1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
62 struct target_builtins *this_target_builtins = &default_target_builtins;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
238 if (strncmp (name, "__sync_", 7) == 0)
244 /* Return true if DECL is a function symbol representing a built-in. */
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Return the alignment in bits of EXP, an object.
268 Don't return more than MAX_ALIGN no matter what. */
271 get_object_alignment (tree exp, unsigned int max_align)
273 HOST_WIDE_INT bitsize, bitpos;
275 enum machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align, inner;
279 /* Get the innermost object and the constant (bitpos) and possibly
280 variable (offset) offset of the access. */
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 /* Extract alignment information from the innermost object and
285 possibly adjust bitpos and offset. */
286 if (TREE_CODE (exp) == CONST_DECL)
287 exp = DECL_INITIAL (exp);
289 && TREE_CODE (exp) != LABEL_DECL)
290 align = DECL_ALIGN (exp);
291 else if (CONSTANT_CLASS_P (exp))
293 align = TYPE_ALIGN (TREE_TYPE (exp));
294 #ifdef CONSTANT_ALIGNMENT
295 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
298 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
299 align = TYPE_ALIGN (TREE_TYPE (exp));
300 else if (TREE_CODE (exp) == INDIRECT_REF)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == MEM_REF)
304 tree addr = TREE_OPERAND (exp, 0);
305 struct ptr_info_def *pi;
306 if (TREE_CODE (addr) == BIT_AND_EXPR
307 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
310 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
311 align *= BITS_PER_UNIT;
312 addr = TREE_OPERAND (addr, 0);
315 align = BITS_PER_UNIT;
316 if (TREE_CODE (addr) == SSA_NAME
317 && (pi = SSA_NAME_PTR_INFO (addr)))
319 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
320 align = MAX (pi->align * BITS_PER_UNIT, align);
322 else if (TREE_CODE (addr) == ADDR_EXPR)
323 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
325 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
327 else if (TREE_CODE (exp) == TARGET_MEM_REF)
329 struct ptr_info_def *pi;
330 tree addr = TMR_BASE (exp);
331 if (TREE_CODE (addr) == BIT_AND_EXPR
332 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
334 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
335 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
336 align *= BITS_PER_UNIT;
337 addr = TREE_OPERAND (addr, 0);
340 align = BITS_PER_UNIT;
341 if (TREE_CODE (addr) == SSA_NAME
342 && (pi = SSA_NAME_PTR_INFO (addr)))
344 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
345 align = MAX (pi->align * BITS_PER_UNIT, align);
347 else if (TREE_CODE (addr) == ADDR_EXPR)
348 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
350 if (TMR_OFFSET (exp))
351 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
352 if (TMR_INDEX (exp) && TMR_STEP (exp))
354 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
355 align = MIN (align, (step & -step) * BITS_PER_UNIT);
357 else if (TMR_INDEX (exp))
358 align = BITS_PER_UNIT;
359 if (TMR_INDEX2 (exp))
360 align = BITS_PER_UNIT;
363 align = BITS_PER_UNIT;
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
372 if (TREE_CODE (offset) == PLUS_EXPR)
374 next_offset = TREE_OPERAND (offset, 0);
375 offset = TREE_OPERAND (offset, 1);
379 if (host_integerp (offset, 1))
381 /* Any overflow in calculating offset_bits won't change
384 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
387 inner = MIN (inner, (offset_bits & -offset_bits));
389 else if (TREE_CODE (offset) == MULT_EXPR
390 && host_integerp (TREE_OPERAND (offset, 1), 1))
392 /* Any overflow in calculating offset_factor won't change
394 unsigned offset_factor
395 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
399 inner = MIN (inner, (offset_factor & -offset_factor));
403 inner = MIN (inner, BITS_PER_UNIT);
406 offset = next_offset;
409 /* Alignment is innermost object alignment adjusted by the constant
410 and non-constant offset parts. */
411 align = MIN (align, inner);
412 bitpos = bitpos & (align - 1);
414 /* align and bitpos now specify known low bits of the pointer.
415 ptr & (align - 1) == bitpos. */
418 align = (bitpos & -bitpos);
420 return MIN (align, max_align);
423 /* Returns true iff we can trust that alignment information has been
424 calculated properly. */
427 can_trust_pointer_alignment (void)
429 /* We rely on TER to compute accurate alignment information. */
430 return (optimize && flag_tree_ter);
433 /* Return the alignment in bits of EXP, a pointer valued expression.
434 But don't return more than MAX_ALIGN no matter what.
435 The alignment returned is, by default, the alignment of the thing that
436 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
438 Otherwise, look at the expression to see if we can do better, i.e., if the
439 expression is actually pointing at an object whose alignment is tighter. */
442 get_pointer_alignment (tree exp, unsigned int max_align)
446 if (TREE_CODE (exp) == ADDR_EXPR)
447 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 return BITS_PER_UNIT;
455 if (pi->misalign != 0)
456 align = (pi->misalign & -pi->misalign);
459 return MIN (max_align, align * BITS_PER_UNIT);
462 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
465 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
466 way, because it could contain a zero byte in the middle.
467 TREE_STRING_LENGTH is the size of the character array, not the string.
469 ONLY_VALUE should be nonzero if the result is not going to be emitted
470 into the instruction stream and zero if it is going to be expanded.
471 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
472 is returned, otherwise NULL, since
473 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
474 evaluate the side-effects.
476 The value returned is of type `ssizetype'.
478 Unfortunately, string_constant can't access the values of const char
479 arrays with initializers, so neither can we do so here. */
482 c_strlen (tree src, int only_value)
485 HOST_WIDE_INT offset;
491 if (TREE_CODE (src) == COND_EXPR
492 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
496 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
497 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
498 if (tree_int_cst_equal (len1, len2))
502 if (TREE_CODE (src) == COMPOUND_EXPR
503 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
504 return c_strlen (TREE_OPERAND (src, 1), only_value);
506 loc = EXPR_LOC_OR_HERE (src);
508 src = string_constant (src, &offset_node);
512 max = TREE_STRING_LENGTH (src) - 1;
513 ptr = TREE_STRING_POINTER (src);
515 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
517 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
518 compute the offset to the following null if we don't know where to
519 start searching for it. */
522 for (i = 0; i < max; i++)
526 /* We don't know the starting offset, but we do know that the string
527 has no internal zero bytes. We can assume that the offset falls
528 within the bounds of the string; otherwise, the programmer deserves
529 what he gets. Subtract the offset from the length of the string,
530 and return that. This would perhaps not be valid if we were dealing
531 with named arrays in addition to literal string constants. */
533 return size_diffop_loc (loc, size_int (max), offset_node);
536 /* We have a known offset into the string. Start searching there for
537 a null character if we can represent it as a single HOST_WIDE_INT. */
538 if (offset_node == 0)
540 else if (! host_integerp (offset_node, 0))
543 offset = tree_low_cst (offset_node, 0);
545 /* If the offset is known to be out of bounds, warn, and call strlen at
547 if (offset < 0 || offset > max)
549 /* Suppress multiple warnings for propagated constant strings. */
550 if (! TREE_NO_WARNING (src))
552 warning_at (loc, 0, "offset outside bounds of constant string");
553 TREE_NO_WARNING (src) = 1;
558 /* Use strlen to search for the first zero byte. Since any strings
559 constructed with build_string will have nulls appended, we win even
560 if we get handed something like (char[4])"abcd".
562 Since OFFSET is our starting index into the string, no further
563 calculation is needed. */
564 return ssize_int (strlen (ptr + offset));
567 /* Return a char pointer for a C string if it is a string constant
568 or sum of string constant and integer constant. */
575 src = string_constant (src, &offset_node);
579 if (offset_node == 0)
580 return TREE_STRING_POINTER (src);
581 else if (!host_integerp (offset_node, 1)
582 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
585 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
588 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
589 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
592 c_readstr (const char *str, enum machine_mode mode)
598 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
603 for (i = 0; i < GET_MODE_SIZE (mode); i++)
606 if (WORDS_BIG_ENDIAN)
607 j = GET_MODE_SIZE (mode) - i - 1;
608 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
609 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
610 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
612 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
615 ch = (unsigned char) str[i];
616 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
618 return immed_double_const (c[0], c[1], mode);
621 /* Cast a target constant CST to target CHAR and if that value fits into
622 host char type, return zero and put that value into variable pointed to by
626 target_char_cast (tree cst, char *p)
628 unsigned HOST_WIDE_INT val, hostval;
630 if (TREE_CODE (cst) != INTEGER_CST
631 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
634 val = TREE_INT_CST_LOW (cst);
635 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
636 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
639 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
640 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
649 /* Similar to save_expr, but assumes that arbitrary code is not executed
650 in between the multiple evaluations. In particular, we assume that a
651 non-addressable local variable will not be modified. */
654 builtin_save_expr (tree exp)
656 if (TREE_CODE (exp) == SSA_NAME
657 || (TREE_ADDRESSABLE (exp) == 0
658 && (TREE_CODE (exp) == PARM_DECL
659 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
662 return save_expr (exp);
665 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
666 times to get the address of either a higher stack frame, or a return
667 address located within it (depending on FNDECL_CODE). */
670 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
674 #ifdef INITIAL_FRAME_ADDRESS_RTX
675 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
679 /* For a zero count with __builtin_return_address, we don't care what
680 frame address we return, because target-specific definitions will
681 override us. Therefore frame pointer elimination is OK, and using
682 the soft frame pointer is OK.
684 For a nonzero count, or a zero count with __builtin_frame_address,
685 we require a stable offset from the current frame pointer to the
686 previous one, so we must use the hard frame pointer, and
687 we must disable frame pointer elimination. */
688 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
689 tem = frame_pointer_rtx;
692 tem = hard_frame_pointer_rtx;
694 /* Tell reload not to eliminate the frame pointer. */
695 crtl->accesses_prior_frames = 1;
699 /* Some machines need special handling before we can access
700 arbitrary frames. For example, on the SPARC, we must first flush
701 all register windows to the stack. */
702 #ifdef SETUP_FRAME_ADDRESSES
704 SETUP_FRAME_ADDRESSES ();
707 /* On the SPARC, the return address is not in the frame, it is in a
708 register. There is no way to access it off of the current frame
709 pointer, but it can be accessed off the previous frame pointer by
710 reading the value from the register window save area. */
711 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
712 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
716 /* Scan back COUNT frames to the specified frame. */
717 for (i = 0; i < count; i++)
719 /* Assume the dynamic chain pointer is in the word that the
720 frame address points to, unless otherwise specified. */
721 #ifdef DYNAMIC_CHAIN_ADDRESS
722 tem = DYNAMIC_CHAIN_ADDRESS (tem);
724 tem = memory_address (Pmode, tem);
725 tem = gen_frame_mem (Pmode, tem);
726 tem = copy_to_reg (tem);
729 /* For __builtin_frame_address, return what we've got. But, on
730 the SPARC for example, we may have to add a bias. */
731 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
732 #ifdef FRAME_ADDR_RTX
733 return FRAME_ADDR_RTX (tem);
738 /* For __builtin_return_address, get the return address from that frame. */
739 #ifdef RETURN_ADDR_RTX
740 tem = RETURN_ADDR_RTX (count, tem);
742 tem = memory_address (Pmode,
743 plus_constant (tem, GET_MODE_SIZE (Pmode)));
744 tem = gen_frame_mem (Pmode, tem);
749 /* Alias set used for setjmp buffer. */
750 static alias_set_type setjmp_alias_set = -1;
752 /* Construct the leading half of a __builtin_setjmp call. Control will
753 return to RECEIVER_LABEL. This is also called directly by the SJLJ
754 exception handling code. */
757 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
759 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
763 if (setjmp_alias_set == -1)
764 setjmp_alias_set = new_alias_set ();
766 buf_addr = convert_memory_address (Pmode, buf_addr);
768 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
770 /* We store the frame pointer and the address of receiver_label in
771 the buffer and use the rest of it for the stack save area, which
772 is machine-dependent. */
774 mem = gen_rtx_MEM (Pmode, buf_addr);
775 set_mem_alias_set (mem, setjmp_alias_set);
776 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
778 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
779 set_mem_alias_set (mem, setjmp_alias_set);
781 emit_move_insn (validize_mem (mem),
782 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
784 stack_save = gen_rtx_MEM (sa_mode,
785 plus_constant (buf_addr,
786 2 * GET_MODE_SIZE (Pmode)));
787 set_mem_alias_set (stack_save, setjmp_alias_set);
788 emit_stack_save (SAVE_NONLOCAL, &stack_save);
790 /* If there is further processing to do, do it. */
791 #ifdef HAVE_builtin_setjmp_setup
792 if (HAVE_builtin_setjmp_setup)
793 emit_insn (gen_builtin_setjmp_setup (buf_addr));
796 /* Tell optimize_save_area_alloca that extra work is going to
797 need to go on during alloca. */
798 cfun->calls_setjmp = 1;
800 /* We have a nonlocal label. */
801 cfun->has_nonlocal_label = 1;
804 /* Construct the trailing part of a __builtin_setjmp call. This is
805 also called directly by the SJLJ exception handling code. */
808 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
812 /* Clobber the FP when we get here, so we have to make sure it's
813 marked as used by this function. */
814 emit_use (hard_frame_pointer_rtx);
816 /* Mark the static chain as clobbered here so life information
817 doesn't get messed up for it. */
818 chain = targetm.calls.static_chain (current_function_decl, true);
819 if (chain && REG_P (chain))
820 emit_clobber (chain);
822 /* Now put in the code to restore the frame pointer, and argument
823 pointer, if needed. */
824 #ifdef HAVE_nonlocal_goto
825 if (! HAVE_nonlocal_goto)
828 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
829 /* This might change the hard frame pointer in ways that aren't
830 apparent to early optimization passes, so force a clobber. */
831 emit_clobber (hard_frame_pointer_rtx);
834 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
835 if (fixed_regs[ARG_POINTER_REGNUM])
837 #ifdef ELIMINABLE_REGS
839 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
841 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
842 if (elim_regs[i].from == ARG_POINTER_REGNUM
843 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
846 if (i == ARRAY_SIZE (elim_regs))
849 /* Now restore our arg pointer from the address at which it
850 was saved in our stack frame. */
851 emit_move_insn (crtl->args.internal_arg_pointer,
852 copy_to_reg (get_arg_pointer_save_area ()));
857 #ifdef HAVE_builtin_setjmp_receiver
858 if (HAVE_builtin_setjmp_receiver)
859 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
862 #ifdef HAVE_nonlocal_goto_receiver
863 if (HAVE_nonlocal_goto_receiver)
864 emit_insn (gen_nonlocal_goto_receiver ());
869 /* We must not allow the code we just generated to be reordered by
870 scheduling. Specifically, the update of the frame pointer must
871 happen immediately, not later. */
872 emit_insn (gen_blockage ());
875 /* __builtin_longjmp is passed a pointer to an array of five words (not
876 all will be used on all machines). It operates similarly to the C
877 library function of the same name, but is more efficient. Much of
878 the code below is copied from the handling of non-local gotos. */
881 expand_builtin_longjmp (rtx buf_addr, rtx value)
883 rtx fp, lab, stack, insn, last;
884 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
886 /* DRAP is needed for stack realign if longjmp is expanded to current
888 if (SUPPORTS_STACK_ALIGNMENT)
889 crtl->need_drap = true;
891 if (setjmp_alias_set == -1)
892 setjmp_alias_set = new_alias_set ();
894 buf_addr = convert_memory_address (Pmode, buf_addr);
896 buf_addr = force_reg (Pmode, buf_addr);
898 /* We require that the user must pass a second argument of 1, because
899 that is what builtin_setjmp will return. */
900 gcc_assert (value == const1_rtx);
902 last = get_last_insn ();
903 #ifdef HAVE_builtin_longjmp
904 if (HAVE_builtin_longjmp)
905 emit_insn (gen_builtin_longjmp (buf_addr));
909 fp = gen_rtx_MEM (Pmode, buf_addr);
910 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
911 GET_MODE_SIZE (Pmode)));
913 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
914 2 * GET_MODE_SIZE (Pmode)));
915 set_mem_alias_set (fp, setjmp_alias_set);
916 set_mem_alias_set (lab, setjmp_alias_set);
917 set_mem_alias_set (stack, setjmp_alias_set);
919 /* Pick up FP, label, and SP from the block and jump. This code is
920 from expand_goto in stmt.c; see there for detailed comments. */
921 #ifdef HAVE_nonlocal_goto
922 if (HAVE_nonlocal_goto)
923 /* We have to pass a value to the nonlocal_goto pattern that will
924 get copied into the static_chain pointer, but it does not matter
925 what that value is, because builtin_setjmp does not use it. */
926 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
930 lab = copy_to_reg (lab);
932 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
933 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
935 emit_move_insn (hard_frame_pointer_rtx, fp);
936 emit_stack_restore (SAVE_NONLOCAL, stack);
938 emit_use (hard_frame_pointer_rtx);
939 emit_use (stack_pointer_rtx);
940 emit_indirect_jump (lab);
944 /* Search backwards and mark the jump insn as a non-local goto.
945 Note that this precludes the use of __builtin_longjmp to a
946 __builtin_setjmp target in the same function. However, we've
947 already cautioned the user that these functions are for
948 internal exception handling use only. */
949 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
951 gcc_assert (insn != last);
955 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
958 else if (CALL_P (insn))
963 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
964 and the address of the save area. */
967 expand_builtin_nonlocal_goto (tree exp)
969 tree t_label, t_save_area;
970 rtx r_label, r_save_area, r_fp, r_sp, insn;
972 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
975 t_label = CALL_EXPR_ARG (exp, 0);
976 t_save_area = CALL_EXPR_ARG (exp, 1);
978 r_label = expand_normal (t_label);
979 r_label = convert_memory_address (Pmode, r_label);
980 r_save_area = expand_normal (t_save_area);
981 r_save_area = convert_memory_address (Pmode, r_save_area);
982 /* Copy the address of the save location to a register just in case it was based
983 on the frame pointer. */
984 r_save_area = copy_to_reg (r_save_area);
985 r_fp = gen_rtx_MEM (Pmode, r_save_area);
986 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
987 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
989 crtl->has_nonlocal_goto = 1;
991 #ifdef HAVE_nonlocal_goto
992 /* ??? We no longer need to pass the static chain value, afaik. */
993 if (HAVE_nonlocal_goto)
994 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
998 r_label = copy_to_reg (r_label);
1000 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1001 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1003 /* Restore frame pointer for containing function.
1004 This sets the actual hard register used for the frame pointer
1005 to the location of the function's incoming static chain info.
1006 The non-local goto handler will then adjust it to contain the
1007 proper value and reload the argument pointer, if needed. */
1008 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1009 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1011 /* USE of hard_frame_pointer_rtx added for consistency;
1012 not clear if really needed. */
1013 emit_use (hard_frame_pointer_rtx);
1014 emit_use (stack_pointer_rtx);
1016 /* If the architecture is using a GP register, we must
1017 conservatively assume that the target function makes use of it.
1018 The prologue of functions with nonlocal gotos must therefore
1019 initialize the GP register to the appropriate value, and we
1020 must then make sure that this value is live at the point
1021 of the jump. (Note that this doesn't necessarily apply
1022 to targets with a nonlocal_goto pattern; they are free
1023 to implement it in their own way. Note also that this is
1024 a no-op if the GP register is a global invariant.) */
1025 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1026 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1027 emit_use (pic_offset_table_rtx);
1029 emit_indirect_jump (r_label);
1032 /* Search backwards to the jump insn and mark it as a
1034 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1038 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1041 else if (CALL_P (insn))
1048 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1049 (not all will be used on all machines) that was passed to __builtin_setjmp.
1050 It updates the stack pointer in that block to correspond to the current
1054 expand_builtin_update_setjmp_buf (rtx buf_addr)
1056 enum machine_mode sa_mode = Pmode;
1060 #ifdef HAVE_save_stack_nonlocal
1061 if (HAVE_save_stack_nonlocal)
1062 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1064 #ifdef STACK_SAVEAREA_MODE
1065 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1069 = gen_rtx_MEM (sa_mode,
1072 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1076 emit_insn (gen_setjmp ());
1079 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1082 /* Expand a call to __builtin_prefetch. For a target that does not support
1083 data prefetch, evaluate the memory address argument in case it has side
1087 expand_builtin_prefetch (tree exp)
1089 tree arg0, arg1, arg2;
1093 if (!validate_arglist (exp, POINTER_TYPE, 0))
1096 arg0 = CALL_EXPR_ARG (exp, 0);
1098 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1099 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1101 nargs = call_expr_nargs (exp);
1103 arg1 = CALL_EXPR_ARG (exp, 1);
1105 arg1 = integer_zero_node;
1107 arg2 = CALL_EXPR_ARG (exp, 2);
1109 arg2 = integer_three_node;
1111 /* Argument 0 is an address. */
1112 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1114 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1115 if (TREE_CODE (arg1) != INTEGER_CST)
1117 error ("second argument to %<__builtin_prefetch%> must be a constant");
1118 arg1 = integer_zero_node;
1120 op1 = expand_normal (arg1);
1121 /* Argument 1 must be either zero or one. */
1122 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1124 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1129 /* Argument 2 (locality) must be a compile-time constant int. */
1130 if (TREE_CODE (arg2) != INTEGER_CST)
1132 error ("third argument to %<__builtin_prefetch%> must be a constant");
1133 arg2 = integer_zero_node;
1135 op2 = expand_normal (arg2);
1136 /* Argument 2 must be 0, 1, 2, or 3. */
1137 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1139 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1143 #ifdef HAVE_prefetch
1146 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1148 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1149 || (GET_MODE (op0) != Pmode))
1151 op0 = convert_memory_address (Pmode, op0);
1152 op0 = force_reg (Pmode, op0);
1154 emit_insn (gen_prefetch (op0, op1, op2));
1158 /* Don't do anything with direct references to volatile memory, but
1159 generate code to handle other side effects. */
1160 if (!MEM_P (op0) && side_effects_p (op0))
1164 /* Get a MEM rtx for expression EXP which is the address of an operand
1165 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1166 the maximum length of the block of memory that might be accessed or
1170 get_memory_rtx (tree exp, tree len)
1172 tree orig_exp = exp;
1176 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1177 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1178 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1179 exp = TREE_OPERAND (exp, 0);
1181 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1182 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1184 /* Get an expression we can use to find the attributes to assign to MEM.
1185 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1186 we can. First remove any nops. */
1187 while (CONVERT_EXPR_P (exp)
1188 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1189 exp = TREE_OPERAND (exp, 0);
1192 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1193 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1194 && host_integerp (TREE_OPERAND (exp, 1), 0)
1195 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1196 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1197 else if (TREE_CODE (exp) == ADDR_EXPR)
1198 exp = TREE_OPERAND (exp, 0);
1199 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1200 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1204 /* Honor attributes derived from exp, except for the alias set
1205 (as builtin stringops may alias with anything) and the size
1206 (as stringops may access multiple array elements). */
1209 set_mem_attributes (mem, exp, 0);
1212 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1214 /* Allow the string and memory builtins to overflow from one
1215 field into another, see http://gcc.gnu.org/PR23561.
1216 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1217 memory accessed by the string or memory builtin will fit
1218 within the field. */
1219 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1221 tree mem_expr = MEM_EXPR (mem);
1222 HOST_WIDE_INT offset = -1, length = -1;
1225 while (TREE_CODE (inner) == ARRAY_REF
1226 || CONVERT_EXPR_P (inner)
1227 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1228 || TREE_CODE (inner) == SAVE_EXPR)
1229 inner = TREE_OPERAND (inner, 0);
1231 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1233 if (MEM_OFFSET (mem)
1234 && CONST_INT_P (MEM_OFFSET (mem)))
1235 offset = INTVAL (MEM_OFFSET (mem));
1237 if (offset >= 0 && len && host_integerp (len, 0))
1238 length = tree_low_cst (len, 0);
1240 while (TREE_CODE (inner) == COMPONENT_REF)
1242 tree field = TREE_OPERAND (inner, 1);
1243 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1244 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1246 /* Bitfields are generally not byte-addressable. */
1247 gcc_assert (!DECL_BIT_FIELD (field)
1248 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1249 % BITS_PER_UNIT) == 0
1250 && host_integerp (DECL_SIZE (field), 0)
1251 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1252 % BITS_PER_UNIT) == 0));
1254 /* If we can prove that the memory starting at XEXP (mem, 0) and
1255 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1256 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1257 fields without DECL_SIZE_UNIT like flexible array members. */
1259 && DECL_SIZE_UNIT (field)
1260 && host_integerp (DECL_SIZE_UNIT (field), 0))
1263 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1266 && offset + length <= size)
1271 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1272 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1273 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1281 mem_expr = TREE_OPERAND (mem_expr, 0);
1282 inner = TREE_OPERAND (inner, 0);
1285 if (mem_expr == NULL)
1287 if (mem_expr != MEM_EXPR (mem))
1289 set_mem_expr (mem, mem_expr);
1290 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1293 set_mem_alias_set (mem, 0);
1294 set_mem_size (mem, NULL_RTX);
1300 /* Built-in functions to perform an untyped call and return. */
1302 #define apply_args_mode \
1303 (this_target_builtins->x_apply_args_mode)
1304 #define apply_result_mode \
1305 (this_target_builtins->x_apply_result_mode)
1307 /* Return the size required for the block returned by __builtin_apply_args,
1308 and initialize apply_args_mode. */
1311 apply_args_size (void)
1313 static int size = -1;
1316 enum machine_mode mode;
1318 /* The values computed by this function never change. */
1321 /* The first value is the incoming arg-pointer. */
1322 size = GET_MODE_SIZE (Pmode);
1324 /* The second value is the structure value address unless this is
1325 passed as an "invisible" first argument. */
1326 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1327 size += GET_MODE_SIZE (Pmode);
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if (FUNCTION_ARG_REGNO_P (regno))
1332 mode = targetm.calls.get_raw_arg_mode (regno);
1334 gcc_assert (mode != VOIDmode);
1336 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1337 if (size % align != 0)
1338 size = CEIL (size, align) * align;
1339 size += GET_MODE_SIZE (mode);
1340 apply_args_mode[regno] = mode;
1344 apply_args_mode[regno] = VOIDmode;
1350 /* Return the size required for the block returned by __builtin_apply,
1351 and initialize apply_result_mode. */
1354 apply_result_size (void)
1356 static int size = -1;
1358 enum machine_mode mode;
1360 /* The values computed by this function never change. */
1365 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1366 if (targetm.calls.function_value_regno_p (regno))
1368 mode = targetm.calls.get_raw_result_mode (regno);
1370 gcc_assert (mode != VOIDmode);
1372 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1373 if (size % align != 0)
1374 size = CEIL (size, align) * align;
1375 size += GET_MODE_SIZE (mode);
1376 apply_result_mode[regno] = mode;
1379 apply_result_mode[regno] = VOIDmode;
1381 /* Allow targets that use untyped_call and untyped_return to override
1382 the size so that machine-specific information can be stored here. */
1383 #ifdef APPLY_RESULT_SIZE
1384 size = APPLY_RESULT_SIZE;
1390 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1391 /* Create a vector describing the result block RESULT. If SAVEP is true,
1392 the result block is used to save the values; otherwise it is used to
1393 restore the values. */
1396 result_vector (int savep, rtx result)
1398 int regno, size, align, nelts;
1399 enum machine_mode mode;
1401 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1404 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1405 if ((mode = apply_result_mode[regno]) != VOIDmode)
1407 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1408 if (size % align != 0)
1409 size = CEIL (size, align) * align;
1410 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1411 mem = adjust_address (result, mode, size);
1412 savevec[nelts++] = (savep
1413 ? gen_rtx_SET (VOIDmode, mem, reg)
1414 : gen_rtx_SET (VOIDmode, reg, mem));
1415 size += GET_MODE_SIZE (mode);
1417 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1419 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1421 /* Save the state required to perform an untyped call with the same
1422 arguments as were passed to the current function. */
1425 expand_builtin_apply_args_1 (void)
1428 int size, align, regno;
1429 enum machine_mode mode;
1430 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1432 /* Create a block where the arg-pointer, structure value address,
1433 and argument registers can be saved. */
1434 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1436 /* Walk past the arg-pointer and structure value address. */
1437 size = GET_MODE_SIZE (Pmode);
1438 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1439 size += GET_MODE_SIZE (Pmode);
1441 /* Save each register used in calling a function to the block. */
1442 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1443 if ((mode = apply_args_mode[regno]) != VOIDmode)
1445 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1446 if (size % align != 0)
1447 size = CEIL (size, align) * align;
1449 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1451 emit_move_insn (adjust_address (registers, mode, size), tem);
1452 size += GET_MODE_SIZE (mode);
1455 /* Save the arg pointer to the block. */
1456 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1457 #ifdef STACK_GROWS_DOWNWARD
1458 /* We need the pointer as the caller actually passed them to us, not
1459 as we might have pretended they were passed. Make sure it's a valid
1460 operand, as emit_move_insn isn't expected to handle a PLUS. */
1462 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1465 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1467 size = GET_MODE_SIZE (Pmode);
1469 /* Save the structure value address unless this is passed as an
1470 "invisible" first argument. */
1471 if (struct_incoming_value)
1473 emit_move_insn (adjust_address (registers, Pmode, size),
1474 copy_to_reg (struct_incoming_value));
1475 size += GET_MODE_SIZE (Pmode);
1478 /* Return the address of the block. */
1479 return copy_addr_to_reg (XEXP (registers, 0));
1482 /* __builtin_apply_args returns block of memory allocated on
1483 the stack into which is stored the arg pointer, structure
1484 value address, static chain, and all the registers that might
1485 possibly be used in performing a function call. The code is
1486 moved to the start of the function so the incoming values are
1490 expand_builtin_apply_args (void)
1492 /* Don't do __builtin_apply_args more than once in a function.
1493 Save the result of the first call and reuse it. */
1494 if (apply_args_value != 0)
1495 return apply_args_value;
1497 /* When this function is called, it means that registers must be
1498 saved on entry to this function. So we migrate the
1499 call to the first insn of this function. */
1504 temp = expand_builtin_apply_args_1 ();
1508 apply_args_value = temp;
1510 /* Put the insns after the NOTE that starts the function.
1511 If this is inside a start_sequence, make the outer-level insn
1512 chain current, so the code is placed at the start of the
1513 function. If internal_arg_pointer is a non-virtual pseudo,
1514 it needs to be placed after the function that initializes
1516 push_topmost_sequence ();
1517 if (REG_P (crtl->args.internal_arg_pointer)
1518 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1519 emit_insn_before (seq, parm_birth_insn);
1521 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1522 pop_topmost_sequence ();
1527 /* Perform an untyped call and save the state required to perform an
1528 untyped return of whatever value was returned by the given function. */
1531 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1533 int size, align, regno;
1534 enum machine_mode mode;
1535 rtx incoming_args, result, reg, dest, src, call_insn;
1536 rtx old_stack_level = 0;
1537 rtx call_fusage = 0;
1538 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1540 arguments = convert_memory_address (Pmode, arguments);
1542 /* Create a block where the return registers can be saved. */
1543 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1545 /* Fetch the arg pointer from the ARGUMENTS block. */
1546 incoming_args = gen_reg_rtx (Pmode);
1547 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1548 #ifndef STACK_GROWS_DOWNWARD
1549 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1550 incoming_args, 0, OPTAB_LIB_WIDEN);
1553 /* Push a new argument block and copy the arguments. Do not allow
1554 the (potential) memcpy call below to interfere with our stack
1556 do_pending_stack_adjust ();
1559 /* Save the stack with nonlocal if available. */
1560 #ifdef HAVE_save_stack_nonlocal
1561 if (HAVE_save_stack_nonlocal)
1562 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1565 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1567 /* Allocate a block of memory onto the stack and copy the memory
1568 arguments to the outgoing arguments address. We can pass TRUE
1569 as the 4th argument because we just saved the stack pointer
1570 and will restore it right after the call. */
1571 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1573 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1574 may have already set current_function_calls_alloca to true.
1575 current_function_calls_alloca won't be set if argsize is zero,
1576 so we have to guarantee need_drap is true here. */
1577 if (SUPPORTS_STACK_ALIGNMENT)
1578 crtl->need_drap = true;
1580 dest = virtual_outgoing_args_rtx;
1581 #ifndef STACK_GROWS_DOWNWARD
1582 if (CONST_INT_P (argsize))
1583 dest = plus_constant (dest, -INTVAL (argsize));
1585 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1587 dest = gen_rtx_MEM (BLKmode, dest);
1588 set_mem_align (dest, PARM_BOUNDARY);
1589 src = gen_rtx_MEM (BLKmode, incoming_args);
1590 set_mem_align (src, PARM_BOUNDARY);
1591 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1593 /* Refer to the argument block. */
1595 arguments = gen_rtx_MEM (BLKmode, arguments);
1596 set_mem_align (arguments, PARM_BOUNDARY);
1598 /* Walk past the arg-pointer and structure value address. */
1599 size = GET_MODE_SIZE (Pmode);
1601 size += GET_MODE_SIZE (Pmode);
1603 /* Restore each of the registers previously saved. Make USE insns
1604 for each of these registers for use in making the call. */
1605 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1606 if ((mode = apply_args_mode[regno]) != VOIDmode)
1608 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1609 if (size % align != 0)
1610 size = CEIL (size, align) * align;
1611 reg = gen_rtx_REG (mode, regno);
1612 emit_move_insn (reg, adjust_address (arguments, mode, size));
1613 use_reg (&call_fusage, reg);
1614 size += GET_MODE_SIZE (mode);
1617 /* Restore the structure value address unless this is passed as an
1618 "invisible" first argument. */
1619 size = GET_MODE_SIZE (Pmode);
1622 rtx value = gen_reg_rtx (Pmode);
1623 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1624 emit_move_insn (struct_value, value);
1625 if (REG_P (struct_value))
1626 use_reg (&call_fusage, struct_value);
1627 size += GET_MODE_SIZE (Pmode);
1630 /* All arguments and registers used for the call are set up by now! */
1631 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1633 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1634 and we don't want to load it into a register as an optimization,
1635 because prepare_call_address already did it if it should be done. */
1636 if (GET_CODE (function) != SYMBOL_REF)
1637 function = memory_address (FUNCTION_MODE, function);
1639 /* Generate the actual call instruction and save the return value. */
1640 #ifdef HAVE_untyped_call
1641 if (HAVE_untyped_call)
1642 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1643 result, result_vector (1, result)));
1646 #ifdef HAVE_call_value
1647 if (HAVE_call_value)
1651 /* Locate the unique return register. It is not possible to
1652 express a call that sets more than one return register using
1653 call_value; use untyped_call for that. In fact, untyped_call
1654 only needs to save the return registers in the given block. */
1655 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1656 if ((mode = apply_result_mode[regno]) != VOIDmode)
1658 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1660 valreg = gen_rtx_REG (mode, regno);
1663 emit_call_insn (GEN_CALL_VALUE (valreg,
1664 gen_rtx_MEM (FUNCTION_MODE, function),
1665 const0_rtx, NULL_RTX, const0_rtx));
1667 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1673 /* Find the CALL insn we just emitted, and attach the register usage
1675 call_insn = last_call_insn ();
1676 add_function_usage_to (call_insn, call_fusage);
1678 /* Restore the stack. */
1679 #ifdef HAVE_save_stack_nonlocal
1680 if (HAVE_save_stack_nonlocal)
1681 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1684 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1688 /* Return the address of the result block. */
1689 result = copy_addr_to_reg (XEXP (result, 0));
1690 return convert_memory_address (ptr_mode, result);
1693 /* Perform an untyped return. */
1696 expand_builtin_return (rtx result)
1698 int size, align, regno;
1699 enum machine_mode mode;
1701 rtx call_fusage = 0;
1703 result = convert_memory_address (Pmode, result);
1705 apply_result_size ();
1706 result = gen_rtx_MEM (BLKmode, result);
1708 #ifdef HAVE_untyped_return
1709 if (HAVE_untyped_return)
1711 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1717 /* Restore the return value and note that each value is used. */
1719 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1720 if ((mode = apply_result_mode[regno]) != VOIDmode)
1722 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1723 if (size % align != 0)
1724 size = CEIL (size, align) * align;
1725 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1726 emit_move_insn (reg, adjust_address (result, mode, size));
1728 push_to_sequence (call_fusage);
1730 call_fusage = get_insns ();
1732 size += GET_MODE_SIZE (mode);
1735 /* Put the USE insns before the return. */
1736 emit_insn (call_fusage);
1738 /* Return whatever values was restored by jumping directly to the end
1740 expand_naked_return ();
1743 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1745 static enum type_class
1746 type_to_class (tree type)
1748 switch (TREE_CODE (type))
1750 case VOID_TYPE: return void_type_class;
1751 case INTEGER_TYPE: return integer_type_class;
1752 case ENUMERAL_TYPE: return enumeral_type_class;
1753 case BOOLEAN_TYPE: return boolean_type_class;
1754 case POINTER_TYPE: return pointer_type_class;
1755 case REFERENCE_TYPE: return reference_type_class;
1756 case OFFSET_TYPE: return offset_type_class;
1757 case REAL_TYPE: return real_type_class;
1758 case COMPLEX_TYPE: return complex_type_class;
1759 case FUNCTION_TYPE: return function_type_class;
1760 case METHOD_TYPE: return method_type_class;
1761 case RECORD_TYPE: return record_type_class;
1763 case QUAL_UNION_TYPE: return union_type_class;
1764 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1765 ? string_type_class : array_type_class);
1766 case LANG_TYPE: return lang_type_class;
1767 default: return no_type_class;
1771 /* Expand a call EXP to __builtin_classify_type. */
1774 expand_builtin_classify_type (tree exp)
1776 if (call_expr_nargs (exp))
1777 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1778 return GEN_INT (no_type_class);
1781 /* This helper macro, meant to be used in mathfn_built_in below,
1782 determines which among a set of three builtin math functions is
1783 appropriate for a given type mode. The `F' and `L' cases are
1784 automatically generated from the `double' case. */
1785 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1786 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1787 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1788 fcodel = BUILT_IN_MATHFN##L ; break;
1789 /* Similar to above, but appends _R after any F/L suffix. */
1790 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1791 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1792 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1793 fcodel = BUILT_IN_MATHFN##L_R ; break;
1795 /* Return mathematic function equivalent to FN but operating directly
1796 on TYPE, if available. If IMPLICIT is true find the function in
1797 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1798 can't do the conversion, return zero. */
1801 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1803 tree const *const fn_arr
1804 = implicit ? implicit_built_in_decls : built_in_decls;
1805 enum built_in_function fcode, fcodef, fcodel;
1809 CASE_MATHFN (BUILT_IN_ACOS)
1810 CASE_MATHFN (BUILT_IN_ACOSH)
1811 CASE_MATHFN (BUILT_IN_ASIN)
1812 CASE_MATHFN (BUILT_IN_ASINH)
1813 CASE_MATHFN (BUILT_IN_ATAN)
1814 CASE_MATHFN (BUILT_IN_ATAN2)
1815 CASE_MATHFN (BUILT_IN_ATANH)
1816 CASE_MATHFN (BUILT_IN_CBRT)
1817 CASE_MATHFN (BUILT_IN_CEIL)
1818 CASE_MATHFN (BUILT_IN_CEXPI)
1819 CASE_MATHFN (BUILT_IN_COPYSIGN)
1820 CASE_MATHFN (BUILT_IN_COS)
1821 CASE_MATHFN (BUILT_IN_COSH)
1822 CASE_MATHFN (BUILT_IN_DREM)
1823 CASE_MATHFN (BUILT_IN_ERF)
1824 CASE_MATHFN (BUILT_IN_ERFC)
1825 CASE_MATHFN (BUILT_IN_EXP)
1826 CASE_MATHFN (BUILT_IN_EXP10)
1827 CASE_MATHFN (BUILT_IN_EXP2)
1828 CASE_MATHFN (BUILT_IN_EXPM1)
1829 CASE_MATHFN (BUILT_IN_FABS)
1830 CASE_MATHFN (BUILT_IN_FDIM)
1831 CASE_MATHFN (BUILT_IN_FLOOR)
1832 CASE_MATHFN (BUILT_IN_FMA)
1833 CASE_MATHFN (BUILT_IN_FMAX)
1834 CASE_MATHFN (BUILT_IN_FMIN)
1835 CASE_MATHFN (BUILT_IN_FMOD)
1836 CASE_MATHFN (BUILT_IN_FREXP)
1837 CASE_MATHFN (BUILT_IN_GAMMA)
1838 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1839 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1840 CASE_MATHFN (BUILT_IN_HYPOT)
1841 CASE_MATHFN (BUILT_IN_ILOGB)
1842 CASE_MATHFN (BUILT_IN_INF)
1843 CASE_MATHFN (BUILT_IN_ISINF)
1844 CASE_MATHFN (BUILT_IN_J0)
1845 CASE_MATHFN (BUILT_IN_J1)
1846 CASE_MATHFN (BUILT_IN_JN)
1847 CASE_MATHFN (BUILT_IN_LCEIL)
1848 CASE_MATHFN (BUILT_IN_LDEXP)
1849 CASE_MATHFN (BUILT_IN_LFLOOR)
1850 CASE_MATHFN (BUILT_IN_LGAMMA)
1851 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1852 CASE_MATHFN (BUILT_IN_LLCEIL)
1853 CASE_MATHFN (BUILT_IN_LLFLOOR)
1854 CASE_MATHFN (BUILT_IN_LLRINT)
1855 CASE_MATHFN (BUILT_IN_LLROUND)
1856 CASE_MATHFN (BUILT_IN_LOG)
1857 CASE_MATHFN (BUILT_IN_LOG10)
1858 CASE_MATHFN (BUILT_IN_LOG1P)
1859 CASE_MATHFN (BUILT_IN_LOG2)
1860 CASE_MATHFN (BUILT_IN_LOGB)
1861 CASE_MATHFN (BUILT_IN_LRINT)
1862 CASE_MATHFN (BUILT_IN_LROUND)
1863 CASE_MATHFN (BUILT_IN_MODF)
1864 CASE_MATHFN (BUILT_IN_NAN)
1865 CASE_MATHFN (BUILT_IN_NANS)
1866 CASE_MATHFN (BUILT_IN_NEARBYINT)
1867 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1868 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1869 CASE_MATHFN (BUILT_IN_POW)
1870 CASE_MATHFN (BUILT_IN_POWI)
1871 CASE_MATHFN (BUILT_IN_POW10)
1872 CASE_MATHFN (BUILT_IN_REMAINDER)
1873 CASE_MATHFN (BUILT_IN_REMQUO)
1874 CASE_MATHFN (BUILT_IN_RINT)
1875 CASE_MATHFN (BUILT_IN_ROUND)
1876 CASE_MATHFN (BUILT_IN_SCALB)
1877 CASE_MATHFN (BUILT_IN_SCALBLN)
1878 CASE_MATHFN (BUILT_IN_SCALBN)
1879 CASE_MATHFN (BUILT_IN_SIGNBIT)
1880 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1881 CASE_MATHFN (BUILT_IN_SIN)
1882 CASE_MATHFN (BUILT_IN_SINCOS)
1883 CASE_MATHFN (BUILT_IN_SINH)
1884 CASE_MATHFN (BUILT_IN_SQRT)
1885 CASE_MATHFN (BUILT_IN_TAN)
1886 CASE_MATHFN (BUILT_IN_TANH)
1887 CASE_MATHFN (BUILT_IN_TGAMMA)
1888 CASE_MATHFN (BUILT_IN_TRUNC)
1889 CASE_MATHFN (BUILT_IN_Y0)
1890 CASE_MATHFN (BUILT_IN_Y1)
1891 CASE_MATHFN (BUILT_IN_YN)
1897 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1898 return fn_arr[fcode];
1899 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1900 return fn_arr[fcodef];
1901 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1902 return fn_arr[fcodel];
1907 /* Like mathfn_built_in_1(), but always use the implicit array. */
1910 mathfn_built_in (tree type, enum built_in_function fn)
1912 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1915 /* If errno must be maintained, expand the RTL to check if the result,
1916 TARGET, of a built-in function call, EXP, is NaN, and if so set
1920 expand_errno_check (tree exp, rtx target)
1922 rtx lab = gen_label_rtx ();
1924 /* Test the result; if it is NaN, set errno=EDOM because
1925 the argument was not in the domain. */
1926 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1927 NULL_RTX, NULL_RTX, lab,
1928 /* The jump is very likely. */
1929 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1932 /* If this built-in doesn't throw an exception, set errno directly. */
1933 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1935 #ifdef GEN_ERRNO_RTX
1936 rtx errno_rtx = GEN_ERRNO_RTX;
1939 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1941 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1947 /* Make sure the library call isn't expanded as a tail call. */
1948 CALL_EXPR_TAILCALL (exp) = 0;
1950 /* We can't set errno=EDOM directly; let the library call do it.
1951 Pop the arguments right away in case the call gets deleted. */
1953 expand_call (exp, target, 0);
1958 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1959 Return NULL_RTX if a normal call should be emitted rather than expanding
1960 the function in-line. EXP is the expression that is a call to the builtin
1961 function; if convenient, the result should be placed in TARGET.
1962 SUBTARGET may be used as the target for computing one of EXP's operands. */
1965 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1967 optab builtin_optab;
1969 tree fndecl = get_callee_fndecl (exp);
1970 enum machine_mode mode;
1971 bool errno_set = false;
1974 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1977 arg = CALL_EXPR_ARG (exp, 0);
1979 switch (DECL_FUNCTION_CODE (fndecl))
1981 CASE_FLT_FN (BUILT_IN_SQRT):
1982 errno_set = ! tree_expr_nonnegative_p (arg);
1983 builtin_optab = sqrt_optab;
1985 CASE_FLT_FN (BUILT_IN_EXP):
1986 errno_set = true; builtin_optab = exp_optab; break;
1987 CASE_FLT_FN (BUILT_IN_EXP10):
1988 CASE_FLT_FN (BUILT_IN_POW10):
1989 errno_set = true; builtin_optab = exp10_optab; break;
1990 CASE_FLT_FN (BUILT_IN_EXP2):
1991 errno_set = true; builtin_optab = exp2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXPM1):
1993 errno_set = true; builtin_optab = expm1_optab; break;
1994 CASE_FLT_FN (BUILT_IN_LOGB):
1995 errno_set = true; builtin_optab = logb_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOG):
1997 errno_set = true; builtin_optab = log_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG10):
1999 errno_set = true; builtin_optab = log10_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG2):
2001 errno_set = true; builtin_optab = log2_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG1P):
2003 errno_set = true; builtin_optab = log1p_optab; break;
2004 CASE_FLT_FN (BUILT_IN_ASIN):
2005 builtin_optab = asin_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ACOS):
2007 builtin_optab = acos_optab; break;
2008 CASE_FLT_FN (BUILT_IN_TAN):
2009 builtin_optab = tan_optab; break;
2010 CASE_FLT_FN (BUILT_IN_ATAN):
2011 builtin_optab = atan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_FLOOR):
2013 builtin_optab = floor_optab; break;
2014 CASE_FLT_FN (BUILT_IN_CEIL):
2015 builtin_optab = ceil_optab; break;
2016 CASE_FLT_FN (BUILT_IN_TRUNC):
2017 builtin_optab = btrunc_optab; break;
2018 CASE_FLT_FN (BUILT_IN_ROUND):
2019 builtin_optab = round_optab; break;
2020 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2021 builtin_optab = nearbyint_optab;
2022 if (flag_trapping_math)
2024 /* Else fallthrough and expand as rint. */
2025 CASE_FLT_FN (BUILT_IN_RINT):
2026 builtin_optab = rint_optab; break;
2027 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2028 builtin_optab = significand_optab; break;
2033 /* Make a suitable register to place result in. */
2034 mode = TYPE_MODE (TREE_TYPE (exp));
2036 if (! flag_errno_math || ! HONOR_NANS (mode))
2039 /* Before working hard, check whether the instruction is available. */
2040 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2041 && (!errno_set || !optimize_insn_for_size_p ()))
2043 target = gen_reg_rtx (mode);
2045 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2046 need to expand the argument again. This way, we will not perform
2047 side-effects more the once. */
2048 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2050 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2054 /* Compute into TARGET.
2055 Set TARGET to wherever the result comes back. */
2056 target = expand_unop (mode, builtin_optab, op0, target, 0);
2061 expand_errno_check (exp, target);
2063 /* Output the entire sequence. */
2064 insns = get_insns ();
2070 /* If we were unable to expand via the builtin, stop the sequence
2071 (without outputting the insns) and call to the library function
2072 with the stabilized argument list. */
2076 return expand_call (exp, target, target == const0_rtx);
2079 /* Expand a call to the builtin binary math functions (pow and atan2).
2080 Return NULL_RTX if a normal call should be emitted rather than expanding the
2081 function in-line. EXP is the expression that is a call to the builtin
2082 function; if convenient, the result should be placed in TARGET.
2083 SUBTARGET may be used as the target for computing one of EXP's
2087 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2089 optab builtin_optab;
2090 rtx op0, op1, insns;
2091 int op1_type = REAL_TYPE;
2092 tree fndecl = get_callee_fndecl (exp);
2094 enum machine_mode mode;
2095 bool errno_set = true;
2097 switch (DECL_FUNCTION_CODE (fndecl))
2099 CASE_FLT_FN (BUILT_IN_SCALBN):
2100 CASE_FLT_FN (BUILT_IN_SCALBLN):
2101 CASE_FLT_FN (BUILT_IN_LDEXP):
2102 op1_type = INTEGER_TYPE;
2107 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2110 arg0 = CALL_EXPR_ARG (exp, 0);
2111 arg1 = CALL_EXPR_ARG (exp, 1);
2113 switch (DECL_FUNCTION_CODE (fndecl))
2115 CASE_FLT_FN (BUILT_IN_POW):
2116 builtin_optab = pow_optab; break;
2117 CASE_FLT_FN (BUILT_IN_ATAN2):
2118 builtin_optab = atan2_optab; break;
2119 CASE_FLT_FN (BUILT_IN_SCALB):
2120 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2122 builtin_optab = scalb_optab; break;
2123 CASE_FLT_FN (BUILT_IN_SCALBN):
2124 CASE_FLT_FN (BUILT_IN_SCALBLN):
2125 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2127 /* Fall through... */
2128 CASE_FLT_FN (BUILT_IN_LDEXP):
2129 builtin_optab = ldexp_optab; break;
2130 CASE_FLT_FN (BUILT_IN_FMOD):
2131 builtin_optab = fmod_optab; break;
2132 CASE_FLT_FN (BUILT_IN_REMAINDER):
2133 CASE_FLT_FN (BUILT_IN_DREM):
2134 builtin_optab = remainder_optab; break;
2139 /* Make a suitable register to place result in. */
2140 mode = TYPE_MODE (TREE_TYPE (exp));
2142 /* Before working hard, check whether the instruction is available. */
2143 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2146 target = gen_reg_rtx (mode);
2148 if (! flag_errno_math || ! HONOR_NANS (mode))
2151 if (errno_set && optimize_insn_for_size_p ())
2154 /* Always stabilize the argument list. */
2155 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2156 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2158 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2159 op1 = expand_normal (arg1);
2163 /* Compute into TARGET.
2164 Set TARGET to wherever the result comes back. */
2165 target = expand_binop (mode, builtin_optab, op0, op1,
2166 target, 0, OPTAB_DIRECT);
2168 /* If we were unable to expand via the builtin, stop the sequence
2169 (without outputting the insns) and call to the library function
2170 with the stabilized argument list. */
2174 return expand_call (exp, target, target == const0_rtx);
2178 expand_errno_check (exp, target);
2180 /* Output the entire sequence. */
2181 insns = get_insns ();
2188 /* Expand a call to the builtin trinary math functions (fma).
2189 Return NULL_RTX if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's
2196 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2198 optab builtin_optab;
2199 rtx op0, op1, op2, insns;
2200 tree fndecl = get_callee_fndecl (exp);
2201 tree arg0, arg1, arg2;
2202 enum machine_mode mode;
2204 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2207 arg0 = CALL_EXPR_ARG (exp, 0);
2208 arg1 = CALL_EXPR_ARG (exp, 1);
2209 arg2 = CALL_EXPR_ARG (exp, 2);
2211 switch (DECL_FUNCTION_CODE (fndecl))
2213 CASE_FLT_FN (BUILT_IN_FMA):
2214 builtin_optab = fma_optab; break;
2219 /* Make a suitable register to place result in. */
2220 mode = TYPE_MODE (TREE_TYPE (exp));
2222 /* Before working hard, check whether the instruction is available. */
2223 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 target = gen_reg_rtx (mode);
2228 /* Always stabilize the argument list. */
2229 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2230 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2231 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2233 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2234 op1 = expand_normal (arg1);
2235 op2 = expand_normal (arg2);
2239 /* Compute into TARGET.
2240 Set TARGET to wherever the result comes back. */
2241 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2244 /* If we were unable to expand via the builtin, stop the sequence
2245 (without outputting the insns) and call to the library function
2246 with the stabilized argument list. */
2250 return expand_call (exp, target, target == const0_rtx);
2253 /* Output the entire sequence. */
2254 insns = get_insns ();
2261 /* Expand a call to the builtin sin and cos math functions.
2262 Return NULL_RTX if a normal call should be emitted rather than expanding the
2263 function in-line. EXP is the expression that is a call to the builtin
2264 function; if convenient, the result should be placed in TARGET.
2265 SUBTARGET may be used as the target for computing one of EXP's
2269 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2271 optab builtin_optab;
2273 tree fndecl = get_callee_fndecl (exp);
2274 enum machine_mode mode;
2277 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 arg = CALL_EXPR_ARG (exp, 0);
2282 switch (DECL_FUNCTION_CODE (fndecl))
2284 CASE_FLT_FN (BUILT_IN_SIN):
2285 CASE_FLT_FN (BUILT_IN_COS):
2286 builtin_optab = sincos_optab; break;
2291 /* Make a suitable register to place result in. */
2292 mode = TYPE_MODE (TREE_TYPE (exp));
2294 /* Check if sincos insn is available, otherwise fallback
2295 to sin or cos insn. */
2296 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2297 switch (DECL_FUNCTION_CODE (fndecl))
2299 CASE_FLT_FN (BUILT_IN_SIN):
2300 builtin_optab = sin_optab; break;
2301 CASE_FLT_FN (BUILT_IN_COS):
2302 builtin_optab = cos_optab; break;
2307 /* Before working hard, check whether the instruction is available. */
2308 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2310 target = gen_reg_rtx (mode);
2312 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2313 need to expand the argument again. This way, we will not perform
2314 side-effects more the once. */
2315 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2317 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 /* Compute into TARGET.
2322 Set TARGET to wherever the result comes back. */
2323 if (builtin_optab == sincos_optab)
2327 switch (DECL_FUNCTION_CODE (fndecl))
2329 CASE_FLT_FN (BUILT_IN_SIN):
2330 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2332 CASE_FLT_FN (BUILT_IN_COS):
2333 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2338 gcc_assert (result);
2342 target = expand_unop (mode, builtin_optab, op0, target, 0);
2347 /* Output the entire sequence. */
2348 insns = get_insns ();
2354 /* If we were unable to expand via the builtin, stop the sequence
2355 (without outputting the insns) and call to the library function
2356 with the stabilized argument list. */
2360 target = expand_call (exp, target, target == const0_rtx);
2365 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2366 return an RTL instruction code that implements the functionality.
2367 If that isn't possible or available return CODE_FOR_nothing. */
2369 static enum insn_code
2370 interclass_mathfn_icode (tree arg, tree fndecl)
2372 bool errno_set = false;
2373 optab builtin_optab = 0;
2374 enum machine_mode mode;
2376 switch (DECL_FUNCTION_CODE (fndecl))
2378 CASE_FLT_FN (BUILT_IN_ILOGB):
2379 errno_set = true; builtin_optab = ilogb_optab; break;
2380 CASE_FLT_FN (BUILT_IN_ISINF):
2381 builtin_optab = isinf_optab; break;
2382 case BUILT_IN_ISNORMAL:
2383 case BUILT_IN_ISFINITE:
2384 CASE_FLT_FN (BUILT_IN_FINITE):
2385 case BUILT_IN_FINITED32:
2386 case BUILT_IN_FINITED64:
2387 case BUILT_IN_FINITED128:
2388 case BUILT_IN_ISINFD32:
2389 case BUILT_IN_ISINFD64:
2390 case BUILT_IN_ISINFD128:
2391 /* These builtins have no optabs (yet). */
2397 /* There's no easy way to detect the case we need to set EDOM. */
2398 if (flag_errno_math && errno_set)
2399 return CODE_FOR_nothing;
2401 /* Optab mode depends on the mode of the input argument. */
2402 mode = TYPE_MODE (TREE_TYPE (arg));
2405 return optab_handler (builtin_optab, mode);
2406 return CODE_FOR_nothing;
2409 /* Expand a call to one of the builtin math functions that operate on
2410 floating point argument and output an integer result (ilogb, isinf,
2412 Return 0 if a normal call should be emitted rather than expanding the
2413 function in-line. EXP is the expression that is a call to the builtin
2414 function; if convenient, the result should be placed in TARGET. */
2417 expand_builtin_interclass_mathfn (tree exp, rtx target)
2419 enum insn_code icode = CODE_FOR_nothing;
2421 tree fndecl = get_callee_fndecl (exp);
2422 enum machine_mode mode;
2425 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 arg = CALL_EXPR_ARG (exp, 0);
2429 icode = interclass_mathfn_icode (arg, fndecl);
2430 mode = TYPE_MODE (TREE_TYPE (arg));
2432 if (icode != CODE_FOR_nothing)
2434 rtx last = get_last_insn ();
2435 tree orig_arg = arg;
2436 /* Make a suitable register to place result in. */
2438 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2439 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2440 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2442 gcc_assert (insn_data[icode].operand[0].predicate
2443 (target, GET_MODE (target)));
2445 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2446 need to expand the argument again. This way, we will not perform
2447 side-effects more the once. */
2448 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2450 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2452 if (mode != GET_MODE (op0))
2453 op0 = convert_to_mode (mode, op0, 0);
2455 /* Compute into TARGET.
2456 Set TARGET to wherever the result comes back. */
2457 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2459 delete_insns_since (last);
2460 CALL_EXPR_ARG (exp, 0) = orig_arg;
2466 /* Expand a call to the builtin sincos math function.
2467 Return NULL_RTX if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
2472 expand_builtin_sincos (tree exp)
2474 rtx op0, op1, op2, target1, target2;
2475 enum machine_mode mode;
2476 tree arg, sinp, cosp;
2478 location_t loc = EXPR_LOCATION (exp);
2479 tree alias_type, alias_off;
2481 if (!validate_arglist (exp, REAL_TYPE,
2482 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2485 arg = CALL_EXPR_ARG (exp, 0);
2486 sinp = CALL_EXPR_ARG (exp, 1);
2487 cosp = CALL_EXPR_ARG (exp, 2);
2489 /* Make a suitable register to place result in. */
2490 mode = TYPE_MODE (TREE_TYPE (arg));
2492 /* Check if sincos insn is available, otherwise emit the call. */
2493 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2496 target1 = gen_reg_rtx (mode);
2497 target2 = gen_reg_rtx (mode);
2499 op0 = expand_normal (arg);
2500 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2501 alias_off = build_int_cst (alias_type, 0);
2502 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2504 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2507 /* Compute into target1 and target2.
2508 Set TARGET to wherever the result comes back. */
2509 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2510 gcc_assert (result);
2512 /* Move target1 and target2 to the memory locations indicated
2514 emit_move_insn (op1, target1);
2515 emit_move_insn (op2, target2);
2520 /* Expand a call to the internal cexpi builtin to the sincos math function.
2521 EXP is the expression that is a call to the builtin function; if convenient,
2522 the result should be placed in TARGET. */
2525 expand_builtin_cexpi (tree exp, rtx target)
2527 tree fndecl = get_callee_fndecl (exp);
2529 enum machine_mode mode;
2531 location_t loc = EXPR_LOCATION (exp);
2533 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2536 arg = CALL_EXPR_ARG (exp, 0);
2537 type = TREE_TYPE (arg);
2538 mode = TYPE_MODE (TREE_TYPE (arg));
2540 /* Try expanding via a sincos optab, fall back to emitting a libcall
2541 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2542 is only generated from sincos, cexp or if we have either of them. */
2543 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2545 op1 = gen_reg_rtx (mode);
2546 op2 = gen_reg_rtx (mode);
2548 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2550 /* Compute into op1 and op2. */
2551 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2553 else if (TARGET_HAS_SINCOS)
2555 tree call, fn = NULL_TREE;
2559 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2560 fn = built_in_decls[BUILT_IN_SINCOSF];
2561 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2562 fn = built_in_decls[BUILT_IN_SINCOS];
2563 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2564 fn = built_in_decls[BUILT_IN_SINCOSL];
2568 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2569 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2570 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2571 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2572 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2573 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2575 /* Make sure not to fold the sincos call again. */
2576 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2577 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2578 call, 3, arg, top1, top2));
2582 tree call, fn = NULL_TREE, narg;
2583 tree ctype = build_complex_type (type);
2585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2586 fn = built_in_decls[BUILT_IN_CEXPF];
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2588 fn = built_in_decls[BUILT_IN_CEXP];
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2590 fn = built_in_decls[BUILT_IN_CEXPL];
2594 /* If we don't have a decl for cexp create one. This is the
2595 friendliest fallback if the user calls __builtin_cexpi
2596 without full target C99 function support. */
2597 if (fn == NULL_TREE)
2600 const char *name = NULL;
2602 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2604 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2609 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2610 fn = build_fn_decl (name, fntype);
2613 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2614 build_real (type, dconst0), arg);
2616 /* Make sure not to fold the cexp call again. */
2617 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2618 return expand_expr (build_call_nary (ctype, call, 1, narg),
2619 target, VOIDmode, EXPAND_NORMAL);
2622 /* Now build the proper return type. */
2623 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2624 make_tree (TREE_TYPE (arg), op2),
2625 make_tree (TREE_TYPE (arg), op1)),
2626 target, VOIDmode, EXPAND_NORMAL);
2629 /* Conveniently construct a function call expression. FNDECL names the
2630 function to be called, N is the number of arguments, and the "..."
2631 parameters are the argument expressions. Unlike build_call_exr
2632 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2635 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2638 tree fntype = TREE_TYPE (fndecl);
2639 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2642 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2644 SET_EXPR_LOCATION (fn, loc);
2648 /* Expand a call to one of the builtin rounding functions gcc defines
2649 as an extension (lfloor and lceil). As these are gcc extensions we
2650 do not need to worry about setting errno to EDOM.
2651 If expanding via optab fails, lower expression to (int)(floor(x)).
2652 EXP is the expression that is a call to the builtin function;
2653 if convenient, the result should be placed in TARGET. */
2656 expand_builtin_int_roundingfn (tree exp, rtx target)
2658 convert_optab builtin_optab;
2659 rtx op0, insns, tmp;
2660 tree fndecl = get_callee_fndecl (exp);
2661 enum built_in_function fallback_fn;
2662 tree fallback_fndecl;
2663 enum machine_mode mode;
2666 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2669 arg = CALL_EXPR_ARG (exp, 0);
2671 switch (DECL_FUNCTION_CODE (fndecl))
2673 CASE_FLT_FN (BUILT_IN_LCEIL):
2674 CASE_FLT_FN (BUILT_IN_LLCEIL):
2675 builtin_optab = lceil_optab;
2676 fallback_fn = BUILT_IN_CEIL;
2679 CASE_FLT_FN (BUILT_IN_LFLOOR):
2680 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2681 builtin_optab = lfloor_optab;
2682 fallback_fn = BUILT_IN_FLOOR;
2689 /* Make a suitable register to place result in. */
2690 mode = TYPE_MODE (TREE_TYPE (exp));
2692 target = gen_reg_rtx (mode);
2694 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2695 need to expand the argument again. This way, we will not perform
2696 side-effects more the once. */
2697 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2699 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2703 /* Compute into TARGET. */
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns). */
2717 /* Fall back to floating point rounding optab. */
2718 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2720 /* For non-C99 targets we may end up without a fallback fndecl here
2721 if the user called __builtin_lfloor directly. In this case emit
2722 a call to the floor/ceil variants nevertheless. This should result
2723 in the best user experience for not full C99 targets. */
2724 if (fallback_fndecl == NULL_TREE)
2727 const char *name = NULL;
2729 switch (DECL_FUNCTION_CODE (fndecl))
2731 case BUILT_IN_LCEIL:
2732 case BUILT_IN_LLCEIL:
2735 case BUILT_IN_LCEILF:
2736 case BUILT_IN_LLCEILF:
2739 case BUILT_IN_LCEILL:
2740 case BUILT_IN_LLCEILL:
2743 case BUILT_IN_LFLOOR:
2744 case BUILT_IN_LLFLOOR:
2747 case BUILT_IN_LFLOORF:
2748 case BUILT_IN_LLFLOORF:
2751 case BUILT_IN_LFLOORL:
2752 case BUILT_IN_LLFLOORL:
2759 fntype = build_function_type_list (TREE_TYPE (arg),
2760 TREE_TYPE (arg), NULL_TREE);
2761 fallback_fndecl = build_fn_decl (name, fntype);
2764 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2766 tmp = expand_normal (exp);
2768 /* Truncate the result of floating point optab to integer
2769 via expand_fix (). */
2770 target = gen_reg_rtx (mode);
2771 expand_fix (target, tmp, 0);
2776 /* Expand a call to one of the builtin math functions doing integer
2778 Return 0 if a normal call should be emitted rather than expanding the
2779 function in-line. EXP is the expression that is a call to the builtin
2780 function; if convenient, the result should be placed in TARGET. */
2783 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2785 convert_optab builtin_optab;
2787 tree fndecl = get_callee_fndecl (exp);
2789 enum machine_mode mode;
2791 /* There's no easy way to detect the case we need to set EDOM. */
2792 if (flag_errno_math)
2795 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2798 arg = CALL_EXPR_ARG (exp, 0);
2800 switch (DECL_FUNCTION_CODE (fndecl))
2802 CASE_FLT_FN (BUILT_IN_LRINT):
2803 CASE_FLT_FN (BUILT_IN_LLRINT):
2804 builtin_optab = lrint_optab; break;
2805 CASE_FLT_FN (BUILT_IN_LROUND):
2806 CASE_FLT_FN (BUILT_IN_LLROUND):
2807 builtin_optab = lround_optab; break;
2812 /* Make a suitable register to place result in. */
2813 mode = TYPE_MODE (TREE_TYPE (exp));
2815 target = gen_reg_rtx (mode);
2817 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2818 need to expand the argument again. This way, we will not perform
2819 side-effects more the once. */
2820 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2822 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2826 if (expand_sfix_optab (target, op0, builtin_optab))
2828 /* Output the entire sequence. */
2829 insns = get_insns ();
2835 /* If we were unable to expand via the builtin, stop the sequence
2836 (without outputting the insns) and call to the library function
2837 with the stabilized argument list. */
2840 target = expand_call (exp, target, target == const0_rtx);
2845 /* To evaluate powi(x,n), the floating point value x raised to the
2846 constant integer exponent n, we use a hybrid algorithm that
2847 combines the "window method" with look-up tables. For an
2848 introduction to exponentiation algorithms and "addition chains",
2849 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2850 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2851 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2852 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2854 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2855 multiplications to inline before calling the system library's pow
2856 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2857 so this default never requires calling pow, powf or powl. */
2859 #ifndef POWI_MAX_MULTS
2860 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2863 /* The size of the "optimal power tree" lookup table. All
2864 exponents less than this value are simply looked up in the
2865 powi_table below. This threshold is also used to size the
2866 cache of pseudo registers that hold intermediate results. */
2867 #define POWI_TABLE_SIZE 256
2869 /* The size, in bits of the window, used in the "window method"
2870 exponentiation algorithm. This is equivalent to a radix of
2871 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2872 #define POWI_WINDOW_SIZE 3
2874 /* The following table is an efficient representation of an
2875 "optimal power tree". For each value, i, the corresponding
2876 value, j, in the table states than an optimal evaluation
2877 sequence for calculating pow(x,i) can be found by evaluating
2878 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2879 100 integers is given in Knuth's "Seminumerical algorithms". */
2881 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2883 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2884 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2885 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2886 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2887 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2888 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2889 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2890 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2891 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2892 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2893 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2894 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2895 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2896 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2897 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2898 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2899 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2900 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2901 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2902 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2903 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2904 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2905 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2906 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2907 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2908 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2909 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2910 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2911 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2912 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2913 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2914 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2918 /* Return the number of multiplications required to calculate
2919 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2920 subroutine of powi_cost. CACHE is an array indicating
2921 which exponents have already been calculated. */
2924 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2926 /* If we've already calculated this exponent, then this evaluation
2927 doesn't require any additional multiplications. */
2932 return powi_lookup_cost (n - powi_table[n], cache)
2933 + powi_lookup_cost (powi_table[n], cache) + 1;
2936 /* Return the number of multiplications required to calculate
2937 powi(x,n) for an arbitrary x, given the exponent N. This
2938 function needs to be kept in sync with expand_powi below. */
2941 powi_cost (HOST_WIDE_INT n)
2943 bool cache[POWI_TABLE_SIZE];
2944 unsigned HOST_WIDE_INT digit;
2945 unsigned HOST_WIDE_INT val;
2951 /* Ignore the reciprocal when calculating the cost. */
2952 val = (n < 0) ? -n : n;
2954 /* Initialize the exponent cache. */
2955 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2960 while (val >= POWI_TABLE_SIZE)
2964 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2965 result += powi_lookup_cost (digit, cache)
2966 + POWI_WINDOW_SIZE + 1;
2967 val >>= POWI_WINDOW_SIZE;
2976 return result + powi_lookup_cost (val, cache);
2979 /* Recursive subroutine of expand_powi. This function takes the array,
2980 CACHE, of already calculated exponents and an exponent N and returns
2981 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2984 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2986 unsigned HOST_WIDE_INT digit;
2990 if (n < POWI_TABLE_SIZE)
2995 target = gen_reg_rtx (mode);
2998 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2999 op1 = expand_powi_1 (mode, powi_table[n], cache);
3003 target = gen_reg_rtx (mode);
3004 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
3005 op0 = expand_powi_1 (mode, n - digit, cache);
3006 op1 = expand_powi_1 (mode, digit, cache);
3010 target = gen_reg_rtx (mode);
3011 op0 = expand_powi_1 (mode, n >> 1, cache);
3015 result = expand_mult (mode, op0, op1, target, 0);
3016 if (result != target)
3017 emit_move_insn (target, result);
3021 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3022 floating point operand in mode MODE, and N is the exponent. This
3023 function needs to be kept in sync with powi_cost above. */
3026 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3028 rtx cache[POWI_TABLE_SIZE];
3032 return CONST1_RTX (mode);
3034 memset (cache, 0, sizeof (cache));
3037 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3039 /* If the original exponent was negative, reciprocate the result. */
3041 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3042 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3047 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3048 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3049 if we can simplify it. */
3051 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3054 if (TREE_CODE (arg1) == REAL_CST
3055 && !TREE_OVERFLOW (arg1)
3056 && flag_unsafe_math_optimizations)
3058 enum machine_mode mode = TYPE_MODE (type);
3059 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3060 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3061 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3062 tree op = NULL_TREE;
3066 /* Optimize pow (x, 0.5) into sqrt. */
3067 if (REAL_VALUES_EQUAL (c, dconsthalf))
3068 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3070 /* Don't do this optimization if we don't have a sqrt insn. */
3071 else if (optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3073 REAL_VALUE_TYPE dconst1_4 = dconst1;
3074 REAL_VALUE_TYPE dconst3_4;
3075 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3077 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3078 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3080 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3081 machines that a builtin sqrt instruction is smaller than a
3082 call to pow with 0.25, so do this optimization even if
3084 if (REAL_VALUES_EQUAL (c, dconst1_4))
3086 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3087 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3090 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3091 are optimizing for space. */
3092 else if (optimize_insn_for_speed_p ()
3093 && !TREE_SIDE_EFFECTS (arg0)
3094 && REAL_VALUES_EQUAL (c, dconst3_4))
3096 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3097 tree sqrt2 = builtin_save_expr (sqrt1);
3098 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3099 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3104 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3105 cbrt/sqrts instead of pow (x, 1./6.). */
3107 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3109 /* First try 1/3. */
3110 REAL_VALUE_TYPE dconst1_3
3111 = real_value_truncate (mode, dconst_third ());
3113 if (REAL_VALUES_EQUAL (c, dconst1_3))
3114 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3117 else if (optimize_insn_for_speed_p ()
3118 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3120 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3121 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3123 if (REAL_VALUES_EQUAL (c, dconst1_6))
3125 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3126 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3132 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3138 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3139 a normal call should be emitted rather than expanding the function
3140 in-line. EXP is the expression that is a call to the builtin
3141 function; if convenient, the result should be placed in TARGET. */
3144 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3148 tree type = TREE_TYPE (exp);
3149 REAL_VALUE_TYPE cint, c, c2;
3152 enum machine_mode mode = TYPE_MODE (type);
3154 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3157 arg0 = CALL_EXPR_ARG (exp, 0);
3158 arg1 = CALL_EXPR_ARG (exp, 1);
3160 if (TREE_CODE (arg1) != REAL_CST
3161 || TREE_OVERFLOW (arg1))
3162 return expand_builtin_mathfn_2 (exp, target, subtarget);
3164 /* Handle constant exponents. */
3166 /* For integer valued exponents we can expand to an optimal multiplication
3167 sequence using expand_powi. */
3168 c = TREE_REAL_CST (arg1);
3169 n = real_to_integer (&c);
3170 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3171 if (real_identical (&c, &cint)
3172 && ((n >= -1 && n <= 2)
3173 || (flag_unsafe_math_optimizations
3174 && optimize_insn_for_speed_p ()
3175 && powi_cost (n) <= POWI_MAX_MULTS)))
3177 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3180 op = force_reg (mode, op);
3181 op = expand_powi (op, mode, n);
3186 narg0 = builtin_save_expr (arg0);
3188 /* If the exponent is not integer valued, check if it is half of an integer.
3189 In this case we can expand to sqrt (x) * x**(n/2). */
3190 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3191 if (fn != NULL_TREE)
3193 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3194 n = real_to_integer (&c2);
3195 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3196 if (real_identical (&c2, &cint)
3197 && ((flag_unsafe_math_optimizations
3198 && optimize_insn_for_speed_p ()
3199 && powi_cost (n/2) <= POWI_MAX_MULTS)
3200 /* Even the c == 0.5 case cannot be done unconditionally
3201 when we need to preserve signed zeros, as
3202 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3203 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3204 /* For c == 1.5 we can assume that x * sqrt (x) is always
3205 smaller than pow (x, 1.5) if sqrt will not be expanded
3208 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3210 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3212 /* Use expand_expr in case the newly built call expression
3213 was folded to a non-call. */
3214 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3217 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3218 op2 = force_reg (mode, op2);
3219 op2 = expand_powi (op2, mode, abs (n / 2));
3220 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3221 0, OPTAB_LIB_WIDEN);
3222 /* If the original exponent was negative, reciprocate the
3225 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3226 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3232 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3234 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3239 /* Try if the exponent is a third of an integer. In this case
3240 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3241 different from pow (x, 1./3.) due to rounding and behavior
3242 with negative x we need to constrain this transformation to
3243 unsafe math and positive x or finite math. */
3244 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3246 && flag_unsafe_math_optimizations
3247 && (tree_expr_nonnegative_p (arg0)
3248 || !HONOR_NANS (mode)))
3250 REAL_VALUE_TYPE dconst3;
3251 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3252 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3253 real_round (&c2, mode, &c2);
3254 n = real_to_integer (&c2);
3255 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3256 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3257 real_convert (&c2, mode, &c2);
3258 if (real_identical (&c2, &c)
3259 && ((optimize_insn_for_speed_p ()
3260 && powi_cost (n/3) <= POWI_MAX_MULTS)
3263 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3265 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3266 if (abs (n) % 3 == 2)
3267 op = expand_simple_binop (mode, MULT, op, op, op,
3268 0, OPTAB_LIB_WIDEN);
3271 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3272 op2 = force_reg (mode, op2);
3273 op2 = expand_powi (op2, mode, abs (n / 3));
3274 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3275 0, OPTAB_LIB_WIDEN);
3276 /* If the original exponent was negative, reciprocate the
3279 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3280 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3286 /* Fall back to optab expansion. */
3287 return expand_builtin_mathfn_2 (exp, target, subtarget);
3290 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3291 a normal call should be emitted rather than expanding the function
3292 in-line. EXP is the expression that is a call to the builtin
3293 function; if convenient, the result should be placed in TARGET. */
3296 expand_builtin_powi (tree exp, rtx target)
3300 enum machine_mode mode;
3301 enum machine_mode mode2;
3303 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 arg0 = CALL_EXPR_ARG (exp, 0);
3307 arg1 = CALL_EXPR_ARG (exp, 1);
3308 mode = TYPE_MODE (TREE_TYPE (exp));
3310 /* Handle constant power. */
3312 if (TREE_CODE (arg1) == INTEGER_CST
3313 && !TREE_OVERFLOW (arg1))
3315 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3317 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3318 Otherwise, check the number of multiplications required. */
3319 if ((TREE_INT_CST_HIGH (arg1) == 0
3320 || TREE_INT_CST_HIGH (arg1) == -1)
3321 && ((n >= -1 && n <= 2)
3322 || (optimize_insn_for_speed_p ()
3323 && powi_cost (n) <= POWI_MAX_MULTS)))
3325 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3326 op0 = force_reg (mode, op0);
3327 return expand_powi (op0, mode, n);
3331 /* Emit a libcall to libgcc. */
3333 /* Mode of the 2nd argument must match that of an int. */
3334 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3336 if (target == NULL_RTX)
3337 target = gen_reg_rtx (mode);
3339 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3340 if (GET_MODE (op0) != mode)
3341 op0 = convert_to_mode (mode, op0, 0);
3342 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3343 if (GET_MODE (op1) != mode2)
3344 op1 = convert_to_mode (mode2, op1, 0);
3346 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3347 target, LCT_CONST, mode, 2,
3348 op0, mode, op1, mode2);
3353 /* Expand expression EXP which is a call to the strlen builtin. Return
3354 NULL_RTX if we failed the caller should emit a normal call, otherwise
3355 try to get the result in TARGET, if convenient. */
3358 expand_builtin_strlen (tree exp, rtx target,
3359 enum machine_mode target_mode)
3361 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3367 tree src = CALL_EXPR_ARG (exp, 0);
3368 rtx result, src_reg, char_rtx, before_strlen;
3369 enum machine_mode insn_mode = target_mode, char_mode;
3370 enum insn_code icode = CODE_FOR_nothing;
3373 /* If the length can be computed at compile-time, return it. */
3374 len = c_strlen (src, 0);
3376 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3378 /* If the length can be computed at compile-time and is constant
3379 integer, but there are side-effects in src, evaluate
3380 src for side-effects, then return len.
3381 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3382 can be optimized into: i++; x = 3; */
3383 len = c_strlen (src, 1);
3384 if (len && TREE_CODE (len) == INTEGER_CST)
3386 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3387 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3390 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3392 /* If SRC is not a pointer type, don't do this operation inline. */
3396 /* Bail out if we can't compute strlen in the right mode. */
3397 while (insn_mode != VOIDmode)
3399 icode = optab_handler (strlen_optab, insn_mode);
3400 if (icode != CODE_FOR_nothing)
3403 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3405 if (insn_mode == VOIDmode)
3408 /* Make a place to write the result of the instruction. */
3412 && GET_MODE (result) == insn_mode
3413 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3414 result = gen_reg_rtx (insn_mode);
3416 /* Make a place to hold the source address. We will not expand
3417 the actual source until we are sure that the expansion will
3418 not fail -- there are trees that cannot be expanded twice. */
3419 src_reg = gen_reg_rtx (Pmode);
3421 /* Mark the beginning of the strlen sequence so we can emit the
3422 source operand later. */
3423 before_strlen = get_last_insn ();
3425 char_rtx = const0_rtx;
3426 char_mode = insn_data[(int) icode].operand[2].mode;
3427 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3429 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3431 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3432 char_rtx, GEN_INT (align));
3437 /* Now that we are assured of success, expand the source. */
3439 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3441 emit_move_insn (src_reg, pat);
3446 emit_insn_after (pat, before_strlen);
3448 emit_insn_before (pat, get_insns ());
3450 /* Return the value in the proper mode for this function. */
3451 if (GET_MODE (result) == target_mode)
3453 else if (target != 0)
3454 convert_move (target, result, 0);
3456 target = convert_to_mode (target_mode, result, 0);
3462 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3463 bytes from constant string DATA + OFFSET and return it as target
3467 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3468 enum machine_mode mode)
3470 const char *str = (const char *) data;
3472 gcc_assert (offset >= 0
3473 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3474 <= strlen (str) + 1));
3476 return c_readstr (str + offset, mode);
3479 /* Expand a call EXP to the memcpy builtin.
3480 Return NULL_RTX if we failed, the caller should emit a normal call,
3481 otherwise try to get the result in TARGET, if convenient (and in
3482 mode MODE if that's convenient). */
3485 expand_builtin_memcpy (tree exp, rtx target)
3487 if (!validate_arglist (exp,
3488 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3492 tree dest = CALL_EXPR_ARG (exp, 0);
3493 tree src = CALL_EXPR_ARG (exp, 1);
3494 tree len = CALL_EXPR_ARG (exp, 2);
3495 const char *src_str;
3496 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3497 unsigned int dest_align
3498 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3499 rtx dest_mem, src_mem, dest_addr, len_rtx;
3500 HOST_WIDE_INT expected_size = -1;
3501 unsigned int expected_align = 0;
3503 /* If DEST is not a pointer type, call the normal function. */
3504 if (dest_align == 0)
3507 /* If either SRC is not a pointer type, don't do this
3508 operation in-line. */
3512 if (currently_expanding_gimple_stmt)
3513 stringop_block_profile (currently_expanding_gimple_stmt,
3514 &expected_align, &expected_size);
3516 if (expected_align < dest_align)
3517 expected_align = dest_align;
3518 dest_mem = get_memory_rtx (dest, len);
3519 set_mem_align (dest_mem, dest_align);
3520 len_rtx = expand_normal (len);
3521 src_str = c_getstr (src);
3523 /* If SRC is a string constant and block move would be done
3524 by pieces, we can avoid loading the string from memory
3525 and only stored the computed constants. */
3527 && CONST_INT_P (len_rtx)
3528 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3529 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3530 CONST_CAST (char *, src_str),
3533 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3534 builtin_memcpy_read_str,
3535 CONST_CAST (char *, src_str),
3536 dest_align, false, 0);
3537 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3538 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3542 src_mem = get_memory_rtx (src, len);
3543 set_mem_align (src_mem, src_align);
3545 /* Copy word part most expediently. */
3546 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3547 CALL_EXPR_TAILCALL (exp)
3548 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3549 expected_align, expected_size);
3553 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3554 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3560 /* Expand a call EXP to the mempcpy builtin.
3561 Return NULL_RTX if we failed; the caller should emit a normal call,
3562 otherwise try to get the result in TARGET, if convenient (and in
3563 mode MODE if that's convenient). If ENDP is 0 return the
3564 destination pointer, if ENDP is 1 return the end pointer ala
3565 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3569 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3576 tree dest = CALL_EXPR_ARG (exp, 0);
3577 tree src = CALL_EXPR_ARG (exp, 1);
3578 tree len = CALL_EXPR_ARG (exp, 2);
3579 return expand_builtin_mempcpy_args (dest, src, len,
3580 target, mode, /*endp=*/ 1);
3584 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3585 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3586 so that this can also be called without constructing an actual CALL_EXPR.
3587 The other arguments and return value are the same as for
3588 expand_builtin_mempcpy. */
3591 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3592 rtx target, enum machine_mode mode, int endp)
3594 /* If return value is ignored, transform mempcpy into memcpy. */
3595 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3597 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3598 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3600 return expand_expr (result, target, mode, EXPAND_NORMAL);
3604 const char *src_str;
3605 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3606 unsigned int dest_align
3607 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3608 rtx dest_mem, src_mem, len_rtx;
3610 /* If either SRC or DEST is not a pointer type, don't do this
3611 operation in-line. */
3612 if (dest_align == 0 || src_align == 0)
3615 /* If LEN is not constant, call the normal function. */
3616 if (! host_integerp (len, 1))
3619 len_rtx = expand_normal (len);
3620 src_str = c_getstr (src);
3622 /* If SRC is a string constant and block move would be done
3623 by pieces, we can avoid loading the string from memory
3624 and only stored the computed constants. */
3626 && CONST_INT_P (len_rtx)
3627 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3628 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3629 CONST_CAST (char *, src_str),
3632 dest_mem = get_memory_rtx (dest, len);
3633 set_mem_align (dest_mem, dest_align);
3634 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3635 builtin_memcpy_read_str,
3636 CONST_CAST (char *, src_str),
3637 dest_align, false, endp);
3638 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3639 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3643 if (CONST_INT_P (len_rtx)
3644 && can_move_by_pieces (INTVAL (len_rtx),
3645 MIN (dest_align, src_align)))
3647 dest_mem = get_memory_rtx (dest, len);
3648 set_mem_align (dest_mem, dest_align);
3649 src_mem = get_memory_rtx (src, len);
3650 set_mem_align (src_mem, src_align);
3651 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3652 MIN (dest_align, src_align), endp);
3653 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3654 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3663 # define HAVE_movstr 0
3664 # define CODE_FOR_movstr CODE_FOR_nothing
3667 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3668 we failed, the caller should emit a normal call, otherwise try to
3669 get the result in TARGET, if convenient. If ENDP is 0 return the
3670 destination pointer, if ENDP is 1 return the end pointer ala
3671 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3675 expand_movstr (tree dest, tree src, rtx target, int endp)
3681 const struct insn_data_d * data;
3686 dest_mem = get_memory_rtx (dest, NULL);
3687 src_mem = get_memory_rtx (src, NULL);
3688 data = insn_data + CODE_FOR_movstr;
3691 target = force_reg (Pmode, XEXP (dest_mem, 0));
3692 dest_mem = replace_equiv_address (dest_mem, target);
3693 end = gen_reg_rtx (Pmode);
3698 || target == const0_rtx
3699 || ! (*data->operand[0].predicate) (target, Pmode))
3701 end = gen_reg_rtx (Pmode);
3702 if (target != const0_rtx)
3709 if (data->operand[0].mode != VOIDmode)
3710 end = gen_lowpart (data->operand[0].mode, end);
3712 insn = data->genfun (end, dest_mem, src_mem);
3718 /* movstr is supposed to set end to the address of the NUL
3719 terminator. If the caller requested a mempcpy-like return value,
3721 if (endp == 1 && target != const0_rtx)
3723 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3724 emit_move_insn (target, force_operand (tem, NULL_RTX));
3730 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3731 NULL_RTX if we failed the caller should emit a normal call, otherwise
3732 try to get the result in TARGET, if convenient (and in mode MODE if that's
3736 expand_builtin_strcpy (tree exp, rtx target)
3738 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3740 tree dest = CALL_EXPR_ARG (exp, 0);
3741 tree src = CALL_EXPR_ARG (exp, 1);
3742 return expand_builtin_strcpy_args (dest, src, target);
3747 /* Helper function to do the actual work for expand_builtin_strcpy. The
3748 arguments to the builtin_strcpy call DEST and SRC are broken out
3749 so that this can also be called without constructing an actual CALL_EXPR.
3750 The other arguments and return value are the same as for
3751 expand_builtin_strcpy. */
3754 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3756 return expand_movstr (dest, src, target, /*endp=*/0);
3759 /* Expand a call EXP to the stpcpy builtin.
3760 Return NULL_RTX if we failed the caller should emit a normal call,
3761 otherwise try to get the result in TARGET, if convenient (and in
3762 mode MODE if that's convenient). */
3765 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3768 location_t loc = EXPR_LOCATION (exp);
3770 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3773 dst = CALL_EXPR_ARG (exp, 0);
3774 src = CALL_EXPR_ARG (exp, 1);
3776 /* If return value is ignored, transform stpcpy into strcpy. */
3777 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3779 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3780 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3781 return expand_expr (result, target, mode, EXPAND_NORMAL);
3788 /* Ensure we get an actual string whose length can be evaluated at
3789 compile-time, not an expression containing a string. This is
3790 because the latter will potentially produce pessimized code
3791 when used to produce the return value. */
3792 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3793 return expand_movstr (dst, src, target, /*endp=*/2);
3795 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3796 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3797 target, mode, /*endp=*/2);
3802 if (TREE_CODE (len) == INTEGER_CST)
3804 rtx len_rtx = expand_normal (len);
3806 if (CONST_INT_P (len_rtx))
3808 ret = expand_builtin_strcpy_args (dst, src, target);
3814 if (mode != VOIDmode)
3815 target = gen_reg_rtx (mode);
3817 target = gen_reg_rtx (GET_MODE (ret));
3819 if (GET_MODE (target) != GET_MODE (ret))
3820 ret = gen_lowpart (GET_MODE (target), ret);
3822 ret = plus_constant (ret, INTVAL (len_rtx));
3823 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3831 return expand_movstr (dst, src, target, /*endp=*/2);
3835 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3836 bytes from constant string DATA + OFFSET and return it as target
3840 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3841 enum machine_mode mode)
3843 const char *str = (const char *) data;
3845 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3848 return c_readstr (str + offset, mode);
3851 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3852 NULL_RTX if we failed the caller should emit a normal call. */
3855 expand_builtin_strncpy (tree exp, rtx target)
3857 location_t loc = EXPR_LOCATION (exp);
3859 if (validate_arglist (exp,
3860 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3862 tree dest = CALL_EXPR_ARG (exp, 0);
3863 tree src = CALL_EXPR_ARG (exp, 1);
3864 tree len = CALL_EXPR_ARG (exp, 2);
3865 tree slen = c_strlen (src, 1);
3867 /* We must be passed a constant len and src parameter. */
3868 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3871 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3873 /* We're required to pad with trailing zeros if the requested
3874 len is greater than strlen(s2)+1. In that case try to
3875 use store_by_pieces, if it fails, punt. */
3876 if (tree_int_cst_lt (slen, len))
3878 unsigned int dest_align
3879 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3880 const char *p = c_getstr (src);
3883 if (!p || dest_align == 0 || !host_integerp (len, 1)
3884 || !can_store_by_pieces (tree_low_cst (len, 1),
3885 builtin_strncpy_read_str,
3886 CONST_CAST (char *, p),
3890 dest_mem = get_memory_rtx (dest, len);
3891 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3892 builtin_strncpy_read_str,
3893 CONST_CAST (char *, p), dest_align, false, 0);
3894 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3895 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3902 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3903 bytes from constant string DATA + OFFSET and return it as target
3907 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3908 enum machine_mode mode)
3910 const char *c = (const char *) data;
3911 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3913 memset (p, *c, GET_MODE_SIZE (mode));
3915 return c_readstr (p, mode);
3918 /* Callback routine for store_by_pieces. Return the RTL of a register
3919 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3920 char value given in the RTL register data. For example, if mode is
3921 4 bytes wide, return the RTL for 0x01010101*data. */
3924 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3925 enum machine_mode mode)
3931 size = GET_MODE_SIZE (mode);
3935 p = XALLOCAVEC (char, size);
3936 memset (p, 1, size);
3937 coeff = c_readstr (p, mode);
3939 target = convert_to_mode (mode, (rtx) data, 1);
3940 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3941 return force_reg (mode, target);
3944 /* Expand expression EXP, which is a call to the memset builtin. Return
3945 NULL_RTX if we failed the caller should emit a normal call, otherwise
3946 try to get the result in TARGET, if convenient (and in mode MODE if that's
3950 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3952 if (!validate_arglist (exp,
3953 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3957 tree dest = CALL_EXPR_ARG (exp, 0);
3958 tree val = CALL_EXPR_ARG (exp, 1);
3959 tree len = CALL_EXPR_ARG (exp, 2);
3960 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3964 /* Helper function to do the actual work for expand_builtin_memset. The
3965 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3966 so that this can also be called without constructing an actual CALL_EXPR.
3967 The other arguments and return value are the same as for
3968 expand_builtin_memset. */
3971 expand_builtin_memset_args (tree dest, tree val, tree len,
3972 rtx target, enum machine_mode mode, tree orig_exp)
3975 enum built_in_function fcode;
3977 unsigned int dest_align;
3978 rtx dest_mem, dest_addr, len_rtx;
3979 HOST_WIDE_INT expected_size = -1;
3980 unsigned int expected_align = 0;
3982 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3984 /* If DEST is not a pointer type, don't do this operation in-line. */
3985 if (dest_align == 0)
3988 if (currently_expanding_gimple_stmt)
3989 stringop_block_profile (currently_expanding_gimple_stmt,
3990 &expected_align, &expected_size);
3992 if (expected_align < dest_align)
3993 expected_align = dest_align;
3995 /* If the LEN parameter is zero, return DEST. */
3996 if (integer_zerop (len))
3998 /* Evaluate and ignore VAL in case it has side-effects. */
3999 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4000 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4003 /* Stabilize the arguments in case we fail. */
4004 dest = builtin_save_expr (dest);
4005 val = builtin_save_expr (val);
4006 len = builtin_save_expr (len);
4008 len_rtx = expand_normal (len);
4009 dest_mem = get_memory_rtx (dest, len);
4011 if (TREE_CODE (val) != INTEGER_CST)
4015 val_rtx = expand_normal (val);
4016 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4019 /* Assume that we can memset by pieces if we can store
4020 * the coefficients by pieces (in the required modes).
4021 * We can't pass builtin_memset_gen_str as that emits RTL. */
4023 if (host_integerp (len, 1)
4024 && can_store_by_pieces (tree_low_cst (len, 1),
4025 builtin_memset_read_str, &c, dest_align,
4028 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4030 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4031 builtin_memset_gen_str, val_rtx, dest_align,
4034 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4035 dest_align, expected_align,
4039 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4040 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4044 if (target_char_cast (val, &c))
4049 if (host_integerp (len, 1)
4050 && can_store_by_pieces (tree_low_cst (len, 1),
4051 builtin_memset_read_str, &c, dest_align,
4053 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4054 builtin_memset_read_str, &c, dest_align, true, 0);
4055 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4056 dest_align, expected_align,
4060 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4061 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4065 set_mem_align (dest_mem, dest_align);
4066 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4067 CALL_EXPR_TAILCALL (orig_exp)
4068 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4069 expected_align, expected_size);
4073 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4074 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4080 fndecl = get_callee_fndecl (orig_exp);
4081 fcode = DECL_FUNCTION_CODE (fndecl);
4082 if (fcode == BUILT_IN_MEMSET)
4083 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4085 else if (fcode == BUILT_IN_BZERO)
4086 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4090 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4091 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4092 return expand_call (fn, target, target == const0_rtx);
4095 /* Expand expression EXP, which is a call to the bzero builtin. Return
4096 NULL_RTX if we failed the caller should emit a normal call. */
4099 expand_builtin_bzero (tree exp)
4102 location_t loc = EXPR_LOCATION (exp);
4104 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4107 dest = CALL_EXPR_ARG (exp, 0);
4108 size = CALL_EXPR_ARG (exp, 1);
4110 /* New argument list transforming bzero(ptr x, int y) to
4111 memset(ptr x, int 0, size_t y). This is done this way
4112 so that if it isn't expanded inline, we fallback to
4113 calling bzero instead of memset. */
4115 return expand_builtin_memset_args (dest, integer_zero_node,
4116 fold_convert_loc (loc, sizetype, size),
4117 const0_rtx, VOIDmode, exp);
4120 /* Expand expression EXP, which is a call to the memcmp built-in function.
4121 Return NULL_RTX if we failed and the
4122 caller should emit a normal call, otherwise try to get the result in
4123 TARGET, if convenient (and in mode MODE, if that's convenient). */
4126 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4127 ATTRIBUTE_UNUSED enum machine_mode mode)
4129 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4131 if (!validate_arglist (exp,
4132 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4135 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4137 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4140 tree arg1 = CALL_EXPR_ARG (exp, 0);
4141 tree arg2 = CALL_EXPR_ARG (exp, 1);
4142 tree len = CALL_EXPR_ARG (exp, 2);
4144 unsigned int arg1_align
4145 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4146 unsigned int arg2_align
4147 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4148 enum machine_mode insn_mode;
4150 #ifdef HAVE_cmpmemsi
4152 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4155 #ifdef HAVE_cmpstrnsi
4157 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4162 /* If we don't have POINTER_TYPE, call the function. */
4163 if (arg1_align == 0 || arg2_align == 0)
4166 /* Make a place to write the result of the instruction. */
4169 && REG_P (result) && GET_MODE (result) == insn_mode
4170 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4171 result = gen_reg_rtx (insn_mode);
4173 arg1_rtx = get_memory_rtx (arg1, len);
4174 arg2_rtx = get_memory_rtx (arg2, len);
4175 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4177 /* Set MEM_SIZE as appropriate. */
4178 if (CONST_INT_P (arg3_rtx))
4180 set_mem_size (arg1_rtx, arg3_rtx);
4181 set_mem_size (arg2_rtx, arg3_rtx);
4184 #ifdef HAVE_cmpmemsi
4186 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4187 GEN_INT (MIN (arg1_align, arg2_align)));
4190 #ifdef HAVE_cmpstrnsi
4192 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4193 GEN_INT (MIN (arg1_align, arg2_align)));
4201 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4202 TYPE_MODE (integer_type_node), 3,
4203 XEXP (arg1_rtx, 0), Pmode,
4204 XEXP (arg2_rtx, 0), Pmode,
4205 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4206 TYPE_UNSIGNED (sizetype)),
4207 TYPE_MODE (sizetype));
4209 /* Return the value in the proper mode for this function. */
4210 mode = TYPE_MODE (TREE_TYPE (exp));
4211 if (GET_MODE (result) == mode)
4213 else if (target != 0)
4215 convert_move (target, result, 0);
4219 return convert_to_mode (mode, result, 0);
4226 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4227 if we failed the caller should emit a normal call, otherwise try to get
4228 the result in TARGET, if convenient. */
4231 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4233 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4236 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4237 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4238 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4240 rtx arg1_rtx, arg2_rtx;
4241 rtx result, insn = NULL_RTX;
4243 tree arg1 = CALL_EXPR_ARG (exp, 0);
4244 tree arg2 = CALL_EXPR_ARG (exp, 1);
4246 unsigned int arg1_align
4247 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4248 unsigned int arg2_align
4249 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4251 /* If we don't have POINTER_TYPE, call the function. */
4252 if (arg1_align == 0 || arg2_align == 0)
4255 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4256 arg1 = builtin_save_expr (arg1);
4257 arg2 = builtin_save_expr (arg2);
4259 arg1_rtx = get_memory_rtx (arg1, NULL);
4260 arg2_rtx = get_memory_rtx (arg2, NULL);
4262 #ifdef HAVE_cmpstrsi
4263 /* Try to call cmpstrsi. */
4266 enum machine_mode insn_mode
4267 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4269 /* Make a place to write the result of the instruction. */
4272 && REG_P (result) && GET_MODE (result) == insn_mode
4273 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4274 result = gen_reg_rtx (insn_mode);
4276 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4277 GEN_INT (MIN (arg1_align, arg2_align)));
4280 #ifdef HAVE_cmpstrnsi
4281 /* Try to determine at least one length and call cmpstrnsi. */
4282 if (!insn && HAVE_cmpstrnsi)
4287 enum machine_mode insn_mode
4288 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4289 tree len1 = c_strlen (arg1, 1);
4290 tree len2 = c_strlen (arg2, 1);
4293 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4295 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4297 /* If we don't have a constant length for the first, use the length
4298 of the second, if we know it. We don't require a constant for
4299 this case; some cost analysis could be done if both are available
4300 but neither is constant. For now, assume they're equally cheap,
4301 unless one has side effects. If both strings have constant lengths,
4308 else if (TREE_SIDE_EFFECTS (len1))
4310 else if (TREE_SIDE_EFFECTS (len2))
4312 else if (TREE_CODE (len1) != INTEGER_CST)
4314 else if (TREE_CODE (len2) != INTEGER_CST)
4316 else if (tree_int_cst_lt (len1, len2))
4321 /* If both arguments have side effects, we cannot optimize. */
4322 if (!len || TREE_SIDE_EFFECTS (len))
4325 arg3_rtx = expand_normal (len);
4327 /* Make a place to write the result of the instruction. */
4330 && REG_P (result) && GET_MODE (result) == insn_mode
4331 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4332 result = gen_reg_rtx (insn_mode);
4334 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4335 GEN_INT (MIN (arg1_align, arg2_align)));
4341 enum machine_mode mode;
4344 /* Return the value in the proper mode for this function. */
4345 mode = TYPE_MODE (TREE_TYPE (exp));
4346 if (GET_MODE (result) == mode)
4349 return convert_to_mode (mode, result, 0);
4350 convert_move (target, result, 0);
4354 /* Expand the library call ourselves using a stabilized argument
4355 list to avoid re-evaluating the function's arguments twice. */
4356 #ifdef HAVE_cmpstrnsi
4359 fndecl = get_callee_fndecl (exp);
4360 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4361 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4362 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4363 return expand_call (fn, target, target == const0_rtx);
4369 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4370 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4371 the result in TARGET, if convenient. */
4374 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4375 ATTRIBUTE_UNUSED enum machine_mode mode)
4377 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4379 if (!validate_arglist (exp,
4380 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4383 /* If c_strlen can determine an expression for one of the string
4384 lengths, and it doesn't have side effects, then emit cmpstrnsi
4385 using length MIN(strlen(string)+1, arg3). */
4386 #ifdef HAVE_cmpstrnsi
4389 tree len, len1, len2;
4390 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4393 tree arg1 = CALL_EXPR_ARG (exp, 0);
4394 tree arg2 = CALL_EXPR_ARG (exp, 1);
4395 tree arg3 = CALL_EXPR_ARG (exp, 2);
4397 unsigned int arg1_align
4398 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4399 unsigned int arg2_align
4400 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4401 enum machine_mode insn_mode
4402 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4404 len1 = c_strlen (arg1, 1);
4405 len2 = c_strlen (arg2, 1);
4408 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4410 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4412 /* If we don't have a constant length for the first, use the length
4413 of the second, if we know it. We don't require a constant for
4414 this case; some cost analysis could be done if both are available
4415 but neither is constant. For now, assume they're equally cheap,
4416 unless one has side effects. If both strings have constant lengths,
4423 else if (TREE_SIDE_EFFECTS (len1))
4425 else if (TREE_SIDE_EFFECTS (len2))
4427 else if (TREE_CODE (len1) != INTEGER_CST)
4429 else if (TREE_CODE (len2) != INTEGER_CST)
4431 else if (tree_int_cst_lt (len1, len2))
4436 /* If both arguments have side effects, we cannot optimize. */
4437 if (!len || TREE_SIDE_EFFECTS (len))
4440 /* The actual new length parameter is MIN(len,arg3). */
4441 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4442 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4444 /* If we don't have POINTER_TYPE, call the function. */
4445 if (arg1_align == 0 || arg2_align == 0)
4448 /* Make a place to write the result of the instruction. */
4451 && REG_P (result) && GET_MODE (result) == insn_mode
4452 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4453 result = gen_reg_rtx (insn_mode);
4455 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4456 arg1 = builtin_save_expr (arg1);
4457 arg2 = builtin_save_expr (arg2);
4458 len = builtin_save_expr (len);
4460 arg1_rtx = get_memory_rtx (arg1, len);
4461 arg2_rtx = get_memory_rtx (arg2, len);
4462 arg3_rtx = expand_normal (len);
4463 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4464 GEN_INT (MIN (arg1_align, arg2_align)));
4469 /* Return the value in the proper mode for this function. */
4470 mode = TYPE_MODE (TREE_TYPE (exp));
4471 if (GET_MODE (result) == mode)
4474 return convert_to_mode (mode, result, 0);
4475 convert_move (target, result, 0);
4479 /* Expand the library call ourselves using a stabilized argument
4480 list to avoid re-evaluating the function's arguments twice. */
4481 fndecl = get_callee_fndecl (exp);
4482 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4484 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4485 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4486 return expand_call (fn, target, target == const0_rtx);
4492 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4493 if that's convenient. */
4496 expand_builtin_saveregs (void)
4500 /* Don't do __builtin_saveregs more than once in a function.
4501 Save the result of the first call and reuse it. */
4502 if (saveregs_value != 0)
4503 return saveregs_value;
4505 /* When this function is called, it means that registers must be
4506 saved on entry to this function. So we migrate the call to the
4507 first insn of this function. */
4511 /* Do whatever the machine needs done in this case. */
4512 val = targetm.calls.expand_builtin_saveregs ();
4517 saveregs_value = val;
4519 /* Put the insns after the NOTE that starts the function. If this
4520 is inside a start_sequence, make the outer-level insn chain current, so
4521 the code is placed at the start of the function. */
4522 push_topmost_sequence ();
4523 emit_insn_after (seq, entry_of_function ());
4524 pop_topmost_sequence ();
4529 /* Expand a call to __builtin_next_arg. */
4532 expand_builtin_next_arg (void)
4534 /* Checking arguments is already done in fold_builtin_next_arg
4535 that must be called before this function. */
4536 return expand_binop (ptr_mode, add_optab,
4537 crtl->args.internal_arg_pointer,
4538 crtl->args.arg_offset_rtx,
4539 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4542 /* Make it easier for the backends by protecting the valist argument
4543 from multiple evaluations. */
4546 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4548 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4550 /* The current way of determining the type of valist is completely
4551 bogus. We should have the information on the va builtin instead. */
4553 vatype = targetm.fn_abi_va_list (cfun->decl);
4555 if (TREE_CODE (vatype) == ARRAY_TYPE)
4557 if (TREE_SIDE_EFFECTS (valist))
4558 valist = save_expr (valist);
4560 /* For this case, the backends will be expecting a pointer to
4561 vatype, but it's possible we've actually been given an array
4562 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4564 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4566 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4567 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4572 tree pt = build_pointer_type (vatype);
4576 if (! TREE_SIDE_EFFECTS (valist))
4579 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4580 TREE_SIDE_EFFECTS (valist) = 1;
4583 if (TREE_SIDE_EFFECTS (valist))
4584 valist = save_expr (valist);
4585 valist = fold_build2_loc (loc, MEM_REF,
4586 vatype, valist, build_int_cst (pt, 0));
4592 /* The "standard" definition of va_list is void*. */
4595 std_build_builtin_va_list (void)
4597 return ptr_type_node;
4600 /* The "standard" abi va_list is va_list_type_node. */
4603 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4605 return va_list_type_node;
4608 /* The "standard" type of va_list is va_list_type_node. */
4611 std_canonical_va_list_type (tree type)
4615 if (INDIRECT_REF_P (type))
4616 type = TREE_TYPE (type);
4617 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4618 type = TREE_TYPE (type);
4619 wtype = va_list_type_node;
4621 /* Treat structure va_list types. */
4622 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4623 htype = TREE_TYPE (htype);
4624 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4626 /* If va_list is an array type, the argument may have decayed
4627 to a pointer type, e.g. by being passed to another function.
4628 In that case, unwrap both types so that we can compare the
4629 underlying records. */
4630 if (TREE_CODE (htype) == ARRAY_TYPE
4631 || POINTER_TYPE_P (htype))
4633 wtype = TREE_TYPE (wtype);
4634 htype = TREE_TYPE (htype);
4637 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4638 return va_list_type_node;
4643 /* The "standard" implementation of va_start: just assign `nextarg' to
4647 std_expand_builtin_va_start (tree valist, rtx nextarg)
4649 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4650 convert_move (va_r, nextarg, 0);
4653 /* Expand EXP, a call to __builtin_va_start. */
4656 expand_builtin_va_start (tree exp)
4660 location_t loc = EXPR_LOCATION (exp);
4662 if (call_expr_nargs (exp) < 2)
4664 error_at (loc, "too few arguments to function %<va_start%>");
4668 if (fold_builtin_next_arg (exp, true))
4671 nextarg = expand_builtin_next_arg ();
4672 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4674 if (targetm.expand_builtin_va_start)
4675 targetm.expand_builtin_va_start (valist, nextarg);
4677 std_expand_builtin_va_start (valist, nextarg);
4682 /* The "standard" implementation of va_arg: read the value from the
4683 current (padded) address and increment by the (padded) size. */
4686 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4689 tree addr, t, type_size, rounded_size, valist_tmp;
4690 unsigned HOST_WIDE_INT align, boundary;
4693 #ifdef ARGS_GROW_DOWNWARD
4694 /* All of the alignment and movement below is for args-grow-up machines.
4695 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4696 implement their own specialized gimplify_va_arg_expr routines. */
4700 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4702 type = build_pointer_type (type);
4704 align = PARM_BOUNDARY / BITS_PER_UNIT;
4705 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4707 /* When we align parameter on stack for caller, if the parameter
4708 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4709 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4710 here with caller. */
4711 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4712 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4714 boundary /= BITS_PER_UNIT;
4716 /* Hoist the valist value into a temporary for the moment. */
4717 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4719 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4720 requires greater alignment, we must perform dynamic alignment. */
4721 if (boundary > align
4722 && !integer_zerop (TYPE_SIZE (type)))
4724 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4725 fold_build2 (POINTER_PLUS_EXPR,
4727 valist_tmp, size_int (boundary - 1)));
4728 gimplify_and_add (t, pre_p);
4730 t = fold_convert (sizetype, valist_tmp);
4731 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4732 fold_convert (TREE_TYPE (valist),
4733 fold_build2 (BIT_AND_EXPR, sizetype, t,
4734 size_int (-boundary))));
4735 gimplify_and_add (t, pre_p);
4740 /* If the actual alignment is less than the alignment of the type,
4741 adjust the type accordingly so that we don't assume strict alignment
4742 when dereferencing the pointer. */
4743 boundary *= BITS_PER_UNIT;
4744 if (boundary < TYPE_ALIGN (type))
4746 type = build_variant_type_copy (type);
4747 TYPE_ALIGN (type) = boundary;
4750 /* Compute the rounded size of the type. */
4751 type_size = size_in_bytes (type);
4752 rounded_size = round_up (type_size, align);
4754 /* Reduce rounded_size so it's sharable with the postqueue. */
4755 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4759 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4761 /* Small args are padded downward. */
4762 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4763 rounded_size, size_int (align));
4764 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4765 size_binop (MINUS_EXPR, rounded_size, type_size));
4766 addr = fold_build2 (POINTER_PLUS_EXPR,
4767 TREE_TYPE (addr), addr, t);
4770 /* Compute new value for AP. */
4771 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4772 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4773 gimplify_and_add (t, pre_p);
4775 addr = fold_convert (build_pointer_type (type), addr);
4778 addr = build_va_arg_indirect_ref (addr);
4780 return build_va_arg_indirect_ref (addr);
4783 /* Build an indirect-ref expression over the given TREE, which represents a
4784 piece of a va_arg() expansion. */
4786 build_va_arg_indirect_ref (tree addr)
4788 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4790 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4796 /* Return a dummy expression of type TYPE in order to keep going after an
4800 dummy_object (tree type)
4802 tree t = build_int_cst (build_pointer_type (type), 0);
4803 return build2 (MEM_REF, type, t, t);
4806 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4807 builtin function, but a very special sort of operator. */
4809 enum gimplify_status
4810 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4812 tree promoted_type, have_va_type;
4813 tree valist = TREE_OPERAND (*expr_p, 0);
4814 tree type = TREE_TYPE (*expr_p);
4816 location_t loc = EXPR_LOCATION (*expr_p);
4818 /* Verify that valist is of the proper type. */
4819 have_va_type = TREE_TYPE (valist);
4820 if (have_va_type == error_mark_node)
4822 have_va_type = targetm.canonical_va_list_type (have_va_type);
4824 if (have_va_type == NULL_TREE)
4826 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4830 /* Generate a diagnostic for requesting data of a type that cannot
4831 be passed through `...' due to type promotion at the call site. */
4832 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4835 static bool gave_help;
4838 /* Unfortunately, this is merely undefined, rather than a constraint
4839 violation, so we cannot make this an error. If this call is never
4840 executed, the program is still strictly conforming. */
4841 warned = warning_at (loc, 0,
4842 "%qT is promoted to %qT when passed through %<...%>",
4843 type, promoted_type);
4844 if (!gave_help && warned)
4847 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4848 promoted_type, type);
4851 /* We can, however, treat "undefined" any way we please.
4852 Call abort to encourage the user to fix the program. */
4854 inform (loc, "if this code is reached, the program will abort");
4855 /* Before the abort, allow the evaluation of the va_list
4856 expression to exit or longjmp. */
4857 gimplify_and_add (valist, pre_p);
4858 t = build_call_expr_loc (loc,
4859 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4860 gimplify_and_add (t, pre_p);
4862 /* This is dead code, but go ahead and finish so that the
4863 mode of the result comes out right. */
4864 *expr_p = dummy_object (type);
4869 /* Make it easier for the backends by protecting the valist argument
4870 from multiple evaluations. */
4871 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4873 /* For this case, the backends will be expecting a pointer to
4874 TREE_TYPE (abi), but it's possible we've
4875 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4877 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4879 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4880 valist = fold_convert_loc (loc, p1,
4881 build_fold_addr_expr_loc (loc, valist));
4884 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4887 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4889 if (!targetm.gimplify_va_arg_expr)
4890 /* FIXME: Once most targets are converted we should merely
4891 assert this is non-null. */
4894 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4899 /* Expand EXP, a call to __builtin_va_end. */
4902 expand_builtin_va_end (tree exp)
4904 tree valist = CALL_EXPR_ARG (exp, 0);
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4908 if (TREE_SIDE_EFFECTS (valist))
4909 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4919 expand_builtin_va_copy (tree exp)
4922 location_t loc = EXPR_LOCATION (exp);
4924 dst = CALL_EXPR_ARG (exp, 0);
4925 src = CALL_EXPR_ARG (exp, 1);
4927 dst = stabilize_va_list_loc (loc, dst, 1);
4928 src = stabilize_va_list_loc (loc, src, 0);
4930 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4932 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4934 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4935 TREE_SIDE_EFFECTS (t) = 1;
4936 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4940 rtx dstb, srcb, size;
4942 /* Evaluate to pointers. */
4943 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4944 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4945 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4946 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4948 dstb = convert_memory_address (Pmode, dstb);
4949 srcb = convert_memory_address (Pmode, srcb);
4951 /* "Dereference" to BLKmode memories. */
4952 dstb = gen_rtx_MEM (BLKmode, dstb);
4953 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4954 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4955 srcb = gen_rtx_MEM (BLKmode, srcb);
4956 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4957 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4960 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4966 /* Expand a call to one of the builtin functions __builtin_frame_address or
4967 __builtin_return_address. */
4970 expand_builtin_frame_address (tree fndecl, tree exp)
4972 /* The argument must be a nonnegative integer constant.
4973 It counts the number of frames to scan up the stack.
4974 The value is the return address saved in that frame. */
4975 if (call_expr_nargs (exp) == 0)
4976 /* Warning about missing arg was already issued. */
4978 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4980 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4981 error ("invalid argument to %<__builtin_frame_address%>");
4983 error ("invalid argument to %<__builtin_return_address%>");
4989 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4990 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4992 /* Some ports cannot access arbitrary stack frames. */
4995 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4996 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4998 warning (0, "unsupported argument to %<__builtin_return_address%>");
5002 /* For __builtin_frame_address, return what we've got. */
5003 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5007 && ! CONSTANT_P (tem))
5008 tem = copy_to_mode_reg (Pmode, tem);
5013 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5014 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
5015 is the same as for allocate_dynamic_stack_space. */
5018 expand_builtin_alloca (tree exp, bool cannot_accumulate)
5023 /* Emit normal call if marked not-inlineable. */
5024 if (CALL_CANNOT_INLINE_P (exp))
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5030 /* Compute the argument. */
5031 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5033 /* Allocate the desired space. */
5034 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
5036 result = convert_memory_address (ptr_mode, result);
5041 /* Expand a call to a bswap builtin with argument ARG0. MODE
5042 is the mode to expand with. */
5045 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5047 enum machine_mode mode;
5051 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5054 arg = CALL_EXPR_ARG (exp, 0);
5055 mode = TYPE_MODE (TREE_TYPE (arg));
5056 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5058 target = expand_unop (mode, bswap_optab, op0, target, 1);
5060 gcc_assert (target);
5062 return convert_to_mode (mode, target, 0);
5065 /* Expand a call to a unary builtin in EXP.
5066 Return NULL_RTX if a normal call should be emitted rather than expanding the
5067 function in-line. If convenient, the result should be placed in TARGET.
5068 SUBTARGET may be used as the target for computing one of EXP's operands. */
5071 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5072 rtx subtarget, optab op_optab)
5076 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5079 /* Compute the argument. */
5080 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5082 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5083 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5084 VOIDmode, EXPAND_NORMAL);
5085 /* Compute op, into TARGET if possible.
5086 Set TARGET to wherever the result comes back. */
5087 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5088 op_optab, op0, target, 1);
5089 gcc_assert (target);
5091 return convert_to_mode (target_mode, target, 0);
5094 /* Expand a call to __builtin_expect. We just return our argument
5095 as the builtin_expect semantic should've been already executed by
5096 tree branch prediction pass. */
5099 expand_builtin_expect (tree exp, rtx target)
5103 if (call_expr_nargs (exp) < 2)
5105 arg = CALL_EXPR_ARG (exp, 0);
5107 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5108 /* When guessing was done, the hints should be already stripped away. */
5109 gcc_assert (!flag_guess_branch_prob
5110 || optimize == 0 || seen_error ());
5115 expand_builtin_trap (void)
5119 emit_insn (gen_trap ());
5122 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5126 /* Expand a call to __builtin_unreachable. We do nothing except emit
5127 a barrier saying that control flow will not pass here.
5129 It is the responsibility of the program being compiled to ensure
5130 that control flow does never reach __builtin_unreachable. */
5132 expand_builtin_unreachable (void)
5137 /* Expand EXP, a call to fabs, fabsf or fabsl.
5138 Return NULL_RTX if a normal call should be emitted rather than expanding
5139 the function inline. If convenient, the result should be placed
5140 in TARGET. SUBTARGET may be used as the target for computing
5144 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5146 enum machine_mode mode;
5150 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5153 arg = CALL_EXPR_ARG (exp, 0);
5154 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5155 mode = TYPE_MODE (TREE_TYPE (arg));
5156 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5157 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5160 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5161 Return NULL is a normal call should be emitted rather than expanding the
5162 function inline. If convenient, the result should be placed in TARGET.
5163 SUBTARGET may be used as the target for computing the operand. */
5166 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5171 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5174 arg = CALL_EXPR_ARG (exp, 0);
5175 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5177 arg = CALL_EXPR_ARG (exp, 1);
5178 op1 = expand_normal (arg);
5180 return expand_copysign (op0, op1, target);
5183 /* Create a new constant string literal and return a char* pointer to it.
5184 The STRING_CST value is the LEN characters at STR. */
5186 build_string_literal (int len, const char *str)
5188 tree t, elem, index, type;
5190 t = build_string (len, str);
5191 elem = build_type_variant (char_type_node, 1, 0);
5192 index = build_index_type (size_int (len - 1));
5193 type = build_array_type (elem, index);
5194 TREE_TYPE (t) = type;
5195 TREE_CONSTANT (t) = 1;
5196 TREE_READONLY (t) = 1;
5197 TREE_STATIC (t) = 1;
5199 type = build_pointer_type (elem);
5200 t = build1 (ADDR_EXPR, type,
5201 build4 (ARRAY_REF, elem,
5202 t, integer_zero_node, NULL_TREE, NULL_TREE));
5206 /* Expand a call to __builtin___clear_cache. */
5209 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5211 #ifndef HAVE_clear_cache
5212 #ifdef CLEAR_INSN_CACHE
5213 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5214 does something. Just do the default expansion to a call to
5218 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5219 does nothing. There is no need to call it. Do nothing. */
5221 #endif /* CLEAR_INSN_CACHE */
5223 /* We have a "clear_cache" insn, and it will handle everything. */
5225 rtx begin_rtx, end_rtx;
5226 enum insn_code icode;
5228 /* We must not expand to a library call. If we did, any
5229 fallback library function in libgcc that might contain a call to
5230 __builtin___clear_cache() would recurse infinitely. */
5231 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5233 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5237 if (HAVE_clear_cache)
5239 icode = CODE_FOR_clear_cache;
5241 begin = CALL_EXPR_ARG (exp, 0);
5242 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5243 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5244 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5245 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5247 end = CALL_EXPR_ARG (exp, 1);
5248 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5249 end_rtx = convert_memory_address (Pmode, end_rtx);
5250 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5251 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5253 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5256 #endif /* HAVE_clear_cache */
5259 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5262 round_trampoline_addr (rtx tramp)
5264 rtx temp, addend, mask;
5266 /* If we don't need too much alignment, we'll have been guaranteed
5267 proper alignment by get_trampoline_type. */
5268 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5271 /* Round address up to desired boundary. */
5272 temp = gen_reg_rtx (Pmode);
5273 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5274 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5276 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5277 temp, 0, OPTAB_LIB_WIDEN);
5278 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5279 temp, 0, OPTAB_LIB_WIDEN);
5285 expand_builtin_init_trampoline (tree exp)
5287 tree t_tramp, t_func, t_chain;
5288 rtx m_tramp, r_tramp, r_chain, tmp;
5290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5291 POINTER_TYPE, VOID_TYPE))
5294 t_tramp = CALL_EXPR_ARG (exp, 0);
5295 t_func = CALL_EXPR_ARG (exp, 1);
5296 t_chain = CALL_EXPR_ARG (exp, 2);
5298 r_tramp = expand_normal (t_tramp);
5299 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5300 MEM_NOTRAP_P (m_tramp) = 1;
5302 /* The TRAMP argument should be the address of a field within the
5303 local function's FRAME decl. Let's see if we can fill in the
5304 to fill in the MEM_ATTRs for this memory. */
5305 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5306 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5309 tmp = round_trampoline_addr (r_tramp);
5312 m_tramp = change_address (m_tramp, BLKmode, tmp);
5313 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5314 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5317 /* The FUNC argument should be the address of the nested function.
5318 Extract the actual function decl to pass to the hook. */
5319 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5320 t_func = TREE_OPERAND (t_func, 0);
5321 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5323 r_chain = expand_normal (t_chain);
5325 /* Generate insns to initialize the trampoline. */
5326 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5328 trampolines_created = 1;
5330 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5331 "trampoline generated for nested function %qD", t_func);
5337 expand_builtin_adjust_trampoline (tree exp)
5341 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5344 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5345 tramp = round_trampoline_addr (tramp);
5346 if (targetm.calls.trampoline_adjust_address)
5347 tramp = targetm.calls.trampoline_adjust_address (tramp);
5352 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5353 function. The function first checks whether the back end provides
5354 an insn to implement signbit for the respective mode. If not, it
5355 checks whether the floating point format of the value is such that
5356 the sign bit can be extracted. If that is not the case, the
5357 function returns NULL_RTX to indicate that a normal call should be
5358 emitted rather than expanding the function in-line. EXP is the
5359 expression that is a call to the builtin function; if convenient,
5360 the result should be placed in TARGET. */
5362 expand_builtin_signbit (tree exp, rtx target)
5364 const struct real_format *fmt;
5365 enum machine_mode fmode, imode, rmode;
5368 enum insn_code icode;
5370 location_t loc = EXPR_LOCATION (exp);
5372 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5375 arg = CALL_EXPR_ARG (exp, 0);
5376 fmode = TYPE_MODE (TREE_TYPE (arg));
5377 rmode = TYPE_MODE (TREE_TYPE (exp));
5378 fmt = REAL_MODE_FORMAT (fmode);
5380 arg = builtin_save_expr (arg);
5382 /* Expand the argument yielding a RTX expression. */
5383 temp = expand_normal (arg);
5385 /* Check if the back end provides an insn that handles signbit for the
5387 icode = optab_handler (signbit_optab, fmode);
5388 if (icode != CODE_FOR_nothing)
5390 rtx last = get_last_insn ();
5391 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5392 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5394 delete_insns_since (last);
5397 /* For floating point formats without a sign bit, implement signbit
5399 bitpos = fmt->signbit_ro;
5402 /* But we can't do this if the format supports signed zero. */
5403 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5406 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5407 build_real (TREE_TYPE (arg), dconst0));
5408 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5411 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5413 imode = int_mode_for_mode (fmode);
5414 if (imode == BLKmode)
5416 temp = gen_lowpart (imode, temp);
5421 /* Handle targets with different FP word orders. */
5422 if (FLOAT_WORDS_BIG_ENDIAN)
5423 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5425 word = bitpos / BITS_PER_WORD;
5426 temp = operand_subword_force (temp, word, fmode);
5427 bitpos = bitpos % BITS_PER_WORD;
5430 /* Force the intermediate word_mode (or narrower) result into a
5431 register. This avoids attempting to create paradoxical SUBREGs
5432 of floating point modes below. */
5433 temp = force_reg (imode, temp);
5435 /* If the bitpos is within the "result mode" lowpart, the operation
5436 can be implement with a single bitwise AND. Otherwise, we need
5437 a right shift and an AND. */
5439 if (bitpos < GET_MODE_BITSIZE (rmode))
5441 double_int mask = double_int_setbit (double_int_zero, bitpos);
5443 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5444 temp = gen_lowpart (rmode, temp);
5445 temp = expand_binop (rmode, and_optab, temp,
5446 immed_double_int_const (mask, rmode),
5447 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5451 /* Perform a logical right shift to place the signbit in the least
5452 significant bit, then truncate the result to the desired mode
5453 and mask just this bit. */
5454 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5455 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5456 temp = gen_lowpart (rmode, temp);
5457 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5458 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5464 /* Expand fork or exec calls. TARGET is the desired target of the
5465 call. EXP is the call. FN is the
5466 identificator of the actual function. IGNORE is nonzero if the
5467 value is to be ignored. */
5470 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5475 /* If we are not profiling, just call the function. */
5476 if (!profile_arc_flag)
5479 /* Otherwise call the wrapper. This should be equivalent for the rest of
5480 compiler, so the code does not diverge, and the wrapper may run the
5481 code necessary for keeping the profiling sane. */
5483 switch (DECL_FUNCTION_CODE (fn))
5486 id = get_identifier ("__gcov_fork");
5489 case BUILT_IN_EXECL:
5490 id = get_identifier ("__gcov_execl");
5493 case BUILT_IN_EXECV:
5494 id = get_identifier ("__gcov_execv");
5497 case BUILT_IN_EXECLP:
5498 id = get_identifier ("__gcov_execlp");
5501 case BUILT_IN_EXECLE:
5502 id = get_identifier ("__gcov_execle");
5505 case BUILT_IN_EXECVP:
5506 id = get_identifier ("__gcov_execvp");
5509 case BUILT_IN_EXECVE:
5510 id = get_identifier ("__gcov_execve");
5517 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5518 FUNCTION_DECL, id, TREE_TYPE (fn));
5519 DECL_EXTERNAL (decl) = 1;
5520 TREE_PUBLIC (decl) = 1;
5521 DECL_ARTIFICIAL (decl) = 1;
5522 TREE_NOTHROW (decl) = 1;
5523 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5524 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5525 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5526 return expand_call (call, target, ignore);
5531 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5532 the pointer in these functions is void*, the tree optimizers may remove
5533 casts. The mode computed in expand_builtin isn't reliable either, due
5534 to __sync_bool_compare_and_swap.
5536 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5537 group of builtins. This gives us log2 of the mode size. */
5539 static inline enum machine_mode
5540 get_builtin_sync_mode (int fcode_diff)
5542 /* The size is not negotiable, so ask not to get BLKmode in return
5543 if the target indicates that a smaller size would be better. */
5544 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5547 /* Expand the memory expression LOC and return the appropriate memory operand
5548 for the builtin_sync operations. */
5551 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5555 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5556 addr = convert_memory_address (Pmode, addr);
5558 /* Note that we explicitly do not want any alias information for this
5559 memory, so that we kill all other live memories. Otherwise we don't
5560 satisfy the full barrier semantics of the intrinsic. */
5561 mem = validize_mem (gen_rtx_MEM (mode, addr));
5563 /* The alignment needs to be at least according to that of the mode. */
5564 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5565 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5566 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5567 MEM_VOLATILE_P (mem) = 1;
5572 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5573 EXP is the CALL_EXPR. CODE is the rtx code
5574 that corresponds to the arithmetic or logical operation from the name;
5575 an exception here is that NOT actually means NAND. TARGET is an optional
5576 place for us to store the results; AFTER is true if this is the
5577 fetch_and_xxx form. IGNORE is true if we don't actually care about
5578 the result of the operation at all. */
5581 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5582 enum rtx_code code, bool after,
5583 rtx target, bool ignore)
5586 enum machine_mode old_mode;
5587 location_t loc = EXPR_LOCATION (exp);
5589 if (code == NOT && warn_sync_nand)
5591 tree fndecl = get_callee_fndecl (exp);
5592 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5594 static bool warned_f_a_n, warned_n_a_f;
5598 case BUILT_IN_FETCH_AND_NAND_1:
5599 case BUILT_IN_FETCH_AND_NAND_2:
5600 case BUILT_IN_FETCH_AND_NAND_4:
5601 case BUILT_IN_FETCH_AND_NAND_8:
5602 case BUILT_IN_FETCH_AND_NAND_16:
5607 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5608 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5609 warned_f_a_n = true;
5612 case BUILT_IN_NAND_AND_FETCH_1:
5613 case BUILT_IN_NAND_AND_FETCH_2:
5614 case BUILT_IN_NAND_AND_FETCH_4:
5615 case BUILT_IN_NAND_AND_FETCH_8:
5616 case BUILT_IN_NAND_AND_FETCH_16:
5621 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5622 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5623 warned_n_a_f = true;
5631 /* Expand the operands. */
5632 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5634 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5635 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5636 of CONST_INTs, where we know the old_mode only from the call argument. */
5637 old_mode = GET_MODE (val);
5638 if (old_mode == VOIDmode)
5639 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5640 val = convert_modes (mode, old_mode, val, 1);
5643 return expand_sync_operation (mem, val, code);
5645 return expand_sync_fetch_operation (mem, val, code, after, target);
5648 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5649 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5650 true if this is the boolean form. TARGET is a place for us to store the
5651 results; this is NOT optional if IS_BOOL is true. */
5654 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5655 bool is_bool, rtx target)
5657 rtx old_val, new_val, mem;
5658 enum machine_mode old_mode;
5660 /* Expand the operands. */
5661 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5664 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5665 mode, EXPAND_NORMAL);
5666 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5667 of CONST_INTs, where we know the old_mode only from the call argument. */
5668 old_mode = GET_MODE (old_val);
5669 if (old_mode == VOIDmode)
5670 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5671 old_val = convert_modes (mode, old_mode, old_val, 1);
5673 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5674 mode, EXPAND_NORMAL);
5675 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5676 of CONST_INTs, where we know the old_mode only from the call argument. */
5677 old_mode = GET_MODE (new_val);
5678 if (old_mode == VOIDmode)
5679 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5680 new_val = convert_modes (mode, old_mode, new_val, 1);
5683 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5685 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5688 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5689 general form is actually an atomic exchange, and some targets only
5690 support a reduced form with the second argument being a constant 1.
5691 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5695 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5699 enum machine_mode old_mode;
5701 /* Expand the operands. */
5702 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5703 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5704 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5705 of CONST_INTs, where we know the old_mode only from the call argument. */
5706 old_mode = GET_MODE (val);
5707 if (old_mode == VOIDmode)
5708 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5709 val = convert_modes (mode, old_mode, val, 1);
5711 return expand_sync_lock_test_and_set (mem, val, target);
5714 /* Expand the __sync_synchronize intrinsic. */
5717 expand_builtin_synchronize (void)
5720 VEC (tree, gc) *v_clobbers;
5722 #ifdef HAVE_memory_barrier
5723 if (HAVE_memory_barrier)
5725 emit_insn (gen_memory_barrier ());
5730 if (synchronize_libfunc != NULL_RTX)
5732 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5736 /* If no explicit memory barrier instruction is available, create an
5737 empty asm stmt with a memory clobber. */
5738 v_clobbers = VEC_alloc (tree, gc, 1);
5739 VEC_quick_push (tree, v_clobbers,
5740 tree_cons (NULL, build_string (6, "memory"), NULL));
5741 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5742 gimple_asm_set_volatile (x, true);
5743 expand_asm_stmt (x);
5746 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5749 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5751 enum insn_code icode;
5753 rtx val = const0_rtx;
5755 /* Expand the operands. */
5756 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5758 /* If there is an explicit operation in the md file, use it. */
5759 icode = direct_optab_handler (sync_lock_release_optab, mode);
5760 if (icode != CODE_FOR_nothing)
5762 if (!insn_data[icode].operand[1].predicate (val, mode))
5763 val = force_reg (mode, val);
5765 insn = GEN_FCN (icode) (mem, val);
5773 /* Otherwise we can implement this operation by emitting a barrier
5774 followed by a store of zero. */
5775 expand_builtin_synchronize ();
5776 emit_move_insn (mem, val);
5779 /* Expand an expression EXP that calls a built-in function,
5780 with result going to TARGET if that's convenient
5781 (and in mode MODE if that's convenient).
5782 SUBTARGET may be used as the target for computing one of EXP's operands.
5783 IGNORE is nonzero if the value is to be ignored. */
5786 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5789 tree fndecl = get_callee_fndecl (exp);
5790 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5791 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5794 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5795 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5797 /* When not optimizing, generate calls to library functions for a certain
5800 && !called_as_built_in (fndecl)
5801 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5802 && fcode != BUILT_IN_ALLOCA
5803 && fcode != BUILT_IN_FREE)
5804 return expand_call (exp, target, ignore);
5806 /* The built-in function expanders test for target == const0_rtx
5807 to determine whether the function's result will be ignored. */
5809 target = const0_rtx;
5811 /* If the result of a pure or const built-in function is ignored, and
5812 none of its arguments are volatile, we can avoid expanding the
5813 built-in call and just evaluate the arguments for side-effects. */
5814 if (target == const0_rtx
5815 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5816 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5818 bool volatilep = false;
5820 call_expr_arg_iterator iter;
5822 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5823 if (TREE_THIS_VOLATILE (arg))
5831 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5832 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5839 CASE_FLT_FN (BUILT_IN_FABS):
5840 target = expand_builtin_fabs (exp, target, subtarget);
5845 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5846 target = expand_builtin_copysign (exp, target, subtarget);
5851 /* Just do a normal library call if we were unable to fold
5853 CASE_FLT_FN (BUILT_IN_CABS):
5856 CASE_FLT_FN (BUILT_IN_EXP):
5857 CASE_FLT_FN (BUILT_IN_EXP10):
5858 CASE_FLT_FN (BUILT_IN_POW10):
5859 CASE_FLT_FN (BUILT_IN_EXP2):
5860 CASE_FLT_FN (BUILT_IN_EXPM1):
5861 CASE_FLT_FN (BUILT_IN_LOGB):
5862 CASE_FLT_FN (BUILT_IN_LOG):
5863 CASE_FLT_FN (BUILT_IN_LOG10):
5864 CASE_FLT_FN (BUILT_IN_LOG2):
5865 CASE_FLT_FN (BUILT_IN_LOG1P):
5866 CASE_FLT_FN (BUILT_IN_TAN):
5867 CASE_FLT_FN (BUILT_IN_ASIN):
5868 CASE_FLT_FN (BUILT_IN_ACOS):
5869 CASE_FLT_FN (BUILT_IN_ATAN):
5870 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5871 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5872 because of possible accuracy problems. */
5873 if (! flag_unsafe_math_optimizations)
5875 CASE_FLT_FN (BUILT_IN_SQRT):
5876 CASE_FLT_FN (BUILT_IN_FLOOR):
5877 CASE_FLT_FN (BUILT_IN_CEIL):
5878 CASE_FLT_FN (BUILT_IN_TRUNC):
5879 CASE_FLT_FN (BUILT_IN_ROUND):
5880 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5881 CASE_FLT_FN (BUILT_IN_RINT):
5882 target = expand_builtin_mathfn (exp, target, subtarget);
5887 CASE_FLT_FN (BUILT_IN_FMA):
5888 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5893 CASE_FLT_FN (BUILT_IN_ILOGB):
5894 if (! flag_unsafe_math_optimizations)
5896 CASE_FLT_FN (BUILT_IN_ISINF):
5897 CASE_FLT_FN (BUILT_IN_FINITE):
5898 case BUILT_IN_ISFINITE:
5899 case BUILT_IN_ISNORMAL:
5900 target = expand_builtin_interclass_mathfn (exp, target);
5905 CASE_FLT_FN (BUILT_IN_LCEIL):
5906 CASE_FLT_FN (BUILT_IN_LLCEIL):
5907 CASE_FLT_FN (BUILT_IN_LFLOOR):
5908 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5909 target = expand_builtin_int_roundingfn (exp, target);
5914 CASE_FLT_FN (BUILT_IN_LRINT):
5915 CASE_FLT_FN (BUILT_IN_LLRINT):
5916 CASE_FLT_FN (BUILT_IN_LROUND):
5917 CASE_FLT_FN (BUILT_IN_LLROUND):
5918 target = expand_builtin_int_roundingfn_2 (exp, target);
5923 CASE_FLT_FN (BUILT_IN_POW):
5924 target = expand_builtin_pow (exp, target, subtarget);
5929 CASE_FLT_FN (BUILT_IN_POWI):
5930 target = expand_builtin_powi (exp, target);
5935 CASE_FLT_FN (BUILT_IN_ATAN2):
5936 CASE_FLT_FN (BUILT_IN_LDEXP):
5937 CASE_FLT_FN (BUILT_IN_SCALB):
5938 CASE_FLT_FN (BUILT_IN_SCALBN):
5939 CASE_FLT_FN (BUILT_IN_SCALBLN):
5940 if (! flag_unsafe_math_optimizations)
5943 CASE_FLT_FN (BUILT_IN_FMOD):
5944 CASE_FLT_FN (BUILT_IN_REMAINDER):
5945 CASE_FLT_FN (BUILT_IN_DREM):
5946 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5951 CASE_FLT_FN (BUILT_IN_CEXPI):
5952 target = expand_builtin_cexpi (exp, target);
5953 gcc_assert (target);
5956 CASE_FLT_FN (BUILT_IN_SIN):
5957 CASE_FLT_FN (BUILT_IN_COS):
5958 if (! flag_unsafe_math_optimizations)
5960 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5965 CASE_FLT_FN (BUILT_IN_SINCOS):
5966 if (! flag_unsafe_math_optimizations)
5968 target = expand_builtin_sincos (exp);
5973 case BUILT_IN_APPLY_ARGS:
5974 return expand_builtin_apply_args ();
5976 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5977 FUNCTION with a copy of the parameters described by
5978 ARGUMENTS, and ARGSIZE. It returns a block of memory
5979 allocated on the stack into which is stored all the registers
5980 that might possibly be used for returning the result of a
5981 function. ARGUMENTS is the value returned by
5982 __builtin_apply_args. ARGSIZE is the number of bytes of
5983 arguments that must be copied. ??? How should this value be
5984 computed? We'll also need a safe worst case value for varargs
5986 case BUILT_IN_APPLY:
5987 if (!validate_arglist (exp, POINTER_TYPE,
5988 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5989 && !validate_arglist (exp, REFERENCE_TYPE,
5990 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5996 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5997 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5998 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6000 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6003 /* __builtin_return (RESULT) causes the function to return the
6004 value described by RESULT. RESULT is address of the block of
6005 memory returned by __builtin_apply. */
6006 case BUILT_IN_RETURN:
6007 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6008 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6011 case BUILT_IN_SAVEREGS:
6012 return expand_builtin_saveregs ();
6014 case BUILT_IN_VA_ARG_PACK:
6015 /* All valid uses of __builtin_va_arg_pack () are removed during
6017 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6020 case BUILT_IN_VA_ARG_PACK_LEN:
6021 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6023 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6026 /* Return the address of the first anonymous stack arg. */
6027 case BUILT_IN_NEXT_ARG:
6028 if (fold_builtin_next_arg (exp, false))
6030 return expand_builtin_next_arg ();
6032 case BUILT_IN_CLEAR_CACHE:
6033 target = expand_builtin___clear_cache (exp);
6038 case BUILT_IN_CLASSIFY_TYPE:
6039 return expand_builtin_classify_type (exp);
6041 case BUILT_IN_CONSTANT_P:
6044 case BUILT_IN_FRAME_ADDRESS:
6045 case BUILT_IN_RETURN_ADDRESS:
6046 return expand_builtin_frame_address (fndecl, exp);
6048 /* Returns the address of the area where the structure is returned.
6050 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6051 if (call_expr_nargs (exp) != 0
6052 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6053 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6056 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6058 case BUILT_IN_ALLOCA:
6059 /* If the allocation stems from the declaration of a variable-sized
6060 object, it cannot accumulate. */
6061 target = expand_builtin_alloca (exp, ALLOCA_FOR_VAR_P (exp));
6066 case BUILT_IN_STACK_SAVE:
6067 return expand_stack_save ();
6069 case BUILT_IN_STACK_RESTORE:
6070 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6073 case BUILT_IN_BSWAP32:
6074 case BUILT_IN_BSWAP64:
6075 target = expand_builtin_bswap (exp, target, subtarget);
6081 CASE_INT_FN (BUILT_IN_FFS):
6082 case BUILT_IN_FFSIMAX:
6083 target = expand_builtin_unop (target_mode, exp, target,
6084 subtarget, ffs_optab);
6089 CASE_INT_FN (BUILT_IN_CLZ):
6090 case BUILT_IN_CLZIMAX:
6091 target = expand_builtin_unop (target_mode, exp, target,
6092 subtarget, clz_optab);
6097 CASE_INT_FN (BUILT_IN_CTZ):
6098 case BUILT_IN_CTZIMAX:
6099 target = expand_builtin_unop (target_mode, exp, target,
6100 subtarget, ctz_optab);
6105 CASE_INT_FN (BUILT_IN_POPCOUNT):
6106 case BUILT_IN_POPCOUNTIMAX:
6107 target = expand_builtin_unop (target_mode, exp, target,
6108 subtarget, popcount_optab);
6113 CASE_INT_FN (BUILT_IN_PARITY):
6114 case BUILT_IN_PARITYIMAX:
6115 target = expand_builtin_unop (target_mode, exp, target,
6116 subtarget, parity_optab);
6121 case BUILT_IN_STRLEN:
6122 target = expand_builtin_strlen (exp, target, target_mode);
6127 case BUILT_IN_STRCPY:
6128 target = expand_builtin_strcpy (exp, target);
6133 case BUILT_IN_STRNCPY:
6134 target = expand_builtin_strncpy (exp, target);
6139 case BUILT_IN_STPCPY:
6140 target = expand_builtin_stpcpy (exp, target, mode);
6145 case BUILT_IN_MEMCPY:
6146 target = expand_builtin_memcpy (exp, target);
6151 case BUILT_IN_MEMPCPY:
6152 target = expand_builtin_mempcpy (exp, target, mode);
6157 case BUILT_IN_MEMSET:
6158 target = expand_builtin_memset (exp, target, mode);
6163 case BUILT_IN_BZERO:
6164 target = expand_builtin_bzero (exp);
6169 case BUILT_IN_STRCMP:
6170 target = expand_builtin_strcmp (exp, target);
6175 case BUILT_IN_STRNCMP:
6176 target = expand_builtin_strncmp (exp, target, mode);
6182 case BUILT_IN_MEMCMP:
6183 target = expand_builtin_memcmp (exp, target, mode);
6188 case BUILT_IN_SETJMP:
6189 /* This should have been lowered to the builtins below. */
6192 case BUILT_IN_SETJMP_SETUP:
6193 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6194 and the receiver label. */
6195 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6197 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6198 VOIDmode, EXPAND_NORMAL);
6199 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6200 rtx label_r = label_rtx (label);
6202 /* This is copied from the handling of non-local gotos. */
6203 expand_builtin_setjmp_setup (buf_addr, label_r);
6204 nonlocal_goto_handler_labels
6205 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6206 nonlocal_goto_handler_labels);
6207 /* ??? Do not let expand_label treat us as such since we would
6208 not want to be both on the list of non-local labels and on
6209 the list of forced labels. */
6210 FORCED_LABEL (label) = 0;
6215 case BUILT_IN_SETJMP_DISPATCHER:
6216 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6217 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6219 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6220 rtx label_r = label_rtx (label);
6222 /* Remove the dispatcher label from the list of non-local labels
6223 since the receiver labels have been added to it above. */
6224 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6229 case BUILT_IN_SETJMP_RECEIVER:
6230 /* __builtin_setjmp_receiver is passed the receiver label. */
6231 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6233 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6234 rtx label_r = label_rtx (label);
6236 expand_builtin_setjmp_receiver (label_r);
6241 /* __builtin_longjmp is passed a pointer to an array of five words.
6242 It's similar to the C library longjmp function but works with
6243 __builtin_setjmp above. */
6244 case BUILT_IN_LONGJMP:
6245 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6247 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6248 VOIDmode, EXPAND_NORMAL);
6249 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6251 if (value != const1_rtx)
6253 error ("%<__builtin_longjmp%> second argument must be 1");
6257 expand_builtin_longjmp (buf_addr, value);
6262 case BUILT_IN_NONLOCAL_GOTO:
6263 target = expand_builtin_nonlocal_goto (exp);
6268 /* This updates the setjmp buffer that is its argument with the value
6269 of the current stack pointer. */
6270 case BUILT_IN_UPDATE_SETJMP_BUF:
6271 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6274 = expand_normal (CALL_EXPR_ARG (exp, 0));
6276 expand_builtin_update_setjmp_buf (buf_addr);
6282 expand_builtin_trap ();
6285 case BUILT_IN_UNREACHABLE:
6286 expand_builtin_unreachable ();
6289 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6290 case BUILT_IN_SIGNBITD32:
6291 case BUILT_IN_SIGNBITD64:
6292 case BUILT_IN_SIGNBITD128:
6293 target = expand_builtin_signbit (exp, target);
6298 /* Various hooks for the DWARF 2 __throw routine. */
6299 case BUILT_IN_UNWIND_INIT:
6300 expand_builtin_unwind_init ();
6302 case BUILT_IN_DWARF_CFA:
6303 return virtual_cfa_rtx;
6304 #ifdef DWARF2_UNWIND_INFO
6305 case BUILT_IN_DWARF_SP_COLUMN:
6306 return expand_builtin_dwarf_sp_column ();
6307 case BUILT_IN_INIT_DWARF_REG_SIZES:
6308 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6311 case BUILT_IN_FROB_RETURN_ADDR:
6312 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6313 case BUILT_IN_EXTRACT_RETURN_ADDR:
6314 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6315 case BUILT_IN_EH_RETURN:
6316 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6317 CALL_EXPR_ARG (exp, 1));
6319 #ifdef EH_RETURN_DATA_REGNO
6320 case BUILT_IN_EH_RETURN_DATA_REGNO:
6321 return expand_builtin_eh_return_data_regno (exp);
6323 case BUILT_IN_EXTEND_POINTER:
6324 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6325 case BUILT_IN_EH_POINTER:
6326 return expand_builtin_eh_pointer (exp);
6327 case BUILT_IN_EH_FILTER:
6328 return expand_builtin_eh_filter (exp);
6329 case BUILT_IN_EH_COPY_VALUES:
6330 return expand_builtin_eh_copy_values (exp);
6332 case BUILT_IN_VA_START:
6333 return expand_builtin_va_start (exp);
6334 case BUILT_IN_VA_END:
6335 return expand_builtin_va_end (exp);
6336 case BUILT_IN_VA_COPY:
6337 return expand_builtin_va_copy (exp);
6338 case BUILT_IN_EXPECT:
6339 return expand_builtin_expect (exp, target);
6340 case BUILT_IN_PREFETCH:
6341 expand_builtin_prefetch (exp);
6344 case BUILT_IN_INIT_TRAMPOLINE:
6345 return expand_builtin_init_trampoline (exp);
6346 case BUILT_IN_ADJUST_TRAMPOLINE:
6347 return expand_builtin_adjust_trampoline (exp);
6350 case BUILT_IN_EXECL:
6351 case BUILT_IN_EXECV:
6352 case BUILT_IN_EXECLP:
6353 case BUILT_IN_EXECLE:
6354 case BUILT_IN_EXECVP:
6355 case BUILT_IN_EXECVE:
6356 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6361 case BUILT_IN_FETCH_AND_ADD_1:
6362 case BUILT_IN_FETCH_AND_ADD_2:
6363 case BUILT_IN_FETCH_AND_ADD_4:
6364 case BUILT_IN_FETCH_AND_ADD_8:
6365 case BUILT_IN_FETCH_AND_ADD_16:
6366 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6367 target = expand_builtin_sync_operation (mode, exp, PLUS,
6368 false, target, ignore);
6373 case BUILT_IN_FETCH_AND_SUB_1:
6374 case BUILT_IN_FETCH_AND_SUB_2:
6375 case BUILT_IN_FETCH_AND_SUB_4:
6376 case BUILT_IN_FETCH_AND_SUB_8:
6377 case BUILT_IN_FETCH_AND_SUB_16:
6378 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6379 target = expand_builtin_sync_operation (mode, exp, MINUS,
6380 false, target, ignore);
6385 case BUILT_IN_FETCH_AND_OR_1:
6386 case BUILT_IN_FETCH_AND_OR_2:
6387 case BUILT_IN_FETCH_AND_OR_4:
6388 case BUILT_IN_FETCH_AND_OR_8:
6389 case BUILT_IN_FETCH_AND_OR_16:
6390 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6391 target = expand_builtin_sync_operation (mode, exp, IOR,
6392 false, target, ignore);
6397 case BUILT_IN_FETCH_AND_AND_1:
6398 case BUILT_IN_FETCH_AND_AND_2:
6399 case BUILT_IN_FETCH_AND_AND_4:
6400 case BUILT_IN_FETCH_AND_AND_8:
6401 case BUILT_IN_FETCH_AND_AND_16:
6402 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6403 target = expand_builtin_sync_operation (mode, exp, AND,
6404 false, target, ignore);
6409 case BUILT_IN_FETCH_AND_XOR_1:
6410 case BUILT_IN_FETCH_AND_XOR_2:
6411 case BUILT_IN_FETCH_AND_XOR_4:
6412 case BUILT_IN_FETCH_AND_XOR_8:
6413 case BUILT_IN_FETCH_AND_XOR_16:
6414 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6415 target = expand_builtin_sync_operation (mode, exp, XOR,
6416 false, target, ignore);
6421 case BUILT_IN_FETCH_AND_NAND_1:
6422 case BUILT_IN_FETCH_AND_NAND_2:
6423 case BUILT_IN_FETCH_AND_NAND_4:
6424 case BUILT_IN_FETCH_AND_NAND_8:
6425 case BUILT_IN_FETCH_AND_NAND_16:
6426 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6427 target = expand_builtin_sync_operation (mode, exp, NOT,
6428 false, target, ignore);
6433 case BUILT_IN_ADD_AND_FETCH_1:
6434 case BUILT_IN_ADD_AND_FETCH_2:
6435 case BUILT_IN_ADD_AND_FETCH_4:
6436 case BUILT_IN_ADD_AND_FETCH_8:
6437 case BUILT_IN_ADD_AND_FETCH_16:
6438 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6439 target = expand_builtin_sync_operation (mode, exp, PLUS,
6440 true, target, ignore);
6445 case BUILT_IN_SUB_AND_FETCH_1:
6446 case BUILT_IN_SUB_AND_FETCH_2:
6447 case BUILT_IN_SUB_AND_FETCH_4:
6448 case BUILT_IN_SUB_AND_FETCH_8:
6449 case BUILT_IN_SUB_AND_FETCH_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6451 target = expand_builtin_sync_operation (mode, exp, MINUS,
6452 true, target, ignore);
6457 case BUILT_IN_OR_AND_FETCH_1:
6458 case BUILT_IN_OR_AND_FETCH_2:
6459 case BUILT_IN_OR_AND_FETCH_4:
6460 case BUILT_IN_OR_AND_FETCH_8:
6461 case BUILT_IN_OR_AND_FETCH_16:
6462 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6463 target = expand_builtin_sync_operation (mode, exp, IOR,
6464 true, target, ignore);
6469 case BUILT_IN_AND_AND_FETCH_1:
6470 case BUILT_IN_AND_AND_FETCH_2:
6471 case BUILT_IN_AND_AND_FETCH_4:
6472 case BUILT_IN_AND_AND_FETCH_8:
6473 case BUILT_IN_AND_AND_FETCH_16:
6474 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6475 target = expand_builtin_sync_operation (mode, exp, AND,
6476 true, target, ignore);
6481 case BUILT_IN_XOR_AND_FETCH_1:
6482 case BUILT_IN_XOR_AND_FETCH_2:
6483 case BUILT_IN_XOR_AND_FETCH_4:
6484 case BUILT_IN_XOR_AND_FETCH_8:
6485 case BUILT_IN_XOR_AND_FETCH_16:
6486 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6487 target = expand_builtin_sync_operation (mode, exp, XOR,
6488 true, target, ignore);
6493 case BUILT_IN_NAND_AND_FETCH_1:
6494 case BUILT_IN_NAND_AND_FETCH_2:
6495 case BUILT_IN_NAND_AND_FETCH_4:
6496 case BUILT_IN_NAND_AND_FETCH_8:
6497 case BUILT_IN_NAND_AND_FETCH_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6499 target = expand_builtin_sync_operation (mode, exp, NOT,
6500 true, target, ignore);
6505 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6506 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6507 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6508 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6509 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6510 if (mode == VOIDmode)
6511 mode = TYPE_MODE (boolean_type_node);
6512 if (!target || !register_operand (target, mode))
6513 target = gen_reg_rtx (mode);
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6516 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6521 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6522 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6523 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6524 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6525 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6527 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6532 case BUILT_IN_LOCK_TEST_AND_SET_1:
6533 case BUILT_IN_LOCK_TEST_AND_SET_2:
6534 case BUILT_IN_LOCK_TEST_AND_SET_4:
6535 case BUILT_IN_LOCK_TEST_AND_SET_8:
6536 case BUILT_IN_LOCK_TEST_AND_SET_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6538 target = expand_builtin_lock_test_and_set (mode, exp, target);
6543 case BUILT_IN_LOCK_RELEASE_1:
6544 case BUILT_IN_LOCK_RELEASE_2:
6545 case BUILT_IN_LOCK_RELEASE_4:
6546 case BUILT_IN_LOCK_RELEASE_8:
6547 case BUILT_IN_LOCK_RELEASE_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6549 expand_builtin_lock_release (mode, exp);
6552 case BUILT_IN_SYNCHRONIZE:
6553 expand_builtin_synchronize ();
6556 case BUILT_IN_OBJECT_SIZE:
6557 return expand_builtin_object_size (exp);
6559 case BUILT_IN_MEMCPY_CHK:
6560 case BUILT_IN_MEMPCPY_CHK:
6561 case BUILT_IN_MEMMOVE_CHK:
6562 case BUILT_IN_MEMSET_CHK:
6563 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6568 case BUILT_IN_STRCPY_CHK:
6569 case BUILT_IN_STPCPY_CHK:
6570 case BUILT_IN_STRNCPY_CHK:
6571 case BUILT_IN_STRCAT_CHK:
6572 case BUILT_IN_STRNCAT_CHK:
6573 case BUILT_IN_SNPRINTF_CHK:
6574 case BUILT_IN_VSNPRINTF_CHK:
6575 maybe_emit_chk_warning (exp, fcode);
6578 case BUILT_IN_SPRINTF_CHK:
6579 case BUILT_IN_VSPRINTF_CHK:
6580 maybe_emit_sprintf_chk_warning (exp, fcode);
6584 maybe_emit_free_warning (exp);
6587 default: /* just do library call, if unknown builtin */
6591 /* The switch statement above can drop through to cause the function
6592 to be called normally. */
6593 return expand_call (exp, target, ignore);
6596 /* Determine whether a tree node represents a call to a built-in
6597 function. If the tree T is a call to a built-in function with
6598 the right number of arguments of the appropriate types, return
6599 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6600 Otherwise the return value is END_BUILTINS. */
6602 enum built_in_function
6603 builtin_mathfn_code (const_tree t)
6605 const_tree fndecl, arg, parmlist;
6606 const_tree argtype, parmtype;
6607 const_call_expr_arg_iterator iter;
6609 if (TREE_CODE (t) != CALL_EXPR
6610 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6611 return END_BUILTINS;
6613 fndecl = get_callee_fndecl (t);
6614 if (fndecl == NULL_TREE
6615 || TREE_CODE (fndecl) != FUNCTION_DECL
6616 || ! DECL_BUILT_IN (fndecl)
6617 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6618 return END_BUILTINS;
6620 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6621 init_const_call_expr_arg_iterator (t, &iter);
6622 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6624 /* If a function doesn't take a variable number of arguments,
6625 the last element in the list will have type `void'. */
6626 parmtype = TREE_VALUE (parmlist);
6627 if (VOID_TYPE_P (parmtype))
6629 if (more_const_call_expr_args_p (&iter))
6630 return END_BUILTINS;
6631 return DECL_FUNCTION_CODE (fndecl);
6634 if (! more_const_call_expr_args_p (&iter))
6635 return END_BUILTINS;
6637 arg = next_const_call_expr_arg (&iter);
6638 argtype = TREE_TYPE (arg);
6640 if (SCALAR_FLOAT_TYPE_P (parmtype))
6642 if (! SCALAR_FLOAT_TYPE_P (argtype))
6643 return END_BUILTINS;
6645 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6647 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6648 return END_BUILTINS;
6650 else if (POINTER_TYPE_P (parmtype))
6652 if (! POINTER_TYPE_P (argtype))
6653 return END_BUILTINS;
6655 else if (INTEGRAL_TYPE_P (parmtype))
6657 if (! INTEGRAL_TYPE_P (argtype))
6658 return END_BUILTINS;
6661 return END_BUILTINS;
6664 /* Variable-length argument list. */
6665 return DECL_FUNCTION_CODE (fndecl);
6668 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6669 evaluate to a constant. */
6672 fold_builtin_constant_p (tree arg)
6674 /* We return 1 for a numeric type that's known to be a constant
6675 value at compile-time or for an aggregate type that's a
6676 literal constant. */
6679 /* If we know this is a constant, emit the constant of one. */
6680 if (CONSTANT_CLASS_P (arg)
6681 || (TREE_CODE (arg) == CONSTRUCTOR
6682 && TREE_CONSTANT (arg)))
6683 return integer_one_node;
6684 if (TREE_CODE (arg) == ADDR_EXPR)
6686 tree op = TREE_OPERAND (arg, 0);
6687 if (TREE_CODE (op) == STRING_CST
6688 || (TREE_CODE (op) == ARRAY_REF
6689 && integer_zerop (TREE_OPERAND (op, 1))
6690 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6691 return integer_one_node;
6694 /* If this expression has side effects, show we don't know it to be a
6695 constant. Likewise if it's a pointer or aggregate type since in
6696 those case we only want literals, since those are only optimized
6697 when generating RTL, not later.
6698 And finally, if we are compiling an initializer, not code, we
6699 need to return a definite result now; there's not going to be any
6700 more optimization done. */
6701 if (TREE_SIDE_EFFECTS (arg)
6702 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6703 || POINTER_TYPE_P (TREE_TYPE (arg))
6705 || folding_initializer)
6706 return integer_zero_node;
6711 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6712 return it as a truthvalue. */
6715 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6717 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6719 fn = built_in_decls[BUILT_IN_EXPECT];
6720 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6721 ret_type = TREE_TYPE (TREE_TYPE (fn));
6722 pred_type = TREE_VALUE (arg_types);
6723 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6725 pred = fold_convert_loc (loc, pred_type, pred);
6726 expected = fold_convert_loc (loc, expected_type, expected);
6727 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6729 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6730 build_int_cst (ret_type, 0));
6733 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6734 NULL_TREE if no simplification is possible. */
6737 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6740 enum tree_code code;
6742 /* If this is a builtin_expect within a builtin_expect keep the
6743 inner one. See through a comparison against a constant. It
6744 might have been added to create a thruthvalue. */
6746 if (COMPARISON_CLASS_P (inner)
6747 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6748 inner = TREE_OPERAND (inner, 0);
6750 if (TREE_CODE (inner) == CALL_EXPR
6751 && (fndecl = get_callee_fndecl (inner))
6752 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6753 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6756 /* Distribute the expected value over short-circuiting operators.
6757 See through the cast from truthvalue_type_node to long. */
6759 while (TREE_CODE (inner) == NOP_EXPR
6760 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6761 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6762 inner = TREE_OPERAND (inner, 0);
6764 code = TREE_CODE (inner);
6765 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6767 tree op0 = TREE_OPERAND (inner, 0);
6768 tree op1 = TREE_OPERAND (inner, 1);
6770 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6771 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6772 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6774 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6777 /* If the argument isn't invariant then there's nothing else we can do. */
6778 if (!TREE_CONSTANT (arg0))
6781 /* If we expect that a comparison against the argument will fold to
6782 a constant return the constant. In practice, this means a true
6783 constant or the address of a non-weak symbol. */
6786 if (TREE_CODE (inner) == ADDR_EXPR)
6790 inner = TREE_OPERAND (inner, 0);
6792 while (TREE_CODE (inner) == COMPONENT_REF
6793 || TREE_CODE (inner) == ARRAY_REF);
6794 if ((TREE_CODE (inner) == VAR_DECL
6795 || TREE_CODE (inner) == FUNCTION_DECL)
6796 && DECL_WEAK (inner))
6800 /* Otherwise, ARG0 already has the proper type for the return value. */
6804 /* Fold a call to __builtin_classify_type with argument ARG. */
6807 fold_builtin_classify_type (tree arg)
6810 return build_int_cst (NULL_TREE, no_type_class);
6812 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6815 /* Fold a call to __builtin_strlen with argument ARG. */
6818 fold_builtin_strlen (location_t loc, tree type, tree arg)
6820 if (!validate_arg (arg, POINTER_TYPE))
6824 tree len = c_strlen (arg, 0);
6827 return fold_convert_loc (loc, type, len);
6833 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6836 fold_builtin_inf (location_t loc, tree type, int warn)
6838 REAL_VALUE_TYPE real;
6840 /* __builtin_inff is intended to be usable to define INFINITY on all
6841 targets. If an infinity is not available, INFINITY expands "to a
6842 positive constant of type float that overflows at translation
6843 time", footnote "In this case, using INFINITY will violate the
6844 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6845 Thus we pedwarn to ensure this constraint violation is
6847 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6848 pedwarn (loc, 0, "target format does not support infinity");
6851 return build_real (type, real);
6854 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6857 fold_builtin_nan (tree arg, tree type, int quiet)
6859 REAL_VALUE_TYPE real;
6862 if (!validate_arg (arg, POINTER_TYPE))
6864 str = c_getstr (arg);
6868 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6871 return build_real (type, real);
6874 /* Return true if the floating point expression T has an integer value.
6875 We also allow +Inf, -Inf and NaN to be considered integer values. */
6878 integer_valued_real_p (tree t)
6880 switch (TREE_CODE (t))
6887 return integer_valued_real_p (TREE_OPERAND (t, 0));
6892 return integer_valued_real_p (TREE_OPERAND (t, 1));
6899 return integer_valued_real_p (TREE_OPERAND (t, 0))
6900 && integer_valued_real_p (TREE_OPERAND (t, 1));
6903 return integer_valued_real_p (TREE_OPERAND (t, 1))
6904 && integer_valued_real_p (TREE_OPERAND (t, 2));
6907 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6911 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6912 if (TREE_CODE (type) == INTEGER_TYPE)
6914 if (TREE_CODE (type) == REAL_TYPE)
6915 return integer_valued_real_p (TREE_OPERAND (t, 0));
6920 switch (builtin_mathfn_code (t))
6922 CASE_FLT_FN (BUILT_IN_CEIL):
6923 CASE_FLT_FN (BUILT_IN_FLOOR):
6924 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6925 CASE_FLT_FN (BUILT_IN_RINT):
6926 CASE_FLT_FN (BUILT_IN_ROUND):
6927 CASE_FLT_FN (BUILT_IN_TRUNC):
6930 CASE_FLT_FN (BUILT_IN_FMIN):
6931 CASE_FLT_FN (BUILT_IN_FMAX):
6932 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6933 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6946 /* FNDECL is assumed to be a builtin where truncation can be propagated
6947 across (for instance floor((double)f) == (double)floorf (f).
6948 Do the transformation for a call with argument ARG. */
6951 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6953 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6955 if (!validate_arg (arg, REAL_TYPE))
6958 /* Integer rounding functions are idempotent. */
6959 if (fcode == builtin_mathfn_code (arg))
6962 /* If argument is already integer valued, and we don't need to worry
6963 about setting errno, there's no need to perform rounding. */
6964 if (! flag_errno_math && integer_valued_real_p (arg))
6969 tree arg0 = strip_float_extensions (arg);
6970 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6971 tree newtype = TREE_TYPE (arg0);
6974 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6975 && (decl = mathfn_built_in (newtype, fcode)))
6976 return fold_convert_loc (loc, ftype,
6977 build_call_expr_loc (loc, decl, 1,
6978 fold_convert_loc (loc,
6985 /* FNDECL is assumed to be builtin which can narrow the FP type of
6986 the argument, for instance lround((double)f) -> lroundf (f).
6987 Do the transformation for a call with argument ARG. */
6990 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6992 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6994 if (!validate_arg (arg, REAL_TYPE))
6997 /* If argument is already integer valued, and we don't need to worry
6998 about setting errno, there's no need to perform rounding. */
6999 if (! flag_errno_math && integer_valued_real_p (arg))
7000 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7001 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7005 tree ftype = TREE_TYPE (arg);
7006 tree arg0 = strip_float_extensions (arg);
7007 tree newtype = TREE_TYPE (arg0);
7010 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7011 && (decl = mathfn_built_in (newtype, fcode)))
7012 return build_call_expr_loc (loc, decl, 1,
7013 fold_convert_loc (loc, newtype, arg0));
7016 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7017 sizeof (long long) == sizeof (long). */
7018 if (TYPE_PRECISION (long_long_integer_type_node)
7019 == TYPE_PRECISION (long_integer_type_node))
7021 tree newfn = NULL_TREE;
7024 CASE_FLT_FN (BUILT_IN_LLCEIL):
7025 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7028 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7029 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7032 CASE_FLT_FN (BUILT_IN_LLROUND):
7033 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7036 CASE_FLT_FN (BUILT_IN_LLRINT):
7037 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7046 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7047 return fold_convert_loc (loc,
7048 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7055 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7056 return type. Return NULL_TREE if no simplification can be made. */
7059 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7063 if (!validate_arg (arg, COMPLEX_TYPE)
7064 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7067 /* Calculate the result when the argument is a constant. */
7068 if (TREE_CODE (arg) == COMPLEX_CST
7069 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7073 if (TREE_CODE (arg) == COMPLEX_EXPR)
7075 tree real = TREE_OPERAND (arg, 0);
7076 tree imag = TREE_OPERAND (arg, 1);
7078 /* If either part is zero, cabs is fabs of the other. */
7079 if (real_zerop (real))
7080 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7081 if (real_zerop (imag))
7082 return fold_build1_loc (loc, ABS_EXPR, type, real);
7084 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7085 if (flag_unsafe_math_optimizations
7086 && operand_equal_p (real, imag, OEP_PURE_SAME))
7088 const REAL_VALUE_TYPE sqrt2_trunc
7089 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7091 return fold_build2_loc (loc, MULT_EXPR, type,
7092 fold_build1_loc (loc, ABS_EXPR, type, real),
7093 build_real (type, sqrt2_trunc));
7097 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7098 if (TREE_CODE (arg) == NEGATE_EXPR
7099 || TREE_CODE (arg) == CONJ_EXPR)
7100 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7102 /* Don't do this when optimizing for size. */
7103 if (flag_unsafe_math_optimizations
7104 && optimize && optimize_function_for_speed_p (cfun))
7106 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7108 if (sqrtfn != NULL_TREE)
7110 tree rpart, ipart, result;
7112 arg = builtin_save_expr (arg);
7114 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7115 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7117 rpart = builtin_save_expr (rpart);
7118 ipart = builtin_save_expr (ipart);
7120 result = fold_build2_loc (loc, PLUS_EXPR, type,
7121 fold_build2_loc (loc, MULT_EXPR, type,
7123 fold_build2_loc (loc, MULT_EXPR, type,
7126 return build_call_expr_loc (loc, sqrtfn, 1, result);
7133 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7134 complex tree type of the result. If NEG is true, the imaginary
7135 zero is negative. */
7138 build_complex_cproj (tree type, bool neg)
7140 REAL_VALUE_TYPE rinf, rzero = dconst0;
7144 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7145 build_real (TREE_TYPE (type), rzero));
7148 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7149 return type. Return NULL_TREE if no simplification can be made. */
7152 fold_builtin_cproj (location_t loc, tree arg, tree type)
7154 if (!validate_arg (arg, COMPLEX_TYPE)
7155 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7158 /* If there are no infinities, return arg. */
7159 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7160 return non_lvalue_loc (loc, arg);
7162 /* Calculate the result when the argument is a constant. */
7163 if (TREE_CODE (arg) == COMPLEX_CST)
7165 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7166 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7168 if (real_isinf (real) || real_isinf (imag))
7169 return build_complex_cproj (type, imag->sign);
7173 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7175 tree real = TREE_OPERAND (arg, 0);
7176 tree imag = TREE_OPERAND (arg, 1);
7181 /* If the real part is inf and the imag part is known to be
7182 nonnegative, return (inf + 0i). Remember side-effects are
7183 possible in the imag part. */
7184 if (TREE_CODE (real) == REAL_CST
7185 && real_isinf (TREE_REAL_CST_PTR (real))
7186 && tree_expr_nonnegative_p (imag))
7187 return omit_one_operand_loc (loc, type,
7188 build_complex_cproj (type, false),
7191 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7192 Remember side-effects are possible in the real part. */
7193 if (TREE_CODE (imag) == REAL_CST
7194 && real_isinf (TREE_REAL_CST_PTR (imag)))
7196 omit_one_operand_loc (loc, type,
7197 build_complex_cproj (type, TREE_REAL_CST_PTR
7198 (imag)->sign), arg);
7204 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7205 Return NULL_TREE if no simplification can be made. */
7208 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7211 enum built_in_function fcode;
7214 if (!validate_arg (arg, REAL_TYPE))
7217 /* Calculate the result when the argument is a constant. */
7218 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7221 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7222 fcode = builtin_mathfn_code (arg);
7223 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7225 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7226 arg = fold_build2_loc (loc, MULT_EXPR, type,
7227 CALL_EXPR_ARG (arg, 0),
7228 build_real (type, dconsthalf));
7229 return build_call_expr_loc (loc, expfn, 1, arg);
7232 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7233 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7235 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7239 tree arg0 = CALL_EXPR_ARG (arg, 0);
7241 /* The inner root was either sqrt or cbrt. */
7242 /* This was a conditional expression but it triggered a bug
7244 REAL_VALUE_TYPE dconstroot;
7245 if (BUILTIN_SQRT_P (fcode))
7246 dconstroot = dconsthalf;
7248 dconstroot = dconst_third ();
7250 /* Adjust for the outer root. */
7251 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7252 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7253 tree_root = build_real (type, dconstroot);
7254 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7258 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7259 if (flag_unsafe_math_optimizations
7260 && (fcode == BUILT_IN_POW
7261 || fcode == BUILT_IN_POWF
7262 || fcode == BUILT_IN_POWL))
7264 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7265 tree arg0 = CALL_EXPR_ARG (arg, 0);
7266 tree arg1 = CALL_EXPR_ARG (arg, 1);
7268 if (!tree_expr_nonnegative_p (arg0))
7269 arg0 = build1 (ABS_EXPR, type, arg0);
7270 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7271 build_real (type, dconsthalf));
7272 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7278 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7279 Return NULL_TREE if no simplification can be made. */
7282 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7284 const enum built_in_function fcode = builtin_mathfn_code (arg);
7287 if (!validate_arg (arg, REAL_TYPE))
7290 /* Calculate the result when the argument is a constant. */
7291 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7294 if (flag_unsafe_math_optimizations)
7296 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7297 if (BUILTIN_EXPONENT_P (fcode))
7299 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7300 const REAL_VALUE_TYPE third_trunc =
7301 real_value_truncate (TYPE_MODE (type), dconst_third ());
7302 arg = fold_build2_loc (loc, MULT_EXPR, type,
7303 CALL_EXPR_ARG (arg, 0),
7304 build_real (type, third_trunc));
7305 return build_call_expr_loc (loc, expfn, 1, arg);
7308 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7309 if (BUILTIN_SQRT_P (fcode))
7311 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7315 tree arg0 = CALL_EXPR_ARG (arg, 0);
7317 REAL_VALUE_TYPE dconstroot = dconst_third ();
7319 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7320 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7321 tree_root = build_real (type, dconstroot);
7322 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7326 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7327 if (BUILTIN_CBRT_P (fcode))
7329 tree arg0 = CALL_EXPR_ARG (arg, 0);
7330 if (tree_expr_nonnegative_p (arg0))
7332 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7337 REAL_VALUE_TYPE dconstroot;
7339 real_arithmetic (&dconstroot, MULT_EXPR,
7340 dconst_third_ptr (), dconst_third_ptr ());
7341 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7342 tree_root = build_real (type, dconstroot);
7343 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7348 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7349 if (fcode == BUILT_IN_POW
7350 || fcode == BUILT_IN_POWF
7351 || fcode == BUILT_IN_POWL)
7353 tree arg00 = CALL_EXPR_ARG (arg, 0);
7354 tree arg01 = CALL_EXPR_ARG (arg, 1);
7355 if (tree_expr_nonnegative_p (arg00))
7357 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7358 const REAL_VALUE_TYPE dconstroot
7359 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7360 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7361 build_real (type, dconstroot));
7362 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7369 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7370 TYPE is the type of the return value. Return NULL_TREE if no
7371 simplification can be made. */
7374 fold_builtin_cos (location_t loc,
7375 tree arg, tree type, tree fndecl)
7379 if (!validate_arg (arg, REAL_TYPE))
7382 /* Calculate the result when the argument is a constant. */
7383 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7386 /* Optimize cos(-x) into cos (x). */
7387 if ((narg = fold_strip_sign_ops (arg)))
7388 return build_call_expr_loc (loc, fndecl, 1, narg);
7393 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7394 Return NULL_TREE if no simplification can be made. */
7397 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7399 if (validate_arg (arg, REAL_TYPE))
7403 /* Calculate the result when the argument is a constant. */
7404 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7407 /* Optimize cosh(-x) into cosh (x). */
7408 if ((narg = fold_strip_sign_ops (arg)))
7409 return build_call_expr_loc (loc, fndecl, 1, narg);
7415 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7416 argument ARG. TYPE is the type of the return value. Return
7417 NULL_TREE if no simplification can be made. */
7420 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7423 if (validate_arg (arg, COMPLEX_TYPE)
7424 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7428 /* Calculate the result when the argument is a constant. */
7429 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7432 /* Optimize fn(-x) into fn(x). */
7433 if ((tmp = fold_strip_sign_ops (arg)))
7434 return build_call_expr_loc (loc, fndecl, 1, tmp);
7440 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7441 Return NULL_TREE if no simplification can be made. */
7444 fold_builtin_tan (tree arg, tree type)
7446 enum built_in_function fcode;
7449 if (!validate_arg (arg, REAL_TYPE))
7452 /* Calculate the result when the argument is a constant. */
7453 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7456 /* Optimize tan(atan(x)) = x. */
7457 fcode = builtin_mathfn_code (arg);
7458 if (flag_unsafe_math_optimizations
7459 && (fcode == BUILT_IN_ATAN
7460 || fcode == BUILT_IN_ATANF
7461 || fcode == BUILT_IN_ATANL))
7462 return CALL_EXPR_ARG (arg, 0);
7467 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7468 NULL_TREE if no simplification can be made. */
7471 fold_builtin_sincos (location_t loc,
7472 tree arg0, tree arg1, tree arg2)
7477 if (!validate_arg (arg0, REAL_TYPE)
7478 || !validate_arg (arg1, POINTER_TYPE)
7479 || !validate_arg (arg2, POINTER_TYPE))
7482 type = TREE_TYPE (arg0);
7484 /* Calculate the result when the argument is a constant. */
7485 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7488 /* Canonicalize sincos to cexpi. */
7489 if (!TARGET_C99_FUNCTIONS)
7491 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7495 call = build_call_expr_loc (loc, fn, 1, arg0);
7496 call = builtin_save_expr (call);
7498 return build2 (COMPOUND_EXPR, void_type_node,
7499 build2 (MODIFY_EXPR, void_type_node,
7500 build_fold_indirect_ref_loc (loc, arg1),
7501 build1 (IMAGPART_EXPR, type, call)),
7502 build2 (MODIFY_EXPR, void_type_node,
7503 build_fold_indirect_ref_loc (loc, arg2),
7504 build1 (REALPART_EXPR, type, call)));
7507 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7508 NULL_TREE if no simplification can be made. */
7511 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7514 tree realp, imagp, ifn;
7517 if (!validate_arg (arg0, COMPLEX_TYPE)
7518 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7521 /* Calculate the result when the argument is a constant. */
7522 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7525 rtype = TREE_TYPE (TREE_TYPE (arg0));
7527 /* In case we can figure out the real part of arg0 and it is constant zero
7529 if (!TARGET_C99_FUNCTIONS)
7531 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7535 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7536 && real_zerop (realp))
7538 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7539 return build_call_expr_loc (loc, ifn, 1, narg);
7542 /* In case we can easily decompose real and imaginary parts split cexp
7543 to exp (r) * cexpi (i). */
7544 if (flag_unsafe_math_optimizations
7547 tree rfn, rcall, icall;
7549 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7553 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7557 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7558 icall = builtin_save_expr (icall);
7559 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7560 rcall = builtin_save_expr (rcall);
7561 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7562 fold_build2_loc (loc, MULT_EXPR, rtype,
7564 fold_build1_loc (loc, REALPART_EXPR,
7566 fold_build2_loc (loc, MULT_EXPR, rtype,
7568 fold_build1_loc (loc, IMAGPART_EXPR,
7575 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7576 Return NULL_TREE if no simplification can be made. */
7579 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7581 if (!validate_arg (arg, REAL_TYPE))
7584 /* Optimize trunc of constant value. */
7585 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7587 REAL_VALUE_TYPE r, x;
7588 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7590 x = TREE_REAL_CST (arg);
7591 real_trunc (&r, TYPE_MODE (type), &x);
7592 return build_real (type, r);
7595 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7598 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7599 Return NULL_TREE if no simplification can be made. */
7602 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7604 if (!validate_arg (arg, REAL_TYPE))
7607 /* Optimize floor of constant value. */
7608 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7612 x = TREE_REAL_CST (arg);
7613 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7615 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7618 real_floor (&r, TYPE_MODE (type), &x);
7619 return build_real (type, r);
7623 /* Fold floor (x) where x is nonnegative to trunc (x). */
7624 if (tree_expr_nonnegative_p (arg))
7626 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7628 return build_call_expr_loc (loc, truncfn, 1, arg);
7631 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7634 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7635 Return NULL_TREE if no simplification can be made. */
7638 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7640 if (!validate_arg (arg, REAL_TYPE))
7643 /* Optimize ceil of constant value. */
7644 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7648 x = TREE_REAL_CST (arg);
7649 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7651 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7654 real_ceil (&r, TYPE_MODE (type), &x);
7655 return build_real (type, r);
7659 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7662 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7663 Return NULL_TREE if no simplification can be made. */
7666 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7668 if (!validate_arg (arg, REAL_TYPE))
7671 /* Optimize round of constant value. */
7672 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7676 x = TREE_REAL_CST (arg);
7677 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7679 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7682 real_round (&r, TYPE_MODE (type), &x);
7683 return build_real (type, r);
7687 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7690 /* Fold function call to builtin lround, lroundf or lroundl (or the
7691 corresponding long long versions) and other rounding functions. ARG
7692 is the argument to the call. Return NULL_TREE if no simplification
7696 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7698 if (!validate_arg (arg, REAL_TYPE))
7701 /* Optimize lround of constant value. */
7702 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7704 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7706 if (real_isfinite (&x))
7708 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7709 tree ftype = TREE_TYPE (arg);
7713 switch (DECL_FUNCTION_CODE (fndecl))
7715 CASE_FLT_FN (BUILT_IN_LFLOOR):
7716 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7717 real_floor (&r, TYPE_MODE (ftype), &x);
7720 CASE_FLT_FN (BUILT_IN_LCEIL):
7721 CASE_FLT_FN (BUILT_IN_LLCEIL):
7722 real_ceil (&r, TYPE_MODE (ftype), &x);
7725 CASE_FLT_FN (BUILT_IN_LROUND):
7726 CASE_FLT_FN (BUILT_IN_LLROUND):
7727 real_round (&r, TYPE_MODE (ftype), &x);
7734 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7735 if (double_int_fits_to_tree_p (itype, val))
7736 return double_int_to_tree (itype, val);
7740 switch (DECL_FUNCTION_CODE (fndecl))
7742 CASE_FLT_FN (BUILT_IN_LFLOOR):
7743 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7744 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7745 if (tree_expr_nonnegative_p (arg))
7746 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7747 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7752 return fold_fixed_mathfn (loc, fndecl, arg);
7755 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7756 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7757 the argument to the call. Return NULL_TREE if no simplification can
7761 fold_builtin_bitop (tree fndecl, tree arg)
7763 if (!validate_arg (arg, INTEGER_TYPE))
7766 /* Optimize for constant argument. */
7767 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7769 HOST_WIDE_INT hi, width, result;
7770 unsigned HOST_WIDE_INT lo;
7773 type = TREE_TYPE (arg);
7774 width = TYPE_PRECISION (type);
7775 lo = TREE_INT_CST_LOW (arg);
7777 /* Clear all the bits that are beyond the type's precision. */
7778 if (width > HOST_BITS_PER_WIDE_INT)
7780 hi = TREE_INT_CST_HIGH (arg);
7781 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7782 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7787 if (width < HOST_BITS_PER_WIDE_INT)
7788 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7791 switch (DECL_FUNCTION_CODE (fndecl))
7793 CASE_INT_FN (BUILT_IN_FFS):
7795 result = ffs_hwi (lo);
7797 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7802 CASE_INT_FN (BUILT_IN_CLZ):
7804 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7806 result = width - floor_log2 (lo) - 1;
7807 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7811 CASE_INT_FN (BUILT_IN_CTZ):
7813 result = ctz_hwi (lo);
7815 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7816 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7820 CASE_INT_FN (BUILT_IN_POPCOUNT):
7823 result++, lo &= lo - 1;
7825 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7828 CASE_INT_FN (BUILT_IN_PARITY):
7831 result++, lo &= lo - 1;
7833 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7841 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7847 /* Fold function call to builtin_bswap and the long and long long
7848 variants. Return NULL_TREE if no simplification can be made. */
7850 fold_builtin_bswap (tree fndecl, tree arg)
7852 if (! validate_arg (arg, INTEGER_TYPE))
7855 /* Optimize constant value. */
7856 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7858 HOST_WIDE_INT hi, width, r_hi = 0;
7859 unsigned HOST_WIDE_INT lo, r_lo = 0;
7862 type = TREE_TYPE (arg);
7863 width = TYPE_PRECISION (type);
7864 lo = TREE_INT_CST_LOW (arg);
7865 hi = TREE_INT_CST_HIGH (arg);
7867 switch (DECL_FUNCTION_CODE (fndecl))
7869 case BUILT_IN_BSWAP32:
7870 case BUILT_IN_BSWAP64:
7874 for (s = 0; s < width; s += 8)
7876 int d = width - s - 8;
7877 unsigned HOST_WIDE_INT byte;
7879 if (s < HOST_BITS_PER_WIDE_INT)
7880 byte = (lo >> s) & 0xff;
7882 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7884 if (d < HOST_BITS_PER_WIDE_INT)
7887 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7897 if (width < HOST_BITS_PER_WIDE_INT)
7898 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7900 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7906 /* A subroutine of fold_builtin to fold the various logarithmic
7907 functions. Return NULL_TREE if no simplification can me made.
7908 FUNC is the corresponding MPFR logarithm function. */
7911 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7912 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7914 if (validate_arg (arg, REAL_TYPE))
7916 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7918 const enum built_in_function fcode = builtin_mathfn_code (arg);
7920 /* Calculate the result when the argument is a constant. */
7921 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7924 /* Special case, optimize logN(expN(x)) = x. */
7925 if (flag_unsafe_math_optimizations
7926 && ((func == mpfr_log
7927 && (fcode == BUILT_IN_EXP
7928 || fcode == BUILT_IN_EXPF
7929 || fcode == BUILT_IN_EXPL))
7930 || (func == mpfr_log2
7931 && (fcode == BUILT_IN_EXP2
7932 || fcode == BUILT_IN_EXP2F
7933 || fcode == BUILT_IN_EXP2L))
7934 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7935 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7937 /* Optimize logN(func()) for various exponential functions. We
7938 want to determine the value "x" and the power "exponent" in
7939 order to transform logN(x**exponent) into exponent*logN(x). */
7940 if (flag_unsafe_math_optimizations)
7942 tree exponent = 0, x = 0;
7946 CASE_FLT_FN (BUILT_IN_EXP):
7947 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7948 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7950 exponent = CALL_EXPR_ARG (arg, 0);
7952 CASE_FLT_FN (BUILT_IN_EXP2):
7953 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7954 x = build_real (type, dconst2);
7955 exponent = CALL_EXPR_ARG (arg, 0);
7957 CASE_FLT_FN (BUILT_IN_EXP10):
7958 CASE_FLT_FN (BUILT_IN_POW10):
7959 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7961 REAL_VALUE_TYPE dconst10;
7962 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7963 x = build_real (type, dconst10);
7965 exponent = CALL_EXPR_ARG (arg, 0);
7967 CASE_FLT_FN (BUILT_IN_SQRT):
7968 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7969 x = CALL_EXPR_ARG (arg, 0);
7970 exponent = build_real (type, dconsthalf);
7972 CASE_FLT_FN (BUILT_IN_CBRT):
7973 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7974 x = CALL_EXPR_ARG (arg, 0);
7975 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7978 CASE_FLT_FN (BUILT_IN_POW):
7979 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7980 x = CALL_EXPR_ARG (arg, 0);
7981 exponent = CALL_EXPR_ARG (arg, 1);
7987 /* Now perform the optimization. */
7990 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7991 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7999 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8000 NULL_TREE if no simplification can be made. */
8003 fold_builtin_hypot (location_t loc, tree fndecl,
8004 tree arg0, tree arg1, tree type)
8006 tree res, narg0, narg1;
8008 if (!validate_arg (arg0, REAL_TYPE)
8009 || !validate_arg (arg1, REAL_TYPE))
8012 /* Calculate the result when the argument is a constant. */
8013 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8016 /* If either argument to hypot has a negate or abs, strip that off.
8017 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8018 narg0 = fold_strip_sign_ops (arg0);
8019 narg1 = fold_strip_sign_ops (arg1);
8022 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8023 narg1 ? narg1 : arg1);
8026 /* If either argument is zero, hypot is fabs of the other. */
8027 if (real_zerop (arg0))
8028 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8029 else if (real_zerop (arg1))
8030 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8032 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8033 if (flag_unsafe_math_optimizations
8034 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8036 const REAL_VALUE_TYPE sqrt2_trunc
8037 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8038 return fold_build2_loc (loc, MULT_EXPR, type,
8039 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8040 build_real (type, sqrt2_trunc));
8047 /* Fold a builtin function call to pow, powf, or powl. Return
8048 NULL_TREE if no simplification can be made. */
8050 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8054 if (!validate_arg (arg0, REAL_TYPE)
8055 || !validate_arg (arg1, REAL_TYPE))
8058 /* Calculate the result when the argument is a constant. */
8059 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8062 /* Optimize pow(1.0,y) = 1.0. */
8063 if (real_onep (arg0))
8064 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8066 if (TREE_CODE (arg1) == REAL_CST
8067 && !TREE_OVERFLOW (arg1))
8069 REAL_VALUE_TYPE cint;
8073 c = TREE_REAL_CST (arg1);
8075 /* Optimize pow(x,0.0) = 1.0. */
8076 if (REAL_VALUES_EQUAL (c, dconst0))
8077 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8080 /* Optimize pow(x,1.0) = x. */
8081 if (REAL_VALUES_EQUAL (c, dconst1))
8084 /* Optimize pow(x,-1.0) = 1.0/x. */
8085 if (REAL_VALUES_EQUAL (c, dconstm1))
8086 return fold_build2_loc (loc, RDIV_EXPR, type,
8087 build_real (type, dconst1), arg0);
8089 /* Optimize pow(x,0.5) = sqrt(x). */
8090 if (flag_unsafe_math_optimizations
8091 && REAL_VALUES_EQUAL (c, dconsthalf))
8093 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8095 if (sqrtfn != NULL_TREE)
8096 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8099 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8100 if (flag_unsafe_math_optimizations)
8102 const REAL_VALUE_TYPE dconstroot
8103 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8105 if (REAL_VALUES_EQUAL (c, dconstroot))
8107 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8108 if (cbrtfn != NULL_TREE)
8109 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8113 /* Check for an integer exponent. */
8114 n = real_to_integer (&c);
8115 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8116 if (real_identical (&c, &cint))
8118 /* Attempt to evaluate pow at compile-time, unless this should
8119 raise an exception. */
8120 if (TREE_CODE (arg0) == REAL_CST
8121 && !TREE_OVERFLOW (arg0)
8123 || (!flag_trapping_math && !flag_errno_math)
8124 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8129 x = TREE_REAL_CST (arg0);
8130 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8131 if (flag_unsafe_math_optimizations || !inexact)
8132 return build_real (type, x);
8135 /* Strip sign ops from even integer powers. */
8136 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8138 tree narg0 = fold_strip_sign_ops (arg0);
8140 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8145 if (flag_unsafe_math_optimizations)
8147 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8149 /* Optimize pow(expN(x),y) = expN(x*y). */
8150 if (BUILTIN_EXPONENT_P (fcode))
8152 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8153 tree arg = CALL_EXPR_ARG (arg0, 0);
8154 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8155 return build_call_expr_loc (loc, expfn, 1, arg);
8158 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8159 if (BUILTIN_SQRT_P (fcode))
8161 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8162 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8163 build_real (type, dconsthalf));
8164 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8167 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8168 if (BUILTIN_CBRT_P (fcode))
8170 tree arg = CALL_EXPR_ARG (arg0, 0);
8171 if (tree_expr_nonnegative_p (arg))
8173 const REAL_VALUE_TYPE dconstroot
8174 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8175 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8176 build_real (type, dconstroot));
8177 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8181 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8182 if (fcode == BUILT_IN_POW
8183 || fcode == BUILT_IN_POWF
8184 || fcode == BUILT_IN_POWL)
8186 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8187 if (tree_expr_nonnegative_p (arg00))
8189 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8190 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8191 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8199 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8200 Return NULL_TREE if no simplification can be made. */
8202 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8203 tree arg0, tree arg1, tree type)
8205 if (!validate_arg (arg0, REAL_TYPE)
8206 || !validate_arg (arg1, INTEGER_TYPE))
8209 /* Optimize pow(1.0,y) = 1.0. */
8210 if (real_onep (arg0))
8211 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8213 if (host_integerp (arg1, 0))
8215 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8217 /* Evaluate powi at compile-time. */
8218 if (TREE_CODE (arg0) == REAL_CST
8219 && !TREE_OVERFLOW (arg0))
8222 x = TREE_REAL_CST (arg0);
8223 real_powi (&x, TYPE_MODE (type), &x, c);
8224 return build_real (type, x);
8227 /* Optimize pow(x,0) = 1.0. */
8229 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8232 /* Optimize pow(x,1) = x. */
8236 /* Optimize pow(x,-1) = 1.0/x. */
8238 return fold_build2_loc (loc, RDIV_EXPR, type,
8239 build_real (type, dconst1), arg0);
8245 /* A subroutine of fold_builtin to fold the various exponent
8246 functions. Return NULL_TREE if no simplification can be made.
8247 FUNC is the corresponding MPFR exponent function. */
8250 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8251 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8253 if (validate_arg (arg, REAL_TYPE))
8255 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8258 /* Calculate the result when the argument is a constant. */
8259 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8262 /* Optimize expN(logN(x)) = x. */
8263 if (flag_unsafe_math_optimizations)
8265 const enum built_in_function fcode = builtin_mathfn_code (arg);
8267 if ((func == mpfr_exp
8268 && (fcode == BUILT_IN_LOG
8269 || fcode == BUILT_IN_LOGF
8270 || fcode == BUILT_IN_LOGL))
8271 || (func == mpfr_exp2
8272 && (fcode == BUILT_IN_LOG2
8273 || fcode == BUILT_IN_LOG2F
8274 || fcode == BUILT_IN_LOG2L))
8275 || (func == mpfr_exp10
8276 && (fcode == BUILT_IN_LOG10
8277 || fcode == BUILT_IN_LOG10F
8278 || fcode == BUILT_IN_LOG10L)))
8279 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8286 /* Return true if VAR is a VAR_DECL or a component thereof. */
8289 var_decl_component_p (tree var)
8292 while (handled_component_p (inner))
8293 inner = TREE_OPERAND (inner, 0);
8294 return SSA_VAR_P (inner);
8297 /* Fold function call to builtin memset. Return
8298 NULL_TREE if no simplification can be made. */
8301 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8302 tree type, bool ignore)
8304 tree var, ret, etype;
8305 unsigned HOST_WIDE_INT length, cval;
8307 if (! validate_arg (dest, POINTER_TYPE)
8308 || ! validate_arg (c, INTEGER_TYPE)
8309 || ! validate_arg (len, INTEGER_TYPE))
8312 if (! host_integerp (len, 1))
8315 /* If the LEN parameter is zero, return DEST. */
8316 if (integer_zerop (len))
8317 return omit_one_operand_loc (loc, type, dest, c);
8319 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8324 if (TREE_CODE (var) != ADDR_EXPR)
8327 var = TREE_OPERAND (var, 0);
8328 if (TREE_THIS_VOLATILE (var))
8331 etype = TREE_TYPE (var);
8332 if (TREE_CODE (etype) == ARRAY_TYPE)
8333 etype = TREE_TYPE (etype);
8335 if (!INTEGRAL_TYPE_P (etype)
8336 && !POINTER_TYPE_P (etype))
8339 if (! var_decl_component_p (var))
8342 length = tree_low_cst (len, 1);
8343 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8344 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8348 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8351 if (integer_zerop (c))
8355 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8358 cval = TREE_INT_CST_LOW (c);
8362 cval |= (cval << 31) << 1;
8365 ret = build_int_cst_type (etype, cval);
8366 var = build_fold_indirect_ref_loc (loc,
8367 fold_convert_loc (loc,
8368 build_pointer_type (etype),
8370 ret = build2 (MODIFY_EXPR, etype, var, ret);
8374 return omit_one_operand_loc (loc, type, dest, ret);
8377 /* Fold function call to builtin memset. Return
8378 NULL_TREE if no simplification can be made. */
8381 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8383 if (! validate_arg (dest, POINTER_TYPE)
8384 || ! validate_arg (size, INTEGER_TYPE))
8390 /* New argument list transforming bzero(ptr x, int y) to
8391 memset(ptr x, int 0, size_t y). This is done this way
8392 so that if it isn't expanded inline, we fallback to
8393 calling bzero instead of memset. */
8395 return fold_builtin_memset (loc, dest, integer_zero_node,
8396 fold_convert_loc (loc, sizetype, size),
8397 void_type_node, ignore);
8400 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8401 NULL_TREE if no simplification can be made.
8402 If ENDP is 0, return DEST (like memcpy).
8403 If ENDP is 1, return DEST+LEN (like mempcpy).
8404 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8405 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8409 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8410 tree len, tree type, bool ignore, int endp)
8412 tree destvar, srcvar, expr;
8414 if (! validate_arg (dest, POINTER_TYPE)
8415 || ! validate_arg (src, POINTER_TYPE)
8416 || ! validate_arg (len, INTEGER_TYPE))
8419 /* If the LEN parameter is zero, return DEST. */
8420 if (integer_zerop (len))
8421 return omit_one_operand_loc (loc, type, dest, src);
8423 /* If SRC and DEST are the same (and not volatile), return
8424 DEST{,+LEN,+LEN-1}. */
8425 if (operand_equal_p (src, dest, 0))
8429 tree srctype, desttype;
8430 unsigned int src_align, dest_align;
8435 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8436 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8438 /* Both DEST and SRC must be pointer types.
8439 ??? This is what old code did. Is the testing for pointer types
8442 If either SRC is readonly or length is 1, we can use memcpy. */
8443 if (!dest_align || !src_align)
8445 if (readonly_data_expr (src)
8446 || (host_integerp (len, 1)
8447 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8448 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8450 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8453 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8456 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8457 if (TREE_CODE (src) == ADDR_EXPR
8458 && TREE_CODE (dest) == ADDR_EXPR)
8460 tree src_base, dest_base, fn;
8461 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8462 HOST_WIDE_INT size = -1;
8463 HOST_WIDE_INT maxsize = -1;
8465 srcvar = TREE_OPERAND (src, 0);
8466 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8468 destvar = TREE_OPERAND (dest, 0);
8469 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8471 if (host_integerp (len, 1))
8472 maxsize = tree_low_cst (len, 1);
8475 src_offset /= BITS_PER_UNIT;
8476 dest_offset /= BITS_PER_UNIT;
8477 if (SSA_VAR_P (src_base)
8478 && SSA_VAR_P (dest_base))
8480 if (operand_equal_p (src_base, dest_base, 0)
8481 && ranges_overlap_p (src_offset, maxsize,
8482 dest_offset, maxsize))
8485 else if (TREE_CODE (src_base) == MEM_REF
8486 && TREE_CODE (dest_base) == MEM_REF)
8489 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8490 TREE_OPERAND (dest_base, 0), 0))
8492 off = double_int_add (mem_ref_offset (src_base),
8493 shwi_to_double_int (src_offset));
8494 if (!double_int_fits_in_shwi_p (off))
8496 src_offset = off.low;
8497 off = double_int_add (mem_ref_offset (dest_base),
8498 shwi_to_double_int (dest_offset));
8499 if (!double_int_fits_in_shwi_p (off))
8501 dest_offset = off.low;
8502 if (ranges_overlap_p (src_offset, maxsize,
8503 dest_offset, maxsize))
8509 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8512 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8515 /* If the destination and source do not alias optimize into
8517 if ((is_gimple_min_invariant (dest)
8518 || TREE_CODE (dest) == SSA_NAME)
8519 && (is_gimple_min_invariant (src)
8520 || TREE_CODE (src) == SSA_NAME))
8523 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8524 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8525 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8528 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8531 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8538 if (!host_integerp (len, 0))
8541 This logic lose for arguments like (type *)malloc (sizeof (type)),
8542 since we strip the casts of up to VOID return value from malloc.
8543 Perhaps we ought to inherit type from non-VOID argument here? */
8546 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8547 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8549 tree tem = TREE_OPERAND (src, 0);
8551 if (tem != TREE_OPERAND (src, 0))
8552 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8554 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8556 tree tem = TREE_OPERAND (dest, 0);
8558 if (tem != TREE_OPERAND (dest, 0))
8559 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8561 srctype = TREE_TYPE (TREE_TYPE (src));
8563 && TREE_CODE (srctype) == ARRAY_TYPE
8564 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8566 srctype = TREE_TYPE (srctype);
8568 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8570 desttype = TREE_TYPE (TREE_TYPE (dest));
8572 && TREE_CODE (desttype) == ARRAY_TYPE
8573 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8575 desttype = TREE_TYPE (desttype);
8577 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8579 if (!srctype || !desttype
8580 || TREE_ADDRESSABLE (srctype)
8581 || TREE_ADDRESSABLE (desttype)
8582 || !TYPE_SIZE_UNIT (srctype)
8583 || !TYPE_SIZE_UNIT (desttype)
8584 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8585 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8588 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8589 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8590 if (dest_align < TYPE_ALIGN (desttype)
8591 || src_align < TYPE_ALIGN (srctype))
8595 dest = builtin_save_expr (dest);
8597 /* Build accesses at offset zero with a ref-all character type. */
8598 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8599 ptr_mode, true), 0);
8602 STRIP_NOPS (destvar);
8603 if (TREE_CODE (destvar) == ADDR_EXPR
8604 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8605 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8606 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8608 destvar = NULL_TREE;
8611 STRIP_NOPS (srcvar);
8612 if (TREE_CODE (srcvar) == ADDR_EXPR
8613 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8614 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8617 || src_align >= TYPE_ALIGN (desttype))
8618 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8620 else if (!STRICT_ALIGNMENT)
8622 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8624 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8632 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8635 if (srcvar == NULL_TREE)
8638 if (src_align >= TYPE_ALIGN (desttype))
8639 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8642 if (STRICT_ALIGNMENT)
8644 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8646 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8649 else if (destvar == NULL_TREE)
8652 if (dest_align >= TYPE_ALIGN (srctype))
8653 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8656 if (STRICT_ALIGNMENT)
8658 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8660 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8664 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8670 if (endp == 0 || endp == 3)
8671 return omit_one_operand_loc (loc, type, dest, expr);
8677 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8680 len = fold_convert_loc (loc, sizetype, len);
8681 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8682 dest = fold_convert_loc (loc, type, dest);
8684 dest = omit_one_operand_loc (loc, type, dest, expr);
8688 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8689 If LEN is not NULL, it represents the length of the string to be
8690 copied. Return NULL_TREE if no simplification can be made. */
8693 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8697 if (!validate_arg (dest, POINTER_TYPE)
8698 || !validate_arg (src, POINTER_TYPE))
8701 /* If SRC and DEST are the same (and not volatile), return DEST. */
8702 if (operand_equal_p (src, dest, 0))
8703 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8705 if (optimize_function_for_size_p (cfun))
8708 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8714 len = c_strlen (src, 1);
8715 if (! len || TREE_SIDE_EFFECTS (len))
8719 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8720 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8721 build_call_expr_loc (loc, fn, 3, dest, src, len));
8724 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8725 Return NULL_TREE if no simplification can be made. */
8728 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8730 tree fn, len, lenp1, call, type;
8732 if (!validate_arg (dest, POINTER_TYPE)
8733 || !validate_arg (src, POINTER_TYPE))
8736 len = c_strlen (src, 1);
8738 || TREE_CODE (len) != INTEGER_CST)
8741 if (optimize_function_for_size_p (cfun)
8742 /* If length is zero it's small enough. */
8743 && !integer_zerop (len))
8746 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8750 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8751 /* We use dest twice in building our expression. Save it from
8752 multiple expansions. */
8753 dest = builtin_save_expr (dest);
8754 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8756 type = TREE_TYPE (TREE_TYPE (fndecl));
8757 len = fold_convert_loc (loc, sizetype, len);
8758 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8759 dest = fold_convert_loc (loc, type, dest);
8760 dest = omit_one_operand_loc (loc, type, dest, call);
8764 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8765 If SLEN is not NULL, it represents the length of the source string.
8766 Return NULL_TREE if no simplification can be made. */
8769 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8770 tree src, tree len, tree slen)
8774 if (!validate_arg (dest, POINTER_TYPE)
8775 || !validate_arg (src, POINTER_TYPE)
8776 || !validate_arg (len, INTEGER_TYPE))
8779 /* If the LEN parameter is zero, return DEST. */
8780 if (integer_zerop (len))
8781 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8783 /* We can't compare slen with len as constants below if len is not a
8785 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8789 slen = c_strlen (src, 1);
8791 /* Now, we must be passed a constant src ptr parameter. */
8792 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8795 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8797 /* We do not support simplification of this case, though we do
8798 support it when expanding trees into RTL. */
8799 /* FIXME: generate a call to __builtin_memset. */
8800 if (tree_int_cst_lt (slen, len))
8803 /* OK transform into builtin memcpy. */
8804 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8807 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8808 build_call_expr_loc (loc, fn, 3, dest, src, len));
8811 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8812 arguments to the call, and TYPE is its return type.
8813 Return NULL_TREE if no simplification can be made. */
8816 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8818 if (!validate_arg (arg1, POINTER_TYPE)
8819 || !validate_arg (arg2, INTEGER_TYPE)
8820 || !validate_arg (len, INTEGER_TYPE))
8826 if (TREE_CODE (arg2) != INTEGER_CST
8827 || !host_integerp (len, 1))
8830 p1 = c_getstr (arg1);
8831 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8837 if (target_char_cast (arg2, &c))
8840 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8843 return build_int_cst (TREE_TYPE (arg1), 0);
8845 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8847 return fold_convert_loc (loc, type, tem);
8853 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8854 Return NULL_TREE if no simplification can be made. */
8857 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8859 const char *p1, *p2;
8861 if (!validate_arg (arg1, POINTER_TYPE)
8862 || !validate_arg (arg2, POINTER_TYPE)
8863 || !validate_arg (len, INTEGER_TYPE))
8866 /* If the LEN parameter is zero, return zero. */
8867 if (integer_zerop (len))
8868 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8871 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8872 if (operand_equal_p (arg1, arg2, 0))
8873 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8875 p1 = c_getstr (arg1);
8876 p2 = c_getstr (arg2);
8878 /* If all arguments are constant, and the value of len is not greater
8879 than the lengths of arg1 and arg2, evaluate at compile-time. */
8880 if (host_integerp (len, 1) && p1 && p2
8881 && compare_tree_int (len, strlen (p1) + 1) <= 0
8882 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8884 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8887 return integer_one_node;
8889 return integer_minus_one_node;
8891 return integer_zero_node;
8894 /* If len parameter is one, return an expression corresponding to
8895 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8896 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8898 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8899 tree cst_uchar_ptr_node
8900 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8903 = fold_convert_loc (loc, integer_type_node,
8904 build1 (INDIRECT_REF, cst_uchar_node,
8905 fold_convert_loc (loc,
8909 = fold_convert_loc (loc, integer_type_node,
8910 build1 (INDIRECT_REF, cst_uchar_node,
8911 fold_convert_loc (loc,
8914 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8920 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8921 Return NULL_TREE if no simplification can be made. */
8924 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8926 const char *p1, *p2;
8928 if (!validate_arg (arg1, POINTER_TYPE)
8929 || !validate_arg (arg2, POINTER_TYPE))
8932 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8933 if (operand_equal_p (arg1, arg2, 0))
8934 return integer_zero_node;
8936 p1 = c_getstr (arg1);
8937 p2 = c_getstr (arg2);
8941 const int i = strcmp (p1, p2);
8943 return integer_minus_one_node;
8945 return integer_one_node;
8947 return integer_zero_node;
8950 /* If the second arg is "", return *(const unsigned char*)arg1. */
8951 if (p2 && *p2 == '\0')
8953 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8954 tree cst_uchar_ptr_node
8955 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8957 return fold_convert_loc (loc, integer_type_node,
8958 build1 (INDIRECT_REF, cst_uchar_node,
8959 fold_convert_loc (loc,
8964 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8965 if (p1 && *p1 == '\0')
8967 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8968 tree cst_uchar_ptr_node
8969 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8972 = fold_convert_loc (loc, integer_type_node,
8973 build1 (INDIRECT_REF, cst_uchar_node,
8974 fold_convert_loc (loc,
8977 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8983 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8984 Return NULL_TREE if no simplification can be made. */
8987 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8989 const char *p1, *p2;
8991 if (!validate_arg (arg1, POINTER_TYPE)
8992 || !validate_arg (arg2, POINTER_TYPE)
8993 || !validate_arg (len, INTEGER_TYPE))
8996 /* If the LEN parameter is zero, return zero. */
8997 if (integer_zerop (len))
8998 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9001 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9002 if (operand_equal_p (arg1, arg2, 0))
9003 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9005 p1 = c_getstr (arg1);
9006 p2 = c_getstr (arg2);
9008 if (host_integerp (len, 1) && p1 && p2)
9010 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9012 return integer_one_node;
9014 return integer_minus_one_node;
9016 return integer_zero_node;
9019 /* If the second arg is "", and the length is greater than zero,
9020 return *(const unsigned char*)arg1. */
9021 if (p2 && *p2 == '\0'
9022 && TREE_CODE (len) == INTEGER_CST
9023 && tree_int_cst_sgn (len) == 1)
9025 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9026 tree cst_uchar_ptr_node
9027 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9029 return fold_convert_loc (loc, integer_type_node,
9030 build1 (INDIRECT_REF, cst_uchar_node,
9031 fold_convert_loc (loc,
9036 /* If the first arg is "", and the length is greater than zero,
9037 return -*(const unsigned char*)arg2. */
9038 if (p1 && *p1 == '\0'
9039 && TREE_CODE (len) == INTEGER_CST
9040 && tree_int_cst_sgn (len) == 1)
9042 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9043 tree cst_uchar_ptr_node
9044 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9046 tree temp = fold_convert_loc (loc, integer_type_node,
9047 build1 (INDIRECT_REF, cst_uchar_node,
9048 fold_convert_loc (loc,
9051 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9054 /* If len parameter is one, return an expression corresponding to
9055 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9056 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9058 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9059 tree cst_uchar_ptr_node
9060 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9062 tree ind1 = fold_convert_loc (loc, integer_type_node,
9063 build1 (INDIRECT_REF, cst_uchar_node,
9064 fold_convert_loc (loc,
9067 tree ind2 = fold_convert_loc (loc, integer_type_node,
9068 build1 (INDIRECT_REF, cst_uchar_node,
9069 fold_convert_loc (loc,
9072 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9078 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9079 ARG. Return NULL_TREE if no simplification can be made. */
9082 fold_builtin_signbit (location_t loc, tree arg, tree type)
9084 if (!validate_arg (arg, REAL_TYPE))
9087 /* If ARG is a compile-time constant, determine the result. */
9088 if (TREE_CODE (arg) == REAL_CST
9089 && !TREE_OVERFLOW (arg))
9093 c = TREE_REAL_CST (arg);
9094 return (REAL_VALUE_NEGATIVE (c)
9095 ? build_one_cst (type)
9096 : build_zero_cst (type));
9099 /* If ARG is non-negative, the result is always zero. */
9100 if (tree_expr_nonnegative_p (arg))
9101 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9103 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9104 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9105 return fold_build2_loc (loc, LT_EXPR, type, arg,
9106 build_real (TREE_TYPE (arg), dconst0));
9111 /* Fold function call to builtin copysign, copysignf or copysignl with
9112 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9116 fold_builtin_copysign (location_t loc, tree fndecl,
9117 tree arg1, tree arg2, tree type)
9121 if (!validate_arg (arg1, REAL_TYPE)
9122 || !validate_arg (arg2, REAL_TYPE))
9125 /* copysign(X,X) is X. */
9126 if (operand_equal_p (arg1, arg2, 0))
9127 return fold_convert_loc (loc, type, arg1);
9129 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9130 if (TREE_CODE (arg1) == REAL_CST
9131 && TREE_CODE (arg2) == REAL_CST
9132 && !TREE_OVERFLOW (arg1)
9133 && !TREE_OVERFLOW (arg2))
9135 REAL_VALUE_TYPE c1, c2;
9137 c1 = TREE_REAL_CST (arg1);
9138 c2 = TREE_REAL_CST (arg2);
9139 /* c1.sign := c2.sign. */
9140 real_copysign (&c1, &c2);
9141 return build_real (type, c1);
9144 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9145 Remember to evaluate Y for side-effects. */
9146 if (tree_expr_nonnegative_p (arg2))
9147 return omit_one_operand_loc (loc, type,
9148 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9151 /* Strip sign changing operations for the first argument. */
9152 tem = fold_strip_sign_ops (arg1);
9154 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9159 /* Fold a call to builtin isascii with argument ARG. */
9162 fold_builtin_isascii (location_t loc, tree arg)
9164 if (!validate_arg (arg, INTEGER_TYPE))
9168 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9169 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9170 build_int_cst (NULL_TREE,
9171 ~ (unsigned HOST_WIDE_INT) 0x7f));
9172 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9173 arg, integer_zero_node);
9177 /* Fold a call to builtin toascii with argument ARG. */
9180 fold_builtin_toascii (location_t loc, tree arg)
9182 if (!validate_arg (arg, INTEGER_TYPE))
9185 /* Transform toascii(c) -> (c & 0x7f). */
9186 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9187 build_int_cst (NULL_TREE, 0x7f));
9190 /* Fold a call to builtin isdigit with argument ARG. */
9193 fold_builtin_isdigit (location_t loc, tree arg)
9195 if (!validate_arg (arg, INTEGER_TYPE))
9199 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9200 /* According to the C standard, isdigit is unaffected by locale.
9201 However, it definitely is affected by the target character set. */
9202 unsigned HOST_WIDE_INT target_digit0
9203 = lang_hooks.to_target_charset ('0');
9205 if (target_digit0 == 0)
9208 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9209 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9210 build_int_cst (unsigned_type_node, target_digit0));
9211 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9212 build_int_cst (unsigned_type_node, 9));
9216 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9219 fold_builtin_fabs (location_t loc, tree arg, tree type)
9221 if (!validate_arg (arg, REAL_TYPE))
9224 arg = fold_convert_loc (loc, type, arg);
9225 if (TREE_CODE (arg) == REAL_CST)
9226 return fold_abs_const (arg, type);
9227 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9230 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9233 fold_builtin_abs (location_t loc, tree arg, tree type)
9235 if (!validate_arg (arg, INTEGER_TYPE))
9238 arg = fold_convert_loc (loc, type, arg);
9239 if (TREE_CODE (arg) == INTEGER_CST)
9240 return fold_abs_const (arg, type);
9241 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9244 /* Fold a fma operation with arguments ARG[012]. */
9247 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9248 tree type, tree arg0, tree arg1, tree arg2)
9250 if (TREE_CODE (arg0) == REAL_CST
9251 && TREE_CODE (arg1) == REAL_CST
9252 && TREE_CODE (arg2) == REAL_CST)
9253 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9258 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9261 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9263 if (validate_arg (arg0, REAL_TYPE)
9264 && validate_arg(arg1, REAL_TYPE)
9265 && validate_arg(arg2, REAL_TYPE))
9267 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9271 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9272 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9273 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9278 /* Fold a call to builtin fmin or fmax. */
9281 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9282 tree type, bool max)
9284 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9286 /* Calculate the result when the argument is a constant. */
9287 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9292 /* If either argument is NaN, return the other one. Avoid the
9293 transformation if we get (and honor) a signalling NaN. Using
9294 omit_one_operand() ensures we create a non-lvalue. */
9295 if (TREE_CODE (arg0) == REAL_CST
9296 && real_isnan (&TREE_REAL_CST (arg0))
9297 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9298 || ! TREE_REAL_CST (arg0).signalling))
9299 return omit_one_operand_loc (loc, type, arg1, arg0);
9300 if (TREE_CODE (arg1) == REAL_CST
9301 && real_isnan (&TREE_REAL_CST (arg1))
9302 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9303 || ! TREE_REAL_CST (arg1).signalling))
9304 return omit_one_operand_loc (loc, type, arg0, arg1);
9306 /* Transform fmin/fmax(x,x) -> x. */
9307 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9308 return omit_one_operand_loc (loc, type, arg0, arg1);
9310 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9311 functions to return the numeric arg if the other one is NaN.
9312 These tree codes don't honor that, so only transform if
9313 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9314 handled, so we don't have to worry about it either. */
9315 if (flag_finite_math_only)
9316 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9317 fold_convert_loc (loc, type, arg0),
9318 fold_convert_loc (loc, type, arg1));
9323 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9326 fold_builtin_carg (location_t loc, tree arg, tree type)
9328 if (validate_arg (arg, COMPLEX_TYPE)
9329 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9331 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9335 tree new_arg = builtin_save_expr (arg);
9336 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9337 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9338 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9345 /* Fold a call to builtin logb/ilogb. */
9348 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9350 if (! validate_arg (arg, REAL_TYPE))
9355 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9357 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9363 /* If arg is Inf or NaN and we're logb, return it. */
9364 if (TREE_CODE (rettype) == REAL_TYPE)
9365 return fold_convert_loc (loc, rettype, arg);
9366 /* Fall through... */
9368 /* Zero may set errno and/or raise an exception for logb, also
9369 for ilogb we don't know FP_ILOGB0. */
9372 /* For normal numbers, proceed iff radix == 2. In GCC,
9373 normalized significands are in the range [0.5, 1.0). We
9374 want the exponent as if they were [1.0, 2.0) so get the
9375 exponent and subtract 1. */
9376 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9377 return fold_convert_loc (loc, rettype,
9378 build_int_cst (NULL_TREE,
9379 REAL_EXP (value)-1));
9387 /* Fold a call to builtin significand, if radix == 2. */
9390 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9392 if (! validate_arg (arg, REAL_TYPE))
9397 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9399 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9406 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9407 return fold_convert_loc (loc, rettype, arg);
9409 /* For normal numbers, proceed iff radix == 2. */
9410 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9412 REAL_VALUE_TYPE result = *value;
9413 /* In GCC, normalized significands are in the range [0.5,
9414 1.0). We want them to be [1.0, 2.0) so set the
9416 SET_REAL_EXP (&result, 1);
9417 return build_real (rettype, result);
9426 /* Fold a call to builtin frexp, we can assume the base is 2. */
9429 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9431 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9436 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9439 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9441 /* Proceed if a valid pointer type was passed in. */
9442 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9444 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9450 /* For +-0, return (*exp = 0, +-0). */
9451 exp = integer_zero_node;
9456 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9457 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9460 /* Since the frexp function always expects base 2, and in
9461 GCC normalized significands are already in the range
9462 [0.5, 1.0), we have exactly what frexp wants. */
9463 REAL_VALUE_TYPE frac_rvt = *value;
9464 SET_REAL_EXP (&frac_rvt, 0);
9465 frac = build_real (rettype, frac_rvt);
9466 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9473 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9474 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9475 TREE_SIDE_EFFECTS (arg1) = 1;
9476 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9482 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9483 then we can assume the base is two. If it's false, then we have to
9484 check the mode of the TYPE parameter in certain cases. */
9487 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9488 tree type, bool ldexp)
9490 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9495 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9496 if (real_zerop (arg0) || integer_zerop (arg1)
9497 || (TREE_CODE (arg0) == REAL_CST
9498 && !real_isfinite (&TREE_REAL_CST (arg0))))
9499 return omit_one_operand_loc (loc, type, arg0, arg1);
9501 /* If both arguments are constant, then try to evaluate it. */
9502 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9503 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9504 && host_integerp (arg1, 0))
9506 /* Bound the maximum adjustment to twice the range of the
9507 mode's valid exponents. Use abs to ensure the range is
9508 positive as a sanity check. */
9509 const long max_exp_adj = 2 *
9510 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9511 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9513 /* Get the user-requested adjustment. */
9514 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9516 /* The requested adjustment must be inside this range. This
9517 is a preliminary cap to avoid things like overflow, we
9518 may still fail to compute the result for other reasons. */
9519 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9521 REAL_VALUE_TYPE initial_result;
9523 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9525 /* Ensure we didn't overflow. */
9526 if (! real_isinf (&initial_result))
9528 const REAL_VALUE_TYPE trunc_result
9529 = real_value_truncate (TYPE_MODE (type), initial_result);
9531 /* Only proceed if the target mode can hold the
9533 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9534 return build_real (type, trunc_result);
9543 /* Fold a call to builtin modf. */
9546 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9548 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9553 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9556 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9558 /* Proceed if a valid pointer type was passed in. */
9559 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9561 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9562 REAL_VALUE_TYPE trunc, frac;
9568 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9569 trunc = frac = *value;
9572 /* For +-Inf, return (*arg1 = arg0, +-0). */
9574 frac.sign = value->sign;
9578 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9579 real_trunc (&trunc, VOIDmode, value);
9580 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9581 /* If the original number was negative and already
9582 integral, then the fractional part is -0.0. */
9583 if (value->sign && frac.cl == rvc_zero)
9584 frac.sign = value->sign;
9588 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9589 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9590 build_real (rettype, trunc));
9591 TREE_SIDE_EFFECTS (arg1) = 1;
9592 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9593 build_real (rettype, frac));
9599 /* Given a location LOC, an interclass builtin function decl FNDECL
9600 and its single argument ARG, return an folded expression computing
9601 the same, or NULL_TREE if we either couldn't or didn't want to fold
9602 (the latter happen if there's an RTL instruction available). */
9605 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9607 enum machine_mode mode;
9609 if (!validate_arg (arg, REAL_TYPE))
9612 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9615 mode = TYPE_MODE (TREE_TYPE (arg));
9617 /* If there is no optab, try generic code. */
9618 switch (DECL_FUNCTION_CODE (fndecl))
9622 CASE_FLT_FN (BUILT_IN_ISINF):
9624 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9625 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9626 tree const type = TREE_TYPE (arg);
9630 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9631 real_from_string (&r, buf);
9632 result = build_call_expr (isgr_fn, 2,
9633 fold_build1_loc (loc, ABS_EXPR, type, arg),
9634 build_real (type, r));
9637 CASE_FLT_FN (BUILT_IN_FINITE):
9638 case BUILT_IN_ISFINITE:
9640 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9641 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9642 tree const type = TREE_TYPE (arg);
9646 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9647 real_from_string (&r, buf);
9648 result = build_call_expr (isle_fn, 2,
9649 fold_build1_loc (loc, ABS_EXPR, type, arg),
9650 build_real (type, r));
9651 /*result = fold_build2_loc (loc, UNGT_EXPR,
9652 TREE_TYPE (TREE_TYPE (fndecl)),
9653 fold_build1_loc (loc, ABS_EXPR, type, arg),
9654 build_real (type, r));
9655 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9656 TREE_TYPE (TREE_TYPE (fndecl)),
9660 case BUILT_IN_ISNORMAL:
9662 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9663 islessequal(fabs(x),DBL_MAX). */
9664 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9665 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9666 tree const type = TREE_TYPE (arg);
9667 REAL_VALUE_TYPE rmax, rmin;
9670 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9671 real_from_string (&rmax, buf);
9672 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9673 real_from_string (&rmin, buf);
9674 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9675 result = build_call_expr (isle_fn, 2, arg,
9676 build_real (type, rmax));
9677 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9678 build_call_expr (isge_fn, 2, arg,
9679 build_real (type, rmin)));
9689 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9690 ARG is the argument for the call. */
9693 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9695 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9698 if (!validate_arg (arg, REAL_TYPE))
9701 switch (builtin_index)
9703 case BUILT_IN_ISINF:
9704 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9705 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9707 if (TREE_CODE (arg) == REAL_CST)
9709 r = TREE_REAL_CST (arg);
9710 if (real_isinf (&r))
9711 return real_compare (GT_EXPR, &r, &dconst0)
9712 ? integer_one_node : integer_minus_one_node;
9714 return integer_zero_node;
9719 case BUILT_IN_ISINF_SIGN:
9721 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9722 /* In a boolean context, GCC will fold the inner COND_EXPR to
9723 1. So e.g. "if (isinf_sign(x))" would be folded to just
9724 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9725 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9726 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9727 tree tmp = NULL_TREE;
9729 arg = builtin_save_expr (arg);
9731 if (signbit_fn && isinf_fn)
9733 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9734 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9736 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9737 signbit_call, integer_zero_node);
9738 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9739 isinf_call, integer_zero_node);
9741 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9742 integer_minus_one_node, integer_one_node);
9743 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9751 case BUILT_IN_ISFINITE:
9752 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9753 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9754 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9756 if (TREE_CODE (arg) == REAL_CST)
9758 r = TREE_REAL_CST (arg);
9759 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9764 case BUILT_IN_ISNAN:
9765 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9766 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9768 if (TREE_CODE (arg) == REAL_CST)
9770 r = TREE_REAL_CST (arg);
9771 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9774 arg = builtin_save_expr (arg);
9775 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9782 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9783 This builtin will generate code to return the appropriate floating
9784 point classification depending on the value of the floating point
9785 number passed in. The possible return values must be supplied as
9786 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9787 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9788 one floating point argument which is "type generic". */
9791 fold_builtin_fpclassify (location_t loc, tree exp)
9793 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9794 arg, type, res, tmp;
9795 enum machine_mode mode;
9799 /* Verify the required arguments in the original call. */
9800 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9801 INTEGER_TYPE, INTEGER_TYPE,
9802 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9805 fp_nan = CALL_EXPR_ARG (exp, 0);
9806 fp_infinite = CALL_EXPR_ARG (exp, 1);
9807 fp_normal = CALL_EXPR_ARG (exp, 2);
9808 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9809 fp_zero = CALL_EXPR_ARG (exp, 4);
9810 arg = CALL_EXPR_ARG (exp, 5);
9811 type = TREE_TYPE (arg);
9812 mode = TYPE_MODE (type);
9813 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9817 (fabs(x) == Inf ? FP_INFINITE :
9818 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9819 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9821 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9822 build_real (type, dconst0));
9823 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9824 tmp, fp_zero, fp_subnormal);
9826 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9827 real_from_string (&r, buf);
9828 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9829 arg, build_real (type, r));
9830 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9832 if (HONOR_INFINITIES (mode))
9835 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9836 build_real (type, r));
9837 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9841 if (HONOR_NANS (mode))
9843 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9844 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9850 /* Fold a call to an unordered comparison function such as
9851 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9852 being called and ARG0 and ARG1 are the arguments for the call.
9853 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9854 the opposite of the desired result. UNORDERED_CODE is used
9855 for modes that can hold NaNs and ORDERED_CODE is used for
9859 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9860 enum tree_code unordered_code,
9861 enum tree_code ordered_code)
9863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9864 enum tree_code code;
9866 enum tree_code code0, code1;
9867 tree cmp_type = NULL_TREE;
9869 type0 = TREE_TYPE (arg0);
9870 type1 = TREE_TYPE (arg1);
9872 code0 = TREE_CODE (type0);
9873 code1 = TREE_CODE (type1);
9875 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9876 /* Choose the wider of two real types. */
9877 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9879 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9881 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9884 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9885 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9887 if (unordered_code == UNORDERED_EXPR)
9889 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9890 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9891 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9894 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9896 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9897 fold_build2_loc (loc, code, type, arg0, arg1));
9900 /* Fold a call to built-in function FNDECL with 0 arguments.
9901 IGNORE is true if the result of the function call is ignored. This
9902 function returns NULL_TREE if no simplification was possible. */
9905 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9907 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9908 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9911 CASE_FLT_FN (BUILT_IN_INF):
9912 case BUILT_IN_INFD32:
9913 case BUILT_IN_INFD64:
9914 case BUILT_IN_INFD128:
9915 return fold_builtin_inf (loc, type, true);
9917 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9918 return fold_builtin_inf (loc, type, false);
9920 case BUILT_IN_CLASSIFY_TYPE:
9921 return fold_builtin_classify_type (NULL_TREE);
9929 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9930 IGNORE is true if the result of the function call is ignored. This
9931 function returns NULL_TREE if no simplification was possible. */
9934 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9937 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9940 case BUILT_IN_CONSTANT_P:
9942 tree val = fold_builtin_constant_p (arg0);
9944 /* Gimplification will pull the CALL_EXPR for the builtin out of
9945 an if condition. When not optimizing, we'll not CSE it back.
9946 To avoid link error types of regressions, return false now. */
9947 if (!val && !optimize)
9948 val = integer_zero_node;
9953 case BUILT_IN_CLASSIFY_TYPE:
9954 return fold_builtin_classify_type (arg0);
9956 case BUILT_IN_STRLEN:
9957 return fold_builtin_strlen (loc, type, arg0);
9959 CASE_FLT_FN (BUILT_IN_FABS):
9960 return fold_builtin_fabs (loc, arg0, type);
9964 case BUILT_IN_LLABS:
9965 case BUILT_IN_IMAXABS:
9966 return fold_builtin_abs (loc, arg0, type);
9968 CASE_FLT_FN (BUILT_IN_CONJ):
9969 if (validate_arg (arg0, COMPLEX_TYPE)
9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9971 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9974 CASE_FLT_FN (BUILT_IN_CREAL):
9975 if (validate_arg (arg0, COMPLEX_TYPE)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9977 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9980 CASE_FLT_FN (BUILT_IN_CIMAG):
9981 if (validate_arg (arg0, COMPLEX_TYPE)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9983 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9986 CASE_FLT_FN (BUILT_IN_CCOS):
9987 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9989 CASE_FLT_FN (BUILT_IN_CCOSH):
9990 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9992 CASE_FLT_FN (BUILT_IN_CPROJ):
9993 return fold_builtin_cproj(loc, arg0, type);
9995 CASE_FLT_FN (BUILT_IN_CSIN):
9996 if (validate_arg (arg0, COMPLEX_TYPE)
9997 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9998 return do_mpc_arg1 (arg0, type, mpc_sin);
10001 CASE_FLT_FN (BUILT_IN_CSINH):
10002 if (validate_arg (arg0, COMPLEX_TYPE)
10003 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10004 return do_mpc_arg1 (arg0, type, mpc_sinh);
10007 CASE_FLT_FN (BUILT_IN_CTAN):
10008 if (validate_arg (arg0, COMPLEX_TYPE)
10009 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10010 return do_mpc_arg1 (arg0, type, mpc_tan);
10013 CASE_FLT_FN (BUILT_IN_CTANH):
10014 if (validate_arg (arg0, COMPLEX_TYPE)
10015 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10016 return do_mpc_arg1 (arg0, type, mpc_tanh);
10019 CASE_FLT_FN (BUILT_IN_CLOG):
10020 if (validate_arg (arg0, COMPLEX_TYPE)
10021 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10022 return do_mpc_arg1 (arg0, type, mpc_log);
10025 CASE_FLT_FN (BUILT_IN_CSQRT):
10026 if (validate_arg (arg0, COMPLEX_TYPE)
10027 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10028 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10031 CASE_FLT_FN (BUILT_IN_CASIN):
10032 if (validate_arg (arg0, COMPLEX_TYPE)
10033 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10034 return do_mpc_arg1 (arg0, type, mpc_asin);
10037 CASE_FLT_FN (BUILT_IN_CACOS):
10038 if (validate_arg (arg0, COMPLEX_TYPE)
10039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10040 return do_mpc_arg1 (arg0, type, mpc_acos);
10043 CASE_FLT_FN (BUILT_IN_CATAN):
10044 if (validate_arg (arg0, COMPLEX_TYPE)
10045 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10046 return do_mpc_arg1 (arg0, type, mpc_atan);
10049 CASE_FLT_FN (BUILT_IN_CASINH):
10050 if (validate_arg (arg0, COMPLEX_TYPE)
10051 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10052 return do_mpc_arg1 (arg0, type, mpc_asinh);
10055 CASE_FLT_FN (BUILT_IN_CACOSH):
10056 if (validate_arg (arg0, COMPLEX_TYPE)
10057 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10058 return do_mpc_arg1 (arg0, type, mpc_acosh);
10061 CASE_FLT_FN (BUILT_IN_CATANH):
10062 if (validate_arg (arg0, COMPLEX_TYPE)
10063 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10064 return do_mpc_arg1 (arg0, type, mpc_atanh);
10067 CASE_FLT_FN (BUILT_IN_CABS):
10068 return fold_builtin_cabs (loc, arg0, type, fndecl);
10070 CASE_FLT_FN (BUILT_IN_CARG):
10071 return fold_builtin_carg (loc, arg0, type);
10073 CASE_FLT_FN (BUILT_IN_SQRT):
10074 return fold_builtin_sqrt (loc, arg0, type);
10076 CASE_FLT_FN (BUILT_IN_CBRT):
10077 return fold_builtin_cbrt (loc, arg0, type);
10079 CASE_FLT_FN (BUILT_IN_ASIN):
10080 if (validate_arg (arg0, REAL_TYPE))
10081 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10082 &dconstm1, &dconst1, true);
10085 CASE_FLT_FN (BUILT_IN_ACOS):
10086 if (validate_arg (arg0, REAL_TYPE))
10087 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10088 &dconstm1, &dconst1, true);
10091 CASE_FLT_FN (BUILT_IN_ATAN):
10092 if (validate_arg (arg0, REAL_TYPE))
10093 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10096 CASE_FLT_FN (BUILT_IN_ASINH):
10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10101 CASE_FLT_FN (BUILT_IN_ACOSH):
10102 if (validate_arg (arg0, REAL_TYPE))
10103 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10104 &dconst1, NULL, true);
10107 CASE_FLT_FN (BUILT_IN_ATANH):
10108 if (validate_arg (arg0, REAL_TYPE))
10109 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10110 &dconstm1, &dconst1, false);
10113 CASE_FLT_FN (BUILT_IN_SIN):
10114 if (validate_arg (arg0, REAL_TYPE))
10115 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10118 CASE_FLT_FN (BUILT_IN_COS):
10119 return fold_builtin_cos (loc, arg0, type, fndecl);
10121 CASE_FLT_FN (BUILT_IN_TAN):
10122 return fold_builtin_tan (arg0, type);
10124 CASE_FLT_FN (BUILT_IN_CEXP):
10125 return fold_builtin_cexp (loc, arg0, type);
10127 CASE_FLT_FN (BUILT_IN_CEXPI):
10128 if (validate_arg (arg0, REAL_TYPE))
10129 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10132 CASE_FLT_FN (BUILT_IN_SINH):
10133 if (validate_arg (arg0, REAL_TYPE))
10134 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10137 CASE_FLT_FN (BUILT_IN_COSH):
10138 return fold_builtin_cosh (loc, arg0, type, fndecl);
10140 CASE_FLT_FN (BUILT_IN_TANH):
10141 if (validate_arg (arg0, REAL_TYPE))
10142 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10145 CASE_FLT_FN (BUILT_IN_ERF):
10146 if (validate_arg (arg0, REAL_TYPE))
10147 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10150 CASE_FLT_FN (BUILT_IN_ERFC):
10151 if (validate_arg (arg0, REAL_TYPE))
10152 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10155 CASE_FLT_FN (BUILT_IN_TGAMMA):
10156 if (validate_arg (arg0, REAL_TYPE))
10157 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10160 CASE_FLT_FN (BUILT_IN_EXP):
10161 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10163 CASE_FLT_FN (BUILT_IN_EXP2):
10164 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10166 CASE_FLT_FN (BUILT_IN_EXP10):
10167 CASE_FLT_FN (BUILT_IN_POW10):
10168 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10170 CASE_FLT_FN (BUILT_IN_EXPM1):
10171 if (validate_arg (arg0, REAL_TYPE))
10172 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10175 CASE_FLT_FN (BUILT_IN_LOG):
10176 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10178 CASE_FLT_FN (BUILT_IN_LOG2):
10179 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10181 CASE_FLT_FN (BUILT_IN_LOG10):
10182 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10184 CASE_FLT_FN (BUILT_IN_LOG1P):
10185 if (validate_arg (arg0, REAL_TYPE))
10186 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10187 &dconstm1, NULL, false);
10190 CASE_FLT_FN (BUILT_IN_J0):
10191 if (validate_arg (arg0, REAL_TYPE))
10192 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10196 CASE_FLT_FN (BUILT_IN_J1):
10197 if (validate_arg (arg0, REAL_TYPE))
10198 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10202 CASE_FLT_FN (BUILT_IN_Y0):
10203 if (validate_arg (arg0, REAL_TYPE))
10204 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10205 &dconst0, NULL, false);
10208 CASE_FLT_FN (BUILT_IN_Y1):
10209 if (validate_arg (arg0, REAL_TYPE))
10210 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10211 &dconst0, NULL, false);
10214 CASE_FLT_FN (BUILT_IN_NAN):
10215 case BUILT_IN_NAND32:
10216 case BUILT_IN_NAND64:
10217 case BUILT_IN_NAND128:
10218 return fold_builtin_nan (arg0, type, true);
10220 CASE_FLT_FN (BUILT_IN_NANS):
10221 return fold_builtin_nan (arg0, type, false);
10223 CASE_FLT_FN (BUILT_IN_FLOOR):
10224 return fold_builtin_floor (loc, fndecl, arg0);
10226 CASE_FLT_FN (BUILT_IN_CEIL):
10227 return fold_builtin_ceil (loc, fndecl, arg0);
10229 CASE_FLT_FN (BUILT_IN_TRUNC):
10230 return fold_builtin_trunc (loc, fndecl, arg0);
10232 CASE_FLT_FN (BUILT_IN_ROUND):
10233 return fold_builtin_round (loc, fndecl, arg0);
10235 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10236 CASE_FLT_FN (BUILT_IN_RINT):
10237 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10239 CASE_FLT_FN (BUILT_IN_LCEIL):
10240 CASE_FLT_FN (BUILT_IN_LLCEIL):
10241 CASE_FLT_FN (BUILT_IN_LFLOOR):
10242 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10243 CASE_FLT_FN (BUILT_IN_LROUND):
10244 CASE_FLT_FN (BUILT_IN_LLROUND):
10245 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10247 CASE_FLT_FN (BUILT_IN_LRINT):
10248 CASE_FLT_FN (BUILT_IN_LLRINT):
10249 return fold_fixed_mathfn (loc, fndecl, arg0);
10251 case BUILT_IN_BSWAP32:
10252 case BUILT_IN_BSWAP64:
10253 return fold_builtin_bswap (fndecl, arg0);
10255 CASE_INT_FN (BUILT_IN_FFS):
10256 CASE_INT_FN (BUILT_IN_CLZ):
10257 CASE_INT_FN (BUILT_IN_CTZ):
10258 CASE_INT_FN (BUILT_IN_POPCOUNT):
10259 CASE_INT_FN (BUILT_IN_PARITY):
10260 return fold_builtin_bitop (fndecl, arg0);
10262 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10263 return fold_builtin_signbit (loc, arg0, type);
10265 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10266 return fold_builtin_significand (loc, arg0, type);
10268 CASE_FLT_FN (BUILT_IN_ILOGB):
10269 CASE_FLT_FN (BUILT_IN_LOGB):
10270 return fold_builtin_logb (loc, arg0, type);
10272 case BUILT_IN_ISASCII:
10273 return fold_builtin_isascii (loc, arg0);
10275 case BUILT_IN_TOASCII:
10276 return fold_builtin_toascii (loc, arg0);
10278 case BUILT_IN_ISDIGIT:
10279 return fold_builtin_isdigit (loc, arg0);
10281 CASE_FLT_FN (BUILT_IN_FINITE):
10282 case BUILT_IN_FINITED32:
10283 case BUILT_IN_FINITED64:
10284 case BUILT_IN_FINITED128:
10285 case BUILT_IN_ISFINITE:
10287 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10290 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10293 CASE_FLT_FN (BUILT_IN_ISINF):
10294 case BUILT_IN_ISINFD32:
10295 case BUILT_IN_ISINFD64:
10296 case BUILT_IN_ISINFD128:
10298 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10301 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10304 case BUILT_IN_ISNORMAL:
10305 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10307 case BUILT_IN_ISINF_SIGN:
10308 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10310 CASE_FLT_FN (BUILT_IN_ISNAN):
10311 case BUILT_IN_ISNAND32:
10312 case BUILT_IN_ISNAND64:
10313 case BUILT_IN_ISNAND128:
10314 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10316 case BUILT_IN_PRINTF:
10317 case BUILT_IN_PRINTF_UNLOCKED:
10318 case BUILT_IN_VPRINTF:
10319 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10321 case BUILT_IN_FREE:
10322 if (integer_zerop (arg0))
10323 return build_empty_stmt (loc);
10334 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10335 IGNORE is true if the result of the function call is ignored. This
10336 function returns NULL_TREE if no simplification was possible. */
10339 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10341 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10342 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10346 CASE_FLT_FN (BUILT_IN_JN):
10347 if (validate_arg (arg0, INTEGER_TYPE)
10348 && validate_arg (arg1, REAL_TYPE))
10349 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10352 CASE_FLT_FN (BUILT_IN_YN):
10353 if (validate_arg (arg0, INTEGER_TYPE)
10354 && validate_arg (arg1, REAL_TYPE))
10355 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10359 CASE_FLT_FN (BUILT_IN_DREM):
10360 CASE_FLT_FN (BUILT_IN_REMAINDER):
10361 if (validate_arg (arg0, REAL_TYPE)
10362 && validate_arg(arg1, REAL_TYPE))
10363 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10366 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10367 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10368 if (validate_arg (arg0, REAL_TYPE)
10369 && validate_arg(arg1, POINTER_TYPE))
10370 return do_mpfr_lgamma_r (arg0, arg1, type);
10373 CASE_FLT_FN (BUILT_IN_ATAN2):
10374 if (validate_arg (arg0, REAL_TYPE)
10375 && validate_arg(arg1, REAL_TYPE))
10376 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10379 CASE_FLT_FN (BUILT_IN_FDIM):
10380 if (validate_arg (arg0, REAL_TYPE)
10381 && validate_arg(arg1, REAL_TYPE))
10382 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10385 CASE_FLT_FN (BUILT_IN_HYPOT):
10386 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10388 CASE_FLT_FN (BUILT_IN_CPOW):
10389 if (validate_arg (arg0, COMPLEX_TYPE)
10390 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10391 && validate_arg (arg1, COMPLEX_TYPE)
10392 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10393 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10396 CASE_FLT_FN (BUILT_IN_LDEXP):
10397 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10398 CASE_FLT_FN (BUILT_IN_SCALBN):
10399 CASE_FLT_FN (BUILT_IN_SCALBLN):
10400 return fold_builtin_load_exponent (loc, arg0, arg1,
10401 type, /*ldexp=*/false);
10403 CASE_FLT_FN (BUILT_IN_FREXP):
10404 return fold_builtin_frexp (loc, arg0, arg1, type);
10406 CASE_FLT_FN (BUILT_IN_MODF):
10407 return fold_builtin_modf (loc, arg0, arg1, type);
10409 case BUILT_IN_BZERO:
10410 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10412 case BUILT_IN_FPUTS:
10413 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10415 case BUILT_IN_FPUTS_UNLOCKED:
10416 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10418 case BUILT_IN_STRSTR:
10419 return fold_builtin_strstr (loc, arg0, arg1, type);
10421 case BUILT_IN_STRCAT:
10422 return fold_builtin_strcat (loc, arg0, arg1);
10424 case BUILT_IN_STRSPN:
10425 return fold_builtin_strspn (loc, arg0, arg1);
10427 case BUILT_IN_STRCSPN:
10428 return fold_builtin_strcspn (loc, arg0, arg1);
10430 case BUILT_IN_STRCHR:
10431 case BUILT_IN_INDEX:
10432 return fold_builtin_strchr (loc, arg0, arg1, type);
10434 case BUILT_IN_STRRCHR:
10435 case BUILT_IN_RINDEX:
10436 return fold_builtin_strrchr (loc, arg0, arg1, type);
10438 case BUILT_IN_STRCPY:
10439 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10441 case BUILT_IN_STPCPY:
10444 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10448 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10451 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10454 case BUILT_IN_STRCMP:
10455 return fold_builtin_strcmp (loc, arg0, arg1);
10457 case BUILT_IN_STRPBRK:
10458 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10460 case BUILT_IN_EXPECT:
10461 return fold_builtin_expect (loc, arg0, arg1);
10463 CASE_FLT_FN (BUILT_IN_POW):
10464 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10466 CASE_FLT_FN (BUILT_IN_POWI):
10467 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10469 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10470 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10472 CASE_FLT_FN (BUILT_IN_FMIN):
10473 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10475 CASE_FLT_FN (BUILT_IN_FMAX):
10476 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10478 case BUILT_IN_ISGREATER:
10479 return fold_builtin_unordered_cmp (loc, fndecl,
10480 arg0, arg1, UNLE_EXPR, LE_EXPR);
10481 case BUILT_IN_ISGREATEREQUAL:
10482 return fold_builtin_unordered_cmp (loc, fndecl,
10483 arg0, arg1, UNLT_EXPR, LT_EXPR);
10484 case BUILT_IN_ISLESS:
10485 return fold_builtin_unordered_cmp (loc, fndecl,
10486 arg0, arg1, UNGE_EXPR, GE_EXPR);
10487 case BUILT_IN_ISLESSEQUAL:
10488 return fold_builtin_unordered_cmp (loc, fndecl,
10489 arg0, arg1, UNGT_EXPR, GT_EXPR);
10490 case BUILT_IN_ISLESSGREATER:
10491 return fold_builtin_unordered_cmp (loc, fndecl,
10492 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10493 case BUILT_IN_ISUNORDERED:
10494 return fold_builtin_unordered_cmp (loc, fndecl,
10495 arg0, arg1, UNORDERED_EXPR,
10498 /* We do the folding for va_start in the expander. */
10499 case BUILT_IN_VA_START:
10502 case BUILT_IN_SPRINTF:
10503 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10505 case BUILT_IN_OBJECT_SIZE:
10506 return fold_builtin_object_size (arg0, arg1);
10508 case BUILT_IN_PRINTF:
10509 case BUILT_IN_PRINTF_UNLOCKED:
10510 case BUILT_IN_VPRINTF:
10511 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10513 case BUILT_IN_PRINTF_CHK:
10514 case BUILT_IN_VPRINTF_CHK:
10515 if (!validate_arg (arg0, INTEGER_TYPE)
10516 || TREE_SIDE_EFFECTS (arg0))
10519 return fold_builtin_printf (loc, fndecl,
10520 arg1, NULL_TREE, ignore, fcode);
10523 case BUILT_IN_FPRINTF:
10524 case BUILT_IN_FPRINTF_UNLOCKED:
10525 case BUILT_IN_VFPRINTF:
10526 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10535 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10536 and ARG2. IGNORE is true if the result of the function call is ignored.
10537 This function returns NULL_TREE if no simplification was possible. */
10540 fold_builtin_3 (location_t loc, tree fndecl,
10541 tree arg0, tree arg1, tree arg2, bool ignore)
10543 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10544 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10548 CASE_FLT_FN (BUILT_IN_SINCOS):
10549 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10551 CASE_FLT_FN (BUILT_IN_FMA):
10552 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10555 CASE_FLT_FN (BUILT_IN_REMQUO):
10556 if (validate_arg (arg0, REAL_TYPE)
10557 && validate_arg(arg1, REAL_TYPE)
10558 && validate_arg(arg2, POINTER_TYPE))
10559 return do_mpfr_remquo (arg0, arg1, arg2);
10562 case BUILT_IN_MEMSET:
10563 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10565 case BUILT_IN_BCOPY:
10566 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10567 void_type_node, true, /*endp=*/3);
10569 case BUILT_IN_MEMCPY:
10570 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10571 type, ignore, /*endp=*/0);
10573 case BUILT_IN_MEMPCPY:
10574 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10575 type, ignore, /*endp=*/1);
10577 case BUILT_IN_MEMMOVE:
10578 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10579 type, ignore, /*endp=*/3);
10581 case BUILT_IN_STRNCAT:
10582 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10584 case BUILT_IN_STRNCPY:
10585 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10587 case BUILT_IN_STRNCMP:
10588 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10590 case BUILT_IN_MEMCHR:
10591 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10593 case BUILT_IN_BCMP:
10594 case BUILT_IN_MEMCMP:
10595 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10597 case BUILT_IN_SPRINTF:
10598 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10600 case BUILT_IN_SNPRINTF:
10601 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10603 case BUILT_IN_STRCPY_CHK:
10604 case BUILT_IN_STPCPY_CHK:
10605 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10608 case BUILT_IN_STRCAT_CHK:
10609 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10611 case BUILT_IN_PRINTF_CHK:
10612 case BUILT_IN_VPRINTF_CHK:
10613 if (!validate_arg (arg0, INTEGER_TYPE)
10614 || TREE_SIDE_EFFECTS (arg0))
10617 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10620 case BUILT_IN_FPRINTF:
10621 case BUILT_IN_FPRINTF_UNLOCKED:
10622 case BUILT_IN_VFPRINTF:
10623 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10626 case BUILT_IN_FPRINTF_CHK:
10627 case BUILT_IN_VFPRINTF_CHK:
10628 if (!validate_arg (arg1, INTEGER_TYPE)
10629 || TREE_SIDE_EFFECTS (arg1))
10632 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10641 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10642 ARG2, and ARG3. IGNORE is true if the result of the function call is
10643 ignored. This function returns NULL_TREE if no simplification was
10647 fold_builtin_4 (location_t loc, tree fndecl,
10648 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10650 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10654 case BUILT_IN_MEMCPY_CHK:
10655 case BUILT_IN_MEMPCPY_CHK:
10656 case BUILT_IN_MEMMOVE_CHK:
10657 case BUILT_IN_MEMSET_CHK:
10658 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10660 DECL_FUNCTION_CODE (fndecl));
10662 case BUILT_IN_STRNCPY_CHK:
10663 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10665 case BUILT_IN_STRNCAT_CHK:
10666 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10668 case BUILT_IN_SNPRINTF:
10669 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10671 case BUILT_IN_FPRINTF_CHK:
10672 case BUILT_IN_VFPRINTF_CHK:
10673 if (!validate_arg (arg1, INTEGER_TYPE)
10674 || TREE_SIDE_EFFECTS (arg1))
10677 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10687 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10688 arguments, where NARGS <= 4. IGNORE is true if the result of the
10689 function call is ignored. This function returns NULL_TREE if no
10690 simplification was possible. Note that this only folds builtins with
10691 fixed argument patterns. Foldings that do varargs-to-varargs
10692 transformations, or that match calls with more than 4 arguments,
10693 need to be handled with fold_builtin_varargs instead. */
10695 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10698 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10700 tree ret = NULL_TREE;
10705 ret = fold_builtin_0 (loc, fndecl, ignore);
10708 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10711 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10714 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10717 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10725 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10726 SET_EXPR_LOCATION (ret, loc);
10727 TREE_NO_WARNING (ret) = 1;
10733 /* Builtins with folding operations that operate on "..." arguments
10734 need special handling; we need to store the arguments in a convenient
10735 data structure before attempting any folding. Fortunately there are
10736 only a few builtins that fall into this category. FNDECL is the
10737 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10738 result of the function call is ignored. */
10741 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10742 bool ignore ATTRIBUTE_UNUSED)
10744 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10745 tree ret = NULL_TREE;
10749 case BUILT_IN_SPRINTF_CHK:
10750 case BUILT_IN_VSPRINTF_CHK:
10751 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10754 case BUILT_IN_SNPRINTF_CHK:
10755 case BUILT_IN_VSNPRINTF_CHK:
10756 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10759 case BUILT_IN_FPCLASSIFY:
10760 ret = fold_builtin_fpclassify (loc, exp);
10768 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10769 SET_EXPR_LOCATION (ret, loc);
10770 TREE_NO_WARNING (ret) = 1;
10776 /* Return true if FNDECL shouldn't be folded right now.
10777 If a built-in function has an inline attribute always_inline
10778 wrapper, defer folding it after always_inline functions have
10779 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10780 might not be performed. */
10783 avoid_folding_inline_builtin (tree fndecl)
10785 return (DECL_DECLARED_INLINE_P (fndecl)
10786 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10788 && !cfun->always_inline_functions_inlined
10789 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10792 /* A wrapper function for builtin folding that prevents warnings for
10793 "statement without effect" and the like, caused by removing the
10794 call node earlier than the warning is generated. */
10797 fold_call_expr (location_t loc, tree exp, bool ignore)
10799 tree ret = NULL_TREE;
10800 tree fndecl = get_callee_fndecl (exp);
10802 && TREE_CODE (fndecl) == FUNCTION_DECL
10803 && DECL_BUILT_IN (fndecl)
10804 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10805 yet. Defer folding until we see all the arguments
10806 (after inlining). */
10807 && !CALL_EXPR_VA_ARG_PACK (exp))
10809 int nargs = call_expr_nargs (exp);
10811 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10812 instead last argument is __builtin_va_arg_pack (). Defer folding
10813 even in that case, until arguments are finalized. */
10814 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10816 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10818 && TREE_CODE (fndecl2) == FUNCTION_DECL
10819 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10820 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10824 if (avoid_folding_inline_builtin (fndecl))
10827 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10828 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10829 CALL_EXPR_ARGP (exp), ignore);
10832 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10834 tree *args = CALL_EXPR_ARGP (exp);
10835 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10838 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10846 /* Conveniently construct a function call expression. FNDECL names the
10847 function to be called and N arguments are passed in the array
10851 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10853 tree fntype = TREE_TYPE (fndecl);
10854 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10856 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10859 /* Conveniently construct a function call expression. FNDECL names the
10860 function to be called and the arguments are passed in the vector
10864 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10866 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10867 VEC_address (tree, vec));
10871 /* Conveniently construct a function call expression. FNDECL names the
10872 function to be called, N is the number of arguments, and the "..."
10873 parameters are the argument expressions. */
10876 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10879 tree *argarray = XALLOCAVEC (tree, n);
10883 for (i = 0; i < n; i++)
10884 argarray[i] = va_arg (ap, tree);
10886 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10889 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10890 varargs macros aren't supported by all bootstrap compilers. */
10893 build_call_expr (tree fndecl, int n, ...)
10896 tree *argarray = XALLOCAVEC (tree, n);
10900 for (i = 0; i < n; i++)
10901 argarray[i] = va_arg (ap, tree);
10903 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10906 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10907 N arguments are passed in the array ARGARRAY. */
10910 fold_builtin_call_array (location_t loc, tree type,
10915 tree ret = NULL_TREE;
10918 if (TREE_CODE (fn) == ADDR_EXPR)
10920 tree fndecl = TREE_OPERAND (fn, 0);
10921 if (TREE_CODE (fndecl) == FUNCTION_DECL
10922 && DECL_BUILT_IN (fndecl))
10924 /* If last argument is __builtin_va_arg_pack (), arguments to this
10925 function are not finalized yet. Defer folding until they are. */
10926 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10928 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10930 && TREE_CODE (fndecl2) == FUNCTION_DECL
10931 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10932 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10933 return build_call_array_loc (loc, type, fn, n, argarray);
10935 if (avoid_folding_inline_builtin (fndecl))
10936 return build_call_array_loc (loc, type, fn, n, argarray);
10937 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10939 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10943 return build_call_array_loc (loc, type, fn, n, argarray);
10945 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10947 /* First try the transformations that don't require consing up
10949 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10954 /* If we got this far, we need to build an exp. */
10955 exp = build_call_array_loc (loc, type, fn, n, argarray);
10956 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10957 return ret ? ret : exp;
10961 return build_call_array_loc (loc, type, fn, n, argarray);
10964 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10965 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10966 of arguments in ARGS to be omitted. OLDNARGS is the number of
10967 elements in ARGS. */
10970 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10971 int skip, tree fndecl, int n, va_list newargs)
10973 int nargs = oldnargs - skip + n;
10980 buffer = XALLOCAVEC (tree, nargs);
10981 for (i = 0; i < n; i++)
10982 buffer[i] = va_arg (newargs, tree);
10983 for (j = skip; j < oldnargs; j++, i++)
10984 buffer[i] = args[j];
10987 buffer = args + skip;
10989 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10992 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10993 list ARGS along with N new arguments specified as the "..."
10994 parameters. SKIP is the number of arguments in ARGS to be omitted.
10995 OLDNARGS is the number of elements in ARGS. */
10998 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10999 int skip, tree fndecl, int n, ...)
11005 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11011 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11012 along with N new arguments specified as the "..." parameters. SKIP
11013 is the number of arguments in EXP to be omitted. This function is used
11014 to do varargs-to-varargs transformations. */
11017 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11023 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11024 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11030 /* Validate a single argument ARG against a tree code CODE representing
11034 validate_arg (const_tree arg, enum tree_code code)
11038 else if (code == POINTER_TYPE)
11039 return POINTER_TYPE_P (TREE_TYPE (arg));
11040 else if (code == INTEGER_TYPE)
11041 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11042 return code == TREE_CODE (TREE_TYPE (arg));
11045 /* This function validates the types of a function call argument list
11046 against a specified list of tree_codes. If the last specifier is a 0,
11047 that represents an ellipses, otherwise the last specifier must be a
11050 This is the GIMPLE version of validate_arglist. Eventually we want to
11051 completely convert builtins.c to work from GIMPLEs and the tree based
11052 validate_arglist will then be removed. */
11055 validate_gimple_arglist (const_gimple call, ...)
11057 enum tree_code code;
11063 va_start (ap, call);
11068 code = (enum tree_code) va_arg (ap, int);
11072 /* This signifies an ellipses, any further arguments are all ok. */
11076 /* This signifies an endlink, if no arguments remain, return
11077 true, otherwise return false. */
11078 res = (i == gimple_call_num_args (call));
11081 /* If no parameters remain or the parameter's code does not
11082 match the specified code, return false. Otherwise continue
11083 checking any remaining arguments. */
11084 arg = gimple_call_arg (call, i++);
11085 if (!validate_arg (arg, code))
11092 /* We need gotos here since we can only have one VA_CLOSE in a
11100 /* This function validates the types of a function call argument list
11101 against a specified list of tree_codes. If the last specifier is a 0,
11102 that represents an ellipses, otherwise the last specifier must be a
11106 validate_arglist (const_tree callexpr, ...)
11108 enum tree_code code;
11111 const_call_expr_arg_iterator iter;
11114 va_start (ap, callexpr);
11115 init_const_call_expr_arg_iterator (callexpr, &iter);
11119 code = (enum tree_code) va_arg (ap, int);
11123 /* This signifies an ellipses, any further arguments are all ok. */
11127 /* This signifies an endlink, if no arguments remain, return
11128 true, otherwise return false. */
11129 res = !more_const_call_expr_args_p (&iter);
11132 /* If no parameters remain or the parameter's code does not
11133 match the specified code, return false. Otherwise continue
11134 checking any remaining arguments. */
11135 arg = next_const_call_expr_arg (&iter);
11136 if (!validate_arg (arg, code))
11143 /* We need gotos here since we can only have one VA_CLOSE in a
11151 /* Default target-specific builtin expander that does nothing. */
11154 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11155 rtx target ATTRIBUTE_UNUSED,
11156 rtx subtarget ATTRIBUTE_UNUSED,
11157 enum machine_mode mode ATTRIBUTE_UNUSED,
11158 int ignore ATTRIBUTE_UNUSED)
11163 /* Returns true is EXP represents data that would potentially reside
11164 in a readonly section. */
11167 readonly_data_expr (tree exp)
11171 if (TREE_CODE (exp) != ADDR_EXPR)
11174 exp = get_base_address (TREE_OPERAND (exp, 0));
11178 /* Make sure we call decl_readonly_section only for trees it
11179 can handle (since it returns true for everything it doesn't
11181 if (TREE_CODE (exp) == STRING_CST
11182 || TREE_CODE (exp) == CONSTRUCTOR
11183 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11184 return decl_readonly_section (exp, 0);
11189 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11190 to the call, and TYPE is its return type.
11192 Return NULL_TREE if no simplification was possible, otherwise return the
11193 simplified form of the call as a tree.
11195 The simplified form may be a constant or other expression which
11196 computes the same value, but in a more efficient manner (including
11197 calls to other builtin functions).
11199 The call may contain arguments which need to be evaluated, but
11200 which are not useful to determine the result of the call. In
11201 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11202 COMPOUND_EXPR will be an argument which must be evaluated.
11203 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11204 COMPOUND_EXPR in the chain will contain the tree for the simplified
11205 form of the builtin function call. */
11208 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11210 if (!validate_arg (s1, POINTER_TYPE)
11211 || !validate_arg (s2, POINTER_TYPE))
11216 const char *p1, *p2;
11218 p2 = c_getstr (s2);
11222 p1 = c_getstr (s1);
11225 const char *r = strstr (p1, p2);
11229 return build_int_cst (TREE_TYPE (s1), 0);
11231 /* Return an offset into the constant string argument. */
11232 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11233 s1, size_int (r - p1));
11234 return fold_convert_loc (loc, type, tem);
11237 /* The argument is const char *, and the result is char *, so we need
11238 a type conversion here to avoid a warning. */
11240 return fold_convert_loc (loc, type, s1);
11245 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11249 /* New argument list transforming strstr(s1, s2) to
11250 strchr(s1, s2[0]). */
11251 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11255 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11256 the call, and TYPE is its return type.
11258 Return NULL_TREE if no simplification was possible, otherwise return the
11259 simplified form of the call as a tree.
11261 The simplified form may be a constant or other expression which
11262 computes the same value, but in a more efficient manner (including
11263 calls to other builtin functions).
11265 The call may contain arguments which need to be evaluated, but
11266 which are not useful to determine the result of the call. In
11267 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11268 COMPOUND_EXPR will be an argument which must be evaluated.
11269 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11270 COMPOUND_EXPR in the chain will contain the tree for the simplified
11271 form of the builtin function call. */
11274 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11276 if (!validate_arg (s1, POINTER_TYPE)
11277 || !validate_arg (s2, INTEGER_TYPE))
11283 if (TREE_CODE (s2) != INTEGER_CST)
11286 p1 = c_getstr (s1);
11293 if (target_char_cast (s2, &c))
11296 r = strchr (p1, c);
11299 return build_int_cst (TREE_TYPE (s1), 0);
11301 /* Return an offset into the constant string argument. */
11302 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11303 s1, size_int (r - p1));
11304 return fold_convert_loc (loc, type, tem);
11310 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11311 the call, and TYPE is its return type.
11313 Return NULL_TREE if no simplification was possible, otherwise return the
11314 simplified form of the call as a tree.
11316 The simplified form may be a constant or other expression which
11317 computes the same value, but in a more efficient manner (including
11318 calls to other builtin functions).
11320 The call may contain arguments which need to be evaluated, but
11321 which are not useful to determine the result of the call. In
11322 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11323 COMPOUND_EXPR will be an argument which must be evaluated.
11324 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11325 COMPOUND_EXPR in the chain will contain the tree for the simplified
11326 form of the builtin function call. */
11329 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11331 if (!validate_arg (s1, POINTER_TYPE)
11332 || !validate_arg (s2, INTEGER_TYPE))
11339 if (TREE_CODE (s2) != INTEGER_CST)
11342 p1 = c_getstr (s1);
11349 if (target_char_cast (s2, &c))
11352 r = strrchr (p1, c);
11355 return build_int_cst (TREE_TYPE (s1), 0);
11357 /* Return an offset into the constant string argument. */
11358 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11359 s1, size_int (r - p1));
11360 return fold_convert_loc (loc, type, tem);
11363 if (! integer_zerop (s2))
11366 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11370 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11371 return build_call_expr_loc (loc, fn, 2, s1, s2);
11375 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11376 to the call, and TYPE is its return type.
11378 Return NULL_TREE if no simplification was possible, otherwise return the
11379 simplified form of the call as a tree.
11381 The simplified form may be a constant or other expression which
11382 computes the same value, but in a more efficient manner (including
11383 calls to other builtin functions).
11385 The call may contain arguments which need to be evaluated, but
11386 which are not useful to determine the result of the call. In
11387 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11388 COMPOUND_EXPR will be an argument which must be evaluated.
11389 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11390 COMPOUND_EXPR in the chain will contain the tree for the simplified
11391 form of the builtin function call. */
11394 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11396 if (!validate_arg (s1, POINTER_TYPE)
11397 || !validate_arg (s2, POINTER_TYPE))
11402 const char *p1, *p2;
11404 p2 = c_getstr (s2);
11408 p1 = c_getstr (s1);
11411 const char *r = strpbrk (p1, p2);
11415 return build_int_cst (TREE_TYPE (s1), 0);
11417 /* Return an offset into the constant string argument. */
11418 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11419 s1, size_int (r - p1));
11420 return fold_convert_loc (loc, type, tem);
11424 /* strpbrk(x, "") == NULL.
11425 Evaluate and ignore s1 in case it had side-effects. */
11426 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11429 return NULL_TREE; /* Really call strpbrk. */
11431 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11435 /* New argument list transforming strpbrk(s1, s2) to
11436 strchr(s1, s2[0]). */
11437 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11441 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11444 Return NULL_TREE if no simplification was possible, otherwise return the
11445 simplified form of the call as a tree.
11447 The simplified form may be a constant or other expression which
11448 computes the same value, but in a more efficient manner (including
11449 calls to other builtin functions).
11451 The call may contain arguments which need to be evaluated, but
11452 which are not useful to determine the result of the call. In
11453 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11454 COMPOUND_EXPR will be an argument which must be evaluated.
11455 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11456 COMPOUND_EXPR in the chain will contain the tree for the simplified
11457 form of the builtin function call. */
11460 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11462 if (!validate_arg (dst, POINTER_TYPE)
11463 || !validate_arg (src, POINTER_TYPE))
11467 const char *p = c_getstr (src);
11469 /* If the string length is zero, return the dst parameter. */
11470 if (p && *p == '\0')
11473 if (optimize_insn_for_speed_p ())
11475 /* See if we can store by pieces into (dst + strlen(dst)). */
11477 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11478 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11480 if (!strlen_fn || !strcpy_fn)
11483 /* If we don't have a movstr we don't want to emit an strcpy
11484 call. We have to do that if the length of the source string
11485 isn't computable (in that case we can use memcpy probably
11486 later expanding to a sequence of mov instructions). If we
11487 have movstr instructions we can emit strcpy calls. */
11490 tree len = c_strlen (src, 1);
11491 if (! len || TREE_SIDE_EFFECTS (len))
11495 /* Stabilize the argument list. */
11496 dst = builtin_save_expr (dst);
11498 /* Create strlen (dst). */
11499 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11500 /* Create (dst p+ strlen (dst)). */
11502 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11503 TREE_TYPE (dst), dst, newdst);
11504 newdst = builtin_save_expr (newdst);
11506 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11507 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11513 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11514 arguments to the call.
11516 Return NULL_TREE if no simplification was possible, otherwise return the
11517 simplified form of the call as a tree.
11519 The simplified form may be a constant or other expression which
11520 computes the same value, but in a more efficient manner (including
11521 calls to other builtin functions).
11523 The call may contain arguments which need to be evaluated, but
11524 which are not useful to determine the result of the call. In
11525 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11526 COMPOUND_EXPR will be an argument which must be evaluated.
11527 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11528 COMPOUND_EXPR in the chain will contain the tree for the simplified
11529 form of the builtin function call. */
11532 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11534 if (!validate_arg (dst, POINTER_TYPE)
11535 || !validate_arg (src, POINTER_TYPE)
11536 || !validate_arg (len, INTEGER_TYPE))
11540 const char *p = c_getstr (src);
11542 /* If the requested length is zero, or the src parameter string
11543 length is zero, return the dst parameter. */
11544 if (integer_zerop (len) || (p && *p == '\0'))
11545 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11547 /* If the requested len is greater than or equal to the string
11548 length, call strcat. */
11549 if (TREE_CODE (len) == INTEGER_CST && p
11550 && compare_tree_int (len, strlen (p)) >= 0)
11552 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11554 /* If the replacement _DECL isn't initialized, don't do the
11559 return build_call_expr_loc (loc, fn, 2, dst, src);
11565 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11568 Return NULL_TREE if no simplification was possible, otherwise return the
11569 simplified form of the call as a tree.
11571 The simplified form may be a constant or other expression which
11572 computes the same value, but in a more efficient manner (including
11573 calls to other builtin functions).
11575 The call may contain arguments which need to be evaluated, but
11576 which are not useful to determine the result of the call. In
11577 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11578 COMPOUND_EXPR will be an argument which must be evaluated.
11579 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11580 COMPOUND_EXPR in the chain will contain the tree for the simplified
11581 form of the builtin function call. */
11584 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11586 if (!validate_arg (s1, POINTER_TYPE)
11587 || !validate_arg (s2, POINTER_TYPE))
11591 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11593 /* If both arguments are constants, evaluate at compile-time. */
11596 const size_t r = strspn (p1, p2);
11597 return size_int (r);
11600 /* If either argument is "", return NULL_TREE. */
11601 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11602 /* Evaluate and ignore both arguments in case either one has
11604 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11610 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11613 Return NULL_TREE if no simplification was possible, otherwise return the
11614 simplified form of the call as a tree.
11616 The simplified form may be a constant or other expression which
11617 computes the same value, but in a more efficient manner (including
11618 calls to other builtin functions).
11620 The call may contain arguments which need to be evaluated, but
11621 which are not useful to determine the result of the call. In
11622 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11623 COMPOUND_EXPR will be an argument which must be evaluated.
11624 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11625 COMPOUND_EXPR in the chain will contain the tree for the simplified
11626 form of the builtin function call. */
11629 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11631 if (!validate_arg (s1, POINTER_TYPE)
11632 || !validate_arg (s2, POINTER_TYPE))
11636 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11638 /* If both arguments are constants, evaluate at compile-time. */
11641 const size_t r = strcspn (p1, p2);
11642 return size_int (r);
11645 /* If the first argument is "", return NULL_TREE. */
11646 if (p1 && *p1 == '\0')
11648 /* Evaluate and ignore argument s2 in case it has
11650 return omit_one_operand_loc (loc, size_type_node,
11651 size_zero_node, s2);
11654 /* If the second argument is "", return __builtin_strlen(s1). */
11655 if (p2 && *p2 == '\0')
11657 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11659 /* If the replacement _DECL isn't initialized, don't do the
11664 return build_call_expr_loc (loc, fn, 1, s1);
11670 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11671 to the call. IGNORE is true if the value returned
11672 by the builtin will be ignored. UNLOCKED is true is true if this
11673 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11674 the known length of the string. Return NULL_TREE if no simplification
11678 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11679 bool ignore, bool unlocked, tree len)
11681 /* If we're using an unlocked function, assume the other unlocked
11682 functions exist explicitly. */
11683 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11684 : implicit_built_in_decls[BUILT_IN_FPUTC];
11685 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11686 : implicit_built_in_decls[BUILT_IN_FWRITE];
11688 /* If the return value is used, don't do the transformation. */
11692 /* Verify the arguments in the original call. */
11693 if (!validate_arg (arg0, POINTER_TYPE)
11694 || !validate_arg (arg1, POINTER_TYPE))
11698 len = c_strlen (arg0, 0);
11700 /* Get the length of the string passed to fputs. If the length
11701 can't be determined, punt. */
11703 || TREE_CODE (len) != INTEGER_CST)
11706 switch (compare_tree_int (len, 1))
11708 case -1: /* length is 0, delete the call entirely . */
11709 return omit_one_operand_loc (loc, integer_type_node,
11710 integer_zero_node, arg1);;
11712 case 0: /* length is 1, call fputc. */
11714 const char *p = c_getstr (arg0);
11719 return build_call_expr_loc (loc, fn_fputc, 2,
11720 build_int_cst (NULL_TREE, p[0]), arg1);
11726 case 1: /* length is greater than 1, call fwrite. */
11728 /* If optimizing for size keep fputs. */
11729 if (optimize_function_for_size_p (cfun))
11731 /* New argument list transforming fputs(string, stream) to
11732 fwrite(string, 1, len, stream). */
11734 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11735 size_one_node, len, arg1);
11740 gcc_unreachable ();
11745 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11746 produced. False otherwise. This is done so that we don't output the error
11747 or warning twice or three times. */
11750 fold_builtin_next_arg (tree exp, bool va_start_p)
11752 tree fntype = TREE_TYPE (current_function_decl);
11753 int nargs = call_expr_nargs (exp);
11756 if (!stdarg_p (fntype))
11758 error ("%<va_start%> used in function with fixed args");
11764 if (va_start_p && (nargs != 2))
11766 error ("wrong number of arguments to function %<va_start%>");
11769 arg = CALL_EXPR_ARG (exp, 1);
11771 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11772 when we checked the arguments and if needed issued a warning. */
11777 /* Evidently an out of date version of <stdarg.h>; can't validate
11778 va_start's second argument, but can still work as intended. */
11779 warning (0, "%<__builtin_next_arg%> called without an argument");
11782 else if (nargs > 1)
11784 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11787 arg = CALL_EXPR_ARG (exp, 0);
11790 if (TREE_CODE (arg) == SSA_NAME)
11791 arg = SSA_NAME_VAR (arg);
11793 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11794 or __builtin_next_arg (0) the first time we see it, after checking
11795 the arguments and if needed issuing a warning. */
11796 if (!integer_zerop (arg))
11798 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11800 /* Strip off all nops for the sake of the comparison. This
11801 is not quite the same as STRIP_NOPS. It does more.
11802 We must also strip off INDIRECT_EXPR for C++ reference
11804 while (CONVERT_EXPR_P (arg)
11805 || TREE_CODE (arg) == INDIRECT_REF)
11806 arg = TREE_OPERAND (arg, 0);
11807 if (arg != last_parm)
11809 /* FIXME: Sometimes with the tree optimizers we can get the
11810 not the last argument even though the user used the last
11811 argument. We just warn and set the arg to be the last
11812 argument so that we will get wrong-code because of
11814 warning (0, "second parameter of %<va_start%> not last named argument");
11817 /* Undefined by C99 7.15.1.4p4 (va_start):
11818 "If the parameter parmN is declared with the register storage
11819 class, with a function or array type, or with a type that is
11820 not compatible with the type that results after application of
11821 the default argument promotions, the behavior is undefined."
11823 else if (DECL_REGISTER (arg))
11824 warning (0, "undefined behaviour when second parameter of "
11825 "%<va_start%> is declared with %<register%> storage");
11827 /* We want to verify the second parameter just once before the tree
11828 optimizers are run and then avoid keeping it in the tree,
11829 as otherwise we could warn even for correct code like:
11830 void foo (int i, ...)
11831 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11833 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11835 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11841 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11842 ORIG may be null if this is a 2-argument call. We don't attempt to
11843 simplify calls with more than 3 arguments.
11845 Return NULL_TREE if no simplification was possible, otherwise return the
11846 simplified form of the call as a tree. If IGNORED is true, it means that
11847 the caller does not use the returned value of the function. */
11850 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11851 tree orig, int ignored)
11854 const char *fmt_str = NULL;
11856 /* Verify the required arguments in the original call. We deal with two
11857 types of sprintf() calls: 'sprintf (str, fmt)' and
11858 'sprintf (dest, "%s", orig)'. */
11859 if (!validate_arg (dest, POINTER_TYPE)
11860 || !validate_arg (fmt, POINTER_TYPE))
11862 if (orig && !validate_arg (orig, POINTER_TYPE))
11865 /* Check whether the format is a literal string constant. */
11866 fmt_str = c_getstr (fmt);
11867 if (fmt_str == NULL)
11871 retval = NULL_TREE;
11873 if (!init_target_chars ())
11876 /* If the format doesn't contain % args or %%, use strcpy. */
11877 if (strchr (fmt_str, target_percent) == NULL)
11879 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11884 /* Don't optimize sprintf (buf, "abc", ptr++). */
11888 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11889 'format' is known to contain no % formats. */
11890 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11892 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11895 /* If the format is "%s", use strcpy if the result isn't used. */
11896 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11899 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11904 /* Don't crash on sprintf (str1, "%s"). */
11908 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11911 retval = c_strlen (orig, 1);
11912 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11915 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11918 if (call && retval)
11920 retval = fold_convert_loc
11921 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11923 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11929 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11930 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11931 attempt to simplify calls with more than 4 arguments.
11933 Return NULL_TREE if no simplification was possible, otherwise return the
11934 simplified form of the call as a tree. If IGNORED is true, it means that
11935 the caller does not use the returned value of the function. */
11938 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11939 tree orig, int ignored)
11942 const char *fmt_str = NULL;
11943 unsigned HOST_WIDE_INT destlen;
11945 /* Verify the required arguments in the original call. We deal with two
11946 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11947 'snprintf (dest, cst, "%s", orig)'. */
11948 if (!validate_arg (dest, POINTER_TYPE)
11949 || !validate_arg (destsize, INTEGER_TYPE)
11950 || !validate_arg (fmt, POINTER_TYPE))
11952 if (orig && !validate_arg (orig, POINTER_TYPE))
11955 if (!host_integerp (destsize, 1))
11958 /* Check whether the format is a literal string constant. */
11959 fmt_str = c_getstr (fmt);
11960 if (fmt_str == NULL)
11964 retval = NULL_TREE;
11966 if (!init_target_chars ())
11969 destlen = tree_low_cst (destsize, 1);
11971 /* If the format doesn't contain % args or %%, use strcpy. */
11972 if (strchr (fmt_str, target_percent) == NULL)
11974 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11975 size_t len = strlen (fmt_str);
11977 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11981 /* We could expand this as
11982 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11984 memcpy (str, fmt_with_nul_at_cstm1, cst);
11985 but in the former case that might increase code size
11986 and in the latter case grow .rodata section too much.
11987 So punt for now. */
11988 if (len >= destlen)
11994 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11995 'format' is known to contain no % formats and
11996 strlen (fmt) < cst. */
11997 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12000 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
12003 /* If the format is "%s", use strcpy if the result isn't used. */
12004 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12006 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
12007 unsigned HOST_WIDE_INT origlen;
12009 /* Don't crash on snprintf (str1, cst, "%s"). */
12013 retval = c_strlen (orig, 1);
12014 if (!retval || !host_integerp (retval, 1))
12017 origlen = tree_low_cst (retval, 1);
12018 /* We could expand this as
12019 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12021 memcpy (str1, str2_with_nul_at_cstm1, cst);
12022 but in the former case that might increase code size
12023 and in the latter case grow .rodata section too much.
12024 So punt for now. */
12025 if (origlen >= destlen)
12028 /* Convert snprintf (str1, cst, "%s", str2) into
12029 strcpy (str1, str2) if strlen (str2) < cst. */
12033 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12036 retval = NULL_TREE;
12039 if (call && retval)
12041 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
12042 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12043 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12049 /* Expand a call EXP to __builtin_object_size. */
12052 expand_builtin_object_size (tree exp)
12055 int object_size_type;
12056 tree fndecl = get_callee_fndecl (exp);
12058 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12060 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12062 expand_builtin_trap ();
12066 ost = CALL_EXPR_ARG (exp, 1);
12069 if (TREE_CODE (ost) != INTEGER_CST
12070 || tree_int_cst_sgn (ost) < 0
12071 || compare_tree_int (ost, 3) > 0)
12073 error ("%Klast argument of %D is not integer constant between 0 and 3",
12075 expand_builtin_trap ();
12079 object_size_type = tree_low_cst (ost, 0);
12081 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12084 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12085 FCODE is the BUILT_IN_* to use.
12086 Return NULL_RTX if we failed; the caller should emit a normal call,
12087 otherwise try to get the result in TARGET, if convenient (and in
12088 mode MODE if that's convenient). */
12091 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12092 enum built_in_function fcode)
12094 tree dest, src, len, size;
12096 if (!validate_arglist (exp,
12098 fcode == BUILT_IN_MEMSET_CHK
12099 ? INTEGER_TYPE : POINTER_TYPE,
12100 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12103 dest = CALL_EXPR_ARG (exp, 0);
12104 src = CALL_EXPR_ARG (exp, 1);
12105 len = CALL_EXPR_ARG (exp, 2);
12106 size = CALL_EXPR_ARG (exp, 3);
12108 if (! host_integerp (size, 1))
12111 if (host_integerp (len, 1) || integer_all_onesp (size))
12115 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12117 warning_at (tree_nonartificial_location (exp),
12118 0, "%Kcall to %D will always overflow destination buffer",
12119 exp, get_callee_fndecl (exp));
12124 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12125 mem{cpy,pcpy,move,set} is available. */
12128 case BUILT_IN_MEMCPY_CHK:
12129 fn = built_in_decls[BUILT_IN_MEMCPY];
12131 case BUILT_IN_MEMPCPY_CHK:
12132 fn = built_in_decls[BUILT_IN_MEMPCPY];
12134 case BUILT_IN_MEMMOVE_CHK:
12135 fn = built_in_decls[BUILT_IN_MEMMOVE];
12137 case BUILT_IN_MEMSET_CHK:
12138 fn = built_in_decls[BUILT_IN_MEMSET];
12147 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12148 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12149 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12150 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12152 else if (fcode == BUILT_IN_MEMSET_CHK)
12156 unsigned int dest_align
12157 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12159 /* If DEST is not a pointer type, call the normal function. */
12160 if (dest_align == 0)
12163 /* If SRC and DEST are the same (and not volatile), do nothing. */
12164 if (operand_equal_p (src, dest, 0))
12168 if (fcode != BUILT_IN_MEMPCPY_CHK)
12170 /* Evaluate and ignore LEN in case it has side-effects. */
12171 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12172 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12175 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12176 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12179 /* __memmove_chk special case. */
12180 if (fcode == BUILT_IN_MEMMOVE_CHK)
12182 unsigned int src_align
12183 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12185 if (src_align == 0)
12188 /* If src is categorized for a readonly section we can use
12189 normal __memcpy_chk. */
12190 if (readonly_data_expr (src))
12192 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12195 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12196 dest, src, len, size);
12197 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12198 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12199 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12206 /* Emit warning if a buffer overflow is detected at compile time. */
12209 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12213 location_t loc = tree_nonartificial_location (exp);
12217 case BUILT_IN_STRCPY_CHK:
12218 case BUILT_IN_STPCPY_CHK:
12219 /* For __strcat_chk the warning will be emitted only if overflowing
12220 by at least strlen (dest) + 1 bytes. */
12221 case BUILT_IN_STRCAT_CHK:
12222 len = CALL_EXPR_ARG (exp, 1);
12223 size = CALL_EXPR_ARG (exp, 2);
12226 case BUILT_IN_STRNCAT_CHK:
12227 case BUILT_IN_STRNCPY_CHK:
12228 len = CALL_EXPR_ARG (exp, 2);
12229 size = CALL_EXPR_ARG (exp, 3);
12231 case BUILT_IN_SNPRINTF_CHK:
12232 case BUILT_IN_VSNPRINTF_CHK:
12233 len = CALL_EXPR_ARG (exp, 1);
12234 size = CALL_EXPR_ARG (exp, 3);
12237 gcc_unreachable ();
12243 if (! host_integerp (size, 1) || integer_all_onesp (size))
12248 len = c_strlen (len, 1);
12249 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12252 else if (fcode == BUILT_IN_STRNCAT_CHK)
12254 tree src = CALL_EXPR_ARG (exp, 1);
12255 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12257 src = c_strlen (src, 1);
12258 if (! src || ! host_integerp (src, 1))
12260 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12261 exp, get_callee_fndecl (exp));
12264 else if (tree_int_cst_lt (src, size))
12267 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12270 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12271 exp, get_callee_fndecl (exp));
12274 /* Emit warning if a buffer overflow is detected at compile time
12275 in __sprintf_chk/__vsprintf_chk calls. */
12278 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12280 tree size, len, fmt;
12281 const char *fmt_str;
12282 int nargs = call_expr_nargs (exp);
12284 /* Verify the required arguments in the original call. */
12288 size = CALL_EXPR_ARG (exp, 2);
12289 fmt = CALL_EXPR_ARG (exp, 3);
12291 if (! host_integerp (size, 1) || integer_all_onesp (size))
12294 /* Check whether the format is a literal string constant. */
12295 fmt_str = c_getstr (fmt);
12296 if (fmt_str == NULL)
12299 if (!init_target_chars ())
12302 /* If the format doesn't contain % args or %%, we know its size. */
12303 if (strchr (fmt_str, target_percent) == 0)
12304 len = build_int_cstu (size_type_node, strlen (fmt_str));
12305 /* If the format is "%s" and first ... argument is a string literal,
12307 else if (fcode == BUILT_IN_SPRINTF_CHK
12308 && strcmp (fmt_str, target_percent_s) == 0)
12314 arg = CALL_EXPR_ARG (exp, 4);
12315 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12318 len = c_strlen (arg, 1);
12319 if (!len || ! host_integerp (len, 1))
12325 if (! tree_int_cst_lt (len, size))
12326 warning_at (tree_nonartificial_location (exp),
12327 0, "%Kcall to %D will always overflow destination buffer",
12328 exp, get_callee_fndecl (exp));
12331 /* Emit warning if a free is called with address of a variable. */
12334 maybe_emit_free_warning (tree exp)
12336 tree arg = CALL_EXPR_ARG (exp, 0);
12339 if (TREE_CODE (arg) != ADDR_EXPR)
12342 arg = get_base_address (TREE_OPERAND (arg, 0));
12343 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12346 if (SSA_VAR_P (arg))
12347 warning_at (tree_nonartificial_location (exp),
12348 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12350 warning_at (tree_nonartificial_location (exp),
12351 0, "%Kattempt to free a non-heap object", exp);
12354 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12358 fold_builtin_object_size (tree ptr, tree ost)
12360 unsigned HOST_WIDE_INT bytes;
12361 int object_size_type;
12363 if (!validate_arg (ptr, POINTER_TYPE)
12364 || !validate_arg (ost, INTEGER_TYPE))
12369 if (TREE_CODE (ost) != INTEGER_CST
12370 || tree_int_cst_sgn (ost) < 0
12371 || compare_tree_int (ost, 3) > 0)
12374 object_size_type = tree_low_cst (ost, 0);
12376 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12377 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12378 and (size_t) 0 for types 2 and 3. */
12379 if (TREE_SIDE_EFFECTS (ptr))
12380 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12382 if (TREE_CODE (ptr) == ADDR_EXPR)
12384 bytes = compute_builtin_object_size (ptr, object_size_type);
12385 if (double_int_fits_to_tree_p (size_type_node,
12386 uhwi_to_double_int (bytes)))
12387 return build_int_cstu (size_type_node, bytes);
12389 else if (TREE_CODE (ptr) == SSA_NAME)
12391 /* If object size is not known yet, delay folding until
12392 later. Maybe subsequent passes will help determining
12394 bytes = compute_builtin_object_size (ptr, object_size_type);
12395 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12396 && double_int_fits_to_tree_p (size_type_node,
12397 uhwi_to_double_int (bytes)))
12398 return build_int_cstu (size_type_node, bytes);
12404 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12405 DEST, SRC, LEN, and SIZE are the arguments to the call.
12406 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12407 code of the builtin. If MAXLEN is not NULL, it is maximum length
12408 passed as third argument. */
12411 fold_builtin_memory_chk (location_t loc, tree fndecl,
12412 tree dest, tree src, tree len, tree size,
12413 tree maxlen, bool ignore,
12414 enum built_in_function fcode)
12418 if (!validate_arg (dest, POINTER_TYPE)
12419 || !validate_arg (src,
12420 (fcode == BUILT_IN_MEMSET_CHK
12421 ? INTEGER_TYPE : POINTER_TYPE))
12422 || !validate_arg (len, INTEGER_TYPE)
12423 || !validate_arg (size, INTEGER_TYPE))
12426 /* If SRC and DEST are the same (and not volatile), return DEST
12427 (resp. DEST+LEN for __mempcpy_chk). */
12428 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12430 if (fcode != BUILT_IN_MEMPCPY_CHK)
12431 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12435 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12437 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12441 if (! host_integerp (size, 1))
12444 if (! integer_all_onesp (size))
12446 if (! host_integerp (len, 1))
12448 /* If LEN is not constant, try MAXLEN too.
12449 For MAXLEN only allow optimizing into non-_ocs function
12450 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12451 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12453 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12455 /* (void) __mempcpy_chk () can be optimized into
12456 (void) __memcpy_chk (). */
12457 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12461 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12469 if (tree_int_cst_lt (size, maxlen))
12474 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12475 mem{cpy,pcpy,move,set} is available. */
12478 case BUILT_IN_MEMCPY_CHK:
12479 fn = built_in_decls[BUILT_IN_MEMCPY];
12481 case BUILT_IN_MEMPCPY_CHK:
12482 fn = built_in_decls[BUILT_IN_MEMPCPY];
12484 case BUILT_IN_MEMMOVE_CHK:
12485 fn = built_in_decls[BUILT_IN_MEMMOVE];
12487 case BUILT_IN_MEMSET_CHK:
12488 fn = built_in_decls[BUILT_IN_MEMSET];
12497 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12500 /* Fold a call to the __st[rp]cpy_chk builtin.
12501 DEST, SRC, and SIZE are the arguments to the call.
12502 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12503 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12504 strings passed as second argument. */
12507 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12508 tree src, tree size,
12509 tree maxlen, bool ignore,
12510 enum built_in_function fcode)
12514 if (!validate_arg (dest, POINTER_TYPE)
12515 || !validate_arg (src, POINTER_TYPE)
12516 || !validate_arg (size, INTEGER_TYPE))
12519 /* If SRC and DEST are the same (and not volatile), return DEST. */
12520 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12521 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12523 if (! host_integerp (size, 1))
12526 if (! integer_all_onesp (size))
12528 len = c_strlen (src, 1);
12529 if (! len || ! host_integerp (len, 1))
12531 /* If LEN is not constant, try MAXLEN too.
12532 For MAXLEN only allow optimizing into non-_ocs function
12533 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12534 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12536 if (fcode == BUILT_IN_STPCPY_CHK)
12541 /* If return value of __stpcpy_chk is ignored,
12542 optimize into __strcpy_chk. */
12543 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12547 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12550 if (! len || TREE_SIDE_EFFECTS (len))
12553 /* If c_strlen returned something, but not a constant,
12554 transform __strcpy_chk into __memcpy_chk. */
12555 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12559 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12560 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12561 build_call_expr_loc (loc, fn, 4,
12562 dest, src, len, size));
12568 if (! tree_int_cst_lt (maxlen, size))
12572 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12573 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12574 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12578 return build_call_expr_loc (loc, fn, 2, dest, src);
12581 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12582 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12583 length passed as third argument. */
12586 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12587 tree len, tree size, tree maxlen)
12591 if (!validate_arg (dest, POINTER_TYPE)
12592 || !validate_arg (src, POINTER_TYPE)
12593 || !validate_arg (len, INTEGER_TYPE)
12594 || !validate_arg (size, INTEGER_TYPE))
12597 if (! host_integerp (size, 1))
12600 if (! integer_all_onesp (size))
12602 if (! host_integerp (len, 1))
12604 /* If LEN is not constant, try MAXLEN too.
12605 For MAXLEN only allow optimizing into non-_ocs function
12606 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12607 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12613 if (tree_int_cst_lt (size, maxlen))
12617 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12618 fn = built_in_decls[BUILT_IN_STRNCPY];
12622 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12625 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12626 are the arguments to the call. */
12629 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12630 tree src, tree size)
12635 if (!validate_arg (dest, POINTER_TYPE)
12636 || !validate_arg (src, POINTER_TYPE)
12637 || !validate_arg (size, INTEGER_TYPE))
12640 p = c_getstr (src);
12641 /* If the SRC parameter is "", return DEST. */
12642 if (p && *p == '\0')
12643 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12645 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12648 /* If __builtin_strcat_chk is used, assume strcat is available. */
12649 fn = built_in_decls[BUILT_IN_STRCAT];
12653 return build_call_expr_loc (loc, fn, 2, dest, src);
12656 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12660 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12661 tree dest, tree src, tree len, tree size)
12666 if (!validate_arg (dest, POINTER_TYPE)
12667 || !validate_arg (src, POINTER_TYPE)
12668 || !validate_arg (size, INTEGER_TYPE)
12669 || !validate_arg (size, INTEGER_TYPE))
12672 p = c_getstr (src);
12673 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12674 if (p && *p == '\0')
12675 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12676 else if (integer_zerop (len))
12677 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12679 if (! host_integerp (size, 1))
12682 if (! integer_all_onesp (size))
12684 tree src_len = c_strlen (src, 1);
12686 && host_integerp (src_len, 1)
12687 && host_integerp (len, 1)
12688 && ! tree_int_cst_lt (len, src_len))
12690 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12691 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12695 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12700 /* If __builtin_strncat_chk is used, assume strncat is available. */
12701 fn = built_in_decls[BUILT_IN_STRNCAT];
12705 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12708 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12709 Return NULL_TREE if a normal call should be emitted rather than
12710 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12711 or BUILT_IN_VSPRINTF_CHK. */
12714 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12715 enum built_in_function fcode)
12717 tree dest, size, len, fn, fmt, flag;
12718 const char *fmt_str;
12720 /* Verify the required arguments in the original call. */
12724 if (!validate_arg (dest, POINTER_TYPE))
12727 if (!validate_arg (flag, INTEGER_TYPE))
12730 if (!validate_arg (size, INTEGER_TYPE))
12733 if (!validate_arg (fmt, POINTER_TYPE))
12736 if (! host_integerp (size, 1))
12741 if (!init_target_chars ())
12744 /* Check whether the format is a literal string constant. */
12745 fmt_str = c_getstr (fmt);
12746 if (fmt_str != NULL)
12748 /* If the format doesn't contain % args or %%, we know the size. */
12749 if (strchr (fmt_str, target_percent) == 0)
12751 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12752 len = build_int_cstu (size_type_node, strlen (fmt_str));
12754 /* If the format is "%s" and first ... argument is a string literal,
12755 we know the size too. */
12756 else if (fcode == BUILT_IN_SPRINTF_CHK
12757 && strcmp (fmt_str, target_percent_s) == 0)
12764 if (validate_arg (arg, POINTER_TYPE))
12766 len = c_strlen (arg, 1);
12767 if (! len || ! host_integerp (len, 1))
12774 if (! integer_all_onesp (size))
12776 if (! len || ! tree_int_cst_lt (len, size))
12780 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12781 or if format doesn't contain % chars or is "%s". */
12782 if (! integer_zerop (flag))
12784 if (fmt_str == NULL)
12786 if (strchr (fmt_str, target_percent) != NULL
12787 && strcmp (fmt_str, target_percent_s))
12791 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12792 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12793 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12797 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12800 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12801 a normal call should be emitted rather than expanding the function
12802 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12805 fold_builtin_sprintf_chk (location_t loc, tree exp,
12806 enum built_in_function fcode)
12808 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12809 CALL_EXPR_ARGP (exp), fcode);
12812 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12813 NULL_TREE if a normal call should be emitted rather than expanding
12814 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12815 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12816 passed as second argument. */
12819 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12820 tree maxlen, enum built_in_function fcode)
12822 tree dest, size, len, fn, fmt, flag;
12823 const char *fmt_str;
12825 /* Verify the required arguments in the original call. */
12829 if (!validate_arg (dest, POINTER_TYPE))
12832 if (!validate_arg (len, INTEGER_TYPE))
12835 if (!validate_arg (flag, INTEGER_TYPE))
12838 if (!validate_arg (size, INTEGER_TYPE))
12841 if (!validate_arg (fmt, POINTER_TYPE))
12844 if (! host_integerp (size, 1))
12847 if (! integer_all_onesp (size))
12849 if (! host_integerp (len, 1))
12851 /* If LEN is not constant, try MAXLEN too.
12852 For MAXLEN only allow optimizing into non-_ocs function
12853 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12854 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12860 if (tree_int_cst_lt (size, maxlen))
12864 if (!init_target_chars ())
12867 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12868 or if format doesn't contain % chars or is "%s". */
12869 if (! integer_zerop (flag))
12871 fmt_str = c_getstr (fmt);
12872 if (fmt_str == NULL)
12874 if (strchr (fmt_str, target_percent) != NULL
12875 && strcmp (fmt_str, target_percent_s))
12879 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12881 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12882 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12886 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12889 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12890 a normal call should be emitted rather than expanding the function
12891 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12892 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12893 passed as second argument. */
12896 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12897 enum built_in_function fcode)
12899 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12900 CALL_EXPR_ARGP (exp), maxlen, fcode);
12903 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12904 FMT and ARG are the arguments to the call; we don't fold cases with
12905 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12907 Return NULL_TREE if no simplification was possible, otherwise return the
12908 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12909 code of the function to be simplified. */
12912 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12913 tree arg, bool ignore,
12914 enum built_in_function fcode)
12916 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12917 const char *fmt_str = NULL;
12919 /* If the return value is used, don't do the transformation. */
12923 /* Verify the required arguments in the original call. */
12924 if (!validate_arg (fmt, POINTER_TYPE))
12927 /* Check whether the format is a literal string constant. */
12928 fmt_str = c_getstr (fmt);
12929 if (fmt_str == NULL)
12932 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12934 /* If we're using an unlocked function, assume the other
12935 unlocked functions exist explicitly. */
12936 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12937 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12941 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12942 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12945 if (!init_target_chars ())
12948 if (strcmp (fmt_str, target_percent_s) == 0
12949 || strchr (fmt_str, target_percent) == NULL)
12953 if (strcmp (fmt_str, target_percent_s) == 0)
12955 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12958 if (!arg || !validate_arg (arg, POINTER_TYPE))
12961 str = c_getstr (arg);
12967 /* The format specifier doesn't contain any '%' characters. */
12968 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12974 /* If the string was "", printf does nothing. */
12975 if (str[0] == '\0')
12976 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12978 /* If the string has length of 1, call putchar. */
12979 if (str[1] == '\0')
12981 /* Given printf("c"), (where c is any one character,)
12982 convert "c"[0] to an int and pass that to the replacement
12984 newarg = build_int_cst (NULL_TREE, str[0]);
12986 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12990 /* If the string was "string\n", call puts("string"). */
12991 size_t len = strlen (str);
12992 if ((unsigned char)str[len - 1] == target_newline
12993 && (size_t) (int) len == len
12997 tree offset_node, string_cst;
12999 /* Create a NUL-terminated string that's one char shorter
13000 than the original, stripping off the trailing '\n'. */
13001 newarg = build_string_literal (len, str);
13002 string_cst = string_constant (newarg, &offset_node);
13003 gcc_checking_assert (string_cst
13004 && (TREE_STRING_LENGTH (string_cst)
13006 && integer_zerop (offset_node)
13008 TREE_STRING_POINTER (string_cst)[len - 1]
13009 == target_newline);
13010 /* build_string_literal creates a new STRING_CST,
13011 modify it in place to avoid double copying. */
13012 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13013 newstr[len - 1] = '\0';
13015 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13018 /* We'd like to arrange to call fputs(string,stdout) here,
13019 but we need stdout and don't have a way to get it yet. */
13024 /* The other optimizations can be done only on the non-va_list variants. */
13025 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13028 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13029 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13031 if (!arg || !validate_arg (arg, POINTER_TYPE))
13034 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13037 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13038 else if (strcmp (fmt_str, target_percent_c) == 0)
13040 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13043 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13049 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13052 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13053 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13054 more than 3 arguments, and ARG may be null in the 2-argument case.
13056 Return NULL_TREE if no simplification was possible, otherwise return the
13057 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13058 code of the function to be simplified. */
13061 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13062 tree fmt, tree arg, bool ignore,
13063 enum built_in_function fcode)
13065 tree fn_fputc, fn_fputs, call = NULL_TREE;
13066 const char *fmt_str = NULL;
13068 /* If the return value is used, don't do the transformation. */
13072 /* Verify the required arguments in the original call. */
13073 if (!validate_arg (fp, POINTER_TYPE))
13075 if (!validate_arg (fmt, POINTER_TYPE))
13078 /* Check whether the format is a literal string constant. */
13079 fmt_str = c_getstr (fmt);
13080 if (fmt_str == NULL)
13083 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13085 /* If we're using an unlocked function, assume the other
13086 unlocked functions exist explicitly. */
13087 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13088 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13092 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13093 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13096 if (!init_target_chars ())
13099 /* If the format doesn't contain % args or %%, use strcpy. */
13100 if (strchr (fmt_str, target_percent) == NULL)
13102 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13106 /* If the format specifier was "", fprintf does nothing. */
13107 if (fmt_str[0] == '\0')
13109 /* If FP has side-effects, just wait until gimplification is
13111 if (TREE_SIDE_EFFECTS (fp))
13114 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13117 /* When "string" doesn't contain %, replace all cases of
13118 fprintf (fp, string) with fputs (string, fp). The fputs
13119 builtin will take care of special cases like length == 1. */
13121 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13124 /* The other optimizations can be done only on the non-va_list variants. */
13125 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13128 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13129 else if (strcmp (fmt_str, target_percent_s) == 0)
13131 if (!arg || !validate_arg (arg, POINTER_TYPE))
13134 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13137 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13138 else if (strcmp (fmt_str, target_percent_c) == 0)
13140 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13143 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13148 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13151 /* Initialize format string characters in the target charset. */
13154 init_target_chars (void)
13159 target_newline = lang_hooks.to_target_charset ('\n');
13160 target_percent = lang_hooks.to_target_charset ('%');
13161 target_c = lang_hooks.to_target_charset ('c');
13162 target_s = lang_hooks.to_target_charset ('s');
13163 if (target_newline == 0 || target_percent == 0 || target_c == 0
13167 target_percent_c[0] = target_percent;
13168 target_percent_c[1] = target_c;
13169 target_percent_c[2] = '\0';
13171 target_percent_s[0] = target_percent;
13172 target_percent_s[1] = target_s;
13173 target_percent_s[2] = '\0';
13175 target_percent_s_newline[0] = target_percent;
13176 target_percent_s_newline[1] = target_s;
13177 target_percent_s_newline[2] = target_newline;
13178 target_percent_s_newline[3] = '\0';
13185 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13186 and no overflow/underflow occurred. INEXACT is true if M was not
13187 exactly calculated. TYPE is the tree type for the result. This
13188 function assumes that you cleared the MPFR flags and then
13189 calculated M to see if anything subsequently set a flag prior to
13190 entering this function. Return NULL_TREE if any checks fail. */
13193 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13195 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13196 overflow/underflow occurred. If -frounding-math, proceed iff the
13197 result of calling FUNC was exact. */
13198 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13199 && (!flag_rounding_math || !inexact))
13201 REAL_VALUE_TYPE rr;
13203 real_from_mpfr (&rr, m, type, GMP_RNDN);
13204 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13205 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13206 but the mpft_t is not, then we underflowed in the
13208 if (real_isfinite (&rr)
13209 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13211 REAL_VALUE_TYPE rmode;
13213 real_convert (&rmode, TYPE_MODE (type), &rr);
13214 /* Proceed iff the specified mode can hold the value. */
13215 if (real_identical (&rmode, &rr))
13216 return build_real (type, rmode);
13222 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13223 number and no overflow/underflow occurred. INEXACT is true if M
13224 was not exactly calculated. TYPE is the tree type for the result.
13225 This function assumes that you cleared the MPFR flags and then
13226 calculated M to see if anything subsequently set a flag prior to
13227 entering this function. Return NULL_TREE if any checks fail, if
13228 FORCE_CONVERT is true, then bypass the checks. */
13231 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13233 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13234 overflow/underflow occurred. If -frounding-math, proceed iff the
13235 result of calling FUNC was exact. */
13237 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13238 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13239 && (!flag_rounding_math || !inexact)))
13241 REAL_VALUE_TYPE re, im;
13243 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13244 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13245 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13246 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13247 but the mpft_t is not, then we underflowed in the
13250 || (real_isfinite (&re) && real_isfinite (&im)
13251 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13252 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13254 REAL_VALUE_TYPE re_mode, im_mode;
13256 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13257 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13258 /* Proceed iff the specified mode can hold the value. */
13260 || (real_identical (&re_mode, &re)
13261 && real_identical (&im_mode, &im)))
13262 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13263 build_real (TREE_TYPE (type), im_mode));
13269 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13270 FUNC on it and return the resulting value as a tree with type TYPE.
13271 If MIN and/or MAX are not NULL, then the supplied ARG must be
13272 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13273 acceptable values, otherwise they are not. The mpfr precision is
13274 set to the precision of TYPE. We assume that function FUNC returns
13275 zero if the result could be calculated exactly within the requested
13279 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13280 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13283 tree result = NULL_TREE;
13287 /* To proceed, MPFR must exactly represent the target floating point
13288 format, which only happens when the target base equals two. */
13289 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13290 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13292 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13294 if (real_isfinite (ra)
13295 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13296 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13298 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13299 const int prec = fmt->p;
13300 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13304 mpfr_init2 (m, prec);
13305 mpfr_from_real (m, ra, GMP_RNDN);
13306 mpfr_clear_flags ();
13307 inexact = func (m, m, rnd);
13308 result = do_mpfr_ckconv (m, type, inexact);
13316 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13317 FUNC on it and return the resulting value as a tree with type TYPE.
13318 The mpfr precision is set to the precision of TYPE. We assume that
13319 function FUNC returns zero if the result could be calculated
13320 exactly within the requested precision. */
13323 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13324 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13326 tree result = NULL_TREE;
13331 /* To proceed, MPFR must exactly represent the target floating point
13332 format, which only happens when the target base equals two. */
13333 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13334 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13335 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13337 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13338 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13340 if (real_isfinite (ra1) && real_isfinite (ra2))
13342 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13343 const int prec = fmt->p;
13344 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13348 mpfr_inits2 (prec, m1, m2, NULL);
13349 mpfr_from_real (m1, ra1, GMP_RNDN);
13350 mpfr_from_real (m2, ra2, GMP_RNDN);
13351 mpfr_clear_flags ();
13352 inexact = func (m1, m1, m2, rnd);
13353 result = do_mpfr_ckconv (m1, type, inexact);
13354 mpfr_clears (m1, m2, NULL);
13361 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13362 FUNC on it and return the resulting value as a tree with type TYPE.
13363 The mpfr precision is set to the precision of TYPE. We assume that
13364 function FUNC returns zero if the result could be calculated
13365 exactly within the requested precision. */
13368 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13369 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13371 tree result = NULL_TREE;
13377 /* To proceed, MPFR must exactly represent the target floating point
13378 format, which only happens when the target base equals two. */
13379 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13380 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13381 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13382 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13384 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13385 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13386 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13388 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13390 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13391 const int prec = fmt->p;
13392 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13396 mpfr_inits2 (prec, m1, m2, m3, NULL);
13397 mpfr_from_real (m1, ra1, GMP_RNDN);
13398 mpfr_from_real (m2, ra2, GMP_RNDN);
13399 mpfr_from_real (m3, ra3, GMP_RNDN);
13400 mpfr_clear_flags ();
13401 inexact = func (m1, m1, m2, m3, rnd);
13402 result = do_mpfr_ckconv (m1, type, inexact);
13403 mpfr_clears (m1, m2, m3, NULL);
13410 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13411 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13412 If ARG_SINP and ARG_COSP are NULL then the result is returned
13413 as a complex value.
13414 The type is taken from the type of ARG and is used for setting the
13415 precision of the calculation and results. */
13418 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13420 tree const type = TREE_TYPE (arg);
13421 tree result = NULL_TREE;
13425 /* To proceed, MPFR must exactly represent the target floating point
13426 format, which only happens when the target base equals two. */
13427 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13428 && TREE_CODE (arg) == REAL_CST
13429 && !TREE_OVERFLOW (arg))
13431 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13433 if (real_isfinite (ra))
13435 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13436 const int prec = fmt->p;
13437 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13438 tree result_s, result_c;
13442 mpfr_inits2 (prec, m, ms, mc, NULL);
13443 mpfr_from_real (m, ra, GMP_RNDN);
13444 mpfr_clear_flags ();
13445 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13446 result_s = do_mpfr_ckconv (ms, type, inexact);
13447 result_c = do_mpfr_ckconv (mc, type, inexact);
13448 mpfr_clears (m, ms, mc, NULL);
13449 if (result_s && result_c)
13451 /* If we are to return in a complex value do so. */
13452 if (!arg_sinp && !arg_cosp)
13453 return build_complex (build_complex_type (type),
13454 result_c, result_s);
13456 /* Dereference the sin/cos pointer arguments. */
13457 arg_sinp = build_fold_indirect_ref (arg_sinp);
13458 arg_cosp = build_fold_indirect_ref (arg_cosp);
13459 /* Proceed if valid pointer type were passed in. */
13460 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13461 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13463 /* Set the values. */
13464 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13466 TREE_SIDE_EFFECTS (result_s) = 1;
13467 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13469 TREE_SIDE_EFFECTS (result_c) = 1;
13470 /* Combine the assignments into a compound expr. */
13471 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13472 result_s, result_c));
13480 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13481 two-argument mpfr order N Bessel function FUNC on them and return
13482 the resulting value as a tree with type TYPE. The mpfr precision
13483 is set to the precision of TYPE. We assume that function FUNC
13484 returns zero if the result could be calculated exactly within the
13485 requested precision. */
13487 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13488 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13489 const REAL_VALUE_TYPE *min, bool inclusive)
13491 tree result = NULL_TREE;
13496 /* To proceed, MPFR must exactly represent the target floating point
13497 format, which only happens when the target base equals two. */
13498 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13499 && host_integerp (arg1, 0)
13500 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13502 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13503 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13506 && real_isfinite (ra)
13507 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13509 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13510 const int prec = fmt->p;
13511 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13515 mpfr_init2 (m, prec);
13516 mpfr_from_real (m, ra, GMP_RNDN);
13517 mpfr_clear_flags ();
13518 inexact = func (m, n, m, rnd);
13519 result = do_mpfr_ckconv (m, type, inexact);
13527 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13528 the pointer *(ARG_QUO) and return the result. The type is taken
13529 from the type of ARG0 and is used for setting the precision of the
13530 calculation and results. */
13533 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13535 tree const type = TREE_TYPE (arg0);
13536 tree result = NULL_TREE;
13541 /* To proceed, MPFR must exactly represent the target floating point
13542 format, which only happens when the target base equals two. */
13543 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13544 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13545 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13547 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13548 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13550 if (real_isfinite (ra0) && real_isfinite (ra1))
13552 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13553 const int prec = fmt->p;
13554 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13559 mpfr_inits2 (prec, m0, m1, NULL);
13560 mpfr_from_real (m0, ra0, GMP_RNDN);
13561 mpfr_from_real (m1, ra1, GMP_RNDN);
13562 mpfr_clear_flags ();
13563 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13564 /* Remquo is independent of the rounding mode, so pass
13565 inexact=0 to do_mpfr_ckconv(). */
13566 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13567 mpfr_clears (m0, m1, NULL);
13570 /* MPFR calculates quo in the host's long so it may
13571 return more bits in quo than the target int can hold
13572 if sizeof(host long) > sizeof(target int). This can
13573 happen even for native compilers in LP64 mode. In
13574 these cases, modulo the quo value with the largest
13575 number that the target int can hold while leaving one
13576 bit for the sign. */
13577 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13578 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13580 /* Dereference the quo pointer argument. */
13581 arg_quo = build_fold_indirect_ref (arg_quo);
13582 /* Proceed iff a valid pointer type was passed in. */
13583 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13585 /* Set the value. */
13586 tree result_quo = fold_build2 (MODIFY_EXPR,
13587 TREE_TYPE (arg_quo), arg_quo,
13588 build_int_cst (NULL, integer_quo));
13589 TREE_SIDE_EFFECTS (result_quo) = 1;
13590 /* Combine the quo assignment with the rem. */
13591 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13592 result_quo, result_rem));
13600 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13601 resulting value as a tree with type TYPE. The mpfr precision is
13602 set to the precision of TYPE. We assume that this mpfr function
13603 returns zero if the result could be calculated exactly within the
13604 requested precision. In addition, the integer pointer represented
13605 by ARG_SG will be dereferenced and set to the appropriate signgam
13609 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13611 tree result = NULL_TREE;
13615 /* To proceed, MPFR must exactly represent the target floating point
13616 format, which only happens when the target base equals two. Also
13617 verify ARG is a constant and that ARG_SG is an int pointer. */
13618 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13619 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13620 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13621 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13623 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13625 /* In addition to NaN and Inf, the argument cannot be zero or a
13626 negative integer. */
13627 if (real_isfinite (ra)
13628 && ra->cl != rvc_zero
13629 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13631 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13632 const int prec = fmt->p;
13633 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13638 mpfr_init2 (m, prec);
13639 mpfr_from_real (m, ra, GMP_RNDN);
13640 mpfr_clear_flags ();
13641 inexact = mpfr_lgamma (m, &sg, m, rnd);
13642 result_lg = do_mpfr_ckconv (m, type, inexact);
13648 /* Dereference the arg_sg pointer argument. */
13649 arg_sg = build_fold_indirect_ref (arg_sg);
13650 /* Assign the signgam value into *arg_sg. */
13651 result_sg = fold_build2 (MODIFY_EXPR,
13652 TREE_TYPE (arg_sg), arg_sg,
13653 build_int_cst (NULL, sg));
13654 TREE_SIDE_EFFECTS (result_sg) = 1;
13655 /* Combine the signgam assignment with the lgamma result. */
13656 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13657 result_sg, result_lg));
13665 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13666 function FUNC on it and return the resulting value as a tree with
13667 type TYPE. The mpfr precision is set to the precision of TYPE. We
13668 assume that function FUNC returns zero if the result could be
13669 calculated exactly within the requested precision. */
13672 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13674 tree result = NULL_TREE;
13678 /* To proceed, MPFR must exactly represent the target floating point
13679 format, which only happens when the target base equals two. */
13680 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13682 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13684 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13685 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13687 if (real_isfinite (re) && real_isfinite (im))
13689 const struct real_format *const fmt =
13690 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13691 const int prec = fmt->p;
13692 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13693 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13697 mpc_init2 (m, prec);
13698 mpfr_from_real (mpc_realref(m), re, rnd);
13699 mpfr_from_real (mpc_imagref(m), im, rnd);
13700 mpfr_clear_flags ();
13701 inexact = func (m, m, crnd);
13702 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13710 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13711 mpc function FUNC on it and return the resulting value as a tree
13712 with type TYPE. The mpfr precision is set to the precision of
13713 TYPE. We assume that function FUNC returns zero if the result
13714 could be calculated exactly within the requested precision. If
13715 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13716 in the arguments and/or results. */
13719 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13720 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13722 tree result = NULL_TREE;
13727 /* To proceed, MPFR must exactly represent the target floating point
13728 format, which only happens when the target base equals two. */
13729 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13731 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13732 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13733 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13735 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13736 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13737 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13738 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13741 || (real_isfinite (re0) && real_isfinite (im0)
13742 && real_isfinite (re1) && real_isfinite (im1)))
13744 const struct real_format *const fmt =
13745 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13746 const int prec = fmt->p;
13747 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13748 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13752 mpc_init2 (m0, prec);
13753 mpc_init2 (m1, prec);
13754 mpfr_from_real (mpc_realref(m0), re0, rnd);
13755 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13756 mpfr_from_real (mpc_realref(m1), re1, rnd);
13757 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13758 mpfr_clear_flags ();
13759 inexact = func (m0, m0, m1, crnd);
13760 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13769 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13770 a normal call should be emitted rather than expanding the function
13771 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13774 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13776 int nargs = gimple_call_num_args (stmt);
13778 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13780 ? gimple_call_arg_ptr (stmt, 0)
13781 : &error_mark_node), fcode);
13784 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13785 a normal call should be emitted rather than expanding the function
13786 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13787 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13788 passed as second argument. */
13791 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13792 enum built_in_function fcode)
13794 int nargs = gimple_call_num_args (stmt);
13796 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13798 ? gimple_call_arg_ptr (stmt, 0)
13799 : &error_mark_node), maxlen, fcode);
13802 /* Builtins with folding operations that operate on "..." arguments
13803 need special handling; we need to store the arguments in a convenient
13804 data structure before attempting any folding. Fortunately there are
13805 only a few builtins that fall into this category. FNDECL is the
13806 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13807 result of the function call is ignored. */
13810 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13811 bool ignore ATTRIBUTE_UNUSED)
13813 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13814 tree ret = NULL_TREE;
13818 case BUILT_IN_SPRINTF_CHK:
13819 case BUILT_IN_VSPRINTF_CHK:
13820 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13823 case BUILT_IN_SNPRINTF_CHK:
13824 case BUILT_IN_VSNPRINTF_CHK:
13825 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13832 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13833 TREE_NO_WARNING (ret) = 1;
13839 /* A wrapper function for builtin folding that prevents warnings for
13840 "statement without effect" and the like, caused by removing the
13841 call node earlier than the warning is generated. */
13844 fold_call_stmt (gimple stmt, bool ignore)
13846 tree ret = NULL_TREE;
13847 tree fndecl = gimple_call_fndecl (stmt);
13848 location_t loc = gimple_location (stmt);
13850 && TREE_CODE (fndecl) == FUNCTION_DECL
13851 && DECL_BUILT_IN (fndecl)
13852 && !gimple_call_va_arg_pack_p (stmt))
13854 int nargs = gimple_call_num_args (stmt);
13855 tree *args = (nargs > 0
13856 ? gimple_call_arg_ptr (stmt, 0)
13857 : &error_mark_node);
13859 if (avoid_folding_inline_builtin (fndecl))
13861 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13863 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13867 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13868 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13870 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13873 /* Propagate location information from original call to
13874 expansion of builtin. Otherwise things like
13875 maybe_emit_chk_warning, that operate on the expansion
13876 of a builtin, will use the wrong location information. */
13877 if (gimple_has_location (stmt))
13879 tree realret = ret;
13880 if (TREE_CODE (ret) == NOP_EXPR)
13881 realret = TREE_OPERAND (ret, 0);
13882 if (CAN_HAVE_LOCATION_P (realret)
13883 && !EXPR_HAS_LOCATION (realret))
13884 SET_EXPR_LOCATION (realret, loc);
13894 /* Look up the function in built_in_decls that corresponds to DECL
13895 and set ASMSPEC as its user assembler name. DECL must be a
13896 function decl that declares a builtin. */
13899 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13902 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13903 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13906 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13907 set_user_assembler_name (builtin, asmspec);
13908 switch (DECL_FUNCTION_CODE (decl))
13910 case BUILT_IN_MEMCPY:
13911 init_block_move_fn (asmspec);
13912 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13914 case BUILT_IN_MEMSET:
13915 init_block_clear_fn (asmspec);
13916 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13918 case BUILT_IN_MEMMOVE:
13919 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13921 case BUILT_IN_MEMCMP:
13922 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13924 case BUILT_IN_ABORT:
13925 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13928 if (INT_TYPE_SIZE < BITS_PER_WORD)
13930 set_user_assembler_libfunc ("ffs", asmspec);
13931 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13932 MODE_INT, 0), "ffs");
13940 /* Return true if DECL is a builtin that expands to a constant or similarly
13943 is_simple_builtin (tree decl)
13945 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13946 switch (DECL_FUNCTION_CODE (decl))
13948 /* Builtins that expand to constants. */
13949 case BUILT_IN_CONSTANT_P:
13950 case BUILT_IN_EXPECT:
13951 case BUILT_IN_OBJECT_SIZE:
13952 case BUILT_IN_UNREACHABLE:
13953 /* Simple register moves or loads from stack. */
13954 case BUILT_IN_RETURN_ADDRESS:
13955 case BUILT_IN_EXTRACT_RETURN_ADDR:
13956 case BUILT_IN_FROB_RETURN_ADDR:
13957 case BUILT_IN_RETURN:
13958 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13959 case BUILT_IN_FRAME_ADDRESS:
13960 case BUILT_IN_VA_END:
13961 case BUILT_IN_STACK_SAVE:
13962 case BUILT_IN_STACK_RESTORE:
13963 /* Exception state returns or moves registers around. */
13964 case BUILT_IN_EH_FILTER:
13965 case BUILT_IN_EH_POINTER:
13966 case BUILT_IN_EH_COPY_VALUES:
13976 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13977 most probably expanded inline into reasonably simple code. This is a
13978 superset of is_simple_builtin. */
13980 is_inexpensive_builtin (tree decl)
13984 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13986 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13987 switch (DECL_FUNCTION_CODE (decl))
13990 case BUILT_IN_ALLOCA:
13991 case BUILT_IN_BSWAP32:
13992 case BUILT_IN_BSWAP64:
13994 case BUILT_IN_CLZIMAX:
13995 case BUILT_IN_CLZL:
13996 case BUILT_IN_CLZLL:
13998 case BUILT_IN_CTZIMAX:
13999 case BUILT_IN_CTZL:
14000 case BUILT_IN_CTZLL:
14002 case BUILT_IN_FFSIMAX:
14003 case BUILT_IN_FFSL:
14004 case BUILT_IN_FFSLL:
14005 case BUILT_IN_IMAXABS:
14006 case BUILT_IN_FINITE:
14007 case BUILT_IN_FINITEF:
14008 case BUILT_IN_FINITEL:
14009 case BUILT_IN_FINITED32:
14010 case BUILT_IN_FINITED64:
14011 case BUILT_IN_FINITED128:
14012 case BUILT_IN_FPCLASSIFY:
14013 case BUILT_IN_ISFINITE:
14014 case BUILT_IN_ISINF_SIGN:
14015 case BUILT_IN_ISINF:
14016 case BUILT_IN_ISINFF:
14017 case BUILT_IN_ISINFL:
14018 case BUILT_IN_ISINFD32:
14019 case BUILT_IN_ISINFD64:
14020 case BUILT_IN_ISINFD128:
14021 case BUILT_IN_ISNAN:
14022 case BUILT_IN_ISNANF:
14023 case BUILT_IN_ISNANL:
14024 case BUILT_IN_ISNAND32:
14025 case BUILT_IN_ISNAND64:
14026 case BUILT_IN_ISNAND128:
14027 case BUILT_IN_ISNORMAL:
14028 case BUILT_IN_ISGREATER:
14029 case BUILT_IN_ISGREATEREQUAL:
14030 case BUILT_IN_ISLESS:
14031 case BUILT_IN_ISLESSEQUAL:
14032 case BUILT_IN_ISLESSGREATER:
14033 case BUILT_IN_ISUNORDERED:
14034 case BUILT_IN_VA_ARG_PACK:
14035 case BUILT_IN_VA_ARG_PACK_LEN:
14036 case BUILT_IN_VA_COPY:
14037 case BUILT_IN_TRAP:
14038 case BUILT_IN_SAVEREGS:
14039 case BUILT_IN_POPCOUNTL:
14040 case BUILT_IN_POPCOUNTLL:
14041 case BUILT_IN_POPCOUNTIMAX:
14042 case BUILT_IN_POPCOUNT:
14043 case BUILT_IN_PARITYL:
14044 case BUILT_IN_PARITYLL:
14045 case BUILT_IN_PARITYIMAX:
14046 case BUILT_IN_PARITY:
14047 case BUILT_IN_LABS:
14048 case BUILT_IN_LLABS:
14049 case BUILT_IN_PREFETCH:
14053 return is_simple_builtin (decl);