1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
62 struct target_builtins *this_target_builtins = &default_target_builtins;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
195 static rtx expand_builtin_object_size (tree);
196 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
197 enum built_in_function);
198 static void maybe_emit_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_free_warning (tree);
201 static tree fold_builtin_object_size (tree, tree);
202 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
203 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
204 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
205 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
206 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
207 enum built_in_function);
208 static bool init_target_chars (void);
210 static unsigned HOST_WIDE_INT target_newline;
211 static unsigned HOST_WIDE_INT target_percent;
212 static unsigned HOST_WIDE_INT target_c;
213 static unsigned HOST_WIDE_INT target_s;
214 static char target_percent_c[3];
215 static char target_percent_s[3];
216 static char target_percent_s_newline[4];
217 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
218 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
219 static tree do_mpfr_arg2 (tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_arg3 (tree, tree, tree, tree,
222 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
223 static tree do_mpfr_sincos (tree, tree, tree);
224 static tree do_mpfr_bessel_n (tree, tree, tree,
225 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
226 const REAL_VALUE_TYPE *, bool);
227 static tree do_mpfr_remquo (tree, tree, tree);
228 static tree do_mpfr_lgamma_r (tree, tree, tree);
230 /* Return true if NAME starts with __builtin_ or __sync_. */
233 is_builtin_name (const char *name)
235 if (strncmp (name, "__builtin_", 10) == 0)
237 if (strncmp (name, "__sync_", 7) == 0)
243 /* Return true if DECL is a function symbol representing a built-in. */
246 is_builtin_fn (tree decl)
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
257 called_as_built_in (tree node)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
266 /* Return the alignment in bits of EXP, an object.
267 Don't return more than MAX_ALIGN no matter what. */
270 get_object_alignment (tree exp, unsigned int max_align)
272 HOST_WIDE_INT bitsize, bitpos;
274 enum machine_mode mode;
275 int unsignedp, volatilep;
276 unsigned int align, inner;
278 /* Get the innermost object and the constant (bitpos) and possibly
279 variable (offset) offset of the access. */
280 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
281 &mode, &unsignedp, &volatilep, true);
283 /* Extract alignment information from the innermost object and
284 possibly adjust bitpos and offset. */
285 if (TREE_CODE (exp) == CONST_DECL)
286 exp = DECL_INITIAL (exp);
288 && TREE_CODE (exp) != LABEL_DECL)
289 align = DECL_ALIGN (exp);
290 else if (CONSTANT_CLASS_P (exp))
292 align = TYPE_ALIGN (TREE_TYPE (exp));
293 #ifdef CONSTANT_ALIGNMENT
294 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
297 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
298 align = TYPE_ALIGN (TREE_TYPE (exp));
299 else if (TREE_CODE (exp) == INDIRECT_REF)
300 align = TYPE_ALIGN (TREE_TYPE (exp));
301 else if (TREE_CODE (exp) == MEM_REF)
303 tree addr = TREE_OPERAND (exp, 0);
304 struct ptr_info_def *pi;
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
308 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
309 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
310 align *= BITS_PER_UNIT;
311 addr = TREE_OPERAND (addr, 0);
314 align = BITS_PER_UNIT;
315 if (TREE_CODE (addr) == SSA_NAME
316 && (pi = SSA_NAME_PTR_INFO (addr)))
318 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
319 align = MAX (pi->align * BITS_PER_UNIT, align);
321 else if (TREE_CODE (addr) == ADDR_EXPR)
322 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
324 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
326 else if (TREE_CODE (exp) == TARGET_MEM_REF)
328 struct ptr_info_def *pi;
329 tree addr = TMR_BASE (exp);
330 if (TREE_CODE (addr) == BIT_AND_EXPR
331 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
333 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
334 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
335 align *= BITS_PER_UNIT;
336 addr = TREE_OPERAND (addr, 0);
339 align = BITS_PER_UNIT;
340 if (TREE_CODE (addr) == SSA_NAME
341 && (pi = SSA_NAME_PTR_INFO (addr)))
343 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
344 align = MAX (pi->align * BITS_PER_UNIT, align);
346 else if (TREE_CODE (addr) == ADDR_EXPR)
347 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
349 if (TMR_OFFSET (exp))
350 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
351 if (TMR_INDEX (exp) && TMR_STEP (exp))
353 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
354 align = MIN (align, (step & -step) * BITS_PER_UNIT);
356 else if (TMR_INDEX (exp))
357 align = BITS_PER_UNIT;
358 if (TMR_INDEX2 (exp))
359 align = BITS_PER_UNIT;
362 align = BITS_PER_UNIT;
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
371 if (TREE_CODE (offset) == PLUS_EXPR)
373 next_offset = TREE_OPERAND (offset, 0);
374 offset = TREE_OPERAND (offset, 1);
378 if (host_integerp (offset, 1))
380 /* Any overflow in calculating offset_bits won't change
383 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
386 inner = MIN (inner, (offset_bits & -offset_bits));
388 else if (TREE_CODE (offset) == MULT_EXPR
389 && host_integerp (TREE_OPERAND (offset, 1), 1))
391 /* Any overflow in calculating offset_factor won't change
393 unsigned offset_factor
394 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
398 inner = MIN (inner, (offset_factor & -offset_factor));
402 inner = MIN (inner, BITS_PER_UNIT);
405 offset = next_offset;
408 /* Alignment is innermost object alignment adjusted by the constant
409 and non-constant offset parts. */
410 align = MIN (align, inner);
411 bitpos = bitpos & (align - 1);
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
417 align = (bitpos & -bitpos);
419 return MIN (align, max_align);
422 /* Returns true iff we can trust that alignment information has been
423 calculated properly. */
426 can_trust_pointer_alignment (void)
428 /* We rely on TER to compute accurate alignment information. */
429 return (optimize && flag_tree_ter);
432 /* Return the alignment in bits of EXP, a pointer valued expression.
433 But don't return more than MAX_ALIGN no matter what.
434 The alignment returned is, by default, the alignment of the thing that
435 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
437 Otherwise, look at the expression to see if we can do better, i.e., if the
438 expression is actually pointing at an object whose alignment is tighter. */
441 get_pointer_alignment (tree exp, unsigned int max_align)
445 if (TREE_CODE (exp) == ADDR_EXPR)
446 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
447 else if (TREE_CODE (exp) == SSA_NAME
448 && POINTER_TYPE_P (TREE_TYPE (exp)))
450 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
453 return BITS_PER_UNIT;
454 if (pi->misalign != 0)
455 align = (pi->misalign & -pi->misalign);
458 return MIN (max_align, align * BITS_PER_UNIT);
461 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
464 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
465 way, because it could contain a zero byte in the middle.
466 TREE_STRING_LENGTH is the size of the character array, not the string.
468 ONLY_VALUE should be nonzero if the result is not going to be emitted
469 into the instruction stream and zero if it is going to be expanded.
470 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
471 is returned, otherwise NULL, since
472 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
473 evaluate the side-effects.
475 The value returned is of type `ssizetype'.
477 Unfortunately, string_constant can't access the values of const char
478 arrays with initializers, so neither can we do so here. */
481 c_strlen (tree src, int only_value)
484 HOST_WIDE_INT offset;
490 if (TREE_CODE (src) == COND_EXPR
491 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
495 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
496 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
497 if (tree_int_cst_equal (len1, len2))
501 if (TREE_CODE (src) == COMPOUND_EXPR
502 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
503 return c_strlen (TREE_OPERAND (src, 1), only_value);
505 loc = EXPR_LOC_OR_HERE (src);
507 src = string_constant (src, &offset_node);
511 max = TREE_STRING_LENGTH (src) - 1;
512 ptr = TREE_STRING_POINTER (src);
514 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
516 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
517 compute the offset to the following null if we don't know where to
518 start searching for it. */
521 for (i = 0; i < max; i++)
525 /* We don't know the starting offset, but we do know that the string
526 has no internal zero bytes. We can assume that the offset falls
527 within the bounds of the string; otherwise, the programmer deserves
528 what he gets. Subtract the offset from the length of the string,
529 and return that. This would perhaps not be valid if we were dealing
530 with named arrays in addition to literal string constants. */
532 return size_diffop_loc (loc, size_int (max), offset_node);
535 /* We have a known offset into the string. Start searching there for
536 a null character if we can represent it as a single HOST_WIDE_INT. */
537 if (offset_node == 0)
539 else if (! host_integerp (offset_node, 0))
542 offset = tree_low_cst (offset_node, 0);
544 /* If the offset is known to be out of bounds, warn, and call strlen at
546 if (offset < 0 || offset > max)
548 /* Suppress multiple warnings for propagated constant strings. */
549 if (! TREE_NO_WARNING (src))
551 warning_at (loc, 0, "offset outside bounds of constant string");
552 TREE_NO_WARNING (src) = 1;
557 /* Use strlen to search for the first zero byte. Since any strings
558 constructed with build_string will have nulls appended, we win even
559 if we get handed something like (char[4])"abcd".
561 Since OFFSET is our starting index into the string, no further
562 calculation is needed. */
563 return ssize_int (strlen (ptr + offset));
566 /* Return a char pointer for a C string if it is a string constant
567 or sum of string constant and integer constant. */
574 src = string_constant (src, &offset_node);
578 if (offset_node == 0)
579 return TREE_STRING_POINTER (src);
580 else if (!host_integerp (offset_node, 1)
581 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
584 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
587 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
588 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
591 c_readstr (const char *str, enum machine_mode mode)
597 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
602 for (i = 0; i < GET_MODE_SIZE (mode); i++)
605 if (WORDS_BIG_ENDIAN)
606 j = GET_MODE_SIZE (mode) - i - 1;
607 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
608 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
609 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
611 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
614 ch = (unsigned char) str[i];
615 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
617 return immed_double_const (c[0], c[1], mode);
620 /* Cast a target constant CST to target CHAR and if that value fits into
621 host char type, return zero and put that value into variable pointed to by
625 target_char_cast (tree cst, char *p)
627 unsigned HOST_WIDE_INT val, hostval;
629 if (TREE_CODE (cst) != INTEGER_CST
630 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
633 val = TREE_INT_CST_LOW (cst);
634 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
635 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
638 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
639 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
648 /* Similar to save_expr, but assumes that arbitrary code is not executed
649 in between the multiple evaluations. In particular, we assume that a
650 non-addressable local variable will not be modified. */
653 builtin_save_expr (tree exp)
655 if (TREE_CODE (exp) == SSA_NAME
656 || (TREE_ADDRESSABLE (exp) == 0
657 && (TREE_CODE (exp) == PARM_DECL
658 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
661 return save_expr (exp);
664 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
665 times to get the address of either a higher stack frame, or a return
666 address located within it (depending on FNDECL_CODE). */
669 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
673 #ifdef INITIAL_FRAME_ADDRESS_RTX
674 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
678 /* For a zero count with __builtin_return_address, we don't care what
679 frame address we return, because target-specific definitions will
680 override us. Therefore frame pointer elimination is OK, and using
681 the soft frame pointer is OK.
683 For a nonzero count, or a zero count with __builtin_frame_address,
684 we require a stable offset from the current frame pointer to the
685 previous one, so we must use the hard frame pointer, and
686 we must disable frame pointer elimination. */
687 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
688 tem = frame_pointer_rtx;
691 tem = hard_frame_pointer_rtx;
693 /* Tell reload not to eliminate the frame pointer. */
694 crtl->accesses_prior_frames = 1;
698 /* Some machines need special handling before we can access
699 arbitrary frames. For example, on the SPARC, we must first flush
700 all register windows to the stack. */
701 #ifdef SETUP_FRAME_ADDRESSES
703 SETUP_FRAME_ADDRESSES ();
706 /* On the SPARC, the return address is not in the frame, it is in a
707 register. There is no way to access it off of the current frame
708 pointer, but it can be accessed off the previous frame pointer by
709 reading the value from the register window save area. */
710 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
711 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
715 /* Scan back COUNT frames to the specified frame. */
716 for (i = 0; i < count; i++)
718 /* Assume the dynamic chain pointer is in the word that the
719 frame address points to, unless otherwise specified. */
720 #ifdef DYNAMIC_CHAIN_ADDRESS
721 tem = DYNAMIC_CHAIN_ADDRESS (tem);
723 tem = memory_address (Pmode, tem);
724 tem = gen_frame_mem (Pmode, tem);
725 tem = copy_to_reg (tem);
728 /* For __builtin_frame_address, return what we've got. But, on
729 the SPARC for example, we may have to add a bias. */
730 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
731 #ifdef FRAME_ADDR_RTX
732 return FRAME_ADDR_RTX (tem);
737 /* For __builtin_return_address, get the return address from that frame. */
738 #ifdef RETURN_ADDR_RTX
739 tem = RETURN_ADDR_RTX (count, tem);
741 tem = memory_address (Pmode,
742 plus_constant (tem, GET_MODE_SIZE (Pmode)));
743 tem = gen_frame_mem (Pmode, tem);
748 /* Alias set used for setjmp buffer. */
749 static alias_set_type setjmp_alias_set = -1;
751 /* Construct the leading half of a __builtin_setjmp call. Control will
752 return to RECEIVER_LABEL. This is also called directly by the SJLJ
753 exception handling code. */
756 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
758 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
762 if (setjmp_alias_set == -1)
763 setjmp_alias_set = new_alias_set ();
765 buf_addr = convert_memory_address (Pmode, buf_addr);
767 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
769 /* We store the frame pointer and the address of receiver_label in
770 the buffer and use the rest of it for the stack save area, which
771 is machine-dependent. */
773 mem = gen_rtx_MEM (Pmode, buf_addr);
774 set_mem_alias_set (mem, setjmp_alias_set);
775 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
777 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
778 set_mem_alias_set (mem, setjmp_alias_set);
780 emit_move_insn (validize_mem (mem),
781 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
783 stack_save = gen_rtx_MEM (sa_mode,
784 plus_constant (buf_addr,
785 2 * GET_MODE_SIZE (Pmode)));
786 set_mem_alias_set (stack_save, setjmp_alias_set);
787 emit_stack_save (SAVE_NONLOCAL, &stack_save);
789 /* If there is further processing to do, do it. */
790 #ifdef HAVE_builtin_setjmp_setup
791 if (HAVE_builtin_setjmp_setup)
792 emit_insn (gen_builtin_setjmp_setup (buf_addr));
795 /* Tell optimize_save_area_alloca that extra work is going to
796 need to go on during alloca. */
797 cfun->calls_setjmp = 1;
799 /* We have a nonlocal label. */
800 cfun->has_nonlocal_label = 1;
803 /* Construct the trailing part of a __builtin_setjmp call. This is
804 also called directly by the SJLJ exception handling code. */
807 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
811 /* Clobber the FP when we get here, so we have to make sure it's
812 marked as used by this function. */
813 emit_use (hard_frame_pointer_rtx);
815 /* Mark the static chain as clobbered here so life information
816 doesn't get messed up for it. */
817 chain = targetm.calls.static_chain (current_function_decl, true);
818 if (chain && REG_P (chain))
819 emit_clobber (chain);
821 /* Now put in the code to restore the frame pointer, and argument
822 pointer, if needed. */
823 #ifdef HAVE_nonlocal_goto
824 if (! HAVE_nonlocal_goto)
827 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
828 /* This might change the hard frame pointer in ways that aren't
829 apparent to early optimization passes, so force a clobber. */
830 emit_clobber (hard_frame_pointer_rtx);
833 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
834 if (fixed_regs[ARG_POINTER_REGNUM])
836 #ifdef ELIMINABLE_REGS
838 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
840 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
841 if (elim_regs[i].from == ARG_POINTER_REGNUM
842 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
845 if (i == ARRAY_SIZE (elim_regs))
848 /* Now restore our arg pointer from the address at which it
849 was saved in our stack frame. */
850 emit_move_insn (crtl->args.internal_arg_pointer,
851 copy_to_reg (get_arg_pointer_save_area ()));
856 #ifdef HAVE_builtin_setjmp_receiver
857 if (HAVE_builtin_setjmp_receiver)
858 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
861 #ifdef HAVE_nonlocal_goto_receiver
862 if (HAVE_nonlocal_goto_receiver)
863 emit_insn (gen_nonlocal_goto_receiver ());
868 /* We must not allow the code we just generated to be reordered by
869 scheduling. Specifically, the update of the frame pointer must
870 happen immediately, not later. */
871 emit_insn (gen_blockage ());
874 /* __builtin_longjmp is passed a pointer to an array of five words (not
875 all will be used on all machines). It operates similarly to the C
876 library function of the same name, but is more efficient. Much of
877 the code below is copied from the handling of non-local gotos. */
880 expand_builtin_longjmp (rtx buf_addr, rtx value)
882 rtx fp, lab, stack, insn, last;
883 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
885 /* DRAP is needed for stack realign if longjmp is expanded to current
887 if (SUPPORTS_STACK_ALIGNMENT)
888 crtl->need_drap = true;
890 if (setjmp_alias_set == -1)
891 setjmp_alias_set = new_alias_set ();
893 buf_addr = convert_memory_address (Pmode, buf_addr);
895 buf_addr = force_reg (Pmode, buf_addr);
897 /* We require that the user must pass a second argument of 1, because
898 that is what builtin_setjmp will return. */
899 gcc_assert (value == const1_rtx);
901 last = get_last_insn ();
902 #ifdef HAVE_builtin_longjmp
903 if (HAVE_builtin_longjmp)
904 emit_insn (gen_builtin_longjmp (buf_addr));
908 fp = gen_rtx_MEM (Pmode, buf_addr);
909 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
910 GET_MODE_SIZE (Pmode)));
912 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
913 2 * GET_MODE_SIZE (Pmode)));
914 set_mem_alias_set (fp, setjmp_alias_set);
915 set_mem_alias_set (lab, setjmp_alias_set);
916 set_mem_alias_set (stack, setjmp_alias_set);
918 /* Pick up FP, label, and SP from the block and jump. This code is
919 from expand_goto in stmt.c; see there for detailed comments. */
920 #ifdef HAVE_nonlocal_goto
921 if (HAVE_nonlocal_goto)
922 /* We have to pass a value to the nonlocal_goto pattern that will
923 get copied into the static_chain pointer, but it does not matter
924 what that value is, because builtin_setjmp does not use it. */
925 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
929 lab = copy_to_reg (lab);
931 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
932 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
934 emit_move_insn (hard_frame_pointer_rtx, fp);
935 emit_stack_restore (SAVE_NONLOCAL, stack);
937 emit_use (hard_frame_pointer_rtx);
938 emit_use (stack_pointer_rtx);
939 emit_indirect_jump (lab);
943 /* Search backwards and mark the jump insn as a non-local goto.
944 Note that this precludes the use of __builtin_longjmp to a
945 __builtin_setjmp target in the same function. However, we've
946 already cautioned the user that these functions are for
947 internal exception handling use only. */
948 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
950 gcc_assert (insn != last);
954 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
957 else if (CALL_P (insn))
962 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
963 and the address of the save area. */
966 expand_builtin_nonlocal_goto (tree exp)
968 tree t_label, t_save_area;
969 rtx r_label, r_save_area, r_fp, r_sp, insn;
971 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
974 t_label = CALL_EXPR_ARG (exp, 0);
975 t_save_area = CALL_EXPR_ARG (exp, 1);
977 r_label = expand_normal (t_label);
978 r_label = convert_memory_address (Pmode, r_label);
979 r_save_area = expand_normal (t_save_area);
980 r_save_area = convert_memory_address (Pmode, r_save_area);
981 /* Copy the address of the save location to a register just in case it was based
982 on the frame pointer. */
983 r_save_area = copy_to_reg (r_save_area);
984 r_fp = gen_rtx_MEM (Pmode, r_save_area);
985 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
986 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
988 crtl->has_nonlocal_goto = 1;
990 #ifdef HAVE_nonlocal_goto
991 /* ??? We no longer need to pass the static chain value, afaik. */
992 if (HAVE_nonlocal_goto)
993 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
997 r_label = copy_to_reg (r_label);
999 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1000 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1002 /* Restore frame pointer for containing function.
1003 This sets the actual hard register used for the frame pointer
1004 to the location of the function's incoming static chain info.
1005 The non-local goto handler will then adjust it to contain the
1006 proper value and reload the argument pointer, if needed. */
1007 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1008 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1010 /* USE of hard_frame_pointer_rtx added for consistency;
1011 not clear if really needed. */
1012 emit_use (hard_frame_pointer_rtx);
1013 emit_use (stack_pointer_rtx);
1015 /* If the architecture is using a GP register, we must
1016 conservatively assume that the target function makes use of it.
1017 The prologue of functions with nonlocal gotos must therefore
1018 initialize the GP register to the appropriate value, and we
1019 must then make sure that this value is live at the point
1020 of the jump. (Note that this doesn't necessarily apply
1021 to targets with a nonlocal_goto pattern; they are free
1022 to implement it in their own way. Note also that this is
1023 a no-op if the GP register is a global invariant.) */
1024 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1025 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1026 emit_use (pic_offset_table_rtx);
1028 emit_indirect_jump (r_label);
1031 /* Search backwards to the jump insn and mark it as a
1033 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1037 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1040 else if (CALL_P (insn))
1047 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1048 (not all will be used on all machines) that was passed to __builtin_setjmp.
1049 It updates the stack pointer in that block to correspond to the current
1053 expand_builtin_update_setjmp_buf (rtx buf_addr)
1055 enum machine_mode sa_mode = Pmode;
1059 #ifdef HAVE_save_stack_nonlocal
1060 if (HAVE_save_stack_nonlocal)
1061 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1063 #ifdef STACK_SAVEAREA_MODE
1064 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1068 = gen_rtx_MEM (sa_mode,
1071 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1075 emit_insn (gen_setjmp ());
1078 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1081 /* Expand a call to __builtin_prefetch. For a target that does not support
1082 data prefetch, evaluate the memory address argument in case it has side
1086 expand_builtin_prefetch (tree exp)
1088 tree arg0, arg1, arg2;
1092 if (!validate_arglist (exp, POINTER_TYPE, 0))
1095 arg0 = CALL_EXPR_ARG (exp, 0);
1097 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1098 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1100 nargs = call_expr_nargs (exp);
1102 arg1 = CALL_EXPR_ARG (exp, 1);
1104 arg1 = integer_zero_node;
1106 arg2 = CALL_EXPR_ARG (exp, 2);
1108 arg2 = integer_three_node;
1110 /* Argument 0 is an address. */
1111 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1113 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1114 if (TREE_CODE (arg1) != INTEGER_CST)
1116 error ("second argument to %<__builtin_prefetch%> must be a constant");
1117 arg1 = integer_zero_node;
1119 op1 = expand_normal (arg1);
1120 /* Argument 1 must be either zero or one. */
1121 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1123 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1128 /* Argument 2 (locality) must be a compile-time constant int. */
1129 if (TREE_CODE (arg2) != INTEGER_CST)
1131 error ("third argument to %<__builtin_prefetch%> must be a constant");
1132 arg2 = integer_zero_node;
1134 op2 = expand_normal (arg2);
1135 /* Argument 2 must be 0, 1, 2, or 3. */
1136 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1138 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1142 #ifdef HAVE_prefetch
1145 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1147 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1148 || (GET_MODE (op0) != Pmode))
1150 op0 = convert_memory_address (Pmode, op0);
1151 op0 = force_reg (Pmode, op0);
1153 emit_insn (gen_prefetch (op0, op1, op2));
1157 /* Don't do anything with direct references to volatile memory, but
1158 generate code to handle other side effects. */
1159 if (!MEM_P (op0) && side_effects_p (op0))
1163 /* Get a MEM rtx for expression EXP which is the address of an operand
1164 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1165 the maximum length of the block of memory that might be accessed or
1169 get_memory_rtx (tree exp, tree len)
1171 tree orig_exp = exp;
1175 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1176 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1177 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1178 exp = TREE_OPERAND (exp, 0);
1180 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1181 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1183 /* Get an expression we can use to find the attributes to assign to MEM.
1184 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1185 we can. First remove any nops. */
1186 while (CONVERT_EXPR_P (exp)
1187 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1188 exp = TREE_OPERAND (exp, 0);
1191 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1192 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1193 && host_integerp (TREE_OPERAND (exp, 1), 0)
1194 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1195 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1196 else if (TREE_CODE (exp) == ADDR_EXPR)
1197 exp = TREE_OPERAND (exp, 0);
1198 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1199 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1203 /* Honor attributes derived from exp, except for the alias set
1204 (as builtin stringops may alias with anything) and the size
1205 (as stringops may access multiple array elements). */
1208 set_mem_attributes (mem, exp, 0);
1211 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1213 /* Allow the string and memory builtins to overflow from one
1214 field into another, see http://gcc.gnu.org/PR23561.
1215 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1216 memory accessed by the string or memory builtin will fit
1217 within the field. */
1218 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1220 tree mem_expr = MEM_EXPR (mem);
1221 HOST_WIDE_INT offset = -1, length = -1;
1224 while (TREE_CODE (inner) == ARRAY_REF
1225 || CONVERT_EXPR_P (inner)
1226 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1227 || TREE_CODE (inner) == SAVE_EXPR)
1228 inner = TREE_OPERAND (inner, 0);
1230 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1232 if (MEM_OFFSET (mem)
1233 && CONST_INT_P (MEM_OFFSET (mem)))
1234 offset = INTVAL (MEM_OFFSET (mem));
1236 if (offset >= 0 && len && host_integerp (len, 0))
1237 length = tree_low_cst (len, 0);
1239 while (TREE_CODE (inner) == COMPONENT_REF)
1241 tree field = TREE_OPERAND (inner, 1);
1242 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1243 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1245 /* Bitfields are generally not byte-addressable. */
1246 gcc_assert (!DECL_BIT_FIELD (field)
1247 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1248 % BITS_PER_UNIT) == 0
1249 && host_integerp (DECL_SIZE (field), 0)
1250 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1251 % BITS_PER_UNIT) == 0));
1253 /* If we can prove that the memory starting at XEXP (mem, 0) and
1254 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1255 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1256 fields without DECL_SIZE_UNIT like flexible array members. */
1258 && DECL_SIZE_UNIT (field)
1259 && host_integerp (DECL_SIZE_UNIT (field), 0))
1262 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1265 && offset + length <= size)
1270 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1271 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1272 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1280 mem_expr = TREE_OPERAND (mem_expr, 0);
1281 inner = TREE_OPERAND (inner, 0);
1284 if (mem_expr == NULL)
1286 if (mem_expr != MEM_EXPR (mem))
1288 set_mem_expr (mem, mem_expr);
1289 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1292 set_mem_alias_set (mem, 0);
1293 set_mem_size (mem, NULL_RTX);
1299 /* Built-in functions to perform an untyped call and return. */
1301 #define apply_args_mode \
1302 (this_target_builtins->x_apply_args_mode)
1303 #define apply_result_mode \
1304 (this_target_builtins->x_apply_result_mode)
1306 /* Return the size required for the block returned by __builtin_apply_args,
1307 and initialize apply_args_mode. */
1310 apply_args_size (void)
1312 static int size = -1;
1315 enum machine_mode mode;
1317 /* The values computed by this function never change. */
1320 /* The first value is the incoming arg-pointer. */
1321 size = GET_MODE_SIZE (Pmode);
1323 /* The second value is the structure value address unless this is
1324 passed as an "invisible" first argument. */
1325 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1326 size += GET_MODE_SIZE (Pmode);
1328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1329 if (FUNCTION_ARG_REGNO_P (regno))
1331 mode = targetm.calls.get_raw_arg_mode (regno);
1333 gcc_assert (mode != VOIDmode);
1335 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1336 if (size % align != 0)
1337 size = CEIL (size, align) * align;
1338 size += GET_MODE_SIZE (mode);
1339 apply_args_mode[regno] = mode;
1343 apply_args_mode[regno] = VOIDmode;
1349 /* Return the size required for the block returned by __builtin_apply,
1350 and initialize apply_result_mode. */
1353 apply_result_size (void)
1355 static int size = -1;
1357 enum machine_mode mode;
1359 /* The values computed by this function never change. */
1364 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1365 if (targetm.calls.function_value_regno_p (regno))
1367 mode = targetm.calls.get_raw_result_mode (regno);
1369 gcc_assert (mode != VOIDmode);
1371 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1372 if (size % align != 0)
1373 size = CEIL (size, align) * align;
1374 size += GET_MODE_SIZE (mode);
1375 apply_result_mode[regno] = mode;
1378 apply_result_mode[regno] = VOIDmode;
1380 /* Allow targets that use untyped_call and untyped_return to override
1381 the size so that machine-specific information can be stored here. */
1382 #ifdef APPLY_RESULT_SIZE
1383 size = APPLY_RESULT_SIZE;
1389 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1390 /* Create a vector describing the result block RESULT. If SAVEP is true,
1391 the result block is used to save the values; otherwise it is used to
1392 restore the values. */
1395 result_vector (int savep, rtx result)
1397 int regno, size, align, nelts;
1398 enum machine_mode mode;
1400 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1403 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1404 if ((mode = apply_result_mode[regno]) != VOIDmode)
1406 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1407 if (size % align != 0)
1408 size = CEIL (size, align) * align;
1409 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1410 mem = adjust_address (result, mode, size);
1411 savevec[nelts++] = (savep
1412 ? gen_rtx_SET (VOIDmode, mem, reg)
1413 : gen_rtx_SET (VOIDmode, reg, mem));
1414 size += GET_MODE_SIZE (mode);
1416 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1418 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1420 /* Save the state required to perform an untyped call with the same
1421 arguments as were passed to the current function. */
1424 expand_builtin_apply_args_1 (void)
1427 int size, align, regno;
1428 enum machine_mode mode;
1429 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1431 /* Create a block where the arg-pointer, structure value address,
1432 and argument registers can be saved. */
1433 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1435 /* Walk past the arg-pointer and structure value address. */
1436 size = GET_MODE_SIZE (Pmode);
1437 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1438 size += GET_MODE_SIZE (Pmode);
1440 /* Save each register used in calling a function to the block. */
1441 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1442 if ((mode = apply_args_mode[regno]) != VOIDmode)
1444 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1445 if (size % align != 0)
1446 size = CEIL (size, align) * align;
1448 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1450 emit_move_insn (adjust_address (registers, mode, size), tem);
1451 size += GET_MODE_SIZE (mode);
1454 /* Save the arg pointer to the block. */
1455 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1456 #ifdef STACK_GROWS_DOWNWARD
1457 /* We need the pointer as the caller actually passed them to us, not
1458 as we might have pretended they were passed. Make sure it's a valid
1459 operand, as emit_move_insn isn't expected to handle a PLUS. */
1461 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1464 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1466 size = GET_MODE_SIZE (Pmode);
1468 /* Save the structure value address unless this is passed as an
1469 "invisible" first argument. */
1470 if (struct_incoming_value)
1472 emit_move_insn (adjust_address (registers, Pmode, size),
1473 copy_to_reg (struct_incoming_value));
1474 size += GET_MODE_SIZE (Pmode);
1477 /* Return the address of the block. */
1478 return copy_addr_to_reg (XEXP (registers, 0));
1481 /* __builtin_apply_args returns block of memory allocated on
1482 the stack into which is stored the arg pointer, structure
1483 value address, static chain, and all the registers that might
1484 possibly be used in performing a function call. The code is
1485 moved to the start of the function so the incoming values are
1489 expand_builtin_apply_args (void)
1491 /* Don't do __builtin_apply_args more than once in a function.
1492 Save the result of the first call and reuse it. */
1493 if (apply_args_value != 0)
1494 return apply_args_value;
1496 /* When this function is called, it means that registers must be
1497 saved on entry to this function. So we migrate the
1498 call to the first insn of this function. */
1503 temp = expand_builtin_apply_args_1 ();
1507 apply_args_value = temp;
1509 /* Put the insns after the NOTE that starts the function.
1510 If this is inside a start_sequence, make the outer-level insn
1511 chain current, so the code is placed at the start of the
1512 function. If internal_arg_pointer is a non-virtual pseudo,
1513 it needs to be placed after the function that initializes
1515 push_topmost_sequence ();
1516 if (REG_P (crtl->args.internal_arg_pointer)
1517 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1518 emit_insn_before (seq, parm_birth_insn);
1520 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1521 pop_topmost_sequence ();
1526 /* Perform an untyped call and save the state required to perform an
1527 untyped return of whatever value was returned by the given function. */
1530 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1532 int size, align, regno;
1533 enum machine_mode mode;
1534 rtx incoming_args, result, reg, dest, src, call_insn;
1535 rtx old_stack_level = 0;
1536 rtx call_fusage = 0;
1537 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1539 arguments = convert_memory_address (Pmode, arguments);
1541 /* Create a block where the return registers can be saved. */
1542 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1544 /* Fetch the arg pointer from the ARGUMENTS block. */
1545 incoming_args = gen_reg_rtx (Pmode);
1546 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1547 #ifndef STACK_GROWS_DOWNWARD
1548 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1549 incoming_args, 0, OPTAB_LIB_WIDEN);
1552 /* Push a new argument block and copy the arguments. Do not allow
1553 the (potential) memcpy call below to interfere with our stack
1555 do_pending_stack_adjust ();
1558 /* Save the stack with nonlocal if available. */
1559 #ifdef HAVE_save_stack_nonlocal
1560 if (HAVE_save_stack_nonlocal)
1561 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1564 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1566 /* Allocate a block of memory onto the stack and copy the memory
1567 arguments to the outgoing arguments address. We can pass TRUE
1568 as the 4th argument because we just saved the stack pointer
1569 and will restore it right after the call. */
1570 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1572 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1573 may have already set current_function_calls_alloca to true.
1574 current_function_calls_alloca won't be set if argsize is zero,
1575 so we have to guarantee need_drap is true here. */
1576 if (SUPPORTS_STACK_ALIGNMENT)
1577 crtl->need_drap = true;
1579 dest = virtual_outgoing_args_rtx;
1580 #ifndef STACK_GROWS_DOWNWARD
1581 if (CONST_INT_P (argsize))
1582 dest = plus_constant (dest, -INTVAL (argsize));
1584 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1586 dest = gen_rtx_MEM (BLKmode, dest);
1587 set_mem_align (dest, PARM_BOUNDARY);
1588 src = gen_rtx_MEM (BLKmode, incoming_args);
1589 set_mem_align (src, PARM_BOUNDARY);
1590 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1592 /* Refer to the argument block. */
1594 arguments = gen_rtx_MEM (BLKmode, arguments);
1595 set_mem_align (arguments, PARM_BOUNDARY);
1597 /* Walk past the arg-pointer and structure value address. */
1598 size = GET_MODE_SIZE (Pmode);
1600 size += GET_MODE_SIZE (Pmode);
1602 /* Restore each of the registers previously saved. Make USE insns
1603 for each of these registers for use in making the call. */
1604 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1605 if ((mode = apply_args_mode[regno]) != VOIDmode)
1607 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1608 if (size % align != 0)
1609 size = CEIL (size, align) * align;
1610 reg = gen_rtx_REG (mode, regno);
1611 emit_move_insn (reg, adjust_address (arguments, mode, size));
1612 use_reg (&call_fusage, reg);
1613 size += GET_MODE_SIZE (mode);
1616 /* Restore the structure value address unless this is passed as an
1617 "invisible" first argument. */
1618 size = GET_MODE_SIZE (Pmode);
1621 rtx value = gen_reg_rtx (Pmode);
1622 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1623 emit_move_insn (struct_value, value);
1624 if (REG_P (struct_value))
1625 use_reg (&call_fusage, struct_value);
1626 size += GET_MODE_SIZE (Pmode);
1629 /* All arguments and registers used for the call are set up by now! */
1630 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1632 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1633 and we don't want to load it into a register as an optimization,
1634 because prepare_call_address already did it if it should be done. */
1635 if (GET_CODE (function) != SYMBOL_REF)
1636 function = memory_address (FUNCTION_MODE, function);
1638 /* Generate the actual call instruction and save the return value. */
1639 #ifdef HAVE_untyped_call
1640 if (HAVE_untyped_call)
1641 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1642 result, result_vector (1, result)));
1645 #ifdef HAVE_call_value
1646 if (HAVE_call_value)
1650 /* Locate the unique return register. It is not possible to
1651 express a call that sets more than one return register using
1652 call_value; use untyped_call for that. In fact, untyped_call
1653 only needs to save the return registers in the given block. */
1654 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1655 if ((mode = apply_result_mode[regno]) != VOIDmode)
1657 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1659 valreg = gen_rtx_REG (mode, regno);
1662 emit_call_insn (GEN_CALL_VALUE (valreg,
1663 gen_rtx_MEM (FUNCTION_MODE, function),
1664 const0_rtx, NULL_RTX, const0_rtx));
1666 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1672 /* Find the CALL insn we just emitted, and attach the register usage
1674 call_insn = last_call_insn ();
1675 add_function_usage_to (call_insn, call_fusage);
1677 /* Restore the stack. */
1678 #ifdef HAVE_save_stack_nonlocal
1679 if (HAVE_save_stack_nonlocal)
1680 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1683 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1687 /* Return the address of the result block. */
1688 result = copy_addr_to_reg (XEXP (result, 0));
1689 return convert_memory_address (ptr_mode, result);
1692 /* Perform an untyped return. */
1695 expand_builtin_return (rtx result)
1697 int size, align, regno;
1698 enum machine_mode mode;
1700 rtx call_fusage = 0;
1702 result = convert_memory_address (Pmode, result);
1704 apply_result_size ();
1705 result = gen_rtx_MEM (BLKmode, result);
1707 #ifdef HAVE_untyped_return
1708 if (HAVE_untyped_return)
1710 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1716 /* Restore the return value and note that each value is used. */
1718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1719 if ((mode = apply_result_mode[regno]) != VOIDmode)
1721 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1722 if (size % align != 0)
1723 size = CEIL (size, align) * align;
1724 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1725 emit_move_insn (reg, adjust_address (result, mode, size));
1727 push_to_sequence (call_fusage);
1729 call_fusage = get_insns ();
1731 size += GET_MODE_SIZE (mode);
1734 /* Put the USE insns before the return. */
1735 emit_insn (call_fusage);
1737 /* Return whatever values was restored by jumping directly to the end
1739 expand_naked_return ();
1742 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1744 static enum type_class
1745 type_to_class (tree type)
1747 switch (TREE_CODE (type))
1749 case VOID_TYPE: return void_type_class;
1750 case INTEGER_TYPE: return integer_type_class;
1751 case ENUMERAL_TYPE: return enumeral_type_class;
1752 case BOOLEAN_TYPE: return boolean_type_class;
1753 case POINTER_TYPE: return pointer_type_class;
1754 case REFERENCE_TYPE: return reference_type_class;
1755 case OFFSET_TYPE: return offset_type_class;
1756 case REAL_TYPE: return real_type_class;
1757 case COMPLEX_TYPE: return complex_type_class;
1758 case FUNCTION_TYPE: return function_type_class;
1759 case METHOD_TYPE: return method_type_class;
1760 case RECORD_TYPE: return record_type_class;
1762 case QUAL_UNION_TYPE: return union_type_class;
1763 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1764 ? string_type_class : array_type_class);
1765 case LANG_TYPE: return lang_type_class;
1766 default: return no_type_class;
1770 /* Expand a call EXP to __builtin_classify_type. */
1773 expand_builtin_classify_type (tree exp)
1775 if (call_expr_nargs (exp))
1776 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1777 return GEN_INT (no_type_class);
1780 /* This helper macro, meant to be used in mathfn_built_in below,
1781 determines which among a set of three builtin math functions is
1782 appropriate for a given type mode. The `F' and `L' cases are
1783 automatically generated from the `double' case. */
1784 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1785 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1786 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1787 fcodel = BUILT_IN_MATHFN##L ; break;
1788 /* Similar to above, but appends _R after any F/L suffix. */
1789 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1790 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1791 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1792 fcodel = BUILT_IN_MATHFN##L_R ; break;
1794 /* Return mathematic function equivalent to FN but operating directly
1795 on TYPE, if available. If IMPLICIT is true find the function in
1796 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1797 can't do the conversion, return zero. */
1800 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1802 tree const *const fn_arr
1803 = implicit ? implicit_built_in_decls : built_in_decls;
1804 enum built_in_function fcode, fcodef, fcodel;
1808 CASE_MATHFN (BUILT_IN_ACOS)
1809 CASE_MATHFN (BUILT_IN_ACOSH)
1810 CASE_MATHFN (BUILT_IN_ASIN)
1811 CASE_MATHFN (BUILT_IN_ASINH)
1812 CASE_MATHFN (BUILT_IN_ATAN)
1813 CASE_MATHFN (BUILT_IN_ATAN2)
1814 CASE_MATHFN (BUILT_IN_ATANH)
1815 CASE_MATHFN (BUILT_IN_CBRT)
1816 CASE_MATHFN (BUILT_IN_CEIL)
1817 CASE_MATHFN (BUILT_IN_CEXPI)
1818 CASE_MATHFN (BUILT_IN_COPYSIGN)
1819 CASE_MATHFN (BUILT_IN_COS)
1820 CASE_MATHFN (BUILT_IN_COSH)
1821 CASE_MATHFN (BUILT_IN_DREM)
1822 CASE_MATHFN (BUILT_IN_ERF)
1823 CASE_MATHFN (BUILT_IN_ERFC)
1824 CASE_MATHFN (BUILT_IN_EXP)
1825 CASE_MATHFN (BUILT_IN_EXP10)
1826 CASE_MATHFN (BUILT_IN_EXP2)
1827 CASE_MATHFN (BUILT_IN_EXPM1)
1828 CASE_MATHFN (BUILT_IN_FABS)
1829 CASE_MATHFN (BUILT_IN_FDIM)
1830 CASE_MATHFN (BUILT_IN_FLOOR)
1831 CASE_MATHFN (BUILT_IN_FMA)
1832 CASE_MATHFN (BUILT_IN_FMAX)
1833 CASE_MATHFN (BUILT_IN_FMIN)
1834 CASE_MATHFN (BUILT_IN_FMOD)
1835 CASE_MATHFN (BUILT_IN_FREXP)
1836 CASE_MATHFN (BUILT_IN_GAMMA)
1837 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1838 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1839 CASE_MATHFN (BUILT_IN_HYPOT)
1840 CASE_MATHFN (BUILT_IN_ILOGB)
1841 CASE_MATHFN (BUILT_IN_INF)
1842 CASE_MATHFN (BUILT_IN_ISINF)
1843 CASE_MATHFN (BUILT_IN_J0)
1844 CASE_MATHFN (BUILT_IN_J1)
1845 CASE_MATHFN (BUILT_IN_JN)
1846 CASE_MATHFN (BUILT_IN_LCEIL)
1847 CASE_MATHFN (BUILT_IN_LDEXP)
1848 CASE_MATHFN (BUILT_IN_LFLOOR)
1849 CASE_MATHFN (BUILT_IN_LGAMMA)
1850 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1851 CASE_MATHFN (BUILT_IN_LLCEIL)
1852 CASE_MATHFN (BUILT_IN_LLFLOOR)
1853 CASE_MATHFN (BUILT_IN_LLRINT)
1854 CASE_MATHFN (BUILT_IN_LLROUND)
1855 CASE_MATHFN (BUILT_IN_LOG)
1856 CASE_MATHFN (BUILT_IN_LOG10)
1857 CASE_MATHFN (BUILT_IN_LOG1P)
1858 CASE_MATHFN (BUILT_IN_LOG2)
1859 CASE_MATHFN (BUILT_IN_LOGB)
1860 CASE_MATHFN (BUILT_IN_LRINT)
1861 CASE_MATHFN (BUILT_IN_LROUND)
1862 CASE_MATHFN (BUILT_IN_MODF)
1863 CASE_MATHFN (BUILT_IN_NAN)
1864 CASE_MATHFN (BUILT_IN_NANS)
1865 CASE_MATHFN (BUILT_IN_NEARBYINT)
1866 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1867 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1868 CASE_MATHFN (BUILT_IN_POW)
1869 CASE_MATHFN (BUILT_IN_POWI)
1870 CASE_MATHFN (BUILT_IN_POW10)
1871 CASE_MATHFN (BUILT_IN_REMAINDER)
1872 CASE_MATHFN (BUILT_IN_REMQUO)
1873 CASE_MATHFN (BUILT_IN_RINT)
1874 CASE_MATHFN (BUILT_IN_ROUND)
1875 CASE_MATHFN (BUILT_IN_SCALB)
1876 CASE_MATHFN (BUILT_IN_SCALBLN)
1877 CASE_MATHFN (BUILT_IN_SCALBN)
1878 CASE_MATHFN (BUILT_IN_SIGNBIT)
1879 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1880 CASE_MATHFN (BUILT_IN_SIN)
1881 CASE_MATHFN (BUILT_IN_SINCOS)
1882 CASE_MATHFN (BUILT_IN_SINH)
1883 CASE_MATHFN (BUILT_IN_SQRT)
1884 CASE_MATHFN (BUILT_IN_TAN)
1885 CASE_MATHFN (BUILT_IN_TANH)
1886 CASE_MATHFN (BUILT_IN_TGAMMA)
1887 CASE_MATHFN (BUILT_IN_TRUNC)
1888 CASE_MATHFN (BUILT_IN_Y0)
1889 CASE_MATHFN (BUILT_IN_Y1)
1890 CASE_MATHFN (BUILT_IN_YN)
1896 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1897 return fn_arr[fcode];
1898 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1899 return fn_arr[fcodef];
1900 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1901 return fn_arr[fcodel];
1906 /* Like mathfn_built_in_1(), but always use the implicit array. */
1909 mathfn_built_in (tree type, enum built_in_function fn)
1911 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1914 /* If errno must be maintained, expand the RTL to check if the result,
1915 TARGET, of a built-in function call, EXP, is NaN, and if so set
1919 expand_errno_check (tree exp, rtx target)
1921 rtx lab = gen_label_rtx ();
1923 /* Test the result; if it is NaN, set errno=EDOM because
1924 the argument was not in the domain. */
1925 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1926 NULL_RTX, NULL_RTX, lab,
1927 /* The jump is very likely. */
1928 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1931 /* If this built-in doesn't throw an exception, set errno directly. */
1932 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1934 #ifdef GEN_ERRNO_RTX
1935 rtx errno_rtx = GEN_ERRNO_RTX;
1938 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1940 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1946 /* Make sure the library call isn't expanded as a tail call. */
1947 CALL_EXPR_TAILCALL (exp) = 0;
1949 /* We can't set errno=EDOM directly; let the library call do it.
1950 Pop the arguments right away in case the call gets deleted. */
1952 expand_call (exp, target, 0);
1957 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1958 Return NULL_RTX if a normal call should be emitted rather than expanding
1959 the function in-line. EXP is the expression that is a call to the builtin
1960 function; if convenient, the result should be placed in TARGET.
1961 SUBTARGET may be used as the target for computing one of EXP's operands. */
1964 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1966 optab builtin_optab;
1968 tree fndecl = get_callee_fndecl (exp);
1969 enum machine_mode mode;
1970 bool errno_set = false;
1973 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1976 arg = CALL_EXPR_ARG (exp, 0);
1978 switch (DECL_FUNCTION_CODE (fndecl))
1980 CASE_FLT_FN (BUILT_IN_SQRT):
1981 errno_set = ! tree_expr_nonnegative_p (arg);
1982 builtin_optab = sqrt_optab;
1984 CASE_FLT_FN (BUILT_IN_EXP):
1985 errno_set = true; builtin_optab = exp_optab; break;
1986 CASE_FLT_FN (BUILT_IN_EXP10):
1987 CASE_FLT_FN (BUILT_IN_POW10):
1988 errno_set = true; builtin_optab = exp10_optab; break;
1989 CASE_FLT_FN (BUILT_IN_EXP2):
1990 errno_set = true; builtin_optab = exp2_optab; break;
1991 CASE_FLT_FN (BUILT_IN_EXPM1):
1992 errno_set = true; builtin_optab = expm1_optab; break;
1993 CASE_FLT_FN (BUILT_IN_LOGB):
1994 errno_set = true; builtin_optab = logb_optab; break;
1995 CASE_FLT_FN (BUILT_IN_LOG):
1996 errno_set = true; builtin_optab = log_optab; break;
1997 CASE_FLT_FN (BUILT_IN_LOG10):
1998 errno_set = true; builtin_optab = log10_optab; break;
1999 CASE_FLT_FN (BUILT_IN_LOG2):
2000 errno_set = true; builtin_optab = log2_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOG1P):
2002 errno_set = true; builtin_optab = log1p_optab; break;
2003 CASE_FLT_FN (BUILT_IN_ASIN):
2004 builtin_optab = asin_optab; break;
2005 CASE_FLT_FN (BUILT_IN_ACOS):
2006 builtin_optab = acos_optab; break;
2007 CASE_FLT_FN (BUILT_IN_TAN):
2008 builtin_optab = tan_optab; break;
2009 CASE_FLT_FN (BUILT_IN_ATAN):
2010 builtin_optab = atan_optab; break;
2011 CASE_FLT_FN (BUILT_IN_FLOOR):
2012 builtin_optab = floor_optab; break;
2013 CASE_FLT_FN (BUILT_IN_CEIL):
2014 builtin_optab = ceil_optab; break;
2015 CASE_FLT_FN (BUILT_IN_TRUNC):
2016 builtin_optab = btrunc_optab; break;
2017 CASE_FLT_FN (BUILT_IN_ROUND):
2018 builtin_optab = round_optab; break;
2019 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2020 builtin_optab = nearbyint_optab;
2021 if (flag_trapping_math)
2023 /* Else fallthrough and expand as rint. */
2024 CASE_FLT_FN (BUILT_IN_RINT):
2025 builtin_optab = rint_optab; break;
2026 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2027 builtin_optab = significand_optab; break;
2032 /* Make a suitable register to place result in. */
2033 mode = TYPE_MODE (TREE_TYPE (exp));
2035 if (! flag_errno_math || ! HONOR_NANS (mode))
2038 /* Before working hard, check whether the instruction is available. */
2039 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2040 && (!errno_set || !optimize_insn_for_size_p ()))
2042 target = gen_reg_rtx (mode);
2044 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2045 need to expand the argument again. This way, we will not perform
2046 side-effects more the once. */
2047 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2049 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2053 /* Compute into TARGET.
2054 Set TARGET to wherever the result comes back. */
2055 target = expand_unop (mode, builtin_optab, op0, target, 0);
2060 expand_errno_check (exp, target);
2062 /* Output the entire sequence. */
2063 insns = get_insns ();
2069 /* If we were unable to expand via the builtin, stop the sequence
2070 (without outputting the insns) and call to the library function
2071 with the stabilized argument list. */
2075 return expand_call (exp, target, target == const0_rtx);
2078 /* Expand a call to the builtin binary math functions (pow and atan2).
2079 Return NULL_RTX if a normal call should be emitted rather than expanding the
2080 function in-line. EXP is the expression that is a call to the builtin
2081 function; if convenient, the result should be placed in TARGET.
2082 SUBTARGET may be used as the target for computing one of EXP's
2086 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2088 optab builtin_optab;
2089 rtx op0, op1, insns;
2090 int op1_type = REAL_TYPE;
2091 tree fndecl = get_callee_fndecl (exp);
2093 enum machine_mode mode;
2094 bool errno_set = true;
2096 switch (DECL_FUNCTION_CODE (fndecl))
2098 CASE_FLT_FN (BUILT_IN_SCALBN):
2099 CASE_FLT_FN (BUILT_IN_SCALBLN):
2100 CASE_FLT_FN (BUILT_IN_LDEXP):
2101 op1_type = INTEGER_TYPE;
2106 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2109 arg0 = CALL_EXPR_ARG (exp, 0);
2110 arg1 = CALL_EXPR_ARG (exp, 1);
2112 switch (DECL_FUNCTION_CODE (fndecl))
2114 CASE_FLT_FN (BUILT_IN_POW):
2115 builtin_optab = pow_optab; break;
2116 CASE_FLT_FN (BUILT_IN_ATAN2):
2117 builtin_optab = atan2_optab; break;
2118 CASE_FLT_FN (BUILT_IN_SCALB):
2119 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2121 builtin_optab = scalb_optab; break;
2122 CASE_FLT_FN (BUILT_IN_SCALBN):
2123 CASE_FLT_FN (BUILT_IN_SCALBLN):
2124 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2126 /* Fall through... */
2127 CASE_FLT_FN (BUILT_IN_LDEXP):
2128 builtin_optab = ldexp_optab; break;
2129 CASE_FLT_FN (BUILT_IN_FMOD):
2130 builtin_optab = fmod_optab; break;
2131 CASE_FLT_FN (BUILT_IN_REMAINDER):
2132 CASE_FLT_FN (BUILT_IN_DREM):
2133 builtin_optab = remainder_optab; break;
2138 /* Make a suitable register to place result in. */
2139 mode = TYPE_MODE (TREE_TYPE (exp));
2141 /* Before working hard, check whether the instruction is available. */
2142 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2145 target = gen_reg_rtx (mode);
2147 if (! flag_errno_math || ! HONOR_NANS (mode))
2150 if (errno_set && optimize_insn_for_size_p ())
2153 /* Always stabilize the argument list. */
2154 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2155 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2157 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2158 op1 = expand_normal (arg1);
2162 /* Compute into TARGET.
2163 Set TARGET to wherever the result comes back. */
2164 target = expand_binop (mode, builtin_optab, op0, op1,
2165 target, 0, OPTAB_DIRECT);
2167 /* If we were unable to expand via the builtin, stop the sequence
2168 (without outputting the insns) and call to the library function
2169 with the stabilized argument list. */
2173 return expand_call (exp, target, target == const0_rtx);
2177 expand_errno_check (exp, target);
2179 /* Output the entire sequence. */
2180 insns = get_insns ();
2187 /* Expand a call to the builtin trinary math functions (fma).
2188 Return NULL_RTX if a normal call should be emitted rather than expanding the
2189 function in-line. EXP is the expression that is a call to the builtin
2190 function; if convenient, the result should be placed in TARGET.
2191 SUBTARGET may be used as the target for computing one of EXP's
2195 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2197 optab builtin_optab;
2198 rtx op0, op1, op2, insns;
2199 tree fndecl = get_callee_fndecl (exp);
2200 tree arg0, arg1, arg2;
2201 enum machine_mode mode;
2203 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2206 arg0 = CALL_EXPR_ARG (exp, 0);
2207 arg1 = CALL_EXPR_ARG (exp, 1);
2208 arg2 = CALL_EXPR_ARG (exp, 2);
2210 switch (DECL_FUNCTION_CODE (fndecl))
2212 CASE_FLT_FN (BUILT_IN_FMA):
2213 builtin_optab = fma_optab; break;
2218 /* Make a suitable register to place result in. */
2219 mode = TYPE_MODE (TREE_TYPE (exp));
2221 /* Before working hard, check whether the instruction is available. */
2222 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2225 target = gen_reg_rtx (mode);
2227 /* Always stabilize the argument list. */
2228 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2229 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2230 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2232 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2233 op1 = expand_normal (arg1);
2234 op2 = expand_normal (arg2);
2238 /* Compute into TARGET.
2239 Set TARGET to wherever the result comes back. */
2240 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2243 /* If we were unable to expand via the builtin, stop the sequence
2244 (without outputting the insns) and call to the library function
2245 with the stabilized argument list. */
2249 return expand_call (exp, target, target == const0_rtx);
2252 /* Output the entire sequence. */
2253 insns = get_insns ();
2260 /* Expand a call to the builtin sin and cos math functions.
2261 Return NULL_RTX if a normal call should be emitted rather than expanding the
2262 function in-line. EXP is the expression that is a call to the builtin
2263 function; if convenient, the result should be placed in TARGET.
2264 SUBTARGET may be used as the target for computing one of EXP's
2268 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2270 optab builtin_optab;
2272 tree fndecl = get_callee_fndecl (exp);
2273 enum machine_mode mode;
2276 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2279 arg = CALL_EXPR_ARG (exp, 0);
2281 switch (DECL_FUNCTION_CODE (fndecl))
2283 CASE_FLT_FN (BUILT_IN_SIN):
2284 CASE_FLT_FN (BUILT_IN_COS):
2285 builtin_optab = sincos_optab; break;
2290 /* Make a suitable register to place result in. */
2291 mode = TYPE_MODE (TREE_TYPE (exp));
2293 /* Check if sincos insn is available, otherwise fallback
2294 to sin or cos insn. */
2295 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2296 switch (DECL_FUNCTION_CODE (fndecl))
2298 CASE_FLT_FN (BUILT_IN_SIN):
2299 builtin_optab = sin_optab; break;
2300 CASE_FLT_FN (BUILT_IN_COS):
2301 builtin_optab = cos_optab; break;
2306 /* Before working hard, check whether the instruction is available. */
2307 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2309 target = gen_reg_rtx (mode);
2311 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2312 need to expand the argument again. This way, we will not perform
2313 side-effects more the once. */
2314 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2316 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2320 /* Compute into TARGET.
2321 Set TARGET to wherever the result comes back. */
2322 if (builtin_optab == sincos_optab)
2326 switch (DECL_FUNCTION_CODE (fndecl))
2328 CASE_FLT_FN (BUILT_IN_SIN):
2329 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2331 CASE_FLT_FN (BUILT_IN_COS):
2332 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2337 gcc_assert (result);
2341 target = expand_unop (mode, builtin_optab, op0, target, 0);
2346 /* Output the entire sequence. */
2347 insns = get_insns ();
2353 /* If we were unable to expand via the builtin, stop the sequence
2354 (without outputting the insns) and call to the library function
2355 with the stabilized argument list. */
2359 target = expand_call (exp, target, target == const0_rtx);
2364 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2365 return an RTL instruction code that implements the functionality.
2366 If that isn't possible or available return CODE_FOR_nothing. */
2368 static enum insn_code
2369 interclass_mathfn_icode (tree arg, tree fndecl)
2371 bool errno_set = false;
2372 optab builtin_optab = 0;
2373 enum machine_mode mode;
2375 switch (DECL_FUNCTION_CODE (fndecl))
2377 CASE_FLT_FN (BUILT_IN_ILOGB):
2378 errno_set = true; builtin_optab = ilogb_optab; break;
2379 CASE_FLT_FN (BUILT_IN_ISINF):
2380 builtin_optab = isinf_optab; break;
2381 case BUILT_IN_ISNORMAL:
2382 case BUILT_IN_ISFINITE:
2383 CASE_FLT_FN (BUILT_IN_FINITE):
2384 case BUILT_IN_FINITED32:
2385 case BUILT_IN_FINITED64:
2386 case BUILT_IN_FINITED128:
2387 case BUILT_IN_ISINFD32:
2388 case BUILT_IN_ISINFD64:
2389 case BUILT_IN_ISINFD128:
2390 /* These builtins have no optabs (yet). */
2396 /* There's no easy way to detect the case we need to set EDOM. */
2397 if (flag_errno_math && errno_set)
2398 return CODE_FOR_nothing;
2400 /* Optab mode depends on the mode of the input argument. */
2401 mode = TYPE_MODE (TREE_TYPE (arg));
2404 return optab_handler (builtin_optab, mode);
2405 return CODE_FOR_nothing;
2408 /* Expand a call to one of the builtin math functions that operate on
2409 floating point argument and output an integer result (ilogb, isinf,
2411 Return 0 if a normal call should be emitted rather than expanding the
2412 function in-line. EXP is the expression that is a call to the builtin
2413 function; if convenient, the result should be placed in TARGET. */
2416 expand_builtin_interclass_mathfn (tree exp, rtx target)
2418 enum insn_code icode = CODE_FOR_nothing;
2420 tree fndecl = get_callee_fndecl (exp);
2421 enum machine_mode mode;
2424 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2427 arg = CALL_EXPR_ARG (exp, 0);
2428 icode = interclass_mathfn_icode (arg, fndecl);
2429 mode = TYPE_MODE (TREE_TYPE (arg));
2431 if (icode != CODE_FOR_nothing)
2433 rtx last = get_last_insn ();
2434 tree orig_arg = arg;
2435 /* Make a suitable register to place result in. */
2437 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2438 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2439 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2441 gcc_assert (insn_data[icode].operand[0].predicate
2442 (target, GET_MODE (target)));
2444 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2445 need to expand the argument again. This way, we will not perform
2446 side-effects more the once. */
2447 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2449 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2451 if (mode != GET_MODE (op0))
2452 op0 = convert_to_mode (mode, op0, 0);
2454 /* Compute into TARGET.
2455 Set TARGET to wherever the result comes back. */
2456 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2458 delete_insns_since (last);
2459 CALL_EXPR_ARG (exp, 0) = orig_arg;
2465 /* Expand a call to the builtin sincos math function.
2466 Return NULL_RTX if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
2471 expand_builtin_sincos (tree exp)
2473 rtx op0, op1, op2, target1, target2;
2474 enum machine_mode mode;
2475 tree arg, sinp, cosp;
2477 location_t loc = EXPR_LOCATION (exp);
2478 tree alias_type, alias_off;
2480 if (!validate_arglist (exp, REAL_TYPE,
2481 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2484 arg = CALL_EXPR_ARG (exp, 0);
2485 sinp = CALL_EXPR_ARG (exp, 1);
2486 cosp = CALL_EXPR_ARG (exp, 2);
2488 /* Make a suitable register to place result in. */
2489 mode = TYPE_MODE (TREE_TYPE (arg));
2491 /* Check if sincos insn is available, otherwise emit the call. */
2492 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2495 target1 = gen_reg_rtx (mode);
2496 target2 = gen_reg_rtx (mode);
2498 op0 = expand_normal (arg);
2499 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2500 alias_off = build_int_cst (alias_type, 0);
2501 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2503 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2506 /* Compute into target1 and target2.
2507 Set TARGET to wherever the result comes back. */
2508 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2509 gcc_assert (result);
2511 /* Move target1 and target2 to the memory locations indicated
2513 emit_move_insn (op1, target1);
2514 emit_move_insn (op2, target2);
2519 /* Expand a call to the internal cexpi builtin to the sincos math function.
2520 EXP is the expression that is a call to the builtin function; if convenient,
2521 the result should be placed in TARGET. */
2524 expand_builtin_cexpi (tree exp, rtx target)
2526 tree fndecl = get_callee_fndecl (exp);
2528 enum machine_mode mode;
2530 location_t loc = EXPR_LOCATION (exp);
2532 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2535 arg = CALL_EXPR_ARG (exp, 0);
2536 type = TREE_TYPE (arg);
2537 mode = TYPE_MODE (TREE_TYPE (arg));
2539 /* Try expanding via a sincos optab, fall back to emitting a libcall
2540 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2541 is only generated from sincos, cexp or if we have either of them. */
2542 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2544 op1 = gen_reg_rtx (mode);
2545 op2 = gen_reg_rtx (mode);
2547 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2549 /* Compute into op1 and op2. */
2550 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2552 else if (TARGET_HAS_SINCOS)
2554 tree call, fn = NULL_TREE;
2558 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2559 fn = built_in_decls[BUILT_IN_SINCOSF];
2560 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2561 fn = built_in_decls[BUILT_IN_SINCOS];
2562 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2563 fn = built_in_decls[BUILT_IN_SINCOSL];
2567 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2568 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2569 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2570 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2571 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2572 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2574 /* Make sure not to fold the sincos call again. */
2575 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2576 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2577 call, 3, arg, top1, top2));
2581 tree call, fn = NULL_TREE, narg;
2582 tree ctype = build_complex_type (type);
2584 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2585 fn = built_in_decls[BUILT_IN_CEXPF];
2586 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2587 fn = built_in_decls[BUILT_IN_CEXP];
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2589 fn = built_in_decls[BUILT_IN_CEXPL];
2593 /* If we don't have a decl for cexp create one. This is the
2594 friendliest fallback if the user calls __builtin_cexpi
2595 without full target C99 function support. */
2596 if (fn == NULL_TREE)
2599 const char *name = NULL;
2601 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2603 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2608 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2609 fn = build_fn_decl (name, fntype);
2612 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2613 build_real (type, dconst0), arg);
2615 /* Make sure not to fold the cexp call again. */
2616 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2617 return expand_expr (build_call_nary (ctype, call, 1, narg),
2618 target, VOIDmode, EXPAND_NORMAL);
2621 /* Now build the proper return type. */
2622 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2623 make_tree (TREE_TYPE (arg), op2),
2624 make_tree (TREE_TYPE (arg), op1)),
2625 target, VOIDmode, EXPAND_NORMAL);
2628 /* Conveniently construct a function call expression. FNDECL names the
2629 function to be called, N is the number of arguments, and the "..."
2630 parameters are the argument expressions. Unlike build_call_exr
2631 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2634 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2637 tree fntype = TREE_TYPE (fndecl);
2638 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2641 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2643 SET_EXPR_LOCATION (fn, loc);
2647 /* Expand a call to one of the builtin rounding functions gcc defines
2648 as an extension (lfloor and lceil). As these are gcc extensions we
2649 do not need to worry about setting errno to EDOM.
2650 If expanding via optab fails, lower expression to (int)(floor(x)).
2651 EXP is the expression that is a call to the builtin function;
2652 if convenient, the result should be placed in TARGET. */
2655 expand_builtin_int_roundingfn (tree exp, rtx target)
2657 convert_optab builtin_optab;
2658 rtx op0, insns, tmp;
2659 tree fndecl = get_callee_fndecl (exp);
2660 enum built_in_function fallback_fn;
2661 tree fallback_fndecl;
2662 enum machine_mode mode;
2665 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2668 arg = CALL_EXPR_ARG (exp, 0);
2670 switch (DECL_FUNCTION_CODE (fndecl))
2672 CASE_FLT_FN (BUILT_IN_LCEIL):
2673 CASE_FLT_FN (BUILT_IN_LLCEIL):
2674 builtin_optab = lceil_optab;
2675 fallback_fn = BUILT_IN_CEIL;
2678 CASE_FLT_FN (BUILT_IN_LFLOOR):
2679 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2680 builtin_optab = lfloor_optab;
2681 fallback_fn = BUILT_IN_FLOOR;
2688 /* Make a suitable register to place result in. */
2689 mode = TYPE_MODE (TREE_TYPE (exp));
2691 target = gen_reg_rtx (mode);
2693 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2694 need to expand the argument again. This way, we will not perform
2695 side-effects more the once. */
2696 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2698 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702 /* Compute into TARGET. */
2703 if (expand_sfix_optab (target, op0, builtin_optab))
2705 /* Output the entire sequence. */
2706 insns = get_insns ();
2712 /* If we were unable to expand via the builtin, stop the sequence
2713 (without outputting the insns). */
2716 /* Fall back to floating point rounding optab. */
2717 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2719 /* For non-C99 targets we may end up without a fallback fndecl here
2720 if the user called __builtin_lfloor directly. In this case emit
2721 a call to the floor/ceil variants nevertheless. This should result
2722 in the best user experience for not full C99 targets. */
2723 if (fallback_fndecl == NULL_TREE)
2726 const char *name = NULL;
2728 switch (DECL_FUNCTION_CODE (fndecl))
2730 case BUILT_IN_LCEIL:
2731 case BUILT_IN_LLCEIL:
2734 case BUILT_IN_LCEILF:
2735 case BUILT_IN_LLCEILF:
2738 case BUILT_IN_LCEILL:
2739 case BUILT_IN_LLCEILL:
2742 case BUILT_IN_LFLOOR:
2743 case BUILT_IN_LLFLOOR:
2746 case BUILT_IN_LFLOORF:
2747 case BUILT_IN_LLFLOORF:
2750 case BUILT_IN_LFLOORL:
2751 case BUILT_IN_LLFLOORL:
2758 fntype = build_function_type_list (TREE_TYPE (arg),
2759 TREE_TYPE (arg), NULL_TREE);
2760 fallback_fndecl = build_fn_decl (name, fntype);
2763 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2765 tmp = expand_normal (exp);
2767 /* Truncate the result of floating point optab to integer
2768 via expand_fix (). */
2769 target = gen_reg_rtx (mode);
2770 expand_fix (target, tmp, 0);
2775 /* Expand a call to one of the builtin math functions doing integer
2777 Return 0 if a normal call should be emitted rather than expanding the
2778 function in-line. EXP is the expression that is a call to the builtin
2779 function; if convenient, the result should be placed in TARGET. */
2782 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2784 convert_optab builtin_optab;
2786 tree fndecl = get_callee_fndecl (exp);
2788 enum machine_mode mode;
2790 /* There's no easy way to detect the case we need to set EDOM. */
2791 if (flag_errno_math)
2794 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2797 arg = CALL_EXPR_ARG (exp, 0);
2799 switch (DECL_FUNCTION_CODE (fndecl))
2801 CASE_FLT_FN (BUILT_IN_LRINT):
2802 CASE_FLT_FN (BUILT_IN_LLRINT):
2803 builtin_optab = lrint_optab; break;
2804 CASE_FLT_FN (BUILT_IN_LROUND):
2805 CASE_FLT_FN (BUILT_IN_LLROUND):
2806 builtin_optab = lround_optab; break;
2811 /* Make a suitable register to place result in. */
2812 mode = TYPE_MODE (TREE_TYPE (exp));
2814 target = gen_reg_rtx (mode);
2816 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2817 need to expand the argument again. This way, we will not perform
2818 side-effects more the once. */
2819 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2821 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2825 if (expand_sfix_optab (target, op0, builtin_optab))
2827 /* Output the entire sequence. */
2828 insns = get_insns ();
2834 /* If we were unable to expand via the builtin, stop the sequence
2835 (without outputting the insns) and call to the library function
2836 with the stabilized argument list. */
2839 target = expand_call (exp, target, target == const0_rtx);
2844 /* To evaluate powi(x,n), the floating point value x raised to the
2845 constant integer exponent n, we use a hybrid algorithm that
2846 combines the "window method" with look-up tables. For an
2847 introduction to exponentiation algorithms and "addition chains",
2848 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2849 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2850 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2851 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2853 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2854 multiplications to inline before calling the system library's pow
2855 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2856 so this default never requires calling pow, powf or powl. */
2858 #ifndef POWI_MAX_MULTS
2859 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2862 /* The size of the "optimal power tree" lookup table. All
2863 exponents less than this value are simply looked up in the
2864 powi_table below. This threshold is also used to size the
2865 cache of pseudo registers that hold intermediate results. */
2866 #define POWI_TABLE_SIZE 256
2868 /* The size, in bits of the window, used in the "window method"
2869 exponentiation algorithm. This is equivalent to a radix of
2870 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2871 #define POWI_WINDOW_SIZE 3
2873 /* The following table is an efficient representation of an
2874 "optimal power tree". For each value, i, the corresponding
2875 value, j, in the table states than an optimal evaluation
2876 sequence for calculating pow(x,i) can be found by evaluating
2877 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2878 100 integers is given in Knuth's "Seminumerical algorithms". */
2880 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2882 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2883 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2884 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2885 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2886 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2887 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2888 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2889 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2890 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2891 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2892 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2893 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2894 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2895 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2896 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2897 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2898 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2899 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2900 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2901 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2902 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2903 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2904 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2905 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2906 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2907 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2908 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2909 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2910 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2911 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2912 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2913 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2917 /* Return the number of multiplications required to calculate
2918 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2919 subroutine of powi_cost. CACHE is an array indicating
2920 which exponents have already been calculated. */
2923 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2925 /* If we've already calculated this exponent, then this evaluation
2926 doesn't require any additional multiplications. */
2931 return powi_lookup_cost (n - powi_table[n], cache)
2932 + powi_lookup_cost (powi_table[n], cache) + 1;
2935 /* Return the number of multiplications required to calculate
2936 powi(x,n) for an arbitrary x, given the exponent N. This
2937 function needs to be kept in sync with expand_powi below. */
2940 powi_cost (HOST_WIDE_INT n)
2942 bool cache[POWI_TABLE_SIZE];
2943 unsigned HOST_WIDE_INT digit;
2944 unsigned HOST_WIDE_INT val;
2950 /* Ignore the reciprocal when calculating the cost. */
2951 val = (n < 0) ? -n : n;
2953 /* Initialize the exponent cache. */
2954 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2959 while (val >= POWI_TABLE_SIZE)
2963 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2964 result += powi_lookup_cost (digit, cache)
2965 + POWI_WINDOW_SIZE + 1;
2966 val >>= POWI_WINDOW_SIZE;
2975 return result + powi_lookup_cost (val, cache);
2978 /* Recursive subroutine of expand_powi. This function takes the array,
2979 CACHE, of already calculated exponents and an exponent N and returns
2980 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2983 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2985 unsigned HOST_WIDE_INT digit;
2989 if (n < POWI_TABLE_SIZE)
2994 target = gen_reg_rtx (mode);
2997 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2998 op1 = expand_powi_1 (mode, powi_table[n], cache);
3002 target = gen_reg_rtx (mode);
3003 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
3004 op0 = expand_powi_1 (mode, n - digit, cache);
3005 op1 = expand_powi_1 (mode, digit, cache);
3009 target = gen_reg_rtx (mode);
3010 op0 = expand_powi_1 (mode, n >> 1, cache);
3014 result = expand_mult (mode, op0, op1, target, 0);
3015 if (result != target)
3016 emit_move_insn (target, result);
3020 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3021 floating point operand in mode MODE, and N is the exponent. This
3022 function needs to be kept in sync with powi_cost above. */
3025 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3027 rtx cache[POWI_TABLE_SIZE];
3031 return CONST1_RTX (mode);
3033 memset (cache, 0, sizeof (cache));
3036 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3038 /* If the original exponent was negative, reciprocate the result. */
3040 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3041 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3046 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3047 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3048 if we can simplify it. */
3050 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3053 if (TREE_CODE (arg1) == REAL_CST
3054 && !TREE_OVERFLOW (arg1)
3055 && flag_unsafe_math_optimizations)
3057 enum machine_mode mode = TYPE_MODE (type);
3058 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3059 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3060 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3061 tree op = NULL_TREE;
3065 /* Optimize pow (x, 0.5) into sqrt. */
3066 if (REAL_VALUES_EQUAL (c, dconsthalf))
3067 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3069 /* Don't do this optimization if we don't have a sqrt insn. */
3070 else if (optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3072 REAL_VALUE_TYPE dconst1_4 = dconst1;
3073 REAL_VALUE_TYPE dconst3_4;
3074 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3076 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3077 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3079 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3080 machines that a builtin sqrt instruction is smaller than a
3081 call to pow with 0.25, so do this optimization even if
3083 if (REAL_VALUES_EQUAL (c, dconst1_4))
3085 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3086 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3089 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3090 are optimizing for space. */
3091 else if (optimize_insn_for_speed_p ()
3092 && !TREE_SIDE_EFFECTS (arg0)
3093 && REAL_VALUES_EQUAL (c, dconst3_4))
3095 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3096 tree sqrt2 = builtin_save_expr (sqrt1);
3097 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3098 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3103 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3104 cbrt/sqrts instead of pow (x, 1./6.). */
3106 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3108 /* First try 1/3. */
3109 REAL_VALUE_TYPE dconst1_3
3110 = real_value_truncate (mode, dconst_third ());
3112 if (REAL_VALUES_EQUAL (c, dconst1_3))
3113 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3116 else if (optimize_insn_for_speed_p ()
3117 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3119 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3120 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3122 if (REAL_VALUES_EQUAL (c, dconst1_6))
3124 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3125 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3131 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3137 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3138 a normal call should be emitted rather than expanding the function
3139 in-line. EXP is the expression that is a call to the builtin
3140 function; if convenient, the result should be placed in TARGET. */
3143 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3147 tree type = TREE_TYPE (exp);
3148 REAL_VALUE_TYPE cint, c, c2;
3151 enum machine_mode mode = TYPE_MODE (type);
3153 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3156 arg0 = CALL_EXPR_ARG (exp, 0);
3157 arg1 = CALL_EXPR_ARG (exp, 1);
3159 if (TREE_CODE (arg1) != REAL_CST
3160 || TREE_OVERFLOW (arg1))
3161 return expand_builtin_mathfn_2 (exp, target, subtarget);
3163 /* Handle constant exponents. */
3165 /* For integer valued exponents we can expand to an optimal multiplication
3166 sequence using expand_powi. */
3167 c = TREE_REAL_CST (arg1);
3168 n = real_to_integer (&c);
3169 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3170 if (real_identical (&c, &cint)
3171 && ((n >= -1 && n <= 2)
3172 || (flag_unsafe_math_optimizations
3173 && optimize_insn_for_speed_p ()
3174 && powi_cost (n) <= POWI_MAX_MULTS)))
3176 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3179 op = force_reg (mode, op);
3180 op = expand_powi (op, mode, n);
3185 narg0 = builtin_save_expr (arg0);
3187 /* If the exponent is not integer valued, check if it is half of an integer.
3188 In this case we can expand to sqrt (x) * x**(n/2). */
3189 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3190 if (fn != NULL_TREE)
3192 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3193 n = real_to_integer (&c2);
3194 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3195 if (real_identical (&c2, &cint)
3196 && ((flag_unsafe_math_optimizations
3197 && optimize_insn_for_speed_p ()
3198 && powi_cost (n/2) <= POWI_MAX_MULTS)
3199 /* Even the c == 0.5 case cannot be done unconditionally
3200 when we need to preserve signed zeros, as
3201 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3202 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3203 /* For c == 1.5 we can assume that x * sqrt (x) is always
3204 smaller than pow (x, 1.5) if sqrt will not be expanded
3207 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3209 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3211 /* Use expand_expr in case the newly built call expression
3212 was folded to a non-call. */
3213 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3216 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3217 op2 = force_reg (mode, op2);
3218 op2 = expand_powi (op2, mode, abs (n / 2));
3219 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3220 0, OPTAB_LIB_WIDEN);
3221 /* If the original exponent was negative, reciprocate the
3224 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3225 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3231 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3233 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3238 /* Try if the exponent is a third of an integer. In this case
3239 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3240 different from pow (x, 1./3.) due to rounding and behavior
3241 with negative x we need to constrain this transformation to
3242 unsafe math and positive x or finite math. */
3243 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3245 && flag_unsafe_math_optimizations
3246 && (tree_expr_nonnegative_p (arg0)
3247 || !HONOR_NANS (mode)))
3249 REAL_VALUE_TYPE dconst3;
3250 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3251 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3252 real_round (&c2, mode, &c2);
3253 n = real_to_integer (&c2);
3254 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3255 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3256 real_convert (&c2, mode, &c2);
3257 if (real_identical (&c2, &c)
3258 && ((optimize_insn_for_speed_p ()
3259 && powi_cost (n/3) <= POWI_MAX_MULTS)
3262 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3264 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3265 if (abs (n) % 3 == 2)
3266 op = expand_simple_binop (mode, MULT, op, op, op,
3267 0, OPTAB_LIB_WIDEN);
3270 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3271 op2 = force_reg (mode, op2);
3272 op2 = expand_powi (op2, mode, abs (n / 3));
3273 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3274 0, OPTAB_LIB_WIDEN);
3275 /* If the original exponent was negative, reciprocate the
3278 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3279 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3285 /* Fall back to optab expansion. */
3286 return expand_builtin_mathfn_2 (exp, target, subtarget);
3289 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3290 a normal call should be emitted rather than expanding the function
3291 in-line. EXP is the expression that is a call to the builtin
3292 function; if convenient, the result should be placed in TARGET. */
3295 expand_builtin_powi (tree exp, rtx target)
3299 enum machine_mode mode;
3300 enum machine_mode mode2;
3302 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3305 arg0 = CALL_EXPR_ARG (exp, 0);
3306 arg1 = CALL_EXPR_ARG (exp, 1);
3307 mode = TYPE_MODE (TREE_TYPE (exp));
3309 /* Handle constant power. */
3311 if (TREE_CODE (arg1) == INTEGER_CST
3312 && !TREE_OVERFLOW (arg1))
3314 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3316 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3317 Otherwise, check the number of multiplications required. */
3318 if ((TREE_INT_CST_HIGH (arg1) == 0
3319 || TREE_INT_CST_HIGH (arg1) == -1)
3320 && ((n >= -1 && n <= 2)
3321 || (optimize_insn_for_speed_p ()
3322 && powi_cost (n) <= POWI_MAX_MULTS)))
3324 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3325 op0 = force_reg (mode, op0);
3326 return expand_powi (op0, mode, n);
3330 /* Emit a libcall to libgcc. */
3332 /* Mode of the 2nd argument must match that of an int. */
3333 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3335 if (target == NULL_RTX)
3336 target = gen_reg_rtx (mode);
3338 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3339 if (GET_MODE (op0) != mode)
3340 op0 = convert_to_mode (mode, op0, 0);
3341 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3342 if (GET_MODE (op1) != mode2)
3343 op1 = convert_to_mode (mode2, op1, 0);
3345 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3346 target, LCT_CONST, mode, 2,
3347 op0, mode, op1, mode2);
3352 /* Expand expression EXP which is a call to the strlen builtin. Return
3353 NULL_RTX if we failed the caller should emit a normal call, otherwise
3354 try to get the result in TARGET, if convenient. */
3357 expand_builtin_strlen (tree exp, rtx target,
3358 enum machine_mode target_mode)
3360 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3366 tree src = CALL_EXPR_ARG (exp, 0);
3367 rtx result, src_reg, char_rtx, before_strlen;
3368 enum machine_mode insn_mode = target_mode, char_mode;
3369 enum insn_code icode = CODE_FOR_nothing;
3372 /* If the length can be computed at compile-time, return it. */
3373 len = c_strlen (src, 0);
3375 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3377 /* If the length can be computed at compile-time and is constant
3378 integer, but there are side-effects in src, evaluate
3379 src for side-effects, then return len.
3380 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3381 can be optimized into: i++; x = 3; */
3382 len = c_strlen (src, 1);
3383 if (len && TREE_CODE (len) == INTEGER_CST)
3385 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3386 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3389 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3391 /* If SRC is not a pointer type, don't do this operation inline. */
3395 /* Bail out if we can't compute strlen in the right mode. */
3396 while (insn_mode != VOIDmode)
3398 icode = optab_handler (strlen_optab, insn_mode);
3399 if (icode != CODE_FOR_nothing)
3402 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3404 if (insn_mode == VOIDmode)
3407 /* Make a place to write the result of the instruction. */
3411 && GET_MODE (result) == insn_mode
3412 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3413 result = gen_reg_rtx (insn_mode);
3415 /* Make a place to hold the source address. We will not expand
3416 the actual source until we are sure that the expansion will
3417 not fail -- there are trees that cannot be expanded twice. */
3418 src_reg = gen_reg_rtx (Pmode);
3420 /* Mark the beginning of the strlen sequence so we can emit the
3421 source operand later. */
3422 before_strlen = get_last_insn ();
3424 char_rtx = const0_rtx;
3425 char_mode = insn_data[(int) icode].operand[2].mode;
3426 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3428 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3430 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3431 char_rtx, GEN_INT (align));
3436 /* Now that we are assured of success, expand the source. */
3438 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3440 emit_move_insn (src_reg, pat);
3445 emit_insn_after (pat, before_strlen);
3447 emit_insn_before (pat, get_insns ());
3449 /* Return the value in the proper mode for this function. */
3450 if (GET_MODE (result) == target_mode)
3452 else if (target != 0)
3453 convert_move (target, result, 0);
3455 target = convert_to_mode (target_mode, result, 0);
3461 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3462 bytes from constant string DATA + OFFSET and return it as target
3466 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3467 enum machine_mode mode)
3469 const char *str = (const char *) data;
3471 gcc_assert (offset >= 0
3472 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3473 <= strlen (str) + 1));
3475 return c_readstr (str + offset, mode);
3478 /* Expand a call EXP to the memcpy builtin.
3479 Return NULL_RTX if we failed, the caller should emit a normal call,
3480 otherwise try to get the result in TARGET, if convenient (and in
3481 mode MODE if that's convenient). */
3484 expand_builtin_memcpy (tree exp, rtx target)
3486 if (!validate_arglist (exp,
3487 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3491 tree dest = CALL_EXPR_ARG (exp, 0);
3492 tree src = CALL_EXPR_ARG (exp, 1);
3493 tree len = CALL_EXPR_ARG (exp, 2);
3494 const char *src_str;
3495 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3496 unsigned int dest_align
3497 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3498 rtx dest_mem, src_mem, dest_addr, len_rtx;
3499 HOST_WIDE_INT expected_size = -1;
3500 unsigned int expected_align = 0;
3502 /* If DEST is not a pointer type, call the normal function. */
3503 if (dest_align == 0)
3506 /* If either SRC is not a pointer type, don't do this
3507 operation in-line. */
3511 if (currently_expanding_gimple_stmt)
3512 stringop_block_profile (currently_expanding_gimple_stmt,
3513 &expected_align, &expected_size);
3515 if (expected_align < dest_align)
3516 expected_align = dest_align;
3517 dest_mem = get_memory_rtx (dest, len);
3518 set_mem_align (dest_mem, dest_align);
3519 len_rtx = expand_normal (len);
3520 src_str = c_getstr (src);
3522 /* If SRC is a string constant and block move would be done
3523 by pieces, we can avoid loading the string from memory
3524 and only stored the computed constants. */
3526 && CONST_INT_P (len_rtx)
3527 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3528 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3529 CONST_CAST (char *, src_str),
3532 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3533 builtin_memcpy_read_str,
3534 CONST_CAST (char *, src_str),
3535 dest_align, false, 0);
3536 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3541 src_mem = get_memory_rtx (src, len);
3542 set_mem_align (src_mem, src_align);
3544 /* Copy word part most expediently. */
3545 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3546 CALL_EXPR_TAILCALL (exp)
3547 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3548 expected_align, expected_size);
3552 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3553 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3559 /* Expand a call EXP to the mempcpy builtin.
3560 Return NULL_RTX if we failed; the caller should emit a normal call,
3561 otherwise try to get the result in TARGET, if convenient (and in
3562 mode MODE if that's convenient). If ENDP is 0 return the
3563 destination pointer, if ENDP is 1 return the end pointer ala
3564 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3568 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3570 if (!validate_arglist (exp,
3571 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3575 tree dest = CALL_EXPR_ARG (exp, 0);
3576 tree src = CALL_EXPR_ARG (exp, 1);
3577 tree len = CALL_EXPR_ARG (exp, 2);
3578 return expand_builtin_mempcpy_args (dest, src, len,
3579 target, mode, /*endp=*/ 1);
3583 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3584 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3585 so that this can also be called without constructing an actual CALL_EXPR.
3586 The other arguments and return value are the same as for
3587 expand_builtin_mempcpy. */
3590 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3591 rtx target, enum machine_mode mode, int endp)
3593 /* If return value is ignored, transform mempcpy into memcpy. */
3594 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3596 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3597 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3599 return expand_expr (result, target, mode, EXPAND_NORMAL);
3603 const char *src_str;
3604 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3605 unsigned int dest_align
3606 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3607 rtx dest_mem, src_mem, len_rtx;
3609 /* If either SRC or DEST is not a pointer type, don't do this
3610 operation in-line. */
3611 if (dest_align == 0 || src_align == 0)
3614 /* If LEN is not constant, call the normal function. */
3615 if (! host_integerp (len, 1))
3618 len_rtx = expand_normal (len);
3619 src_str = c_getstr (src);
3621 /* If SRC is a string constant and block move would be done
3622 by pieces, we can avoid loading the string from memory
3623 and only stored the computed constants. */
3625 && CONST_INT_P (len_rtx)
3626 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3627 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3628 CONST_CAST (char *, src_str),
3631 dest_mem = get_memory_rtx (dest, len);
3632 set_mem_align (dest_mem, dest_align);
3633 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3634 builtin_memcpy_read_str,
3635 CONST_CAST (char *, src_str),
3636 dest_align, false, endp);
3637 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3638 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3642 if (CONST_INT_P (len_rtx)
3643 && can_move_by_pieces (INTVAL (len_rtx),
3644 MIN (dest_align, src_align)))
3646 dest_mem = get_memory_rtx (dest, len);
3647 set_mem_align (dest_mem, dest_align);
3648 src_mem = get_memory_rtx (src, len);
3649 set_mem_align (src_mem, src_align);
3650 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3651 MIN (dest_align, src_align), endp);
3652 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3653 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3662 # define HAVE_movstr 0
3663 # define CODE_FOR_movstr CODE_FOR_nothing
3666 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3667 we failed, the caller should emit a normal call, otherwise try to
3668 get the result in TARGET, if convenient. If ENDP is 0 return the
3669 destination pointer, if ENDP is 1 return the end pointer ala
3670 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3674 expand_movstr (tree dest, tree src, rtx target, int endp)
3680 const struct insn_data_d * data;
3685 dest_mem = get_memory_rtx (dest, NULL);
3686 src_mem = get_memory_rtx (src, NULL);
3687 data = insn_data + CODE_FOR_movstr;
3690 target = force_reg (Pmode, XEXP (dest_mem, 0));
3691 dest_mem = replace_equiv_address (dest_mem, target);
3692 end = gen_reg_rtx (Pmode);
3697 || target == const0_rtx
3698 || ! (*data->operand[0].predicate) (target, Pmode))
3700 end = gen_reg_rtx (Pmode);
3701 if (target != const0_rtx)
3708 if (data->operand[0].mode != VOIDmode)
3709 end = gen_lowpart (data->operand[0].mode, end);
3711 insn = data->genfun (end, dest_mem, src_mem);
3717 /* movstr is supposed to set end to the address of the NUL
3718 terminator. If the caller requested a mempcpy-like return value,
3720 if (endp == 1 && target != const0_rtx)
3722 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3723 emit_move_insn (target, force_operand (tem, NULL_RTX));
3729 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3730 NULL_RTX if we failed the caller should emit a normal call, otherwise
3731 try to get the result in TARGET, if convenient (and in mode MODE if that's
3735 expand_builtin_strcpy (tree exp, rtx target)
3737 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3739 tree dest = CALL_EXPR_ARG (exp, 0);
3740 tree src = CALL_EXPR_ARG (exp, 1);
3741 return expand_builtin_strcpy_args (dest, src, target);
3746 /* Helper function to do the actual work for expand_builtin_strcpy. The
3747 arguments to the builtin_strcpy call DEST and SRC are broken out
3748 so that this can also be called without constructing an actual CALL_EXPR.
3749 The other arguments and return value are the same as for
3750 expand_builtin_strcpy. */
3753 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3755 return expand_movstr (dest, src, target, /*endp=*/0);
3758 /* Expand a call EXP to the stpcpy builtin.
3759 Return NULL_RTX if we failed the caller should emit a normal call,
3760 otherwise try to get the result in TARGET, if convenient (and in
3761 mode MODE if that's convenient). */
3764 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3767 location_t loc = EXPR_LOCATION (exp);
3769 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3772 dst = CALL_EXPR_ARG (exp, 0);
3773 src = CALL_EXPR_ARG (exp, 1);
3775 /* If return value is ignored, transform stpcpy into strcpy. */
3776 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3778 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3779 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3780 return expand_expr (result, target, mode, EXPAND_NORMAL);
3787 /* Ensure we get an actual string whose length can be evaluated at
3788 compile-time, not an expression containing a string. This is
3789 because the latter will potentially produce pessimized code
3790 when used to produce the return value. */
3791 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3792 return expand_movstr (dst, src, target, /*endp=*/2);
3794 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3795 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3796 target, mode, /*endp=*/2);
3801 if (TREE_CODE (len) == INTEGER_CST)
3803 rtx len_rtx = expand_normal (len);
3805 if (CONST_INT_P (len_rtx))
3807 ret = expand_builtin_strcpy_args (dst, src, target);
3813 if (mode != VOIDmode)
3814 target = gen_reg_rtx (mode);
3816 target = gen_reg_rtx (GET_MODE (ret));
3818 if (GET_MODE (target) != GET_MODE (ret))
3819 ret = gen_lowpart (GET_MODE (target), ret);
3821 ret = plus_constant (ret, INTVAL (len_rtx));
3822 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3830 return expand_movstr (dst, src, target, /*endp=*/2);
3834 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3835 bytes from constant string DATA + OFFSET and return it as target
3839 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3840 enum machine_mode mode)
3842 const char *str = (const char *) data;
3844 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3847 return c_readstr (str + offset, mode);
3850 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3851 NULL_RTX if we failed the caller should emit a normal call. */
3854 expand_builtin_strncpy (tree exp, rtx target)
3856 location_t loc = EXPR_LOCATION (exp);
3858 if (validate_arglist (exp,
3859 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3861 tree dest = CALL_EXPR_ARG (exp, 0);
3862 tree src = CALL_EXPR_ARG (exp, 1);
3863 tree len = CALL_EXPR_ARG (exp, 2);
3864 tree slen = c_strlen (src, 1);
3866 /* We must be passed a constant len and src parameter. */
3867 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3870 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3872 /* We're required to pad with trailing zeros if the requested
3873 len is greater than strlen(s2)+1. In that case try to
3874 use store_by_pieces, if it fails, punt. */
3875 if (tree_int_cst_lt (slen, len))
3877 unsigned int dest_align
3878 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3879 const char *p = c_getstr (src);
3882 if (!p || dest_align == 0 || !host_integerp (len, 1)
3883 || !can_store_by_pieces (tree_low_cst (len, 1),
3884 builtin_strncpy_read_str,
3885 CONST_CAST (char *, p),
3889 dest_mem = get_memory_rtx (dest, len);
3890 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3891 builtin_strncpy_read_str,
3892 CONST_CAST (char *, p), dest_align, false, 0);
3893 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3894 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3901 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3902 bytes from constant string DATA + OFFSET and return it as target
3906 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3907 enum machine_mode mode)
3909 const char *c = (const char *) data;
3910 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3912 memset (p, *c, GET_MODE_SIZE (mode));
3914 return c_readstr (p, mode);
3917 /* Callback routine for store_by_pieces. Return the RTL of a register
3918 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3919 char value given in the RTL register data. For example, if mode is
3920 4 bytes wide, return the RTL for 0x01010101*data. */
3923 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3924 enum machine_mode mode)
3930 size = GET_MODE_SIZE (mode);
3934 p = XALLOCAVEC (char, size);
3935 memset (p, 1, size);
3936 coeff = c_readstr (p, mode);
3938 target = convert_to_mode (mode, (rtx) data, 1);
3939 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3940 return force_reg (mode, target);
3943 /* Expand expression EXP, which is a call to the memset builtin. Return
3944 NULL_RTX if we failed the caller should emit a normal call, otherwise
3945 try to get the result in TARGET, if convenient (and in mode MODE if that's
3949 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3951 if (!validate_arglist (exp,
3952 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3956 tree dest = CALL_EXPR_ARG (exp, 0);
3957 tree val = CALL_EXPR_ARG (exp, 1);
3958 tree len = CALL_EXPR_ARG (exp, 2);
3959 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3963 /* Helper function to do the actual work for expand_builtin_memset. The
3964 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3965 so that this can also be called without constructing an actual CALL_EXPR.
3966 The other arguments and return value are the same as for
3967 expand_builtin_memset. */
3970 expand_builtin_memset_args (tree dest, tree val, tree len,
3971 rtx target, enum machine_mode mode, tree orig_exp)
3974 enum built_in_function fcode;
3976 unsigned int dest_align;
3977 rtx dest_mem, dest_addr, len_rtx;
3978 HOST_WIDE_INT expected_size = -1;
3979 unsigned int expected_align = 0;
3981 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3983 /* If DEST is not a pointer type, don't do this operation in-line. */
3984 if (dest_align == 0)
3987 if (currently_expanding_gimple_stmt)
3988 stringop_block_profile (currently_expanding_gimple_stmt,
3989 &expected_align, &expected_size);
3991 if (expected_align < dest_align)
3992 expected_align = dest_align;
3994 /* If the LEN parameter is zero, return DEST. */
3995 if (integer_zerop (len))
3997 /* Evaluate and ignore VAL in case it has side-effects. */
3998 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3999 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4002 /* Stabilize the arguments in case we fail. */
4003 dest = builtin_save_expr (dest);
4004 val = builtin_save_expr (val);
4005 len = builtin_save_expr (len);
4007 len_rtx = expand_normal (len);
4008 dest_mem = get_memory_rtx (dest, len);
4010 if (TREE_CODE (val) != INTEGER_CST)
4014 val_rtx = expand_normal (val);
4015 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4018 /* Assume that we can memset by pieces if we can store
4019 * the coefficients by pieces (in the required modes).
4020 * We can't pass builtin_memset_gen_str as that emits RTL. */
4022 if (host_integerp (len, 1)
4023 && can_store_by_pieces (tree_low_cst (len, 1),
4024 builtin_memset_read_str, &c, dest_align,
4027 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4029 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4030 builtin_memset_gen_str, val_rtx, dest_align,
4033 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4034 dest_align, expected_align,
4038 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4039 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4043 if (target_char_cast (val, &c))
4048 if (host_integerp (len, 1)
4049 && can_store_by_pieces (tree_low_cst (len, 1),
4050 builtin_memset_read_str, &c, dest_align,
4052 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4053 builtin_memset_read_str, &c, dest_align, true, 0);
4054 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4055 dest_align, expected_align,
4059 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4060 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4064 set_mem_align (dest_mem, dest_align);
4065 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4066 CALL_EXPR_TAILCALL (orig_exp)
4067 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4068 expected_align, expected_size);
4072 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4073 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4079 fndecl = get_callee_fndecl (orig_exp);
4080 fcode = DECL_FUNCTION_CODE (fndecl);
4081 if (fcode == BUILT_IN_MEMSET)
4082 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4084 else if (fcode == BUILT_IN_BZERO)
4085 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4089 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4090 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4091 return expand_call (fn, target, target == const0_rtx);
4094 /* Expand expression EXP, which is a call to the bzero builtin. Return
4095 NULL_RTX if we failed the caller should emit a normal call. */
4098 expand_builtin_bzero (tree exp)
4101 location_t loc = EXPR_LOCATION (exp);
4103 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4106 dest = CALL_EXPR_ARG (exp, 0);
4107 size = CALL_EXPR_ARG (exp, 1);
4109 /* New argument list transforming bzero(ptr x, int y) to
4110 memset(ptr x, int 0, size_t y). This is done this way
4111 so that if it isn't expanded inline, we fallback to
4112 calling bzero instead of memset. */
4114 return expand_builtin_memset_args (dest, integer_zero_node,
4115 fold_convert_loc (loc, sizetype, size),
4116 const0_rtx, VOIDmode, exp);
4119 /* Expand expression EXP, which is a call to the memcmp built-in function.
4120 Return NULL_RTX if we failed and the caller should emit a normal call,
4121 otherwise try to get the result in TARGET, if convenient (and in mode
4122 MODE, if that's convenient). */
4125 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4126 ATTRIBUTE_UNUSED enum machine_mode mode)
4128 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4130 if (!validate_arglist (exp,
4131 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4134 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
4135 implementing memcmp because it will stop if it encounters two
4137 #if defined HAVE_cmpmemsi
4139 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4142 tree arg1 = CALL_EXPR_ARG (exp, 0);
4143 tree arg2 = CALL_EXPR_ARG (exp, 1);
4144 tree len = CALL_EXPR_ARG (exp, 2);
4146 unsigned int arg1_align
4147 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4148 unsigned int arg2_align
4149 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4150 enum machine_mode insn_mode;
4153 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4157 /* If we don't have POINTER_TYPE, call the function. */
4158 if (arg1_align == 0 || arg2_align == 0)
4161 /* Make a place to write the result of the instruction. */
4164 && REG_P (result) && GET_MODE (result) == insn_mode
4165 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4166 result = gen_reg_rtx (insn_mode);
4168 arg1_rtx = get_memory_rtx (arg1, len);
4169 arg2_rtx = get_memory_rtx (arg2, len);
4170 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4172 /* Set MEM_SIZE as appropriate. */
4173 if (CONST_INT_P (arg3_rtx))
4175 set_mem_size (arg1_rtx, arg3_rtx);
4176 set_mem_size (arg2_rtx, arg3_rtx);
4180 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4181 GEN_INT (MIN (arg1_align, arg2_align)));
4188 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4189 TYPE_MODE (integer_type_node), 3,
4190 XEXP (arg1_rtx, 0), Pmode,
4191 XEXP (arg2_rtx, 0), Pmode,
4192 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4193 TYPE_UNSIGNED (sizetype)),
4194 TYPE_MODE (sizetype));
4196 /* Return the value in the proper mode for this function. */
4197 mode = TYPE_MODE (TREE_TYPE (exp));
4198 if (GET_MODE (result) == mode)
4200 else if (target != 0)
4202 convert_move (target, result, 0);
4206 return convert_to_mode (mode, result, 0);
4208 #endif /* HAVE_cmpmemsi. */
4213 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4214 if we failed the caller should emit a normal call, otherwise try to get
4215 the result in TARGET, if convenient. */
4218 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4220 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4223 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4224 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4225 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4227 rtx arg1_rtx, arg2_rtx;
4228 rtx result, insn = NULL_RTX;
4230 tree arg1 = CALL_EXPR_ARG (exp, 0);
4231 tree arg2 = CALL_EXPR_ARG (exp, 1);
4233 unsigned int arg1_align
4234 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4235 unsigned int arg2_align
4236 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4238 /* If we don't have POINTER_TYPE, call the function. */
4239 if (arg1_align == 0 || arg2_align == 0)
4242 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4243 arg1 = builtin_save_expr (arg1);
4244 arg2 = builtin_save_expr (arg2);
4246 arg1_rtx = get_memory_rtx (arg1, NULL);
4247 arg2_rtx = get_memory_rtx (arg2, NULL);
4249 #ifdef HAVE_cmpstrsi
4250 /* Try to call cmpstrsi. */
4253 enum machine_mode insn_mode
4254 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4256 /* Make a place to write the result of the instruction. */
4259 && REG_P (result) && GET_MODE (result) == insn_mode
4260 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4261 result = gen_reg_rtx (insn_mode);
4263 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4264 GEN_INT (MIN (arg1_align, arg2_align)));
4267 #ifdef HAVE_cmpstrnsi
4268 /* Try to determine at least one length and call cmpstrnsi. */
4269 if (!insn && HAVE_cmpstrnsi)
4274 enum machine_mode insn_mode
4275 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4276 tree len1 = c_strlen (arg1, 1);
4277 tree len2 = c_strlen (arg2, 1);
4280 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4282 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4284 /* If we don't have a constant length for the first, use the length
4285 of the second, if we know it. We don't require a constant for
4286 this case; some cost analysis could be done if both are available
4287 but neither is constant. For now, assume they're equally cheap,
4288 unless one has side effects. If both strings have constant lengths,
4295 else if (TREE_SIDE_EFFECTS (len1))
4297 else if (TREE_SIDE_EFFECTS (len2))
4299 else if (TREE_CODE (len1) != INTEGER_CST)
4301 else if (TREE_CODE (len2) != INTEGER_CST)
4303 else if (tree_int_cst_lt (len1, len2))
4308 /* If both arguments have side effects, we cannot optimize. */
4309 if (!len || TREE_SIDE_EFFECTS (len))
4312 arg3_rtx = expand_normal (len);
4314 /* Make a place to write the result of the instruction. */
4317 && REG_P (result) && GET_MODE (result) == insn_mode
4318 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4319 result = gen_reg_rtx (insn_mode);
4321 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4322 GEN_INT (MIN (arg1_align, arg2_align)));
4328 enum machine_mode mode;
4331 /* Return the value in the proper mode for this function. */
4332 mode = TYPE_MODE (TREE_TYPE (exp));
4333 if (GET_MODE (result) == mode)
4336 return convert_to_mode (mode, result, 0);
4337 convert_move (target, result, 0);
4341 /* Expand the library call ourselves using a stabilized argument
4342 list to avoid re-evaluating the function's arguments twice. */
4343 #ifdef HAVE_cmpstrnsi
4346 fndecl = get_callee_fndecl (exp);
4347 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4348 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4349 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4350 return expand_call (fn, target, target == const0_rtx);
4356 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4357 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4358 the result in TARGET, if convenient. */
4361 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4362 ATTRIBUTE_UNUSED enum machine_mode mode)
4364 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4366 if (!validate_arglist (exp,
4367 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4370 /* If c_strlen can determine an expression for one of the string
4371 lengths, and it doesn't have side effects, then emit cmpstrnsi
4372 using length MIN(strlen(string)+1, arg3). */
4373 #ifdef HAVE_cmpstrnsi
4376 tree len, len1, len2;
4377 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4380 tree arg1 = CALL_EXPR_ARG (exp, 0);
4381 tree arg2 = CALL_EXPR_ARG (exp, 1);
4382 tree arg3 = CALL_EXPR_ARG (exp, 2);
4384 unsigned int arg1_align
4385 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4386 unsigned int arg2_align
4387 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4388 enum machine_mode insn_mode
4389 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4391 len1 = c_strlen (arg1, 1);
4392 len2 = c_strlen (arg2, 1);
4395 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4397 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4399 /* If we don't have a constant length for the first, use the length
4400 of the second, if we know it. We don't require a constant for
4401 this case; some cost analysis could be done if both are available
4402 but neither is constant. For now, assume they're equally cheap,
4403 unless one has side effects. If both strings have constant lengths,
4410 else if (TREE_SIDE_EFFECTS (len1))
4412 else if (TREE_SIDE_EFFECTS (len2))
4414 else if (TREE_CODE (len1) != INTEGER_CST)
4416 else if (TREE_CODE (len2) != INTEGER_CST)
4418 else if (tree_int_cst_lt (len1, len2))
4423 /* If both arguments have side effects, we cannot optimize. */
4424 if (!len || TREE_SIDE_EFFECTS (len))
4427 /* The actual new length parameter is MIN(len,arg3). */
4428 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4429 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4431 /* If we don't have POINTER_TYPE, call the function. */
4432 if (arg1_align == 0 || arg2_align == 0)
4435 /* Make a place to write the result of the instruction. */
4438 && REG_P (result) && GET_MODE (result) == insn_mode
4439 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4440 result = gen_reg_rtx (insn_mode);
4442 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4443 arg1 = builtin_save_expr (arg1);
4444 arg2 = builtin_save_expr (arg2);
4445 len = builtin_save_expr (len);
4447 arg1_rtx = get_memory_rtx (arg1, len);
4448 arg2_rtx = get_memory_rtx (arg2, len);
4449 arg3_rtx = expand_normal (len);
4450 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4451 GEN_INT (MIN (arg1_align, arg2_align)));
4456 /* Return the value in the proper mode for this function. */
4457 mode = TYPE_MODE (TREE_TYPE (exp));
4458 if (GET_MODE (result) == mode)
4461 return convert_to_mode (mode, result, 0);
4462 convert_move (target, result, 0);
4466 /* Expand the library call ourselves using a stabilized argument
4467 list to avoid re-evaluating the function's arguments twice. */
4468 fndecl = get_callee_fndecl (exp);
4469 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4471 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4472 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4473 return expand_call (fn, target, target == const0_rtx);
4479 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4480 if that's convenient. */
4483 expand_builtin_saveregs (void)
4487 /* Don't do __builtin_saveregs more than once in a function.
4488 Save the result of the first call and reuse it. */
4489 if (saveregs_value != 0)
4490 return saveregs_value;
4492 /* When this function is called, it means that registers must be
4493 saved on entry to this function. So we migrate the call to the
4494 first insn of this function. */
4498 /* Do whatever the machine needs done in this case. */
4499 val = targetm.calls.expand_builtin_saveregs ();
4504 saveregs_value = val;
4506 /* Put the insns after the NOTE that starts the function. If this
4507 is inside a start_sequence, make the outer-level insn chain current, so
4508 the code is placed at the start of the function. */
4509 push_topmost_sequence ();
4510 emit_insn_after (seq, entry_of_function ());
4511 pop_topmost_sequence ();
4516 /* Expand a call to __builtin_next_arg. */
4519 expand_builtin_next_arg (void)
4521 /* Checking arguments is already done in fold_builtin_next_arg
4522 that must be called before this function. */
4523 return expand_binop (ptr_mode, add_optab,
4524 crtl->args.internal_arg_pointer,
4525 crtl->args.arg_offset_rtx,
4526 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4529 /* Make it easier for the backends by protecting the valist argument
4530 from multiple evaluations. */
4533 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4535 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4537 /* The current way of determining the type of valist is completely
4538 bogus. We should have the information on the va builtin instead. */
4540 vatype = targetm.fn_abi_va_list (cfun->decl);
4542 if (TREE_CODE (vatype) == ARRAY_TYPE)
4544 if (TREE_SIDE_EFFECTS (valist))
4545 valist = save_expr (valist);
4547 /* For this case, the backends will be expecting a pointer to
4548 vatype, but it's possible we've actually been given an array
4549 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4551 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4553 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4554 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4559 tree pt = build_pointer_type (vatype);
4563 if (! TREE_SIDE_EFFECTS (valist))
4566 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4567 TREE_SIDE_EFFECTS (valist) = 1;
4570 if (TREE_SIDE_EFFECTS (valist))
4571 valist = save_expr (valist);
4572 valist = fold_build2_loc (loc, MEM_REF,
4573 vatype, valist, build_int_cst (pt, 0));
4579 /* The "standard" definition of va_list is void*. */
4582 std_build_builtin_va_list (void)
4584 return ptr_type_node;
4587 /* The "standard" abi va_list is va_list_type_node. */
4590 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4592 return va_list_type_node;
4595 /* The "standard" type of va_list is va_list_type_node. */
4598 std_canonical_va_list_type (tree type)
4602 if (INDIRECT_REF_P (type))
4603 type = TREE_TYPE (type);
4604 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4605 type = TREE_TYPE (type);
4606 wtype = va_list_type_node;
4608 /* Treat structure va_list types. */
4609 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4610 htype = TREE_TYPE (htype);
4611 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4613 /* If va_list is an array type, the argument may have decayed
4614 to a pointer type, e.g. by being passed to another function.
4615 In that case, unwrap both types so that we can compare the
4616 underlying records. */
4617 if (TREE_CODE (htype) == ARRAY_TYPE
4618 || POINTER_TYPE_P (htype))
4620 wtype = TREE_TYPE (wtype);
4621 htype = TREE_TYPE (htype);
4624 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4625 return va_list_type_node;
4630 /* The "standard" implementation of va_start: just assign `nextarg' to
4634 std_expand_builtin_va_start (tree valist, rtx nextarg)
4636 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4637 convert_move (va_r, nextarg, 0);
4640 /* Expand EXP, a call to __builtin_va_start. */
4643 expand_builtin_va_start (tree exp)
4647 location_t loc = EXPR_LOCATION (exp);
4649 if (call_expr_nargs (exp) < 2)
4651 error_at (loc, "too few arguments to function %<va_start%>");
4655 if (fold_builtin_next_arg (exp, true))
4658 nextarg = expand_builtin_next_arg ();
4659 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4661 if (targetm.expand_builtin_va_start)
4662 targetm.expand_builtin_va_start (valist, nextarg);
4664 std_expand_builtin_va_start (valist, nextarg);
4669 /* The "standard" implementation of va_arg: read the value from the
4670 current (padded) address and increment by the (padded) size. */
4673 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4676 tree addr, t, type_size, rounded_size, valist_tmp;
4677 unsigned HOST_WIDE_INT align, boundary;
4680 #ifdef ARGS_GROW_DOWNWARD
4681 /* All of the alignment and movement below is for args-grow-up machines.
4682 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4683 implement their own specialized gimplify_va_arg_expr routines. */
4687 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4689 type = build_pointer_type (type);
4691 align = PARM_BOUNDARY / BITS_PER_UNIT;
4692 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4694 /* When we align parameter on stack for caller, if the parameter
4695 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4696 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4697 here with caller. */
4698 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4699 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4701 boundary /= BITS_PER_UNIT;
4703 /* Hoist the valist value into a temporary for the moment. */
4704 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4706 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4707 requires greater alignment, we must perform dynamic alignment. */
4708 if (boundary > align
4709 && !integer_zerop (TYPE_SIZE (type)))
4711 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4712 fold_build2 (POINTER_PLUS_EXPR,
4714 valist_tmp, size_int (boundary - 1)));
4715 gimplify_and_add (t, pre_p);
4717 t = fold_convert (sizetype, valist_tmp);
4718 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4719 fold_convert (TREE_TYPE (valist),
4720 fold_build2 (BIT_AND_EXPR, sizetype, t,
4721 size_int (-boundary))));
4722 gimplify_and_add (t, pre_p);
4727 /* If the actual alignment is less than the alignment of the type,
4728 adjust the type accordingly so that we don't assume strict alignment
4729 when dereferencing the pointer. */
4730 boundary *= BITS_PER_UNIT;
4731 if (boundary < TYPE_ALIGN (type))
4733 type = build_variant_type_copy (type);
4734 TYPE_ALIGN (type) = boundary;
4737 /* Compute the rounded size of the type. */
4738 type_size = size_in_bytes (type);
4739 rounded_size = round_up (type_size, align);
4741 /* Reduce rounded_size so it's sharable with the postqueue. */
4742 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4746 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4748 /* Small args are padded downward. */
4749 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4750 rounded_size, size_int (align));
4751 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4752 size_binop (MINUS_EXPR, rounded_size, type_size));
4753 addr = fold_build2 (POINTER_PLUS_EXPR,
4754 TREE_TYPE (addr), addr, t);
4757 /* Compute new value for AP. */
4758 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4759 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4760 gimplify_and_add (t, pre_p);
4762 addr = fold_convert (build_pointer_type (type), addr);
4765 addr = build_va_arg_indirect_ref (addr);
4767 return build_va_arg_indirect_ref (addr);
4770 /* Build an indirect-ref expression over the given TREE, which represents a
4771 piece of a va_arg() expansion. */
4773 build_va_arg_indirect_ref (tree addr)
4775 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4777 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4783 /* Return a dummy expression of type TYPE in order to keep going after an
4787 dummy_object (tree type)
4789 tree t = build_int_cst (build_pointer_type (type), 0);
4790 return build2 (MEM_REF, type, t, t);
4793 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4794 builtin function, but a very special sort of operator. */
4796 enum gimplify_status
4797 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4799 tree promoted_type, have_va_type;
4800 tree valist = TREE_OPERAND (*expr_p, 0);
4801 tree type = TREE_TYPE (*expr_p);
4803 location_t loc = EXPR_LOCATION (*expr_p);
4805 /* Verify that valist is of the proper type. */
4806 have_va_type = TREE_TYPE (valist);
4807 if (have_va_type == error_mark_node)
4809 have_va_type = targetm.canonical_va_list_type (have_va_type);
4811 if (have_va_type == NULL_TREE)
4813 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4817 /* Generate a diagnostic for requesting data of a type that cannot
4818 be passed through `...' due to type promotion at the call site. */
4819 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4822 static bool gave_help;
4825 /* Unfortunately, this is merely undefined, rather than a constraint
4826 violation, so we cannot make this an error. If this call is never
4827 executed, the program is still strictly conforming. */
4828 warned = warning_at (loc, 0,
4829 "%qT is promoted to %qT when passed through %<...%>",
4830 type, promoted_type);
4831 if (!gave_help && warned)
4834 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4835 promoted_type, type);
4838 /* We can, however, treat "undefined" any way we please.
4839 Call abort to encourage the user to fix the program. */
4841 inform (loc, "if this code is reached, the program will abort");
4842 /* Before the abort, allow the evaluation of the va_list
4843 expression to exit or longjmp. */
4844 gimplify_and_add (valist, pre_p);
4845 t = build_call_expr_loc (loc,
4846 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4847 gimplify_and_add (t, pre_p);
4849 /* This is dead code, but go ahead and finish so that the
4850 mode of the result comes out right. */
4851 *expr_p = dummy_object (type);
4856 /* Make it easier for the backends by protecting the valist argument
4857 from multiple evaluations. */
4858 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4860 /* For this case, the backends will be expecting a pointer to
4861 TREE_TYPE (abi), but it's possible we've
4862 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4864 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4866 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4867 valist = fold_convert_loc (loc, p1,
4868 build_fold_addr_expr_loc (loc, valist));
4871 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4874 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4876 if (!targetm.gimplify_va_arg_expr)
4877 /* FIXME: Once most targets are converted we should merely
4878 assert this is non-null. */
4881 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4886 /* Expand EXP, a call to __builtin_va_end. */
4889 expand_builtin_va_end (tree exp)
4891 tree valist = CALL_EXPR_ARG (exp, 0);
4893 /* Evaluate for side effects, if needed. I hate macros that don't
4895 if (TREE_SIDE_EFFECTS (valist))
4896 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4901 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4902 builtin rather than just as an assignment in stdarg.h because of the
4903 nastiness of array-type va_list types. */
4906 expand_builtin_va_copy (tree exp)
4909 location_t loc = EXPR_LOCATION (exp);
4911 dst = CALL_EXPR_ARG (exp, 0);
4912 src = CALL_EXPR_ARG (exp, 1);
4914 dst = stabilize_va_list_loc (loc, dst, 1);
4915 src = stabilize_va_list_loc (loc, src, 0);
4917 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4919 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4921 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4922 TREE_SIDE_EFFECTS (t) = 1;
4923 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4927 rtx dstb, srcb, size;
4929 /* Evaluate to pointers. */
4930 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4931 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4932 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4933 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4935 dstb = convert_memory_address (Pmode, dstb);
4936 srcb = convert_memory_address (Pmode, srcb);
4938 /* "Dereference" to BLKmode memories. */
4939 dstb = gen_rtx_MEM (BLKmode, dstb);
4940 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4941 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4942 srcb = gen_rtx_MEM (BLKmode, srcb);
4943 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4944 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4947 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4953 /* Expand a call to one of the builtin functions __builtin_frame_address or
4954 __builtin_return_address. */
4957 expand_builtin_frame_address (tree fndecl, tree exp)
4959 /* The argument must be a nonnegative integer constant.
4960 It counts the number of frames to scan up the stack.
4961 The value is the return address saved in that frame. */
4962 if (call_expr_nargs (exp) == 0)
4963 /* Warning about missing arg was already issued. */
4965 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4967 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4968 error ("invalid argument to %<__builtin_frame_address%>");
4970 error ("invalid argument to %<__builtin_return_address%>");
4976 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4977 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4979 /* Some ports cannot access arbitrary stack frames. */
4982 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4983 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4985 warning (0, "unsupported argument to %<__builtin_return_address%>");
4989 /* For __builtin_frame_address, return what we've got. */
4990 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4994 && ! CONSTANT_P (tem))
4995 tem = copy_to_mode_reg (Pmode, tem);
5000 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5001 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
5002 is the same as for allocate_dynamic_stack_space. */
5005 expand_builtin_alloca (tree exp, bool cannot_accumulate)
5010 /* Emit normal call if marked not-inlineable. */
5011 if (CALL_CANNOT_INLINE_P (exp))
5014 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5017 /* Compute the argument. */
5018 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5020 /* Allocate the desired space. */
5021 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
5023 result = convert_memory_address (ptr_mode, result);
5028 /* Expand a call to a bswap builtin with argument ARG0. MODE
5029 is the mode to expand with. */
5032 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5034 enum machine_mode mode;
5038 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5041 arg = CALL_EXPR_ARG (exp, 0);
5042 mode = TYPE_MODE (TREE_TYPE (arg));
5043 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5045 target = expand_unop (mode, bswap_optab, op0, target, 1);
5047 gcc_assert (target);
5049 return convert_to_mode (mode, target, 0);
5052 /* Expand a call to a unary builtin in EXP.
5053 Return NULL_RTX if a normal call should be emitted rather than expanding the
5054 function in-line. If convenient, the result should be placed in TARGET.
5055 SUBTARGET may be used as the target for computing one of EXP's operands. */
5058 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5059 rtx subtarget, optab op_optab)
5063 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5066 /* Compute the argument. */
5067 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5069 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5070 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5071 VOIDmode, EXPAND_NORMAL);
5072 /* Compute op, into TARGET if possible.
5073 Set TARGET to wherever the result comes back. */
5074 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5075 op_optab, op0, target, 1);
5076 gcc_assert (target);
5078 return convert_to_mode (target_mode, target, 0);
5081 /* Expand a call to __builtin_expect. We just return our argument
5082 as the builtin_expect semantic should've been already executed by
5083 tree branch prediction pass. */
5086 expand_builtin_expect (tree exp, rtx target)
5090 if (call_expr_nargs (exp) < 2)
5092 arg = CALL_EXPR_ARG (exp, 0);
5094 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5095 /* When guessing was done, the hints should be already stripped away. */
5096 gcc_assert (!flag_guess_branch_prob
5097 || optimize == 0 || seen_error ());
5102 expand_builtin_trap (void)
5106 emit_insn (gen_trap ());
5109 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5113 /* Expand a call to __builtin_unreachable. We do nothing except emit
5114 a barrier saying that control flow will not pass here.
5116 It is the responsibility of the program being compiled to ensure
5117 that control flow does never reach __builtin_unreachable. */
5119 expand_builtin_unreachable (void)
5124 /* Expand EXP, a call to fabs, fabsf or fabsl.
5125 Return NULL_RTX if a normal call should be emitted rather than expanding
5126 the function inline. If convenient, the result should be placed
5127 in TARGET. SUBTARGET may be used as the target for computing
5131 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5133 enum machine_mode mode;
5137 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5140 arg = CALL_EXPR_ARG (exp, 0);
5141 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5142 mode = TYPE_MODE (TREE_TYPE (arg));
5143 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5144 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5147 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5148 Return NULL is a normal call should be emitted rather than expanding the
5149 function inline. If convenient, the result should be placed in TARGET.
5150 SUBTARGET may be used as the target for computing the operand. */
5153 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5158 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5161 arg = CALL_EXPR_ARG (exp, 0);
5162 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5164 arg = CALL_EXPR_ARG (exp, 1);
5165 op1 = expand_normal (arg);
5167 return expand_copysign (op0, op1, target);
5170 /* Create a new constant string literal and return a char* pointer to it.
5171 The STRING_CST value is the LEN characters at STR. */
5173 build_string_literal (int len, const char *str)
5175 tree t, elem, index, type;
5177 t = build_string (len, str);
5178 elem = build_type_variant (char_type_node, 1, 0);
5179 index = build_index_type (size_int (len - 1));
5180 type = build_array_type (elem, index);
5181 TREE_TYPE (t) = type;
5182 TREE_CONSTANT (t) = 1;
5183 TREE_READONLY (t) = 1;
5184 TREE_STATIC (t) = 1;
5186 type = build_pointer_type (elem);
5187 t = build1 (ADDR_EXPR, type,
5188 build4 (ARRAY_REF, elem,
5189 t, integer_zero_node, NULL_TREE, NULL_TREE));
5193 /* Expand a call to __builtin___clear_cache. */
5196 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5198 #ifndef HAVE_clear_cache
5199 #ifdef CLEAR_INSN_CACHE
5200 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5201 does something. Just do the default expansion to a call to
5205 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5206 does nothing. There is no need to call it. Do nothing. */
5208 #endif /* CLEAR_INSN_CACHE */
5210 /* We have a "clear_cache" insn, and it will handle everything. */
5212 rtx begin_rtx, end_rtx;
5213 enum insn_code icode;
5215 /* We must not expand to a library call. If we did, any
5216 fallback library function in libgcc that might contain a call to
5217 __builtin___clear_cache() would recurse infinitely. */
5218 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5220 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5224 if (HAVE_clear_cache)
5226 icode = CODE_FOR_clear_cache;
5228 begin = CALL_EXPR_ARG (exp, 0);
5229 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5230 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5231 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5232 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5234 end = CALL_EXPR_ARG (exp, 1);
5235 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5236 end_rtx = convert_memory_address (Pmode, end_rtx);
5237 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5238 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5240 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5243 #endif /* HAVE_clear_cache */
5246 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5249 round_trampoline_addr (rtx tramp)
5251 rtx temp, addend, mask;
5253 /* If we don't need too much alignment, we'll have been guaranteed
5254 proper alignment by get_trampoline_type. */
5255 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5258 /* Round address up to desired boundary. */
5259 temp = gen_reg_rtx (Pmode);
5260 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5261 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5263 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5264 temp, 0, OPTAB_LIB_WIDEN);
5265 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5266 temp, 0, OPTAB_LIB_WIDEN);
5272 expand_builtin_init_trampoline (tree exp)
5274 tree t_tramp, t_func, t_chain;
5275 rtx m_tramp, r_tramp, r_chain, tmp;
5277 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5278 POINTER_TYPE, VOID_TYPE))
5281 t_tramp = CALL_EXPR_ARG (exp, 0);
5282 t_func = CALL_EXPR_ARG (exp, 1);
5283 t_chain = CALL_EXPR_ARG (exp, 2);
5285 r_tramp = expand_normal (t_tramp);
5286 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5287 MEM_NOTRAP_P (m_tramp) = 1;
5289 /* The TRAMP argument should be the address of a field within the
5290 local function's FRAME decl. Let's see if we can fill in the
5291 to fill in the MEM_ATTRs for this memory. */
5292 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5293 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5296 tmp = round_trampoline_addr (r_tramp);
5299 m_tramp = change_address (m_tramp, BLKmode, tmp);
5300 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5301 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5304 /* The FUNC argument should be the address of the nested function.
5305 Extract the actual function decl to pass to the hook. */
5306 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5307 t_func = TREE_OPERAND (t_func, 0);
5308 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5310 r_chain = expand_normal (t_chain);
5312 /* Generate insns to initialize the trampoline. */
5313 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5315 trampolines_created = 1;
5317 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5318 "trampoline generated for nested function %qD", t_func);
5324 expand_builtin_adjust_trampoline (tree exp)
5328 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5331 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5332 tramp = round_trampoline_addr (tramp);
5333 if (targetm.calls.trampoline_adjust_address)
5334 tramp = targetm.calls.trampoline_adjust_address (tramp);
5339 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5340 function. The function first checks whether the back end provides
5341 an insn to implement signbit for the respective mode. If not, it
5342 checks whether the floating point format of the value is such that
5343 the sign bit can be extracted. If that is not the case, the
5344 function returns NULL_RTX to indicate that a normal call should be
5345 emitted rather than expanding the function in-line. EXP is the
5346 expression that is a call to the builtin function; if convenient,
5347 the result should be placed in TARGET. */
5349 expand_builtin_signbit (tree exp, rtx target)
5351 const struct real_format *fmt;
5352 enum machine_mode fmode, imode, rmode;
5355 enum insn_code icode;
5357 location_t loc = EXPR_LOCATION (exp);
5359 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5362 arg = CALL_EXPR_ARG (exp, 0);
5363 fmode = TYPE_MODE (TREE_TYPE (arg));
5364 rmode = TYPE_MODE (TREE_TYPE (exp));
5365 fmt = REAL_MODE_FORMAT (fmode);
5367 arg = builtin_save_expr (arg);
5369 /* Expand the argument yielding a RTX expression. */
5370 temp = expand_normal (arg);
5372 /* Check if the back end provides an insn that handles signbit for the
5374 icode = optab_handler (signbit_optab, fmode);
5375 if (icode != CODE_FOR_nothing)
5377 rtx last = get_last_insn ();
5378 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5379 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5381 delete_insns_since (last);
5384 /* For floating point formats without a sign bit, implement signbit
5386 bitpos = fmt->signbit_ro;
5389 /* But we can't do this if the format supports signed zero. */
5390 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5393 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5394 build_real (TREE_TYPE (arg), dconst0));
5395 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5398 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5400 imode = int_mode_for_mode (fmode);
5401 if (imode == BLKmode)
5403 temp = gen_lowpart (imode, temp);
5408 /* Handle targets with different FP word orders. */
5409 if (FLOAT_WORDS_BIG_ENDIAN)
5410 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5412 word = bitpos / BITS_PER_WORD;
5413 temp = operand_subword_force (temp, word, fmode);
5414 bitpos = bitpos % BITS_PER_WORD;
5417 /* Force the intermediate word_mode (or narrower) result into a
5418 register. This avoids attempting to create paradoxical SUBREGs
5419 of floating point modes below. */
5420 temp = force_reg (imode, temp);
5422 /* If the bitpos is within the "result mode" lowpart, the operation
5423 can be implement with a single bitwise AND. Otherwise, we need
5424 a right shift and an AND. */
5426 if (bitpos < GET_MODE_BITSIZE (rmode))
5428 double_int mask = double_int_setbit (double_int_zero, bitpos);
5430 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5431 temp = gen_lowpart (rmode, temp);
5432 temp = expand_binop (rmode, and_optab, temp,
5433 immed_double_int_const (mask, rmode),
5434 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5438 /* Perform a logical right shift to place the signbit in the least
5439 significant bit, then truncate the result to the desired mode
5440 and mask just this bit. */
5441 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5442 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5443 temp = gen_lowpart (rmode, temp);
5444 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5445 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5451 /* Expand fork or exec calls. TARGET is the desired target of the
5452 call. EXP is the call. FN is the
5453 identificator of the actual function. IGNORE is nonzero if the
5454 value is to be ignored. */
5457 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5462 /* If we are not profiling, just call the function. */
5463 if (!profile_arc_flag)
5466 /* Otherwise call the wrapper. This should be equivalent for the rest of
5467 compiler, so the code does not diverge, and the wrapper may run the
5468 code necessary for keeping the profiling sane. */
5470 switch (DECL_FUNCTION_CODE (fn))
5473 id = get_identifier ("__gcov_fork");
5476 case BUILT_IN_EXECL:
5477 id = get_identifier ("__gcov_execl");
5480 case BUILT_IN_EXECV:
5481 id = get_identifier ("__gcov_execv");
5484 case BUILT_IN_EXECLP:
5485 id = get_identifier ("__gcov_execlp");
5488 case BUILT_IN_EXECLE:
5489 id = get_identifier ("__gcov_execle");
5492 case BUILT_IN_EXECVP:
5493 id = get_identifier ("__gcov_execvp");
5496 case BUILT_IN_EXECVE:
5497 id = get_identifier ("__gcov_execve");
5504 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5505 FUNCTION_DECL, id, TREE_TYPE (fn));
5506 DECL_EXTERNAL (decl) = 1;
5507 TREE_PUBLIC (decl) = 1;
5508 DECL_ARTIFICIAL (decl) = 1;
5509 TREE_NOTHROW (decl) = 1;
5510 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5511 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5512 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5513 return expand_call (call, target, ignore);
5518 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5519 the pointer in these functions is void*, the tree optimizers may remove
5520 casts. The mode computed in expand_builtin isn't reliable either, due
5521 to __sync_bool_compare_and_swap.
5523 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5524 group of builtins. This gives us log2 of the mode size. */
5526 static inline enum machine_mode
5527 get_builtin_sync_mode (int fcode_diff)
5529 /* The size is not negotiable, so ask not to get BLKmode in return
5530 if the target indicates that a smaller size would be better. */
5531 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5534 /* Expand the memory expression LOC and return the appropriate memory operand
5535 for the builtin_sync operations. */
5538 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5542 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5543 addr = convert_memory_address (Pmode, addr);
5545 /* Note that we explicitly do not want any alias information for this
5546 memory, so that we kill all other live memories. Otherwise we don't
5547 satisfy the full barrier semantics of the intrinsic. */
5548 mem = validize_mem (gen_rtx_MEM (mode, addr));
5550 /* The alignment needs to be at least according to that of the mode. */
5551 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5552 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5553 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5554 MEM_VOLATILE_P (mem) = 1;
5559 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5560 EXP is the CALL_EXPR. CODE is the rtx code
5561 that corresponds to the arithmetic or logical operation from the name;
5562 an exception here is that NOT actually means NAND. TARGET is an optional
5563 place for us to store the results; AFTER is true if this is the
5564 fetch_and_xxx form. IGNORE is true if we don't actually care about
5565 the result of the operation at all. */
5568 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5569 enum rtx_code code, bool after,
5570 rtx target, bool ignore)
5573 enum machine_mode old_mode;
5574 location_t loc = EXPR_LOCATION (exp);
5576 if (code == NOT && warn_sync_nand)
5578 tree fndecl = get_callee_fndecl (exp);
5579 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5581 static bool warned_f_a_n, warned_n_a_f;
5585 case BUILT_IN_FETCH_AND_NAND_1:
5586 case BUILT_IN_FETCH_AND_NAND_2:
5587 case BUILT_IN_FETCH_AND_NAND_4:
5588 case BUILT_IN_FETCH_AND_NAND_8:
5589 case BUILT_IN_FETCH_AND_NAND_16:
5594 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5595 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5596 warned_f_a_n = true;
5599 case BUILT_IN_NAND_AND_FETCH_1:
5600 case BUILT_IN_NAND_AND_FETCH_2:
5601 case BUILT_IN_NAND_AND_FETCH_4:
5602 case BUILT_IN_NAND_AND_FETCH_8:
5603 case BUILT_IN_NAND_AND_FETCH_16:
5608 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5609 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5610 warned_n_a_f = true;
5618 /* Expand the operands. */
5619 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5621 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5622 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5623 of CONST_INTs, where we know the old_mode only from the call argument. */
5624 old_mode = GET_MODE (val);
5625 if (old_mode == VOIDmode)
5626 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5627 val = convert_modes (mode, old_mode, val, 1);
5630 return expand_sync_operation (mem, val, code);
5632 return expand_sync_fetch_operation (mem, val, code, after, target);
5635 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5636 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5637 true if this is the boolean form. TARGET is a place for us to store the
5638 results; this is NOT optional if IS_BOOL is true. */
5641 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5642 bool is_bool, rtx target)
5644 rtx old_val, new_val, mem;
5645 enum machine_mode old_mode;
5647 /* Expand the operands. */
5648 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5651 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5652 mode, EXPAND_NORMAL);
5653 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5654 of CONST_INTs, where we know the old_mode only from the call argument. */
5655 old_mode = GET_MODE (old_val);
5656 if (old_mode == VOIDmode)
5657 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5658 old_val = convert_modes (mode, old_mode, old_val, 1);
5660 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5661 mode, EXPAND_NORMAL);
5662 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5663 of CONST_INTs, where we know the old_mode only from the call argument. */
5664 old_mode = GET_MODE (new_val);
5665 if (old_mode == VOIDmode)
5666 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5667 new_val = convert_modes (mode, old_mode, new_val, 1);
5670 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5672 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5675 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5676 general form is actually an atomic exchange, and some targets only
5677 support a reduced form with the second argument being a constant 1.
5678 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5682 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5686 enum machine_mode old_mode;
5688 /* Expand the operands. */
5689 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5690 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5691 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5692 of CONST_INTs, where we know the old_mode only from the call argument. */
5693 old_mode = GET_MODE (val);
5694 if (old_mode == VOIDmode)
5695 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5696 val = convert_modes (mode, old_mode, val, 1);
5698 return expand_sync_lock_test_and_set (mem, val, target);
5701 /* Expand the __sync_synchronize intrinsic. */
5704 expand_builtin_synchronize (void)
5707 VEC (tree, gc) *v_clobbers;
5709 #ifdef HAVE_memory_barrier
5710 if (HAVE_memory_barrier)
5712 emit_insn (gen_memory_barrier ());
5717 if (synchronize_libfunc != NULL_RTX)
5719 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5723 /* If no explicit memory barrier instruction is available, create an
5724 empty asm stmt with a memory clobber. */
5725 v_clobbers = VEC_alloc (tree, gc, 1);
5726 VEC_quick_push (tree, v_clobbers,
5727 tree_cons (NULL, build_string (6, "memory"), NULL));
5728 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5729 gimple_asm_set_volatile (x, true);
5730 expand_asm_stmt (x);
5733 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5736 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5738 enum insn_code icode;
5740 rtx val = const0_rtx;
5742 /* Expand the operands. */
5743 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5745 /* If there is an explicit operation in the md file, use it. */
5746 icode = direct_optab_handler (sync_lock_release_optab, mode);
5747 if (icode != CODE_FOR_nothing)
5749 if (!insn_data[icode].operand[1].predicate (val, mode))
5750 val = force_reg (mode, val);
5752 insn = GEN_FCN (icode) (mem, val);
5760 /* Otherwise we can implement this operation by emitting a barrier
5761 followed by a store of zero. */
5762 expand_builtin_synchronize ();
5763 emit_move_insn (mem, val);
5766 /* Expand an expression EXP that calls a built-in function,
5767 with result going to TARGET if that's convenient
5768 (and in mode MODE if that's convenient).
5769 SUBTARGET may be used as the target for computing one of EXP's operands.
5770 IGNORE is nonzero if the value is to be ignored. */
5773 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5776 tree fndecl = get_callee_fndecl (exp);
5777 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5778 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5781 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5782 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5784 /* When not optimizing, generate calls to library functions for a certain
5787 && !called_as_built_in (fndecl)
5788 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5789 && fcode != BUILT_IN_ALLOCA
5790 && fcode != BUILT_IN_FREE)
5791 return expand_call (exp, target, ignore);
5793 /* The built-in function expanders test for target == const0_rtx
5794 to determine whether the function's result will be ignored. */
5796 target = const0_rtx;
5798 /* If the result of a pure or const built-in function is ignored, and
5799 none of its arguments are volatile, we can avoid expanding the
5800 built-in call and just evaluate the arguments for side-effects. */
5801 if (target == const0_rtx
5802 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5803 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5805 bool volatilep = false;
5807 call_expr_arg_iterator iter;
5809 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5810 if (TREE_THIS_VOLATILE (arg))
5818 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5819 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5826 CASE_FLT_FN (BUILT_IN_FABS):
5827 target = expand_builtin_fabs (exp, target, subtarget);
5832 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5833 target = expand_builtin_copysign (exp, target, subtarget);
5838 /* Just do a normal library call if we were unable to fold
5840 CASE_FLT_FN (BUILT_IN_CABS):
5843 CASE_FLT_FN (BUILT_IN_EXP):
5844 CASE_FLT_FN (BUILT_IN_EXP10):
5845 CASE_FLT_FN (BUILT_IN_POW10):
5846 CASE_FLT_FN (BUILT_IN_EXP2):
5847 CASE_FLT_FN (BUILT_IN_EXPM1):
5848 CASE_FLT_FN (BUILT_IN_LOGB):
5849 CASE_FLT_FN (BUILT_IN_LOG):
5850 CASE_FLT_FN (BUILT_IN_LOG10):
5851 CASE_FLT_FN (BUILT_IN_LOG2):
5852 CASE_FLT_FN (BUILT_IN_LOG1P):
5853 CASE_FLT_FN (BUILT_IN_TAN):
5854 CASE_FLT_FN (BUILT_IN_ASIN):
5855 CASE_FLT_FN (BUILT_IN_ACOS):
5856 CASE_FLT_FN (BUILT_IN_ATAN):
5857 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5858 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5859 because of possible accuracy problems. */
5860 if (! flag_unsafe_math_optimizations)
5862 CASE_FLT_FN (BUILT_IN_SQRT):
5863 CASE_FLT_FN (BUILT_IN_FLOOR):
5864 CASE_FLT_FN (BUILT_IN_CEIL):
5865 CASE_FLT_FN (BUILT_IN_TRUNC):
5866 CASE_FLT_FN (BUILT_IN_ROUND):
5867 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5868 CASE_FLT_FN (BUILT_IN_RINT):
5869 target = expand_builtin_mathfn (exp, target, subtarget);
5874 CASE_FLT_FN (BUILT_IN_FMA):
5875 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5880 CASE_FLT_FN (BUILT_IN_ILOGB):
5881 if (! flag_unsafe_math_optimizations)
5883 CASE_FLT_FN (BUILT_IN_ISINF):
5884 CASE_FLT_FN (BUILT_IN_FINITE):
5885 case BUILT_IN_ISFINITE:
5886 case BUILT_IN_ISNORMAL:
5887 target = expand_builtin_interclass_mathfn (exp, target);
5892 CASE_FLT_FN (BUILT_IN_LCEIL):
5893 CASE_FLT_FN (BUILT_IN_LLCEIL):
5894 CASE_FLT_FN (BUILT_IN_LFLOOR):
5895 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5896 target = expand_builtin_int_roundingfn (exp, target);
5901 CASE_FLT_FN (BUILT_IN_LRINT):
5902 CASE_FLT_FN (BUILT_IN_LLRINT):
5903 CASE_FLT_FN (BUILT_IN_LROUND):
5904 CASE_FLT_FN (BUILT_IN_LLROUND):
5905 target = expand_builtin_int_roundingfn_2 (exp, target);
5910 CASE_FLT_FN (BUILT_IN_POW):
5911 target = expand_builtin_pow (exp, target, subtarget);
5916 CASE_FLT_FN (BUILT_IN_POWI):
5917 target = expand_builtin_powi (exp, target);
5922 CASE_FLT_FN (BUILT_IN_ATAN2):
5923 CASE_FLT_FN (BUILT_IN_LDEXP):
5924 CASE_FLT_FN (BUILT_IN_SCALB):
5925 CASE_FLT_FN (BUILT_IN_SCALBN):
5926 CASE_FLT_FN (BUILT_IN_SCALBLN):
5927 if (! flag_unsafe_math_optimizations)
5930 CASE_FLT_FN (BUILT_IN_FMOD):
5931 CASE_FLT_FN (BUILT_IN_REMAINDER):
5932 CASE_FLT_FN (BUILT_IN_DREM):
5933 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5938 CASE_FLT_FN (BUILT_IN_CEXPI):
5939 target = expand_builtin_cexpi (exp, target);
5940 gcc_assert (target);
5943 CASE_FLT_FN (BUILT_IN_SIN):
5944 CASE_FLT_FN (BUILT_IN_COS):
5945 if (! flag_unsafe_math_optimizations)
5947 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5952 CASE_FLT_FN (BUILT_IN_SINCOS):
5953 if (! flag_unsafe_math_optimizations)
5955 target = expand_builtin_sincos (exp);
5960 case BUILT_IN_APPLY_ARGS:
5961 return expand_builtin_apply_args ();
5963 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5964 FUNCTION with a copy of the parameters described by
5965 ARGUMENTS, and ARGSIZE. It returns a block of memory
5966 allocated on the stack into which is stored all the registers
5967 that might possibly be used for returning the result of a
5968 function. ARGUMENTS is the value returned by
5969 __builtin_apply_args. ARGSIZE is the number of bytes of
5970 arguments that must be copied. ??? How should this value be
5971 computed? We'll also need a safe worst case value for varargs
5973 case BUILT_IN_APPLY:
5974 if (!validate_arglist (exp, POINTER_TYPE,
5975 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5976 && !validate_arglist (exp, REFERENCE_TYPE,
5977 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5983 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5984 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5985 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5987 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5990 /* __builtin_return (RESULT) causes the function to return the
5991 value described by RESULT. RESULT is address of the block of
5992 memory returned by __builtin_apply. */
5993 case BUILT_IN_RETURN:
5994 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5995 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5998 case BUILT_IN_SAVEREGS:
5999 return expand_builtin_saveregs ();
6001 case BUILT_IN_VA_ARG_PACK:
6002 /* All valid uses of __builtin_va_arg_pack () are removed during
6004 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6007 case BUILT_IN_VA_ARG_PACK_LEN:
6008 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6010 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6013 /* Return the address of the first anonymous stack arg. */
6014 case BUILT_IN_NEXT_ARG:
6015 if (fold_builtin_next_arg (exp, false))
6017 return expand_builtin_next_arg ();
6019 case BUILT_IN_CLEAR_CACHE:
6020 target = expand_builtin___clear_cache (exp);
6025 case BUILT_IN_CLASSIFY_TYPE:
6026 return expand_builtin_classify_type (exp);
6028 case BUILT_IN_CONSTANT_P:
6031 case BUILT_IN_FRAME_ADDRESS:
6032 case BUILT_IN_RETURN_ADDRESS:
6033 return expand_builtin_frame_address (fndecl, exp);
6035 /* Returns the address of the area where the structure is returned.
6037 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6038 if (call_expr_nargs (exp) != 0
6039 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6040 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6043 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6045 case BUILT_IN_ALLOCA:
6046 /* If the allocation stems from the declaration of a variable-sized
6047 object, it cannot accumulate. */
6048 target = expand_builtin_alloca (exp, ALLOCA_FOR_VAR_P (exp));
6053 case BUILT_IN_STACK_SAVE:
6054 return expand_stack_save ();
6056 case BUILT_IN_STACK_RESTORE:
6057 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6060 case BUILT_IN_BSWAP32:
6061 case BUILT_IN_BSWAP64:
6062 target = expand_builtin_bswap (exp, target, subtarget);
6068 CASE_INT_FN (BUILT_IN_FFS):
6069 case BUILT_IN_FFSIMAX:
6070 target = expand_builtin_unop (target_mode, exp, target,
6071 subtarget, ffs_optab);
6076 CASE_INT_FN (BUILT_IN_CLZ):
6077 case BUILT_IN_CLZIMAX:
6078 target = expand_builtin_unop (target_mode, exp, target,
6079 subtarget, clz_optab);
6084 CASE_INT_FN (BUILT_IN_CTZ):
6085 case BUILT_IN_CTZIMAX:
6086 target = expand_builtin_unop (target_mode, exp, target,
6087 subtarget, ctz_optab);
6092 CASE_INT_FN (BUILT_IN_POPCOUNT):
6093 case BUILT_IN_POPCOUNTIMAX:
6094 target = expand_builtin_unop (target_mode, exp, target,
6095 subtarget, popcount_optab);
6100 CASE_INT_FN (BUILT_IN_PARITY):
6101 case BUILT_IN_PARITYIMAX:
6102 target = expand_builtin_unop (target_mode, exp, target,
6103 subtarget, parity_optab);
6108 case BUILT_IN_STRLEN:
6109 target = expand_builtin_strlen (exp, target, target_mode);
6114 case BUILT_IN_STRCPY:
6115 target = expand_builtin_strcpy (exp, target);
6120 case BUILT_IN_STRNCPY:
6121 target = expand_builtin_strncpy (exp, target);
6126 case BUILT_IN_STPCPY:
6127 target = expand_builtin_stpcpy (exp, target, mode);
6132 case BUILT_IN_MEMCPY:
6133 target = expand_builtin_memcpy (exp, target);
6138 case BUILT_IN_MEMPCPY:
6139 target = expand_builtin_mempcpy (exp, target, mode);
6144 case BUILT_IN_MEMSET:
6145 target = expand_builtin_memset (exp, target, mode);
6150 case BUILT_IN_BZERO:
6151 target = expand_builtin_bzero (exp);
6156 case BUILT_IN_STRCMP:
6157 target = expand_builtin_strcmp (exp, target);
6162 case BUILT_IN_STRNCMP:
6163 target = expand_builtin_strncmp (exp, target, mode);
6169 case BUILT_IN_MEMCMP:
6170 target = expand_builtin_memcmp (exp, target, mode);
6175 case BUILT_IN_SETJMP:
6176 /* This should have been lowered to the builtins below. */
6179 case BUILT_IN_SETJMP_SETUP:
6180 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6181 and the receiver label. */
6182 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6184 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6185 VOIDmode, EXPAND_NORMAL);
6186 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6187 rtx label_r = label_rtx (label);
6189 /* This is copied from the handling of non-local gotos. */
6190 expand_builtin_setjmp_setup (buf_addr, label_r);
6191 nonlocal_goto_handler_labels
6192 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6193 nonlocal_goto_handler_labels);
6194 /* ??? Do not let expand_label treat us as such since we would
6195 not want to be both on the list of non-local labels and on
6196 the list of forced labels. */
6197 FORCED_LABEL (label) = 0;
6202 case BUILT_IN_SETJMP_DISPATCHER:
6203 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6204 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6206 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6207 rtx label_r = label_rtx (label);
6209 /* Remove the dispatcher label from the list of non-local labels
6210 since the receiver labels have been added to it above. */
6211 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6216 case BUILT_IN_SETJMP_RECEIVER:
6217 /* __builtin_setjmp_receiver is passed the receiver label. */
6218 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6220 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6221 rtx label_r = label_rtx (label);
6223 expand_builtin_setjmp_receiver (label_r);
6228 /* __builtin_longjmp is passed a pointer to an array of five words.
6229 It's similar to the C library longjmp function but works with
6230 __builtin_setjmp above. */
6231 case BUILT_IN_LONGJMP:
6232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6234 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6235 VOIDmode, EXPAND_NORMAL);
6236 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6238 if (value != const1_rtx)
6240 error ("%<__builtin_longjmp%> second argument must be 1");
6244 expand_builtin_longjmp (buf_addr, value);
6249 case BUILT_IN_NONLOCAL_GOTO:
6250 target = expand_builtin_nonlocal_goto (exp);
6255 /* This updates the setjmp buffer that is its argument with the value
6256 of the current stack pointer. */
6257 case BUILT_IN_UPDATE_SETJMP_BUF:
6258 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6261 = expand_normal (CALL_EXPR_ARG (exp, 0));
6263 expand_builtin_update_setjmp_buf (buf_addr);
6269 expand_builtin_trap ();
6272 case BUILT_IN_UNREACHABLE:
6273 expand_builtin_unreachable ();
6276 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6277 case BUILT_IN_SIGNBITD32:
6278 case BUILT_IN_SIGNBITD64:
6279 case BUILT_IN_SIGNBITD128:
6280 target = expand_builtin_signbit (exp, target);
6285 /* Various hooks for the DWARF 2 __throw routine. */
6286 case BUILT_IN_UNWIND_INIT:
6287 expand_builtin_unwind_init ();
6289 case BUILT_IN_DWARF_CFA:
6290 return virtual_cfa_rtx;
6291 #ifdef DWARF2_UNWIND_INFO
6292 case BUILT_IN_DWARF_SP_COLUMN:
6293 return expand_builtin_dwarf_sp_column ();
6294 case BUILT_IN_INIT_DWARF_REG_SIZES:
6295 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6298 case BUILT_IN_FROB_RETURN_ADDR:
6299 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6300 case BUILT_IN_EXTRACT_RETURN_ADDR:
6301 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6302 case BUILT_IN_EH_RETURN:
6303 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6304 CALL_EXPR_ARG (exp, 1));
6306 #ifdef EH_RETURN_DATA_REGNO
6307 case BUILT_IN_EH_RETURN_DATA_REGNO:
6308 return expand_builtin_eh_return_data_regno (exp);
6310 case BUILT_IN_EXTEND_POINTER:
6311 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6312 case BUILT_IN_EH_POINTER:
6313 return expand_builtin_eh_pointer (exp);
6314 case BUILT_IN_EH_FILTER:
6315 return expand_builtin_eh_filter (exp);
6316 case BUILT_IN_EH_COPY_VALUES:
6317 return expand_builtin_eh_copy_values (exp);
6319 case BUILT_IN_VA_START:
6320 return expand_builtin_va_start (exp);
6321 case BUILT_IN_VA_END:
6322 return expand_builtin_va_end (exp);
6323 case BUILT_IN_VA_COPY:
6324 return expand_builtin_va_copy (exp);
6325 case BUILT_IN_EXPECT:
6326 return expand_builtin_expect (exp, target);
6327 case BUILT_IN_PREFETCH:
6328 expand_builtin_prefetch (exp);
6331 case BUILT_IN_INIT_TRAMPOLINE:
6332 return expand_builtin_init_trampoline (exp);
6333 case BUILT_IN_ADJUST_TRAMPOLINE:
6334 return expand_builtin_adjust_trampoline (exp);
6337 case BUILT_IN_EXECL:
6338 case BUILT_IN_EXECV:
6339 case BUILT_IN_EXECLP:
6340 case BUILT_IN_EXECLE:
6341 case BUILT_IN_EXECVP:
6342 case BUILT_IN_EXECVE:
6343 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6348 case BUILT_IN_FETCH_AND_ADD_1:
6349 case BUILT_IN_FETCH_AND_ADD_2:
6350 case BUILT_IN_FETCH_AND_ADD_4:
6351 case BUILT_IN_FETCH_AND_ADD_8:
6352 case BUILT_IN_FETCH_AND_ADD_16:
6353 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6354 target = expand_builtin_sync_operation (mode, exp, PLUS,
6355 false, target, ignore);
6360 case BUILT_IN_FETCH_AND_SUB_1:
6361 case BUILT_IN_FETCH_AND_SUB_2:
6362 case BUILT_IN_FETCH_AND_SUB_4:
6363 case BUILT_IN_FETCH_AND_SUB_8:
6364 case BUILT_IN_FETCH_AND_SUB_16:
6365 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6366 target = expand_builtin_sync_operation (mode, exp, MINUS,
6367 false, target, ignore);
6372 case BUILT_IN_FETCH_AND_OR_1:
6373 case BUILT_IN_FETCH_AND_OR_2:
6374 case BUILT_IN_FETCH_AND_OR_4:
6375 case BUILT_IN_FETCH_AND_OR_8:
6376 case BUILT_IN_FETCH_AND_OR_16:
6377 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6378 target = expand_builtin_sync_operation (mode, exp, IOR,
6379 false, target, ignore);
6384 case BUILT_IN_FETCH_AND_AND_1:
6385 case BUILT_IN_FETCH_AND_AND_2:
6386 case BUILT_IN_FETCH_AND_AND_4:
6387 case BUILT_IN_FETCH_AND_AND_8:
6388 case BUILT_IN_FETCH_AND_AND_16:
6389 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6390 target = expand_builtin_sync_operation (mode, exp, AND,
6391 false, target, ignore);
6396 case BUILT_IN_FETCH_AND_XOR_1:
6397 case BUILT_IN_FETCH_AND_XOR_2:
6398 case BUILT_IN_FETCH_AND_XOR_4:
6399 case BUILT_IN_FETCH_AND_XOR_8:
6400 case BUILT_IN_FETCH_AND_XOR_16:
6401 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6402 target = expand_builtin_sync_operation (mode, exp, XOR,
6403 false, target, ignore);
6408 case BUILT_IN_FETCH_AND_NAND_1:
6409 case BUILT_IN_FETCH_AND_NAND_2:
6410 case BUILT_IN_FETCH_AND_NAND_4:
6411 case BUILT_IN_FETCH_AND_NAND_8:
6412 case BUILT_IN_FETCH_AND_NAND_16:
6413 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6414 target = expand_builtin_sync_operation (mode, exp, NOT,
6415 false, target, ignore);
6420 case BUILT_IN_ADD_AND_FETCH_1:
6421 case BUILT_IN_ADD_AND_FETCH_2:
6422 case BUILT_IN_ADD_AND_FETCH_4:
6423 case BUILT_IN_ADD_AND_FETCH_8:
6424 case BUILT_IN_ADD_AND_FETCH_16:
6425 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6426 target = expand_builtin_sync_operation (mode, exp, PLUS,
6427 true, target, ignore);
6432 case BUILT_IN_SUB_AND_FETCH_1:
6433 case BUILT_IN_SUB_AND_FETCH_2:
6434 case BUILT_IN_SUB_AND_FETCH_4:
6435 case BUILT_IN_SUB_AND_FETCH_8:
6436 case BUILT_IN_SUB_AND_FETCH_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6438 target = expand_builtin_sync_operation (mode, exp, MINUS,
6439 true, target, ignore);
6444 case BUILT_IN_OR_AND_FETCH_1:
6445 case BUILT_IN_OR_AND_FETCH_2:
6446 case BUILT_IN_OR_AND_FETCH_4:
6447 case BUILT_IN_OR_AND_FETCH_8:
6448 case BUILT_IN_OR_AND_FETCH_16:
6449 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6450 target = expand_builtin_sync_operation (mode, exp, IOR,
6451 true, target, ignore);
6456 case BUILT_IN_AND_AND_FETCH_1:
6457 case BUILT_IN_AND_AND_FETCH_2:
6458 case BUILT_IN_AND_AND_FETCH_4:
6459 case BUILT_IN_AND_AND_FETCH_8:
6460 case BUILT_IN_AND_AND_FETCH_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6462 target = expand_builtin_sync_operation (mode, exp, AND,
6463 true, target, ignore);
6468 case BUILT_IN_XOR_AND_FETCH_1:
6469 case BUILT_IN_XOR_AND_FETCH_2:
6470 case BUILT_IN_XOR_AND_FETCH_4:
6471 case BUILT_IN_XOR_AND_FETCH_8:
6472 case BUILT_IN_XOR_AND_FETCH_16:
6473 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6474 target = expand_builtin_sync_operation (mode, exp, XOR,
6475 true, target, ignore);
6480 case BUILT_IN_NAND_AND_FETCH_1:
6481 case BUILT_IN_NAND_AND_FETCH_2:
6482 case BUILT_IN_NAND_AND_FETCH_4:
6483 case BUILT_IN_NAND_AND_FETCH_8:
6484 case BUILT_IN_NAND_AND_FETCH_16:
6485 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6486 target = expand_builtin_sync_operation (mode, exp, NOT,
6487 true, target, ignore);
6492 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6493 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6494 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6495 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6496 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6497 if (mode == VOIDmode)
6498 mode = TYPE_MODE (boolean_type_node);
6499 if (!target || !register_operand (target, mode))
6500 target = gen_reg_rtx (mode);
6502 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6503 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6508 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6509 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6510 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6511 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6512 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6513 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6514 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6519 case BUILT_IN_LOCK_TEST_AND_SET_1:
6520 case BUILT_IN_LOCK_TEST_AND_SET_2:
6521 case BUILT_IN_LOCK_TEST_AND_SET_4:
6522 case BUILT_IN_LOCK_TEST_AND_SET_8:
6523 case BUILT_IN_LOCK_TEST_AND_SET_16:
6524 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6525 target = expand_builtin_lock_test_and_set (mode, exp, target);
6530 case BUILT_IN_LOCK_RELEASE_1:
6531 case BUILT_IN_LOCK_RELEASE_2:
6532 case BUILT_IN_LOCK_RELEASE_4:
6533 case BUILT_IN_LOCK_RELEASE_8:
6534 case BUILT_IN_LOCK_RELEASE_16:
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6536 expand_builtin_lock_release (mode, exp);
6539 case BUILT_IN_SYNCHRONIZE:
6540 expand_builtin_synchronize ();
6543 case BUILT_IN_OBJECT_SIZE:
6544 return expand_builtin_object_size (exp);
6546 case BUILT_IN_MEMCPY_CHK:
6547 case BUILT_IN_MEMPCPY_CHK:
6548 case BUILT_IN_MEMMOVE_CHK:
6549 case BUILT_IN_MEMSET_CHK:
6550 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6555 case BUILT_IN_STRCPY_CHK:
6556 case BUILT_IN_STPCPY_CHK:
6557 case BUILT_IN_STRNCPY_CHK:
6558 case BUILT_IN_STRCAT_CHK:
6559 case BUILT_IN_STRNCAT_CHK:
6560 case BUILT_IN_SNPRINTF_CHK:
6561 case BUILT_IN_VSNPRINTF_CHK:
6562 maybe_emit_chk_warning (exp, fcode);
6565 case BUILT_IN_SPRINTF_CHK:
6566 case BUILT_IN_VSPRINTF_CHK:
6567 maybe_emit_sprintf_chk_warning (exp, fcode);
6571 maybe_emit_free_warning (exp);
6574 default: /* just do library call, if unknown builtin */
6578 /* The switch statement above can drop through to cause the function
6579 to be called normally. */
6580 return expand_call (exp, target, ignore);
6583 /* Determine whether a tree node represents a call to a built-in
6584 function. If the tree T is a call to a built-in function with
6585 the right number of arguments of the appropriate types, return
6586 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6587 Otherwise the return value is END_BUILTINS. */
6589 enum built_in_function
6590 builtin_mathfn_code (const_tree t)
6592 const_tree fndecl, arg, parmlist;
6593 const_tree argtype, parmtype;
6594 const_call_expr_arg_iterator iter;
6596 if (TREE_CODE (t) != CALL_EXPR
6597 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6598 return END_BUILTINS;
6600 fndecl = get_callee_fndecl (t);
6601 if (fndecl == NULL_TREE
6602 || TREE_CODE (fndecl) != FUNCTION_DECL
6603 || ! DECL_BUILT_IN (fndecl)
6604 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6605 return END_BUILTINS;
6607 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6608 init_const_call_expr_arg_iterator (t, &iter);
6609 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6611 /* If a function doesn't take a variable number of arguments,
6612 the last element in the list will have type `void'. */
6613 parmtype = TREE_VALUE (parmlist);
6614 if (VOID_TYPE_P (parmtype))
6616 if (more_const_call_expr_args_p (&iter))
6617 return END_BUILTINS;
6618 return DECL_FUNCTION_CODE (fndecl);
6621 if (! more_const_call_expr_args_p (&iter))
6622 return END_BUILTINS;
6624 arg = next_const_call_expr_arg (&iter);
6625 argtype = TREE_TYPE (arg);
6627 if (SCALAR_FLOAT_TYPE_P (parmtype))
6629 if (! SCALAR_FLOAT_TYPE_P (argtype))
6630 return END_BUILTINS;
6632 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6634 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6635 return END_BUILTINS;
6637 else if (POINTER_TYPE_P (parmtype))
6639 if (! POINTER_TYPE_P (argtype))
6640 return END_BUILTINS;
6642 else if (INTEGRAL_TYPE_P (parmtype))
6644 if (! INTEGRAL_TYPE_P (argtype))
6645 return END_BUILTINS;
6648 return END_BUILTINS;
6651 /* Variable-length argument list. */
6652 return DECL_FUNCTION_CODE (fndecl);
6655 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6656 evaluate to a constant. */
6659 fold_builtin_constant_p (tree arg)
6661 /* We return 1 for a numeric type that's known to be a constant
6662 value at compile-time or for an aggregate type that's a
6663 literal constant. */
6666 /* If we know this is a constant, emit the constant of one. */
6667 if (CONSTANT_CLASS_P (arg)
6668 || (TREE_CODE (arg) == CONSTRUCTOR
6669 && TREE_CONSTANT (arg)))
6670 return integer_one_node;
6671 if (TREE_CODE (arg) == ADDR_EXPR)
6673 tree op = TREE_OPERAND (arg, 0);
6674 if (TREE_CODE (op) == STRING_CST
6675 || (TREE_CODE (op) == ARRAY_REF
6676 && integer_zerop (TREE_OPERAND (op, 1))
6677 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6678 return integer_one_node;
6681 /* If this expression has side effects, show we don't know it to be a
6682 constant. Likewise if it's a pointer or aggregate type since in
6683 those case we only want literals, since those are only optimized
6684 when generating RTL, not later.
6685 And finally, if we are compiling an initializer, not code, we
6686 need to return a definite result now; there's not going to be any
6687 more optimization done. */
6688 if (TREE_SIDE_EFFECTS (arg)
6689 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6690 || POINTER_TYPE_P (TREE_TYPE (arg))
6692 || folding_initializer)
6693 return integer_zero_node;
6698 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6699 return it as a truthvalue. */
6702 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6704 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6706 fn = built_in_decls[BUILT_IN_EXPECT];
6707 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6708 ret_type = TREE_TYPE (TREE_TYPE (fn));
6709 pred_type = TREE_VALUE (arg_types);
6710 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6712 pred = fold_convert_loc (loc, pred_type, pred);
6713 expected = fold_convert_loc (loc, expected_type, expected);
6714 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6716 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6717 build_int_cst (ret_type, 0));
6720 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6721 NULL_TREE if no simplification is possible. */
6724 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6727 enum tree_code code;
6729 /* If this is a builtin_expect within a builtin_expect keep the
6730 inner one. See through a comparison against a constant. It
6731 might have been added to create a thruthvalue. */
6733 if (COMPARISON_CLASS_P (inner)
6734 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6735 inner = TREE_OPERAND (inner, 0);
6737 if (TREE_CODE (inner) == CALL_EXPR
6738 && (fndecl = get_callee_fndecl (inner))
6739 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6740 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6743 /* Distribute the expected value over short-circuiting operators.
6744 See through the cast from truthvalue_type_node to long. */
6746 while (TREE_CODE (inner) == NOP_EXPR
6747 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6748 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6749 inner = TREE_OPERAND (inner, 0);
6751 code = TREE_CODE (inner);
6752 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6754 tree op0 = TREE_OPERAND (inner, 0);
6755 tree op1 = TREE_OPERAND (inner, 1);
6757 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6758 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6759 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6761 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6764 /* If the argument isn't invariant then there's nothing else we can do. */
6765 if (!TREE_CONSTANT (arg0))
6768 /* If we expect that a comparison against the argument will fold to
6769 a constant return the constant. In practice, this means a true
6770 constant or the address of a non-weak symbol. */
6773 if (TREE_CODE (inner) == ADDR_EXPR)
6777 inner = TREE_OPERAND (inner, 0);
6779 while (TREE_CODE (inner) == COMPONENT_REF
6780 || TREE_CODE (inner) == ARRAY_REF);
6781 if ((TREE_CODE (inner) == VAR_DECL
6782 || TREE_CODE (inner) == FUNCTION_DECL)
6783 && DECL_WEAK (inner))
6787 /* Otherwise, ARG0 already has the proper type for the return value. */
6791 /* Fold a call to __builtin_classify_type with argument ARG. */
6794 fold_builtin_classify_type (tree arg)
6797 return build_int_cst (NULL_TREE, no_type_class);
6799 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6802 /* Fold a call to __builtin_strlen with argument ARG. */
6805 fold_builtin_strlen (location_t loc, tree type, tree arg)
6807 if (!validate_arg (arg, POINTER_TYPE))
6811 tree len = c_strlen (arg, 0);
6814 return fold_convert_loc (loc, type, len);
6820 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6823 fold_builtin_inf (location_t loc, tree type, int warn)
6825 REAL_VALUE_TYPE real;
6827 /* __builtin_inff is intended to be usable to define INFINITY on all
6828 targets. If an infinity is not available, INFINITY expands "to a
6829 positive constant of type float that overflows at translation
6830 time", footnote "In this case, using INFINITY will violate the
6831 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6832 Thus we pedwarn to ensure this constraint violation is
6834 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6835 pedwarn (loc, 0, "target format does not support infinity");
6838 return build_real (type, real);
6841 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6844 fold_builtin_nan (tree arg, tree type, int quiet)
6846 REAL_VALUE_TYPE real;
6849 if (!validate_arg (arg, POINTER_TYPE))
6851 str = c_getstr (arg);
6855 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6858 return build_real (type, real);
6861 /* Return true if the floating point expression T has an integer value.
6862 We also allow +Inf, -Inf and NaN to be considered integer values. */
6865 integer_valued_real_p (tree t)
6867 switch (TREE_CODE (t))
6874 return integer_valued_real_p (TREE_OPERAND (t, 0));
6879 return integer_valued_real_p (TREE_OPERAND (t, 1));
6886 return integer_valued_real_p (TREE_OPERAND (t, 0))
6887 && integer_valued_real_p (TREE_OPERAND (t, 1));
6890 return integer_valued_real_p (TREE_OPERAND (t, 1))
6891 && integer_valued_real_p (TREE_OPERAND (t, 2));
6894 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6898 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6899 if (TREE_CODE (type) == INTEGER_TYPE)
6901 if (TREE_CODE (type) == REAL_TYPE)
6902 return integer_valued_real_p (TREE_OPERAND (t, 0));
6907 switch (builtin_mathfn_code (t))
6909 CASE_FLT_FN (BUILT_IN_CEIL):
6910 CASE_FLT_FN (BUILT_IN_FLOOR):
6911 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6912 CASE_FLT_FN (BUILT_IN_RINT):
6913 CASE_FLT_FN (BUILT_IN_ROUND):
6914 CASE_FLT_FN (BUILT_IN_TRUNC):
6917 CASE_FLT_FN (BUILT_IN_FMIN):
6918 CASE_FLT_FN (BUILT_IN_FMAX):
6919 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6920 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6933 /* FNDECL is assumed to be a builtin where truncation can be propagated
6934 across (for instance floor((double)f) == (double)floorf (f).
6935 Do the transformation for a call with argument ARG. */
6938 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6940 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6942 if (!validate_arg (arg, REAL_TYPE))
6945 /* Integer rounding functions are idempotent. */
6946 if (fcode == builtin_mathfn_code (arg))
6949 /* If argument is already integer valued, and we don't need to worry
6950 about setting errno, there's no need to perform rounding. */
6951 if (! flag_errno_math && integer_valued_real_p (arg))
6956 tree arg0 = strip_float_extensions (arg);
6957 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6958 tree newtype = TREE_TYPE (arg0);
6961 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6962 && (decl = mathfn_built_in (newtype, fcode)))
6963 return fold_convert_loc (loc, ftype,
6964 build_call_expr_loc (loc, decl, 1,
6965 fold_convert_loc (loc,
6972 /* FNDECL is assumed to be builtin which can narrow the FP type of
6973 the argument, for instance lround((double)f) -> lroundf (f).
6974 Do the transformation for a call with argument ARG. */
6977 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6979 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6981 if (!validate_arg (arg, REAL_TYPE))
6984 /* If argument is already integer valued, and we don't need to worry
6985 about setting errno, there's no need to perform rounding. */
6986 if (! flag_errno_math && integer_valued_real_p (arg))
6987 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6988 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6992 tree ftype = TREE_TYPE (arg);
6993 tree arg0 = strip_float_extensions (arg);
6994 tree newtype = TREE_TYPE (arg0);
6997 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6998 && (decl = mathfn_built_in (newtype, fcode)))
6999 return build_call_expr_loc (loc, decl, 1,
7000 fold_convert_loc (loc, newtype, arg0));
7003 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7004 sizeof (long long) == sizeof (long). */
7005 if (TYPE_PRECISION (long_long_integer_type_node)
7006 == TYPE_PRECISION (long_integer_type_node))
7008 tree newfn = NULL_TREE;
7011 CASE_FLT_FN (BUILT_IN_LLCEIL):
7012 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7015 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7016 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7019 CASE_FLT_FN (BUILT_IN_LLROUND):
7020 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7023 CASE_FLT_FN (BUILT_IN_LLRINT):
7024 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7033 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7034 return fold_convert_loc (loc,
7035 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7042 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7043 return type. Return NULL_TREE if no simplification can be made. */
7046 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7050 if (!validate_arg (arg, COMPLEX_TYPE)
7051 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7054 /* Calculate the result when the argument is a constant. */
7055 if (TREE_CODE (arg) == COMPLEX_CST
7056 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7060 if (TREE_CODE (arg) == COMPLEX_EXPR)
7062 tree real = TREE_OPERAND (arg, 0);
7063 tree imag = TREE_OPERAND (arg, 1);
7065 /* If either part is zero, cabs is fabs of the other. */
7066 if (real_zerop (real))
7067 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7068 if (real_zerop (imag))
7069 return fold_build1_loc (loc, ABS_EXPR, type, real);
7071 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7072 if (flag_unsafe_math_optimizations
7073 && operand_equal_p (real, imag, OEP_PURE_SAME))
7075 const REAL_VALUE_TYPE sqrt2_trunc
7076 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7078 return fold_build2_loc (loc, MULT_EXPR, type,
7079 fold_build1_loc (loc, ABS_EXPR, type, real),
7080 build_real (type, sqrt2_trunc));
7084 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7085 if (TREE_CODE (arg) == NEGATE_EXPR
7086 || TREE_CODE (arg) == CONJ_EXPR)
7087 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7089 /* Don't do this when optimizing for size. */
7090 if (flag_unsafe_math_optimizations
7091 && optimize && optimize_function_for_speed_p (cfun))
7093 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7095 if (sqrtfn != NULL_TREE)
7097 tree rpart, ipart, result;
7099 arg = builtin_save_expr (arg);
7101 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7102 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7104 rpart = builtin_save_expr (rpart);
7105 ipart = builtin_save_expr (ipart);
7107 result = fold_build2_loc (loc, PLUS_EXPR, type,
7108 fold_build2_loc (loc, MULT_EXPR, type,
7110 fold_build2_loc (loc, MULT_EXPR, type,
7113 return build_call_expr_loc (loc, sqrtfn, 1, result);
7120 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7121 complex tree type of the result. If NEG is true, the imaginary
7122 zero is negative. */
7125 build_complex_cproj (tree type, bool neg)
7127 REAL_VALUE_TYPE rinf, rzero = dconst0;
7131 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7132 build_real (TREE_TYPE (type), rzero));
7135 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7136 return type. Return NULL_TREE if no simplification can be made. */
7139 fold_builtin_cproj (location_t loc, tree arg, tree type)
7141 if (!validate_arg (arg, COMPLEX_TYPE)
7142 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7145 /* If there are no infinities, return arg. */
7146 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7147 return non_lvalue_loc (loc, arg);
7149 /* Calculate the result when the argument is a constant. */
7150 if (TREE_CODE (arg) == COMPLEX_CST)
7152 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7153 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7155 if (real_isinf (real) || real_isinf (imag))
7156 return build_complex_cproj (type, imag->sign);
7160 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7162 tree real = TREE_OPERAND (arg, 0);
7163 tree imag = TREE_OPERAND (arg, 1);
7168 /* If the real part is inf and the imag part is known to be
7169 nonnegative, return (inf + 0i). Remember side-effects are
7170 possible in the imag part. */
7171 if (TREE_CODE (real) == REAL_CST
7172 && real_isinf (TREE_REAL_CST_PTR (real))
7173 && tree_expr_nonnegative_p (imag))
7174 return omit_one_operand_loc (loc, type,
7175 build_complex_cproj (type, false),
7178 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7179 Remember side-effects are possible in the real part. */
7180 if (TREE_CODE (imag) == REAL_CST
7181 && real_isinf (TREE_REAL_CST_PTR (imag)))
7183 omit_one_operand_loc (loc, type,
7184 build_complex_cproj (type, TREE_REAL_CST_PTR
7185 (imag)->sign), arg);
7191 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7192 Return NULL_TREE if no simplification can be made. */
7195 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7198 enum built_in_function fcode;
7201 if (!validate_arg (arg, REAL_TYPE))
7204 /* Calculate the result when the argument is a constant. */
7205 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7208 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7209 fcode = builtin_mathfn_code (arg);
7210 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7212 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7213 arg = fold_build2_loc (loc, MULT_EXPR, type,
7214 CALL_EXPR_ARG (arg, 0),
7215 build_real (type, dconsthalf));
7216 return build_call_expr_loc (loc, expfn, 1, arg);
7219 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7220 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7222 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7226 tree arg0 = CALL_EXPR_ARG (arg, 0);
7228 /* The inner root was either sqrt or cbrt. */
7229 /* This was a conditional expression but it triggered a bug
7231 REAL_VALUE_TYPE dconstroot;
7232 if (BUILTIN_SQRT_P (fcode))
7233 dconstroot = dconsthalf;
7235 dconstroot = dconst_third ();
7237 /* Adjust for the outer root. */
7238 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7239 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7240 tree_root = build_real (type, dconstroot);
7241 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7245 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7246 if (flag_unsafe_math_optimizations
7247 && (fcode == BUILT_IN_POW
7248 || fcode == BUILT_IN_POWF
7249 || fcode == BUILT_IN_POWL))
7251 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7252 tree arg0 = CALL_EXPR_ARG (arg, 0);
7253 tree arg1 = CALL_EXPR_ARG (arg, 1);
7255 if (!tree_expr_nonnegative_p (arg0))
7256 arg0 = build1 (ABS_EXPR, type, arg0);
7257 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7258 build_real (type, dconsthalf));
7259 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7265 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7266 Return NULL_TREE if no simplification can be made. */
7269 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7271 const enum built_in_function fcode = builtin_mathfn_code (arg);
7274 if (!validate_arg (arg, REAL_TYPE))
7277 /* Calculate the result when the argument is a constant. */
7278 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7281 if (flag_unsafe_math_optimizations)
7283 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7284 if (BUILTIN_EXPONENT_P (fcode))
7286 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7287 const REAL_VALUE_TYPE third_trunc =
7288 real_value_truncate (TYPE_MODE (type), dconst_third ());
7289 arg = fold_build2_loc (loc, MULT_EXPR, type,
7290 CALL_EXPR_ARG (arg, 0),
7291 build_real (type, third_trunc));
7292 return build_call_expr_loc (loc, expfn, 1, arg);
7295 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7296 if (BUILTIN_SQRT_P (fcode))
7298 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7302 tree arg0 = CALL_EXPR_ARG (arg, 0);
7304 REAL_VALUE_TYPE dconstroot = dconst_third ();
7306 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7307 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7308 tree_root = build_real (type, dconstroot);
7309 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7313 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7314 if (BUILTIN_CBRT_P (fcode))
7316 tree arg0 = CALL_EXPR_ARG (arg, 0);
7317 if (tree_expr_nonnegative_p (arg0))
7319 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7324 REAL_VALUE_TYPE dconstroot;
7326 real_arithmetic (&dconstroot, MULT_EXPR,
7327 dconst_third_ptr (), dconst_third_ptr ());
7328 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7329 tree_root = build_real (type, dconstroot);
7330 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7335 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7336 if (fcode == BUILT_IN_POW
7337 || fcode == BUILT_IN_POWF
7338 || fcode == BUILT_IN_POWL)
7340 tree arg00 = CALL_EXPR_ARG (arg, 0);
7341 tree arg01 = CALL_EXPR_ARG (arg, 1);
7342 if (tree_expr_nonnegative_p (arg00))
7344 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7345 const REAL_VALUE_TYPE dconstroot
7346 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7347 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7348 build_real (type, dconstroot));
7349 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7356 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7357 TYPE is the type of the return value. Return NULL_TREE if no
7358 simplification can be made. */
7361 fold_builtin_cos (location_t loc,
7362 tree arg, tree type, tree fndecl)
7366 if (!validate_arg (arg, REAL_TYPE))
7369 /* Calculate the result when the argument is a constant. */
7370 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7373 /* Optimize cos(-x) into cos (x). */
7374 if ((narg = fold_strip_sign_ops (arg)))
7375 return build_call_expr_loc (loc, fndecl, 1, narg);
7380 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7381 Return NULL_TREE if no simplification can be made. */
7384 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7386 if (validate_arg (arg, REAL_TYPE))
7390 /* Calculate the result when the argument is a constant. */
7391 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7394 /* Optimize cosh(-x) into cosh (x). */
7395 if ((narg = fold_strip_sign_ops (arg)))
7396 return build_call_expr_loc (loc, fndecl, 1, narg);
7402 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7403 argument ARG. TYPE is the type of the return value. Return
7404 NULL_TREE if no simplification can be made. */
7407 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7410 if (validate_arg (arg, COMPLEX_TYPE)
7411 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7415 /* Calculate the result when the argument is a constant. */
7416 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7419 /* Optimize fn(-x) into fn(x). */
7420 if ((tmp = fold_strip_sign_ops (arg)))
7421 return build_call_expr_loc (loc, fndecl, 1, tmp);
7427 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7428 Return NULL_TREE if no simplification can be made. */
7431 fold_builtin_tan (tree arg, tree type)
7433 enum built_in_function fcode;
7436 if (!validate_arg (arg, REAL_TYPE))
7439 /* Calculate the result when the argument is a constant. */
7440 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7443 /* Optimize tan(atan(x)) = x. */
7444 fcode = builtin_mathfn_code (arg);
7445 if (flag_unsafe_math_optimizations
7446 && (fcode == BUILT_IN_ATAN
7447 || fcode == BUILT_IN_ATANF
7448 || fcode == BUILT_IN_ATANL))
7449 return CALL_EXPR_ARG (arg, 0);
7454 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7455 NULL_TREE if no simplification can be made. */
7458 fold_builtin_sincos (location_t loc,
7459 tree arg0, tree arg1, tree arg2)
7464 if (!validate_arg (arg0, REAL_TYPE)
7465 || !validate_arg (arg1, POINTER_TYPE)
7466 || !validate_arg (arg2, POINTER_TYPE))
7469 type = TREE_TYPE (arg0);
7471 /* Calculate the result when the argument is a constant. */
7472 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7475 /* Canonicalize sincos to cexpi. */
7476 if (!TARGET_C99_FUNCTIONS)
7478 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7482 call = build_call_expr_loc (loc, fn, 1, arg0);
7483 call = builtin_save_expr (call);
7485 return build2 (COMPOUND_EXPR, void_type_node,
7486 build2 (MODIFY_EXPR, void_type_node,
7487 build_fold_indirect_ref_loc (loc, arg1),
7488 build1 (IMAGPART_EXPR, type, call)),
7489 build2 (MODIFY_EXPR, void_type_node,
7490 build_fold_indirect_ref_loc (loc, arg2),
7491 build1 (REALPART_EXPR, type, call)));
7494 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7495 NULL_TREE if no simplification can be made. */
7498 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7501 tree realp, imagp, ifn;
7504 if (!validate_arg (arg0, COMPLEX_TYPE)
7505 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7508 /* Calculate the result when the argument is a constant. */
7509 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7512 rtype = TREE_TYPE (TREE_TYPE (arg0));
7514 /* In case we can figure out the real part of arg0 and it is constant zero
7516 if (!TARGET_C99_FUNCTIONS)
7518 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7522 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7523 && real_zerop (realp))
7525 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7526 return build_call_expr_loc (loc, ifn, 1, narg);
7529 /* In case we can easily decompose real and imaginary parts split cexp
7530 to exp (r) * cexpi (i). */
7531 if (flag_unsafe_math_optimizations
7534 tree rfn, rcall, icall;
7536 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7540 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7544 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7545 icall = builtin_save_expr (icall);
7546 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7547 rcall = builtin_save_expr (rcall);
7548 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7549 fold_build2_loc (loc, MULT_EXPR, rtype,
7551 fold_build1_loc (loc, REALPART_EXPR,
7553 fold_build2_loc (loc, MULT_EXPR, rtype,
7555 fold_build1_loc (loc, IMAGPART_EXPR,
7562 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7563 Return NULL_TREE if no simplification can be made. */
7566 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7568 if (!validate_arg (arg, REAL_TYPE))
7571 /* Optimize trunc of constant value. */
7572 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7574 REAL_VALUE_TYPE r, x;
7575 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7577 x = TREE_REAL_CST (arg);
7578 real_trunc (&r, TYPE_MODE (type), &x);
7579 return build_real (type, r);
7582 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7585 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7586 Return NULL_TREE if no simplification can be made. */
7589 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7591 if (!validate_arg (arg, REAL_TYPE))
7594 /* Optimize floor of constant value. */
7595 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7599 x = TREE_REAL_CST (arg);
7600 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7602 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7605 real_floor (&r, TYPE_MODE (type), &x);
7606 return build_real (type, r);
7610 /* Fold floor (x) where x is nonnegative to trunc (x). */
7611 if (tree_expr_nonnegative_p (arg))
7613 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7615 return build_call_expr_loc (loc, truncfn, 1, arg);
7618 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7621 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7622 Return NULL_TREE if no simplification can be made. */
7625 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7627 if (!validate_arg (arg, REAL_TYPE))
7630 /* Optimize ceil of constant value. */
7631 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7635 x = TREE_REAL_CST (arg);
7636 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7638 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7641 real_ceil (&r, TYPE_MODE (type), &x);
7642 return build_real (type, r);
7646 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7649 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7650 Return NULL_TREE if no simplification can be made. */
7653 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7655 if (!validate_arg (arg, REAL_TYPE))
7658 /* Optimize round of constant value. */
7659 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7663 x = TREE_REAL_CST (arg);
7664 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7666 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7669 real_round (&r, TYPE_MODE (type), &x);
7670 return build_real (type, r);
7674 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7677 /* Fold function call to builtin lround, lroundf or lroundl (or the
7678 corresponding long long versions) and other rounding functions. ARG
7679 is the argument to the call. Return NULL_TREE if no simplification
7683 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7685 if (!validate_arg (arg, REAL_TYPE))
7688 /* Optimize lround of constant value. */
7689 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7691 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7693 if (real_isfinite (&x))
7695 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7696 tree ftype = TREE_TYPE (arg);
7700 switch (DECL_FUNCTION_CODE (fndecl))
7702 CASE_FLT_FN (BUILT_IN_LFLOOR):
7703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7704 real_floor (&r, TYPE_MODE (ftype), &x);
7707 CASE_FLT_FN (BUILT_IN_LCEIL):
7708 CASE_FLT_FN (BUILT_IN_LLCEIL):
7709 real_ceil (&r, TYPE_MODE (ftype), &x);
7712 CASE_FLT_FN (BUILT_IN_LROUND):
7713 CASE_FLT_FN (BUILT_IN_LLROUND):
7714 real_round (&r, TYPE_MODE (ftype), &x);
7721 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7722 if (double_int_fits_to_tree_p (itype, val))
7723 return double_int_to_tree (itype, val);
7727 switch (DECL_FUNCTION_CODE (fndecl))
7729 CASE_FLT_FN (BUILT_IN_LFLOOR):
7730 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7731 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7732 if (tree_expr_nonnegative_p (arg))
7733 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7734 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7739 return fold_fixed_mathfn (loc, fndecl, arg);
7742 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7743 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7744 the argument to the call. Return NULL_TREE if no simplification can
7748 fold_builtin_bitop (tree fndecl, tree arg)
7750 if (!validate_arg (arg, INTEGER_TYPE))
7753 /* Optimize for constant argument. */
7754 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7756 HOST_WIDE_INT hi, width, result;
7757 unsigned HOST_WIDE_INT lo;
7760 type = TREE_TYPE (arg);
7761 width = TYPE_PRECISION (type);
7762 lo = TREE_INT_CST_LOW (arg);
7764 /* Clear all the bits that are beyond the type's precision. */
7765 if (width > HOST_BITS_PER_WIDE_INT)
7767 hi = TREE_INT_CST_HIGH (arg);
7768 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7769 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7774 if (width < HOST_BITS_PER_WIDE_INT)
7775 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7778 switch (DECL_FUNCTION_CODE (fndecl))
7780 CASE_INT_FN (BUILT_IN_FFS):
7782 result = ffs_hwi (lo);
7784 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7789 CASE_INT_FN (BUILT_IN_CLZ):
7791 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7793 result = width - floor_log2 (lo) - 1;
7794 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7798 CASE_INT_FN (BUILT_IN_CTZ):
7800 result = ctz_hwi (lo);
7802 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7803 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7807 CASE_INT_FN (BUILT_IN_POPCOUNT):
7810 result++, lo &= lo - 1;
7812 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7815 CASE_INT_FN (BUILT_IN_PARITY):
7818 result++, lo &= lo - 1;
7820 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7828 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7834 /* Fold function call to builtin_bswap and the long and long long
7835 variants. Return NULL_TREE if no simplification can be made. */
7837 fold_builtin_bswap (tree fndecl, tree arg)
7839 if (! validate_arg (arg, INTEGER_TYPE))
7842 /* Optimize constant value. */
7843 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7845 HOST_WIDE_INT hi, width, r_hi = 0;
7846 unsigned HOST_WIDE_INT lo, r_lo = 0;
7849 type = TREE_TYPE (arg);
7850 width = TYPE_PRECISION (type);
7851 lo = TREE_INT_CST_LOW (arg);
7852 hi = TREE_INT_CST_HIGH (arg);
7854 switch (DECL_FUNCTION_CODE (fndecl))
7856 case BUILT_IN_BSWAP32:
7857 case BUILT_IN_BSWAP64:
7861 for (s = 0; s < width; s += 8)
7863 int d = width - s - 8;
7864 unsigned HOST_WIDE_INT byte;
7866 if (s < HOST_BITS_PER_WIDE_INT)
7867 byte = (lo >> s) & 0xff;
7869 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7871 if (d < HOST_BITS_PER_WIDE_INT)
7874 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7884 if (width < HOST_BITS_PER_WIDE_INT)
7885 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7887 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7893 /* A subroutine of fold_builtin to fold the various logarithmic
7894 functions. Return NULL_TREE if no simplification can me made.
7895 FUNC is the corresponding MPFR logarithm function. */
7898 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7899 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7901 if (validate_arg (arg, REAL_TYPE))
7903 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7905 const enum built_in_function fcode = builtin_mathfn_code (arg);
7907 /* Calculate the result when the argument is a constant. */
7908 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7911 /* Special case, optimize logN(expN(x)) = x. */
7912 if (flag_unsafe_math_optimizations
7913 && ((func == mpfr_log
7914 && (fcode == BUILT_IN_EXP
7915 || fcode == BUILT_IN_EXPF
7916 || fcode == BUILT_IN_EXPL))
7917 || (func == mpfr_log2
7918 && (fcode == BUILT_IN_EXP2
7919 || fcode == BUILT_IN_EXP2F
7920 || fcode == BUILT_IN_EXP2L))
7921 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7922 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7924 /* Optimize logN(func()) for various exponential functions. We
7925 want to determine the value "x" and the power "exponent" in
7926 order to transform logN(x**exponent) into exponent*logN(x). */
7927 if (flag_unsafe_math_optimizations)
7929 tree exponent = 0, x = 0;
7933 CASE_FLT_FN (BUILT_IN_EXP):
7934 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7935 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7937 exponent = CALL_EXPR_ARG (arg, 0);
7939 CASE_FLT_FN (BUILT_IN_EXP2):
7940 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7941 x = build_real (type, dconst2);
7942 exponent = CALL_EXPR_ARG (arg, 0);
7944 CASE_FLT_FN (BUILT_IN_EXP10):
7945 CASE_FLT_FN (BUILT_IN_POW10):
7946 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7948 REAL_VALUE_TYPE dconst10;
7949 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7950 x = build_real (type, dconst10);
7952 exponent = CALL_EXPR_ARG (arg, 0);
7954 CASE_FLT_FN (BUILT_IN_SQRT):
7955 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7956 x = CALL_EXPR_ARG (arg, 0);
7957 exponent = build_real (type, dconsthalf);
7959 CASE_FLT_FN (BUILT_IN_CBRT):
7960 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7961 x = CALL_EXPR_ARG (arg, 0);
7962 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7965 CASE_FLT_FN (BUILT_IN_POW):
7966 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7967 x = CALL_EXPR_ARG (arg, 0);
7968 exponent = CALL_EXPR_ARG (arg, 1);
7974 /* Now perform the optimization. */
7977 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7978 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7986 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7987 NULL_TREE if no simplification can be made. */
7990 fold_builtin_hypot (location_t loc, tree fndecl,
7991 tree arg0, tree arg1, tree type)
7993 tree res, narg0, narg1;
7995 if (!validate_arg (arg0, REAL_TYPE)
7996 || !validate_arg (arg1, REAL_TYPE))
7999 /* Calculate the result when the argument is a constant. */
8000 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8003 /* If either argument to hypot has a negate or abs, strip that off.
8004 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8005 narg0 = fold_strip_sign_ops (arg0);
8006 narg1 = fold_strip_sign_ops (arg1);
8009 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8010 narg1 ? narg1 : arg1);
8013 /* If either argument is zero, hypot is fabs of the other. */
8014 if (real_zerop (arg0))
8015 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8016 else if (real_zerop (arg1))
8017 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8019 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8020 if (flag_unsafe_math_optimizations
8021 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8023 const REAL_VALUE_TYPE sqrt2_trunc
8024 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8025 return fold_build2_loc (loc, MULT_EXPR, type,
8026 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8027 build_real (type, sqrt2_trunc));
8034 /* Fold a builtin function call to pow, powf, or powl. Return
8035 NULL_TREE if no simplification can be made. */
8037 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8041 if (!validate_arg (arg0, REAL_TYPE)
8042 || !validate_arg (arg1, REAL_TYPE))
8045 /* Calculate the result when the argument is a constant. */
8046 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8049 /* Optimize pow(1.0,y) = 1.0. */
8050 if (real_onep (arg0))
8051 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8053 if (TREE_CODE (arg1) == REAL_CST
8054 && !TREE_OVERFLOW (arg1))
8056 REAL_VALUE_TYPE cint;
8060 c = TREE_REAL_CST (arg1);
8062 /* Optimize pow(x,0.0) = 1.0. */
8063 if (REAL_VALUES_EQUAL (c, dconst0))
8064 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8067 /* Optimize pow(x,1.0) = x. */
8068 if (REAL_VALUES_EQUAL (c, dconst1))
8071 /* Optimize pow(x,-1.0) = 1.0/x. */
8072 if (REAL_VALUES_EQUAL (c, dconstm1))
8073 return fold_build2_loc (loc, RDIV_EXPR, type,
8074 build_real (type, dconst1), arg0);
8076 /* Optimize pow(x,0.5) = sqrt(x). */
8077 if (flag_unsafe_math_optimizations
8078 && REAL_VALUES_EQUAL (c, dconsthalf))
8080 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8082 if (sqrtfn != NULL_TREE)
8083 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8086 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8087 if (flag_unsafe_math_optimizations)
8089 const REAL_VALUE_TYPE dconstroot
8090 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8092 if (REAL_VALUES_EQUAL (c, dconstroot))
8094 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8095 if (cbrtfn != NULL_TREE)
8096 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8100 /* Check for an integer exponent. */
8101 n = real_to_integer (&c);
8102 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8103 if (real_identical (&c, &cint))
8105 /* Attempt to evaluate pow at compile-time, unless this should
8106 raise an exception. */
8107 if (TREE_CODE (arg0) == REAL_CST
8108 && !TREE_OVERFLOW (arg0)
8110 || (!flag_trapping_math && !flag_errno_math)
8111 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8116 x = TREE_REAL_CST (arg0);
8117 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8118 if (flag_unsafe_math_optimizations || !inexact)
8119 return build_real (type, x);
8122 /* Strip sign ops from even integer powers. */
8123 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8125 tree narg0 = fold_strip_sign_ops (arg0);
8127 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8132 if (flag_unsafe_math_optimizations)
8134 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8136 /* Optimize pow(expN(x),y) = expN(x*y). */
8137 if (BUILTIN_EXPONENT_P (fcode))
8139 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8140 tree arg = CALL_EXPR_ARG (arg0, 0);
8141 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8142 return build_call_expr_loc (loc, expfn, 1, arg);
8145 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8146 if (BUILTIN_SQRT_P (fcode))
8148 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8149 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8150 build_real (type, dconsthalf));
8151 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8154 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8155 if (BUILTIN_CBRT_P (fcode))
8157 tree arg = CALL_EXPR_ARG (arg0, 0);
8158 if (tree_expr_nonnegative_p (arg))
8160 const REAL_VALUE_TYPE dconstroot
8161 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8162 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8163 build_real (type, dconstroot));
8164 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8168 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8169 if (fcode == BUILT_IN_POW
8170 || fcode == BUILT_IN_POWF
8171 || fcode == BUILT_IN_POWL)
8173 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8174 if (tree_expr_nonnegative_p (arg00))
8176 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8177 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8178 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8186 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8187 Return NULL_TREE if no simplification can be made. */
8189 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8190 tree arg0, tree arg1, tree type)
8192 if (!validate_arg (arg0, REAL_TYPE)
8193 || !validate_arg (arg1, INTEGER_TYPE))
8196 /* Optimize pow(1.0,y) = 1.0. */
8197 if (real_onep (arg0))
8198 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8200 if (host_integerp (arg1, 0))
8202 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8204 /* Evaluate powi at compile-time. */
8205 if (TREE_CODE (arg0) == REAL_CST
8206 && !TREE_OVERFLOW (arg0))
8209 x = TREE_REAL_CST (arg0);
8210 real_powi (&x, TYPE_MODE (type), &x, c);
8211 return build_real (type, x);
8214 /* Optimize pow(x,0) = 1.0. */
8216 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8219 /* Optimize pow(x,1) = x. */
8223 /* Optimize pow(x,-1) = 1.0/x. */
8225 return fold_build2_loc (loc, RDIV_EXPR, type,
8226 build_real (type, dconst1), arg0);
8232 /* A subroutine of fold_builtin to fold the various exponent
8233 functions. Return NULL_TREE if no simplification can be made.
8234 FUNC is the corresponding MPFR exponent function. */
8237 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8238 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8240 if (validate_arg (arg, REAL_TYPE))
8242 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8245 /* Calculate the result when the argument is a constant. */
8246 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8249 /* Optimize expN(logN(x)) = x. */
8250 if (flag_unsafe_math_optimizations)
8252 const enum built_in_function fcode = builtin_mathfn_code (arg);
8254 if ((func == mpfr_exp
8255 && (fcode == BUILT_IN_LOG
8256 || fcode == BUILT_IN_LOGF
8257 || fcode == BUILT_IN_LOGL))
8258 || (func == mpfr_exp2
8259 && (fcode == BUILT_IN_LOG2
8260 || fcode == BUILT_IN_LOG2F
8261 || fcode == BUILT_IN_LOG2L))
8262 || (func == mpfr_exp10
8263 && (fcode == BUILT_IN_LOG10
8264 || fcode == BUILT_IN_LOG10F
8265 || fcode == BUILT_IN_LOG10L)))
8266 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8273 /* Return true if VAR is a VAR_DECL or a component thereof. */
8276 var_decl_component_p (tree var)
8279 while (handled_component_p (inner))
8280 inner = TREE_OPERAND (inner, 0);
8281 return SSA_VAR_P (inner);
8284 /* Fold function call to builtin memset. Return
8285 NULL_TREE if no simplification can be made. */
8288 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8289 tree type, bool ignore)
8291 tree var, ret, etype;
8292 unsigned HOST_WIDE_INT length, cval;
8294 if (! validate_arg (dest, POINTER_TYPE)
8295 || ! validate_arg (c, INTEGER_TYPE)
8296 || ! validate_arg (len, INTEGER_TYPE))
8299 if (! host_integerp (len, 1))
8302 /* If the LEN parameter is zero, return DEST. */
8303 if (integer_zerop (len))
8304 return omit_one_operand_loc (loc, type, dest, c);
8306 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8311 if (TREE_CODE (var) != ADDR_EXPR)
8314 var = TREE_OPERAND (var, 0);
8315 if (TREE_THIS_VOLATILE (var))
8318 etype = TREE_TYPE (var);
8319 if (TREE_CODE (etype) == ARRAY_TYPE)
8320 etype = TREE_TYPE (etype);
8322 if (!INTEGRAL_TYPE_P (etype)
8323 && !POINTER_TYPE_P (etype))
8326 if (! var_decl_component_p (var))
8329 length = tree_low_cst (len, 1);
8330 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8331 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8335 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8338 if (integer_zerop (c))
8342 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8345 cval = TREE_INT_CST_LOW (c);
8349 cval |= (cval << 31) << 1;
8352 ret = build_int_cst_type (etype, cval);
8353 var = build_fold_indirect_ref_loc (loc,
8354 fold_convert_loc (loc,
8355 build_pointer_type (etype),
8357 ret = build2 (MODIFY_EXPR, etype, var, ret);
8361 return omit_one_operand_loc (loc, type, dest, ret);
8364 /* Fold function call to builtin memset. Return
8365 NULL_TREE if no simplification can be made. */
8368 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8370 if (! validate_arg (dest, POINTER_TYPE)
8371 || ! validate_arg (size, INTEGER_TYPE))
8377 /* New argument list transforming bzero(ptr x, int y) to
8378 memset(ptr x, int 0, size_t y). This is done this way
8379 so that if it isn't expanded inline, we fallback to
8380 calling bzero instead of memset. */
8382 return fold_builtin_memset (loc, dest, integer_zero_node,
8383 fold_convert_loc (loc, sizetype, size),
8384 void_type_node, ignore);
8387 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8388 NULL_TREE if no simplification can be made.
8389 If ENDP is 0, return DEST (like memcpy).
8390 If ENDP is 1, return DEST+LEN (like mempcpy).
8391 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8392 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8396 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8397 tree len, tree type, bool ignore, int endp)
8399 tree destvar, srcvar, expr;
8401 if (! validate_arg (dest, POINTER_TYPE)
8402 || ! validate_arg (src, POINTER_TYPE)
8403 || ! validate_arg (len, INTEGER_TYPE))
8406 /* If the LEN parameter is zero, return DEST. */
8407 if (integer_zerop (len))
8408 return omit_one_operand_loc (loc, type, dest, src);
8410 /* If SRC and DEST are the same (and not volatile), return
8411 DEST{,+LEN,+LEN-1}. */
8412 if (operand_equal_p (src, dest, 0))
8416 tree srctype, desttype;
8417 unsigned int src_align, dest_align;
8422 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8423 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8425 /* Both DEST and SRC must be pointer types.
8426 ??? This is what old code did. Is the testing for pointer types
8429 If either SRC is readonly or length is 1, we can use memcpy. */
8430 if (!dest_align || !src_align)
8432 if (readonly_data_expr (src)
8433 || (host_integerp (len, 1)
8434 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8435 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8437 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8440 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8443 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8444 if (TREE_CODE (src) == ADDR_EXPR
8445 && TREE_CODE (dest) == ADDR_EXPR)
8447 tree src_base, dest_base, fn;
8448 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8449 HOST_WIDE_INT size = -1;
8450 HOST_WIDE_INT maxsize = -1;
8452 srcvar = TREE_OPERAND (src, 0);
8453 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8455 destvar = TREE_OPERAND (dest, 0);
8456 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8458 if (host_integerp (len, 1))
8459 maxsize = tree_low_cst (len, 1);
8462 src_offset /= BITS_PER_UNIT;
8463 dest_offset /= BITS_PER_UNIT;
8464 if (SSA_VAR_P (src_base)
8465 && SSA_VAR_P (dest_base))
8467 if (operand_equal_p (src_base, dest_base, 0)
8468 && ranges_overlap_p (src_offset, maxsize,
8469 dest_offset, maxsize))
8472 else if (TREE_CODE (src_base) == MEM_REF
8473 && TREE_CODE (dest_base) == MEM_REF)
8476 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8477 TREE_OPERAND (dest_base, 0), 0))
8479 off = double_int_add (mem_ref_offset (src_base),
8480 shwi_to_double_int (src_offset));
8481 if (!double_int_fits_in_shwi_p (off))
8483 src_offset = off.low;
8484 off = double_int_add (mem_ref_offset (dest_base),
8485 shwi_to_double_int (dest_offset));
8486 if (!double_int_fits_in_shwi_p (off))
8488 dest_offset = off.low;
8489 if (ranges_overlap_p (src_offset, maxsize,
8490 dest_offset, maxsize))
8496 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8499 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8502 /* If the destination and source do not alias optimize into
8504 if ((is_gimple_min_invariant (dest)
8505 || TREE_CODE (dest) == SSA_NAME)
8506 && (is_gimple_min_invariant (src)
8507 || TREE_CODE (src) == SSA_NAME))
8510 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8511 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8512 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8515 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8518 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8525 if (!host_integerp (len, 0))
8528 This logic lose for arguments like (type *)malloc (sizeof (type)),
8529 since we strip the casts of up to VOID return value from malloc.
8530 Perhaps we ought to inherit type from non-VOID argument here? */
8533 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8534 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8536 tree tem = TREE_OPERAND (src, 0);
8538 if (tem != TREE_OPERAND (src, 0))
8539 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8541 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8543 tree tem = TREE_OPERAND (dest, 0);
8545 if (tem != TREE_OPERAND (dest, 0))
8546 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8548 srctype = TREE_TYPE (TREE_TYPE (src));
8550 && TREE_CODE (srctype) == ARRAY_TYPE
8551 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8553 srctype = TREE_TYPE (srctype);
8555 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8557 desttype = TREE_TYPE (TREE_TYPE (dest));
8559 && TREE_CODE (desttype) == ARRAY_TYPE
8560 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8562 desttype = TREE_TYPE (desttype);
8564 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8566 if (!srctype || !desttype
8567 || TREE_ADDRESSABLE (srctype)
8568 || TREE_ADDRESSABLE (desttype)
8569 || !TYPE_SIZE_UNIT (srctype)
8570 || !TYPE_SIZE_UNIT (desttype)
8571 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8572 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8575 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8576 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8577 if (dest_align < TYPE_ALIGN (desttype)
8578 || src_align < TYPE_ALIGN (srctype))
8582 dest = builtin_save_expr (dest);
8584 /* Build accesses at offset zero with a ref-all character type. */
8585 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8586 ptr_mode, true), 0);
8589 STRIP_NOPS (destvar);
8590 if (TREE_CODE (destvar) == ADDR_EXPR
8591 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8592 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8593 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8595 destvar = NULL_TREE;
8598 STRIP_NOPS (srcvar);
8599 if (TREE_CODE (srcvar) == ADDR_EXPR
8600 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8601 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8604 || src_align >= TYPE_ALIGN (desttype))
8605 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8607 else if (!STRICT_ALIGNMENT)
8609 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8611 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8619 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8622 if (srcvar == NULL_TREE)
8625 if (src_align >= TYPE_ALIGN (desttype))
8626 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8629 if (STRICT_ALIGNMENT)
8631 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8633 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8636 else if (destvar == NULL_TREE)
8639 if (dest_align >= TYPE_ALIGN (srctype))
8640 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8643 if (STRICT_ALIGNMENT)
8645 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8647 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8651 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8657 if (endp == 0 || endp == 3)
8658 return omit_one_operand_loc (loc, type, dest, expr);
8664 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8667 len = fold_convert_loc (loc, sizetype, len);
8668 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8669 dest = fold_convert_loc (loc, type, dest);
8671 dest = omit_one_operand_loc (loc, type, dest, expr);
8675 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8676 If LEN is not NULL, it represents the length of the string to be
8677 copied. Return NULL_TREE if no simplification can be made. */
8680 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8684 if (!validate_arg (dest, POINTER_TYPE)
8685 || !validate_arg (src, POINTER_TYPE))
8688 /* If SRC and DEST are the same (and not volatile), return DEST. */
8689 if (operand_equal_p (src, dest, 0))
8690 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8692 if (optimize_function_for_size_p (cfun))
8695 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8701 len = c_strlen (src, 1);
8702 if (! len || TREE_SIDE_EFFECTS (len))
8706 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8707 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8708 build_call_expr_loc (loc, fn, 3, dest, src, len));
8711 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8712 Return NULL_TREE if no simplification can be made. */
8715 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8717 tree fn, len, lenp1, call, type;
8719 if (!validate_arg (dest, POINTER_TYPE)
8720 || !validate_arg (src, POINTER_TYPE))
8723 len = c_strlen (src, 1);
8725 || TREE_CODE (len) != INTEGER_CST)
8728 if (optimize_function_for_size_p (cfun)
8729 /* If length is zero it's small enough. */
8730 && !integer_zerop (len))
8733 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8737 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8738 /* We use dest twice in building our expression. Save it from
8739 multiple expansions. */
8740 dest = builtin_save_expr (dest);
8741 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8743 type = TREE_TYPE (TREE_TYPE (fndecl));
8744 len = fold_convert_loc (loc, sizetype, len);
8745 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8746 dest = fold_convert_loc (loc, type, dest);
8747 dest = omit_one_operand_loc (loc, type, dest, call);
8751 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8752 If SLEN is not NULL, it represents the length of the source string.
8753 Return NULL_TREE if no simplification can be made. */
8756 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8757 tree src, tree len, tree slen)
8761 if (!validate_arg (dest, POINTER_TYPE)
8762 || !validate_arg (src, POINTER_TYPE)
8763 || !validate_arg (len, INTEGER_TYPE))
8766 /* If the LEN parameter is zero, return DEST. */
8767 if (integer_zerop (len))
8768 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8770 /* We can't compare slen with len as constants below if len is not a
8772 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8776 slen = c_strlen (src, 1);
8778 /* Now, we must be passed a constant src ptr parameter. */
8779 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8782 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8784 /* We do not support simplification of this case, though we do
8785 support it when expanding trees into RTL. */
8786 /* FIXME: generate a call to __builtin_memset. */
8787 if (tree_int_cst_lt (slen, len))
8790 /* OK transform into builtin memcpy. */
8791 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8794 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8795 build_call_expr_loc (loc, fn, 3, dest, src, len));
8798 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8799 arguments to the call, and TYPE is its return type.
8800 Return NULL_TREE if no simplification can be made. */
8803 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8805 if (!validate_arg (arg1, POINTER_TYPE)
8806 || !validate_arg (arg2, INTEGER_TYPE)
8807 || !validate_arg (len, INTEGER_TYPE))
8813 if (TREE_CODE (arg2) != INTEGER_CST
8814 || !host_integerp (len, 1))
8817 p1 = c_getstr (arg1);
8818 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8824 if (target_char_cast (arg2, &c))
8827 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8830 return build_int_cst (TREE_TYPE (arg1), 0);
8832 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8834 return fold_convert_loc (loc, type, tem);
8840 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8841 Return NULL_TREE if no simplification can be made. */
8844 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8846 const char *p1, *p2;
8848 if (!validate_arg (arg1, POINTER_TYPE)
8849 || !validate_arg (arg2, POINTER_TYPE)
8850 || !validate_arg (len, INTEGER_TYPE))
8853 /* If the LEN parameter is zero, return zero. */
8854 if (integer_zerop (len))
8855 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8858 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8859 if (operand_equal_p (arg1, arg2, 0))
8860 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8862 p1 = c_getstr (arg1);
8863 p2 = c_getstr (arg2);
8865 /* If all arguments are constant, and the value of len is not greater
8866 than the lengths of arg1 and arg2, evaluate at compile-time. */
8867 if (host_integerp (len, 1) && p1 && p2
8868 && compare_tree_int (len, strlen (p1) + 1) <= 0
8869 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8871 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8874 return integer_one_node;
8876 return integer_minus_one_node;
8878 return integer_zero_node;
8881 /* If len parameter is one, return an expression corresponding to
8882 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8883 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8885 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8886 tree cst_uchar_ptr_node
8887 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8890 = fold_convert_loc (loc, integer_type_node,
8891 build1 (INDIRECT_REF, cst_uchar_node,
8892 fold_convert_loc (loc,
8896 = fold_convert_loc (loc, integer_type_node,
8897 build1 (INDIRECT_REF, cst_uchar_node,
8898 fold_convert_loc (loc,
8901 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8907 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8908 Return NULL_TREE if no simplification can be made. */
8911 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8913 const char *p1, *p2;
8915 if (!validate_arg (arg1, POINTER_TYPE)
8916 || !validate_arg (arg2, POINTER_TYPE))
8919 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8920 if (operand_equal_p (arg1, arg2, 0))
8921 return integer_zero_node;
8923 p1 = c_getstr (arg1);
8924 p2 = c_getstr (arg2);
8928 const int i = strcmp (p1, p2);
8930 return integer_minus_one_node;
8932 return integer_one_node;
8934 return integer_zero_node;
8937 /* If the second arg is "", return *(const unsigned char*)arg1. */
8938 if (p2 && *p2 == '\0')
8940 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8941 tree cst_uchar_ptr_node
8942 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8944 return fold_convert_loc (loc, integer_type_node,
8945 build1 (INDIRECT_REF, cst_uchar_node,
8946 fold_convert_loc (loc,
8951 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8952 if (p1 && *p1 == '\0')
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8959 = fold_convert_loc (loc, integer_type_node,
8960 build1 (INDIRECT_REF, cst_uchar_node,
8961 fold_convert_loc (loc,
8964 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8970 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8971 Return NULL_TREE if no simplification can be made. */
8974 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8976 const char *p1, *p2;
8978 if (!validate_arg (arg1, POINTER_TYPE)
8979 || !validate_arg (arg2, POINTER_TYPE)
8980 || !validate_arg (len, INTEGER_TYPE))
8983 /* If the LEN parameter is zero, return zero. */
8984 if (integer_zerop (len))
8985 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8988 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8989 if (operand_equal_p (arg1, arg2, 0))
8990 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8992 p1 = c_getstr (arg1);
8993 p2 = c_getstr (arg2);
8995 if (host_integerp (len, 1) && p1 && p2)
8997 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8999 return integer_one_node;
9001 return integer_minus_one_node;
9003 return integer_zero_node;
9006 /* If the second arg is "", and the length is greater than zero,
9007 return *(const unsigned char*)arg1. */
9008 if (p2 && *p2 == '\0'
9009 && TREE_CODE (len) == INTEGER_CST
9010 && tree_int_cst_sgn (len) == 1)
9012 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9013 tree cst_uchar_ptr_node
9014 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9016 return fold_convert_loc (loc, integer_type_node,
9017 build1 (INDIRECT_REF, cst_uchar_node,
9018 fold_convert_loc (loc,
9023 /* If the first arg is "", and the length is greater than zero,
9024 return -*(const unsigned char*)arg2. */
9025 if (p1 && *p1 == '\0'
9026 && TREE_CODE (len) == INTEGER_CST
9027 && tree_int_cst_sgn (len) == 1)
9029 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9030 tree cst_uchar_ptr_node
9031 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9033 tree temp = fold_convert_loc (loc, integer_type_node,
9034 build1 (INDIRECT_REF, cst_uchar_node,
9035 fold_convert_loc (loc,
9038 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9041 /* If len parameter is one, return an expression corresponding to
9042 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9043 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9045 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9046 tree cst_uchar_ptr_node
9047 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9049 tree ind1 = fold_convert_loc (loc, integer_type_node,
9050 build1 (INDIRECT_REF, cst_uchar_node,
9051 fold_convert_loc (loc,
9054 tree ind2 = fold_convert_loc (loc, integer_type_node,
9055 build1 (INDIRECT_REF, cst_uchar_node,
9056 fold_convert_loc (loc,
9059 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9065 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9066 ARG. Return NULL_TREE if no simplification can be made. */
9069 fold_builtin_signbit (location_t loc, tree arg, tree type)
9071 if (!validate_arg (arg, REAL_TYPE))
9074 /* If ARG is a compile-time constant, determine the result. */
9075 if (TREE_CODE (arg) == REAL_CST
9076 && !TREE_OVERFLOW (arg))
9080 c = TREE_REAL_CST (arg);
9081 return (REAL_VALUE_NEGATIVE (c)
9082 ? build_one_cst (type)
9083 : build_zero_cst (type));
9086 /* If ARG is non-negative, the result is always zero. */
9087 if (tree_expr_nonnegative_p (arg))
9088 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9090 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9091 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9092 return fold_build2_loc (loc, LT_EXPR, type, arg,
9093 build_real (TREE_TYPE (arg), dconst0));
9098 /* Fold function call to builtin copysign, copysignf or copysignl with
9099 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9103 fold_builtin_copysign (location_t loc, tree fndecl,
9104 tree arg1, tree arg2, tree type)
9108 if (!validate_arg (arg1, REAL_TYPE)
9109 || !validate_arg (arg2, REAL_TYPE))
9112 /* copysign(X,X) is X. */
9113 if (operand_equal_p (arg1, arg2, 0))
9114 return fold_convert_loc (loc, type, arg1);
9116 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9117 if (TREE_CODE (arg1) == REAL_CST
9118 && TREE_CODE (arg2) == REAL_CST
9119 && !TREE_OVERFLOW (arg1)
9120 && !TREE_OVERFLOW (arg2))
9122 REAL_VALUE_TYPE c1, c2;
9124 c1 = TREE_REAL_CST (arg1);
9125 c2 = TREE_REAL_CST (arg2);
9126 /* c1.sign := c2.sign. */
9127 real_copysign (&c1, &c2);
9128 return build_real (type, c1);
9131 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9132 Remember to evaluate Y for side-effects. */
9133 if (tree_expr_nonnegative_p (arg2))
9134 return omit_one_operand_loc (loc, type,
9135 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9138 /* Strip sign changing operations for the first argument. */
9139 tem = fold_strip_sign_ops (arg1);
9141 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9146 /* Fold a call to builtin isascii with argument ARG. */
9149 fold_builtin_isascii (location_t loc, tree arg)
9151 if (!validate_arg (arg, INTEGER_TYPE))
9155 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9156 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9157 build_int_cst (NULL_TREE,
9158 ~ (unsigned HOST_WIDE_INT) 0x7f));
9159 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9160 arg, integer_zero_node);
9164 /* Fold a call to builtin toascii with argument ARG. */
9167 fold_builtin_toascii (location_t loc, tree arg)
9169 if (!validate_arg (arg, INTEGER_TYPE))
9172 /* Transform toascii(c) -> (c & 0x7f). */
9173 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9174 build_int_cst (NULL_TREE, 0x7f));
9177 /* Fold a call to builtin isdigit with argument ARG. */
9180 fold_builtin_isdigit (location_t loc, tree arg)
9182 if (!validate_arg (arg, INTEGER_TYPE))
9186 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9187 /* According to the C standard, isdigit is unaffected by locale.
9188 However, it definitely is affected by the target character set. */
9189 unsigned HOST_WIDE_INT target_digit0
9190 = lang_hooks.to_target_charset ('0');
9192 if (target_digit0 == 0)
9195 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9196 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9197 build_int_cst (unsigned_type_node, target_digit0));
9198 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9199 build_int_cst (unsigned_type_node, 9));
9203 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9206 fold_builtin_fabs (location_t loc, tree arg, tree type)
9208 if (!validate_arg (arg, REAL_TYPE))
9211 arg = fold_convert_loc (loc, type, arg);
9212 if (TREE_CODE (arg) == REAL_CST)
9213 return fold_abs_const (arg, type);
9214 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9217 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9220 fold_builtin_abs (location_t loc, tree arg, tree type)
9222 if (!validate_arg (arg, INTEGER_TYPE))
9225 arg = fold_convert_loc (loc, type, arg);
9226 if (TREE_CODE (arg) == INTEGER_CST)
9227 return fold_abs_const (arg, type);
9228 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9231 /* Fold a fma operation with arguments ARG[012]. */
9234 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9235 tree type, tree arg0, tree arg1, tree arg2)
9237 if (TREE_CODE (arg0) == REAL_CST
9238 && TREE_CODE (arg1) == REAL_CST
9239 && TREE_CODE (arg2) == REAL_CST)
9240 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9245 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9248 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9250 if (validate_arg (arg0, REAL_TYPE)
9251 && validate_arg(arg1, REAL_TYPE)
9252 && validate_arg(arg2, REAL_TYPE))
9254 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9258 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9259 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9260 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9265 /* Fold a call to builtin fmin or fmax. */
9268 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9269 tree type, bool max)
9271 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9273 /* Calculate the result when the argument is a constant. */
9274 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9279 /* If either argument is NaN, return the other one. Avoid the
9280 transformation if we get (and honor) a signalling NaN. Using
9281 omit_one_operand() ensures we create a non-lvalue. */
9282 if (TREE_CODE (arg0) == REAL_CST
9283 && real_isnan (&TREE_REAL_CST (arg0))
9284 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9285 || ! TREE_REAL_CST (arg0).signalling))
9286 return omit_one_operand_loc (loc, type, arg1, arg0);
9287 if (TREE_CODE (arg1) == REAL_CST
9288 && real_isnan (&TREE_REAL_CST (arg1))
9289 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9290 || ! TREE_REAL_CST (arg1).signalling))
9291 return omit_one_operand_loc (loc, type, arg0, arg1);
9293 /* Transform fmin/fmax(x,x) -> x. */
9294 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9295 return omit_one_operand_loc (loc, type, arg0, arg1);
9297 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9298 functions to return the numeric arg if the other one is NaN.
9299 These tree codes don't honor that, so only transform if
9300 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9301 handled, so we don't have to worry about it either. */
9302 if (flag_finite_math_only)
9303 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9304 fold_convert_loc (loc, type, arg0),
9305 fold_convert_loc (loc, type, arg1));
9310 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9313 fold_builtin_carg (location_t loc, tree arg, tree type)
9315 if (validate_arg (arg, COMPLEX_TYPE)
9316 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9318 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9322 tree new_arg = builtin_save_expr (arg);
9323 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9324 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9325 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9332 /* Fold a call to builtin logb/ilogb. */
9335 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9337 if (! validate_arg (arg, REAL_TYPE))
9342 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9344 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9350 /* If arg is Inf or NaN and we're logb, return it. */
9351 if (TREE_CODE (rettype) == REAL_TYPE)
9352 return fold_convert_loc (loc, rettype, arg);
9353 /* Fall through... */
9355 /* Zero may set errno and/or raise an exception for logb, also
9356 for ilogb we don't know FP_ILOGB0. */
9359 /* For normal numbers, proceed iff radix == 2. In GCC,
9360 normalized significands are in the range [0.5, 1.0). We
9361 want the exponent as if they were [1.0, 2.0) so get the
9362 exponent and subtract 1. */
9363 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9364 return fold_convert_loc (loc, rettype,
9365 build_int_cst (NULL_TREE,
9366 REAL_EXP (value)-1));
9374 /* Fold a call to builtin significand, if radix == 2. */
9377 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9379 if (! validate_arg (arg, REAL_TYPE))
9384 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9386 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9393 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9394 return fold_convert_loc (loc, rettype, arg);
9396 /* For normal numbers, proceed iff radix == 2. */
9397 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9399 REAL_VALUE_TYPE result = *value;
9400 /* In GCC, normalized significands are in the range [0.5,
9401 1.0). We want them to be [1.0, 2.0) so set the
9403 SET_REAL_EXP (&result, 1);
9404 return build_real (rettype, result);
9413 /* Fold a call to builtin frexp, we can assume the base is 2. */
9416 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9418 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9423 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9426 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9428 /* Proceed if a valid pointer type was passed in. */
9429 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9431 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9437 /* For +-0, return (*exp = 0, +-0). */
9438 exp = integer_zero_node;
9443 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9444 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9447 /* Since the frexp function always expects base 2, and in
9448 GCC normalized significands are already in the range
9449 [0.5, 1.0), we have exactly what frexp wants. */
9450 REAL_VALUE_TYPE frac_rvt = *value;
9451 SET_REAL_EXP (&frac_rvt, 0);
9452 frac = build_real (rettype, frac_rvt);
9453 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9460 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9461 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9462 TREE_SIDE_EFFECTS (arg1) = 1;
9463 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9469 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9470 then we can assume the base is two. If it's false, then we have to
9471 check the mode of the TYPE parameter in certain cases. */
9474 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9475 tree type, bool ldexp)
9477 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9482 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9483 if (real_zerop (arg0) || integer_zerop (arg1)
9484 || (TREE_CODE (arg0) == REAL_CST
9485 && !real_isfinite (&TREE_REAL_CST (arg0))))
9486 return omit_one_operand_loc (loc, type, arg0, arg1);
9488 /* If both arguments are constant, then try to evaluate it. */
9489 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9490 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9491 && host_integerp (arg1, 0))
9493 /* Bound the maximum adjustment to twice the range of the
9494 mode's valid exponents. Use abs to ensure the range is
9495 positive as a sanity check. */
9496 const long max_exp_adj = 2 *
9497 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9498 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9500 /* Get the user-requested adjustment. */
9501 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9503 /* The requested adjustment must be inside this range. This
9504 is a preliminary cap to avoid things like overflow, we
9505 may still fail to compute the result for other reasons. */
9506 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9508 REAL_VALUE_TYPE initial_result;
9510 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9512 /* Ensure we didn't overflow. */
9513 if (! real_isinf (&initial_result))
9515 const REAL_VALUE_TYPE trunc_result
9516 = real_value_truncate (TYPE_MODE (type), initial_result);
9518 /* Only proceed if the target mode can hold the
9520 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9521 return build_real (type, trunc_result);
9530 /* Fold a call to builtin modf. */
9533 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9535 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9540 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9543 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9545 /* Proceed if a valid pointer type was passed in. */
9546 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9548 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9549 REAL_VALUE_TYPE trunc, frac;
9555 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9556 trunc = frac = *value;
9559 /* For +-Inf, return (*arg1 = arg0, +-0). */
9561 frac.sign = value->sign;
9565 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9566 real_trunc (&trunc, VOIDmode, value);
9567 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9568 /* If the original number was negative and already
9569 integral, then the fractional part is -0.0. */
9570 if (value->sign && frac.cl == rvc_zero)
9571 frac.sign = value->sign;
9575 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9576 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9577 build_real (rettype, trunc));
9578 TREE_SIDE_EFFECTS (arg1) = 1;
9579 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9580 build_real (rettype, frac));
9586 /* Given a location LOC, an interclass builtin function decl FNDECL
9587 and its single argument ARG, return an folded expression computing
9588 the same, or NULL_TREE if we either couldn't or didn't want to fold
9589 (the latter happen if there's an RTL instruction available). */
9592 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9594 enum machine_mode mode;
9596 if (!validate_arg (arg, REAL_TYPE))
9599 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9602 mode = TYPE_MODE (TREE_TYPE (arg));
9604 /* If there is no optab, try generic code. */
9605 switch (DECL_FUNCTION_CODE (fndecl))
9609 CASE_FLT_FN (BUILT_IN_ISINF):
9611 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9612 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9613 tree const type = TREE_TYPE (arg);
9617 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9618 real_from_string (&r, buf);
9619 result = build_call_expr (isgr_fn, 2,
9620 fold_build1_loc (loc, ABS_EXPR, type, arg),
9621 build_real (type, r));
9624 CASE_FLT_FN (BUILT_IN_FINITE):
9625 case BUILT_IN_ISFINITE:
9627 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9628 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9629 tree const type = TREE_TYPE (arg);
9633 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9634 real_from_string (&r, buf);
9635 result = build_call_expr (isle_fn, 2,
9636 fold_build1_loc (loc, ABS_EXPR, type, arg),
9637 build_real (type, r));
9638 /*result = fold_build2_loc (loc, UNGT_EXPR,
9639 TREE_TYPE (TREE_TYPE (fndecl)),
9640 fold_build1_loc (loc, ABS_EXPR, type, arg),
9641 build_real (type, r));
9642 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9643 TREE_TYPE (TREE_TYPE (fndecl)),
9647 case BUILT_IN_ISNORMAL:
9649 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9650 islessequal(fabs(x),DBL_MAX). */
9651 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9652 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9653 tree const type = TREE_TYPE (arg);
9654 REAL_VALUE_TYPE rmax, rmin;
9657 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9658 real_from_string (&rmax, buf);
9659 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9660 real_from_string (&rmin, buf);
9661 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9662 result = build_call_expr (isle_fn, 2, arg,
9663 build_real (type, rmax));
9664 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9665 build_call_expr (isge_fn, 2, arg,
9666 build_real (type, rmin)));
9676 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9677 ARG is the argument for the call. */
9680 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9682 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9685 if (!validate_arg (arg, REAL_TYPE))
9688 switch (builtin_index)
9690 case BUILT_IN_ISINF:
9691 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9692 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9694 if (TREE_CODE (arg) == REAL_CST)
9696 r = TREE_REAL_CST (arg);
9697 if (real_isinf (&r))
9698 return real_compare (GT_EXPR, &r, &dconst0)
9699 ? integer_one_node : integer_minus_one_node;
9701 return integer_zero_node;
9706 case BUILT_IN_ISINF_SIGN:
9708 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9709 /* In a boolean context, GCC will fold the inner COND_EXPR to
9710 1. So e.g. "if (isinf_sign(x))" would be folded to just
9711 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9712 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9713 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9714 tree tmp = NULL_TREE;
9716 arg = builtin_save_expr (arg);
9718 if (signbit_fn && isinf_fn)
9720 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9721 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9723 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9724 signbit_call, integer_zero_node);
9725 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9726 isinf_call, integer_zero_node);
9728 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9729 integer_minus_one_node, integer_one_node);
9730 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9738 case BUILT_IN_ISFINITE:
9739 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9740 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9741 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9743 if (TREE_CODE (arg) == REAL_CST)
9745 r = TREE_REAL_CST (arg);
9746 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9751 case BUILT_IN_ISNAN:
9752 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9753 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9755 if (TREE_CODE (arg) == REAL_CST)
9757 r = TREE_REAL_CST (arg);
9758 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9761 arg = builtin_save_expr (arg);
9762 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9769 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9770 This builtin will generate code to return the appropriate floating
9771 point classification depending on the value of the floating point
9772 number passed in. The possible return values must be supplied as
9773 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9774 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9775 one floating point argument which is "type generic". */
9778 fold_builtin_fpclassify (location_t loc, tree exp)
9780 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9781 arg, type, res, tmp;
9782 enum machine_mode mode;
9786 /* Verify the required arguments in the original call. */
9787 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9788 INTEGER_TYPE, INTEGER_TYPE,
9789 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9792 fp_nan = CALL_EXPR_ARG (exp, 0);
9793 fp_infinite = CALL_EXPR_ARG (exp, 1);
9794 fp_normal = CALL_EXPR_ARG (exp, 2);
9795 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9796 fp_zero = CALL_EXPR_ARG (exp, 4);
9797 arg = CALL_EXPR_ARG (exp, 5);
9798 type = TREE_TYPE (arg);
9799 mode = TYPE_MODE (type);
9800 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9804 (fabs(x) == Inf ? FP_INFINITE :
9805 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9806 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9808 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9809 build_real (type, dconst0));
9810 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9811 tmp, fp_zero, fp_subnormal);
9813 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9814 real_from_string (&r, buf);
9815 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9816 arg, build_real (type, r));
9817 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9819 if (HONOR_INFINITIES (mode))
9822 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9823 build_real (type, r));
9824 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9828 if (HONOR_NANS (mode))
9830 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9831 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9837 /* Fold a call to an unordered comparison function such as
9838 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9839 being called and ARG0 and ARG1 are the arguments for the call.
9840 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9841 the opposite of the desired result. UNORDERED_CODE is used
9842 for modes that can hold NaNs and ORDERED_CODE is used for
9846 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9847 enum tree_code unordered_code,
9848 enum tree_code ordered_code)
9850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9851 enum tree_code code;
9853 enum tree_code code0, code1;
9854 tree cmp_type = NULL_TREE;
9856 type0 = TREE_TYPE (arg0);
9857 type1 = TREE_TYPE (arg1);
9859 code0 = TREE_CODE (type0);
9860 code1 = TREE_CODE (type1);
9862 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9863 /* Choose the wider of two real types. */
9864 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9866 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9868 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9871 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9872 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9874 if (unordered_code == UNORDERED_EXPR)
9876 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9877 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9878 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9881 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9883 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9884 fold_build2_loc (loc, code, type, arg0, arg1));
9887 /* Fold a call to built-in function FNDECL with 0 arguments.
9888 IGNORE is true if the result of the function call is ignored. This
9889 function returns NULL_TREE if no simplification was possible. */
9892 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9898 CASE_FLT_FN (BUILT_IN_INF):
9899 case BUILT_IN_INFD32:
9900 case BUILT_IN_INFD64:
9901 case BUILT_IN_INFD128:
9902 return fold_builtin_inf (loc, type, true);
9904 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9905 return fold_builtin_inf (loc, type, false);
9907 case BUILT_IN_CLASSIFY_TYPE:
9908 return fold_builtin_classify_type (NULL_TREE);
9916 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9917 IGNORE is true if the result of the function call is ignored. This
9918 function returns NULL_TREE if no simplification was possible. */
9921 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9924 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9927 case BUILT_IN_CONSTANT_P:
9929 tree val = fold_builtin_constant_p (arg0);
9931 /* Gimplification will pull the CALL_EXPR for the builtin out of
9932 an if condition. When not optimizing, we'll not CSE it back.
9933 To avoid link error types of regressions, return false now. */
9934 if (!val && !optimize)
9935 val = integer_zero_node;
9940 case BUILT_IN_CLASSIFY_TYPE:
9941 return fold_builtin_classify_type (arg0);
9943 case BUILT_IN_STRLEN:
9944 return fold_builtin_strlen (loc, type, arg0);
9946 CASE_FLT_FN (BUILT_IN_FABS):
9947 return fold_builtin_fabs (loc, arg0, type);
9951 case BUILT_IN_LLABS:
9952 case BUILT_IN_IMAXABS:
9953 return fold_builtin_abs (loc, arg0, type);
9955 CASE_FLT_FN (BUILT_IN_CONJ):
9956 if (validate_arg (arg0, COMPLEX_TYPE)
9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9958 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9961 CASE_FLT_FN (BUILT_IN_CREAL):
9962 if (validate_arg (arg0, COMPLEX_TYPE)
9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9964 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9967 CASE_FLT_FN (BUILT_IN_CIMAG):
9968 if (validate_arg (arg0, COMPLEX_TYPE)
9969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9970 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9973 CASE_FLT_FN (BUILT_IN_CCOS):
9974 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9976 CASE_FLT_FN (BUILT_IN_CCOSH):
9977 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9979 CASE_FLT_FN (BUILT_IN_CPROJ):
9980 return fold_builtin_cproj(loc, arg0, type);
9982 CASE_FLT_FN (BUILT_IN_CSIN):
9983 if (validate_arg (arg0, COMPLEX_TYPE)
9984 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9985 return do_mpc_arg1 (arg0, type, mpc_sin);
9988 CASE_FLT_FN (BUILT_IN_CSINH):
9989 if (validate_arg (arg0, COMPLEX_TYPE)
9990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9991 return do_mpc_arg1 (arg0, type, mpc_sinh);
9994 CASE_FLT_FN (BUILT_IN_CTAN):
9995 if (validate_arg (arg0, COMPLEX_TYPE)
9996 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9997 return do_mpc_arg1 (arg0, type, mpc_tan);
10000 CASE_FLT_FN (BUILT_IN_CTANH):
10001 if (validate_arg (arg0, COMPLEX_TYPE)
10002 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10003 return do_mpc_arg1 (arg0, type, mpc_tanh);
10006 CASE_FLT_FN (BUILT_IN_CLOG):
10007 if (validate_arg (arg0, COMPLEX_TYPE)
10008 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10009 return do_mpc_arg1 (arg0, type, mpc_log);
10012 CASE_FLT_FN (BUILT_IN_CSQRT):
10013 if (validate_arg (arg0, COMPLEX_TYPE)
10014 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10015 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10018 CASE_FLT_FN (BUILT_IN_CASIN):
10019 if (validate_arg (arg0, COMPLEX_TYPE)
10020 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10021 return do_mpc_arg1 (arg0, type, mpc_asin);
10024 CASE_FLT_FN (BUILT_IN_CACOS):
10025 if (validate_arg (arg0, COMPLEX_TYPE)
10026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10027 return do_mpc_arg1 (arg0, type, mpc_acos);
10030 CASE_FLT_FN (BUILT_IN_CATAN):
10031 if (validate_arg (arg0, COMPLEX_TYPE)
10032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10033 return do_mpc_arg1 (arg0, type, mpc_atan);
10036 CASE_FLT_FN (BUILT_IN_CASINH):
10037 if (validate_arg (arg0, COMPLEX_TYPE)
10038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10039 return do_mpc_arg1 (arg0, type, mpc_asinh);
10042 CASE_FLT_FN (BUILT_IN_CACOSH):
10043 if (validate_arg (arg0, COMPLEX_TYPE)
10044 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10045 return do_mpc_arg1 (arg0, type, mpc_acosh);
10048 CASE_FLT_FN (BUILT_IN_CATANH):
10049 if (validate_arg (arg0, COMPLEX_TYPE)
10050 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10051 return do_mpc_arg1 (arg0, type, mpc_atanh);
10054 CASE_FLT_FN (BUILT_IN_CABS):
10055 return fold_builtin_cabs (loc, arg0, type, fndecl);
10057 CASE_FLT_FN (BUILT_IN_CARG):
10058 return fold_builtin_carg (loc, arg0, type);
10060 CASE_FLT_FN (BUILT_IN_SQRT):
10061 return fold_builtin_sqrt (loc, arg0, type);
10063 CASE_FLT_FN (BUILT_IN_CBRT):
10064 return fold_builtin_cbrt (loc, arg0, type);
10066 CASE_FLT_FN (BUILT_IN_ASIN):
10067 if (validate_arg (arg0, REAL_TYPE))
10068 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10069 &dconstm1, &dconst1, true);
10072 CASE_FLT_FN (BUILT_IN_ACOS):
10073 if (validate_arg (arg0, REAL_TYPE))
10074 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10075 &dconstm1, &dconst1, true);
10078 CASE_FLT_FN (BUILT_IN_ATAN):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10083 CASE_FLT_FN (BUILT_IN_ASINH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10088 CASE_FLT_FN (BUILT_IN_ACOSH):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10091 &dconst1, NULL, true);
10094 CASE_FLT_FN (BUILT_IN_ATANH):
10095 if (validate_arg (arg0, REAL_TYPE))
10096 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10097 &dconstm1, &dconst1, false);
10100 CASE_FLT_FN (BUILT_IN_SIN):
10101 if (validate_arg (arg0, REAL_TYPE))
10102 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10105 CASE_FLT_FN (BUILT_IN_COS):
10106 return fold_builtin_cos (loc, arg0, type, fndecl);
10108 CASE_FLT_FN (BUILT_IN_TAN):
10109 return fold_builtin_tan (arg0, type);
10111 CASE_FLT_FN (BUILT_IN_CEXP):
10112 return fold_builtin_cexp (loc, arg0, type);
10114 CASE_FLT_FN (BUILT_IN_CEXPI):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10119 CASE_FLT_FN (BUILT_IN_SINH):
10120 if (validate_arg (arg0, REAL_TYPE))
10121 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10124 CASE_FLT_FN (BUILT_IN_COSH):
10125 return fold_builtin_cosh (loc, arg0, type, fndecl);
10127 CASE_FLT_FN (BUILT_IN_TANH):
10128 if (validate_arg (arg0, REAL_TYPE))
10129 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10132 CASE_FLT_FN (BUILT_IN_ERF):
10133 if (validate_arg (arg0, REAL_TYPE))
10134 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10137 CASE_FLT_FN (BUILT_IN_ERFC):
10138 if (validate_arg (arg0, REAL_TYPE))
10139 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10142 CASE_FLT_FN (BUILT_IN_TGAMMA):
10143 if (validate_arg (arg0, REAL_TYPE))
10144 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10147 CASE_FLT_FN (BUILT_IN_EXP):
10148 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10150 CASE_FLT_FN (BUILT_IN_EXP2):
10151 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10153 CASE_FLT_FN (BUILT_IN_EXP10):
10154 CASE_FLT_FN (BUILT_IN_POW10):
10155 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10157 CASE_FLT_FN (BUILT_IN_EXPM1):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10162 CASE_FLT_FN (BUILT_IN_LOG):
10163 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10165 CASE_FLT_FN (BUILT_IN_LOG2):
10166 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10168 CASE_FLT_FN (BUILT_IN_LOG10):
10169 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10171 CASE_FLT_FN (BUILT_IN_LOG1P):
10172 if (validate_arg (arg0, REAL_TYPE))
10173 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10174 &dconstm1, NULL, false);
10177 CASE_FLT_FN (BUILT_IN_J0):
10178 if (validate_arg (arg0, REAL_TYPE))
10179 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10183 CASE_FLT_FN (BUILT_IN_J1):
10184 if (validate_arg (arg0, REAL_TYPE))
10185 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10189 CASE_FLT_FN (BUILT_IN_Y0):
10190 if (validate_arg (arg0, REAL_TYPE))
10191 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10192 &dconst0, NULL, false);
10195 CASE_FLT_FN (BUILT_IN_Y1):
10196 if (validate_arg (arg0, REAL_TYPE))
10197 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10198 &dconst0, NULL, false);
10201 CASE_FLT_FN (BUILT_IN_NAN):
10202 case BUILT_IN_NAND32:
10203 case BUILT_IN_NAND64:
10204 case BUILT_IN_NAND128:
10205 return fold_builtin_nan (arg0, type, true);
10207 CASE_FLT_FN (BUILT_IN_NANS):
10208 return fold_builtin_nan (arg0, type, false);
10210 CASE_FLT_FN (BUILT_IN_FLOOR):
10211 return fold_builtin_floor (loc, fndecl, arg0);
10213 CASE_FLT_FN (BUILT_IN_CEIL):
10214 return fold_builtin_ceil (loc, fndecl, arg0);
10216 CASE_FLT_FN (BUILT_IN_TRUNC):
10217 return fold_builtin_trunc (loc, fndecl, arg0);
10219 CASE_FLT_FN (BUILT_IN_ROUND):
10220 return fold_builtin_round (loc, fndecl, arg0);
10222 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10223 CASE_FLT_FN (BUILT_IN_RINT):
10224 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10226 CASE_FLT_FN (BUILT_IN_LCEIL):
10227 CASE_FLT_FN (BUILT_IN_LLCEIL):
10228 CASE_FLT_FN (BUILT_IN_LFLOOR):
10229 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10230 CASE_FLT_FN (BUILT_IN_LROUND):
10231 CASE_FLT_FN (BUILT_IN_LLROUND):
10232 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10234 CASE_FLT_FN (BUILT_IN_LRINT):
10235 CASE_FLT_FN (BUILT_IN_LLRINT):
10236 return fold_fixed_mathfn (loc, fndecl, arg0);
10238 case BUILT_IN_BSWAP32:
10239 case BUILT_IN_BSWAP64:
10240 return fold_builtin_bswap (fndecl, arg0);
10242 CASE_INT_FN (BUILT_IN_FFS):
10243 CASE_INT_FN (BUILT_IN_CLZ):
10244 CASE_INT_FN (BUILT_IN_CTZ):
10245 CASE_INT_FN (BUILT_IN_POPCOUNT):
10246 CASE_INT_FN (BUILT_IN_PARITY):
10247 return fold_builtin_bitop (fndecl, arg0);
10249 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10250 return fold_builtin_signbit (loc, arg0, type);
10252 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10253 return fold_builtin_significand (loc, arg0, type);
10255 CASE_FLT_FN (BUILT_IN_ILOGB):
10256 CASE_FLT_FN (BUILT_IN_LOGB):
10257 return fold_builtin_logb (loc, arg0, type);
10259 case BUILT_IN_ISASCII:
10260 return fold_builtin_isascii (loc, arg0);
10262 case BUILT_IN_TOASCII:
10263 return fold_builtin_toascii (loc, arg0);
10265 case BUILT_IN_ISDIGIT:
10266 return fold_builtin_isdigit (loc, arg0);
10268 CASE_FLT_FN (BUILT_IN_FINITE):
10269 case BUILT_IN_FINITED32:
10270 case BUILT_IN_FINITED64:
10271 case BUILT_IN_FINITED128:
10272 case BUILT_IN_ISFINITE:
10274 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10277 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10280 CASE_FLT_FN (BUILT_IN_ISINF):
10281 case BUILT_IN_ISINFD32:
10282 case BUILT_IN_ISINFD64:
10283 case BUILT_IN_ISINFD128:
10285 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10288 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10291 case BUILT_IN_ISNORMAL:
10292 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10294 case BUILT_IN_ISINF_SIGN:
10295 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10297 CASE_FLT_FN (BUILT_IN_ISNAN):
10298 case BUILT_IN_ISNAND32:
10299 case BUILT_IN_ISNAND64:
10300 case BUILT_IN_ISNAND128:
10301 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10303 case BUILT_IN_PRINTF:
10304 case BUILT_IN_PRINTF_UNLOCKED:
10305 case BUILT_IN_VPRINTF:
10306 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10308 case BUILT_IN_FREE:
10309 if (integer_zerop (arg0))
10310 return build_empty_stmt (loc);
10321 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10322 IGNORE is true if the result of the function call is ignored. This
10323 function returns NULL_TREE if no simplification was possible. */
10326 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10328 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10329 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10333 CASE_FLT_FN (BUILT_IN_JN):
10334 if (validate_arg (arg0, INTEGER_TYPE)
10335 && validate_arg (arg1, REAL_TYPE))
10336 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10339 CASE_FLT_FN (BUILT_IN_YN):
10340 if (validate_arg (arg0, INTEGER_TYPE)
10341 && validate_arg (arg1, REAL_TYPE))
10342 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10346 CASE_FLT_FN (BUILT_IN_DREM):
10347 CASE_FLT_FN (BUILT_IN_REMAINDER):
10348 if (validate_arg (arg0, REAL_TYPE)
10349 && validate_arg(arg1, REAL_TYPE))
10350 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10353 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10354 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10355 if (validate_arg (arg0, REAL_TYPE)
10356 && validate_arg(arg1, POINTER_TYPE))
10357 return do_mpfr_lgamma_r (arg0, arg1, type);
10360 CASE_FLT_FN (BUILT_IN_ATAN2):
10361 if (validate_arg (arg0, REAL_TYPE)
10362 && validate_arg(arg1, REAL_TYPE))
10363 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10366 CASE_FLT_FN (BUILT_IN_FDIM):
10367 if (validate_arg (arg0, REAL_TYPE)
10368 && validate_arg(arg1, REAL_TYPE))
10369 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10372 CASE_FLT_FN (BUILT_IN_HYPOT):
10373 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10375 CASE_FLT_FN (BUILT_IN_CPOW):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10378 && validate_arg (arg1, COMPLEX_TYPE)
10379 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10380 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10383 CASE_FLT_FN (BUILT_IN_LDEXP):
10384 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10385 CASE_FLT_FN (BUILT_IN_SCALBN):
10386 CASE_FLT_FN (BUILT_IN_SCALBLN):
10387 return fold_builtin_load_exponent (loc, arg0, arg1,
10388 type, /*ldexp=*/false);
10390 CASE_FLT_FN (BUILT_IN_FREXP):
10391 return fold_builtin_frexp (loc, arg0, arg1, type);
10393 CASE_FLT_FN (BUILT_IN_MODF):
10394 return fold_builtin_modf (loc, arg0, arg1, type);
10396 case BUILT_IN_BZERO:
10397 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10399 case BUILT_IN_FPUTS:
10400 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10402 case BUILT_IN_FPUTS_UNLOCKED:
10403 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10405 case BUILT_IN_STRSTR:
10406 return fold_builtin_strstr (loc, arg0, arg1, type);
10408 case BUILT_IN_STRCAT:
10409 return fold_builtin_strcat (loc, arg0, arg1);
10411 case BUILT_IN_STRSPN:
10412 return fold_builtin_strspn (loc, arg0, arg1);
10414 case BUILT_IN_STRCSPN:
10415 return fold_builtin_strcspn (loc, arg0, arg1);
10417 case BUILT_IN_STRCHR:
10418 case BUILT_IN_INDEX:
10419 return fold_builtin_strchr (loc, arg0, arg1, type);
10421 case BUILT_IN_STRRCHR:
10422 case BUILT_IN_RINDEX:
10423 return fold_builtin_strrchr (loc, arg0, arg1, type);
10425 case BUILT_IN_STRCPY:
10426 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10428 case BUILT_IN_STPCPY:
10431 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10435 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10438 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10441 case BUILT_IN_STRCMP:
10442 return fold_builtin_strcmp (loc, arg0, arg1);
10444 case BUILT_IN_STRPBRK:
10445 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10447 case BUILT_IN_EXPECT:
10448 return fold_builtin_expect (loc, arg0, arg1);
10450 CASE_FLT_FN (BUILT_IN_POW):
10451 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10453 CASE_FLT_FN (BUILT_IN_POWI):
10454 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10456 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10457 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10459 CASE_FLT_FN (BUILT_IN_FMIN):
10460 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10462 CASE_FLT_FN (BUILT_IN_FMAX):
10463 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10465 case BUILT_IN_ISGREATER:
10466 return fold_builtin_unordered_cmp (loc, fndecl,
10467 arg0, arg1, UNLE_EXPR, LE_EXPR);
10468 case BUILT_IN_ISGREATEREQUAL:
10469 return fold_builtin_unordered_cmp (loc, fndecl,
10470 arg0, arg1, UNLT_EXPR, LT_EXPR);
10471 case BUILT_IN_ISLESS:
10472 return fold_builtin_unordered_cmp (loc, fndecl,
10473 arg0, arg1, UNGE_EXPR, GE_EXPR);
10474 case BUILT_IN_ISLESSEQUAL:
10475 return fold_builtin_unordered_cmp (loc, fndecl,
10476 arg0, arg1, UNGT_EXPR, GT_EXPR);
10477 case BUILT_IN_ISLESSGREATER:
10478 return fold_builtin_unordered_cmp (loc, fndecl,
10479 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10480 case BUILT_IN_ISUNORDERED:
10481 return fold_builtin_unordered_cmp (loc, fndecl,
10482 arg0, arg1, UNORDERED_EXPR,
10485 /* We do the folding for va_start in the expander. */
10486 case BUILT_IN_VA_START:
10489 case BUILT_IN_SPRINTF:
10490 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10492 case BUILT_IN_OBJECT_SIZE:
10493 return fold_builtin_object_size (arg0, arg1);
10495 case BUILT_IN_PRINTF:
10496 case BUILT_IN_PRINTF_UNLOCKED:
10497 case BUILT_IN_VPRINTF:
10498 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10500 case BUILT_IN_PRINTF_CHK:
10501 case BUILT_IN_VPRINTF_CHK:
10502 if (!validate_arg (arg0, INTEGER_TYPE)
10503 || TREE_SIDE_EFFECTS (arg0))
10506 return fold_builtin_printf (loc, fndecl,
10507 arg1, NULL_TREE, ignore, fcode);
10510 case BUILT_IN_FPRINTF:
10511 case BUILT_IN_FPRINTF_UNLOCKED:
10512 case BUILT_IN_VFPRINTF:
10513 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10522 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10523 and ARG2. IGNORE is true if the result of the function call is ignored.
10524 This function returns NULL_TREE if no simplification was possible. */
10527 fold_builtin_3 (location_t loc, tree fndecl,
10528 tree arg0, tree arg1, tree arg2, bool ignore)
10530 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10531 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10535 CASE_FLT_FN (BUILT_IN_SINCOS):
10536 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10538 CASE_FLT_FN (BUILT_IN_FMA):
10539 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10542 CASE_FLT_FN (BUILT_IN_REMQUO):
10543 if (validate_arg (arg0, REAL_TYPE)
10544 && validate_arg(arg1, REAL_TYPE)
10545 && validate_arg(arg2, POINTER_TYPE))
10546 return do_mpfr_remquo (arg0, arg1, arg2);
10549 case BUILT_IN_MEMSET:
10550 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10552 case BUILT_IN_BCOPY:
10553 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10554 void_type_node, true, /*endp=*/3);
10556 case BUILT_IN_MEMCPY:
10557 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10558 type, ignore, /*endp=*/0);
10560 case BUILT_IN_MEMPCPY:
10561 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10562 type, ignore, /*endp=*/1);
10564 case BUILT_IN_MEMMOVE:
10565 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10566 type, ignore, /*endp=*/3);
10568 case BUILT_IN_STRNCAT:
10569 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10571 case BUILT_IN_STRNCPY:
10572 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10574 case BUILT_IN_STRNCMP:
10575 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10577 case BUILT_IN_MEMCHR:
10578 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10580 case BUILT_IN_BCMP:
10581 case BUILT_IN_MEMCMP:
10582 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10584 case BUILT_IN_SPRINTF:
10585 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10587 case BUILT_IN_STRCPY_CHK:
10588 case BUILT_IN_STPCPY_CHK:
10589 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10592 case BUILT_IN_STRCAT_CHK:
10593 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10595 case BUILT_IN_PRINTF_CHK:
10596 case BUILT_IN_VPRINTF_CHK:
10597 if (!validate_arg (arg0, INTEGER_TYPE)
10598 || TREE_SIDE_EFFECTS (arg0))
10601 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10604 case BUILT_IN_FPRINTF:
10605 case BUILT_IN_FPRINTF_UNLOCKED:
10606 case BUILT_IN_VFPRINTF:
10607 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10610 case BUILT_IN_FPRINTF_CHK:
10611 case BUILT_IN_VFPRINTF_CHK:
10612 if (!validate_arg (arg1, INTEGER_TYPE)
10613 || TREE_SIDE_EFFECTS (arg1))
10616 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10625 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10626 ARG2, and ARG3. IGNORE is true if the result of the function call is
10627 ignored. This function returns NULL_TREE if no simplification was
10631 fold_builtin_4 (location_t loc, tree fndecl,
10632 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10634 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10638 case BUILT_IN_MEMCPY_CHK:
10639 case BUILT_IN_MEMPCPY_CHK:
10640 case BUILT_IN_MEMMOVE_CHK:
10641 case BUILT_IN_MEMSET_CHK:
10642 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10644 DECL_FUNCTION_CODE (fndecl));
10646 case BUILT_IN_STRNCPY_CHK:
10647 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10649 case BUILT_IN_STRNCAT_CHK:
10650 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10652 case BUILT_IN_FPRINTF_CHK:
10653 case BUILT_IN_VFPRINTF_CHK:
10654 if (!validate_arg (arg1, INTEGER_TYPE)
10655 || TREE_SIDE_EFFECTS (arg1))
10658 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10668 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10669 arguments, where NARGS <= 4. IGNORE is true if the result of the
10670 function call is ignored. This function returns NULL_TREE if no
10671 simplification was possible. Note that this only folds builtins with
10672 fixed argument patterns. Foldings that do varargs-to-varargs
10673 transformations, or that match calls with more than 4 arguments,
10674 need to be handled with fold_builtin_varargs instead. */
10676 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10679 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10681 tree ret = NULL_TREE;
10686 ret = fold_builtin_0 (loc, fndecl, ignore);
10689 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10692 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10695 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10698 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10706 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10707 SET_EXPR_LOCATION (ret, loc);
10708 TREE_NO_WARNING (ret) = 1;
10714 /* Builtins with folding operations that operate on "..." arguments
10715 need special handling; we need to store the arguments in a convenient
10716 data structure before attempting any folding. Fortunately there are
10717 only a few builtins that fall into this category. FNDECL is the
10718 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10719 result of the function call is ignored. */
10722 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10723 bool ignore ATTRIBUTE_UNUSED)
10725 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10726 tree ret = NULL_TREE;
10730 case BUILT_IN_SPRINTF_CHK:
10731 case BUILT_IN_VSPRINTF_CHK:
10732 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10735 case BUILT_IN_SNPRINTF_CHK:
10736 case BUILT_IN_VSNPRINTF_CHK:
10737 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10740 case BUILT_IN_FPCLASSIFY:
10741 ret = fold_builtin_fpclassify (loc, exp);
10749 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10750 SET_EXPR_LOCATION (ret, loc);
10751 TREE_NO_WARNING (ret) = 1;
10757 /* Return true if FNDECL shouldn't be folded right now.
10758 If a built-in function has an inline attribute always_inline
10759 wrapper, defer folding it after always_inline functions have
10760 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10761 might not be performed. */
10764 avoid_folding_inline_builtin (tree fndecl)
10766 return (DECL_DECLARED_INLINE_P (fndecl)
10767 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10769 && !cfun->always_inline_functions_inlined
10770 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10773 /* A wrapper function for builtin folding that prevents warnings for
10774 "statement without effect" and the like, caused by removing the
10775 call node earlier than the warning is generated. */
10778 fold_call_expr (location_t loc, tree exp, bool ignore)
10780 tree ret = NULL_TREE;
10781 tree fndecl = get_callee_fndecl (exp);
10783 && TREE_CODE (fndecl) == FUNCTION_DECL
10784 && DECL_BUILT_IN (fndecl)
10785 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10786 yet. Defer folding until we see all the arguments
10787 (after inlining). */
10788 && !CALL_EXPR_VA_ARG_PACK (exp))
10790 int nargs = call_expr_nargs (exp);
10792 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10793 instead last argument is __builtin_va_arg_pack (). Defer folding
10794 even in that case, until arguments are finalized. */
10795 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10797 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10799 && TREE_CODE (fndecl2) == FUNCTION_DECL
10800 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10801 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10805 if (avoid_folding_inline_builtin (fndecl))
10808 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10809 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10810 CALL_EXPR_ARGP (exp), ignore);
10813 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10815 tree *args = CALL_EXPR_ARGP (exp);
10816 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10819 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10827 /* Conveniently construct a function call expression. FNDECL names the
10828 function to be called and N arguments are passed in the array
10832 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10834 tree fntype = TREE_TYPE (fndecl);
10835 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10837 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10840 /* Conveniently construct a function call expression. FNDECL names the
10841 function to be called and the arguments are passed in the vector
10845 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10847 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10848 VEC_address (tree, vec));
10852 /* Conveniently construct a function call expression. FNDECL names the
10853 function to be called, N is the number of arguments, and the "..."
10854 parameters are the argument expressions. */
10857 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10860 tree *argarray = XALLOCAVEC (tree, n);
10864 for (i = 0; i < n; i++)
10865 argarray[i] = va_arg (ap, tree);
10867 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10870 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10871 varargs macros aren't supported by all bootstrap compilers. */
10874 build_call_expr (tree fndecl, int n, ...)
10877 tree *argarray = XALLOCAVEC (tree, n);
10881 for (i = 0; i < n; i++)
10882 argarray[i] = va_arg (ap, tree);
10884 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10887 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10888 N arguments are passed in the array ARGARRAY. */
10891 fold_builtin_call_array (location_t loc, tree type,
10896 tree ret = NULL_TREE;
10899 if (TREE_CODE (fn) == ADDR_EXPR)
10901 tree fndecl = TREE_OPERAND (fn, 0);
10902 if (TREE_CODE (fndecl) == FUNCTION_DECL
10903 && DECL_BUILT_IN (fndecl))
10905 /* If last argument is __builtin_va_arg_pack (), arguments to this
10906 function are not finalized yet. Defer folding until they are. */
10907 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10909 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10911 && TREE_CODE (fndecl2) == FUNCTION_DECL
10912 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10913 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10914 return build_call_array_loc (loc, type, fn, n, argarray);
10916 if (avoid_folding_inline_builtin (fndecl))
10917 return build_call_array_loc (loc, type, fn, n, argarray);
10918 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10920 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10924 return build_call_array_loc (loc, type, fn, n, argarray);
10926 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10928 /* First try the transformations that don't require consing up
10930 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10935 /* If we got this far, we need to build an exp. */
10936 exp = build_call_array_loc (loc, type, fn, n, argarray);
10937 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10938 return ret ? ret : exp;
10942 return build_call_array_loc (loc, type, fn, n, argarray);
10945 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10946 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10947 of arguments in ARGS to be omitted. OLDNARGS is the number of
10948 elements in ARGS. */
10951 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10952 int skip, tree fndecl, int n, va_list newargs)
10954 int nargs = oldnargs - skip + n;
10961 buffer = XALLOCAVEC (tree, nargs);
10962 for (i = 0; i < n; i++)
10963 buffer[i] = va_arg (newargs, tree);
10964 for (j = skip; j < oldnargs; j++, i++)
10965 buffer[i] = args[j];
10968 buffer = args + skip;
10970 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10973 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10974 list ARGS along with N new arguments specified as the "..."
10975 parameters. SKIP is the number of arguments in ARGS to be omitted.
10976 OLDNARGS is the number of elements in ARGS. */
10979 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10980 int skip, tree fndecl, int n, ...)
10986 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10992 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10993 along with N new arguments specified as the "..." parameters. SKIP
10994 is the number of arguments in EXP to be omitted. This function is used
10995 to do varargs-to-varargs transformations. */
10998 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11004 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11005 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11011 /* Validate a single argument ARG against a tree code CODE representing
11015 validate_arg (const_tree arg, enum tree_code code)
11019 else if (code == POINTER_TYPE)
11020 return POINTER_TYPE_P (TREE_TYPE (arg));
11021 else if (code == INTEGER_TYPE)
11022 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11023 return code == TREE_CODE (TREE_TYPE (arg));
11026 /* This function validates the types of a function call argument list
11027 against a specified list of tree_codes. If the last specifier is a 0,
11028 that represents an ellipses, otherwise the last specifier must be a
11031 This is the GIMPLE version of validate_arglist. Eventually we want to
11032 completely convert builtins.c to work from GIMPLEs and the tree based
11033 validate_arglist will then be removed. */
11036 validate_gimple_arglist (const_gimple call, ...)
11038 enum tree_code code;
11044 va_start (ap, call);
11049 code = (enum tree_code) va_arg (ap, int);
11053 /* This signifies an ellipses, any further arguments are all ok. */
11057 /* This signifies an endlink, if no arguments remain, return
11058 true, otherwise return false. */
11059 res = (i == gimple_call_num_args (call));
11062 /* If no parameters remain or the parameter's code does not
11063 match the specified code, return false. Otherwise continue
11064 checking any remaining arguments. */
11065 arg = gimple_call_arg (call, i++);
11066 if (!validate_arg (arg, code))
11073 /* We need gotos here since we can only have one VA_CLOSE in a
11081 /* This function validates the types of a function call argument list
11082 against a specified list of tree_codes. If the last specifier is a 0,
11083 that represents an ellipses, otherwise the last specifier must be a
11087 validate_arglist (const_tree callexpr, ...)
11089 enum tree_code code;
11092 const_call_expr_arg_iterator iter;
11095 va_start (ap, callexpr);
11096 init_const_call_expr_arg_iterator (callexpr, &iter);
11100 code = (enum tree_code) va_arg (ap, int);
11104 /* This signifies an ellipses, any further arguments are all ok. */
11108 /* This signifies an endlink, if no arguments remain, return
11109 true, otherwise return false. */
11110 res = !more_const_call_expr_args_p (&iter);
11113 /* If no parameters remain or the parameter's code does not
11114 match the specified code, return false. Otherwise continue
11115 checking any remaining arguments. */
11116 arg = next_const_call_expr_arg (&iter);
11117 if (!validate_arg (arg, code))
11124 /* We need gotos here since we can only have one VA_CLOSE in a
11132 /* Default target-specific builtin expander that does nothing. */
11135 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11136 rtx target ATTRIBUTE_UNUSED,
11137 rtx subtarget ATTRIBUTE_UNUSED,
11138 enum machine_mode mode ATTRIBUTE_UNUSED,
11139 int ignore ATTRIBUTE_UNUSED)
11144 /* Returns true is EXP represents data that would potentially reside
11145 in a readonly section. */
11148 readonly_data_expr (tree exp)
11152 if (TREE_CODE (exp) != ADDR_EXPR)
11155 exp = get_base_address (TREE_OPERAND (exp, 0));
11159 /* Make sure we call decl_readonly_section only for trees it
11160 can handle (since it returns true for everything it doesn't
11162 if (TREE_CODE (exp) == STRING_CST
11163 || TREE_CODE (exp) == CONSTRUCTOR
11164 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11165 return decl_readonly_section (exp, 0);
11170 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11171 to the call, and TYPE is its return type.
11173 Return NULL_TREE if no simplification was possible, otherwise return the
11174 simplified form of the call as a tree.
11176 The simplified form may be a constant or other expression which
11177 computes the same value, but in a more efficient manner (including
11178 calls to other builtin functions).
11180 The call may contain arguments which need to be evaluated, but
11181 which are not useful to determine the result of the call. In
11182 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11183 COMPOUND_EXPR will be an argument which must be evaluated.
11184 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11185 COMPOUND_EXPR in the chain will contain the tree for the simplified
11186 form of the builtin function call. */
11189 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11191 if (!validate_arg (s1, POINTER_TYPE)
11192 || !validate_arg (s2, POINTER_TYPE))
11197 const char *p1, *p2;
11199 p2 = c_getstr (s2);
11203 p1 = c_getstr (s1);
11206 const char *r = strstr (p1, p2);
11210 return build_int_cst (TREE_TYPE (s1), 0);
11212 /* Return an offset into the constant string argument. */
11213 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11214 s1, size_int (r - p1));
11215 return fold_convert_loc (loc, type, tem);
11218 /* The argument is const char *, and the result is char *, so we need
11219 a type conversion here to avoid a warning. */
11221 return fold_convert_loc (loc, type, s1);
11226 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11230 /* New argument list transforming strstr(s1, s2) to
11231 strchr(s1, s2[0]). */
11232 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11236 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11237 the call, and TYPE is its return type.
11239 Return NULL_TREE if no simplification was possible, otherwise return the
11240 simplified form of the call as a tree.
11242 The simplified form may be a constant or other expression which
11243 computes the same value, but in a more efficient manner (including
11244 calls to other builtin functions).
11246 The call may contain arguments which need to be evaluated, but
11247 which are not useful to determine the result of the call. In
11248 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11249 COMPOUND_EXPR will be an argument which must be evaluated.
11250 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11251 COMPOUND_EXPR in the chain will contain the tree for the simplified
11252 form of the builtin function call. */
11255 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11257 if (!validate_arg (s1, POINTER_TYPE)
11258 || !validate_arg (s2, INTEGER_TYPE))
11264 if (TREE_CODE (s2) != INTEGER_CST)
11267 p1 = c_getstr (s1);
11274 if (target_char_cast (s2, &c))
11277 r = strchr (p1, c);
11280 return build_int_cst (TREE_TYPE (s1), 0);
11282 /* Return an offset into the constant string argument. */
11283 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11284 s1, size_int (r - p1));
11285 return fold_convert_loc (loc, type, tem);
11291 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11292 the call, and TYPE is its return type.
11294 Return NULL_TREE if no simplification was possible, otherwise return the
11295 simplified form of the call as a tree.
11297 The simplified form may be a constant or other expression which
11298 computes the same value, but in a more efficient manner (including
11299 calls to other builtin functions).
11301 The call may contain arguments which need to be evaluated, but
11302 which are not useful to determine the result of the call. In
11303 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11304 COMPOUND_EXPR will be an argument which must be evaluated.
11305 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11306 COMPOUND_EXPR in the chain will contain the tree for the simplified
11307 form of the builtin function call. */
11310 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11312 if (!validate_arg (s1, POINTER_TYPE)
11313 || !validate_arg (s2, INTEGER_TYPE))
11320 if (TREE_CODE (s2) != INTEGER_CST)
11323 p1 = c_getstr (s1);
11330 if (target_char_cast (s2, &c))
11333 r = strrchr (p1, c);
11336 return build_int_cst (TREE_TYPE (s1), 0);
11338 /* Return an offset into the constant string argument. */
11339 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11340 s1, size_int (r - p1));
11341 return fold_convert_loc (loc, type, tem);
11344 if (! integer_zerop (s2))
11347 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11351 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11352 return build_call_expr_loc (loc, fn, 2, s1, s2);
11356 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11357 to the call, and TYPE is its return type.
11359 Return NULL_TREE if no simplification was possible, otherwise return the
11360 simplified form of the call as a tree.
11362 The simplified form may be a constant or other expression which
11363 computes the same value, but in a more efficient manner (including
11364 calls to other builtin functions).
11366 The call may contain arguments which need to be evaluated, but
11367 which are not useful to determine the result of the call. In
11368 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11369 COMPOUND_EXPR will be an argument which must be evaluated.
11370 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11371 COMPOUND_EXPR in the chain will contain the tree for the simplified
11372 form of the builtin function call. */
11375 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11377 if (!validate_arg (s1, POINTER_TYPE)
11378 || !validate_arg (s2, POINTER_TYPE))
11383 const char *p1, *p2;
11385 p2 = c_getstr (s2);
11389 p1 = c_getstr (s1);
11392 const char *r = strpbrk (p1, p2);
11396 return build_int_cst (TREE_TYPE (s1), 0);
11398 /* Return an offset into the constant string argument. */
11399 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11400 s1, size_int (r - p1));
11401 return fold_convert_loc (loc, type, tem);
11405 /* strpbrk(x, "") == NULL.
11406 Evaluate and ignore s1 in case it had side-effects. */
11407 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11410 return NULL_TREE; /* Really call strpbrk. */
11412 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11416 /* New argument list transforming strpbrk(s1, s2) to
11417 strchr(s1, s2[0]). */
11418 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11422 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11425 Return NULL_TREE if no simplification was possible, otherwise return the
11426 simplified form of the call as a tree.
11428 The simplified form may be a constant or other expression which
11429 computes the same value, but in a more efficient manner (including
11430 calls to other builtin functions).
11432 The call may contain arguments which need to be evaluated, but
11433 which are not useful to determine the result of the call. In
11434 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11435 COMPOUND_EXPR will be an argument which must be evaluated.
11436 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11437 COMPOUND_EXPR in the chain will contain the tree for the simplified
11438 form of the builtin function call. */
11441 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11443 if (!validate_arg (dst, POINTER_TYPE)
11444 || !validate_arg (src, POINTER_TYPE))
11448 const char *p = c_getstr (src);
11450 /* If the string length is zero, return the dst parameter. */
11451 if (p && *p == '\0')
11454 if (optimize_insn_for_speed_p ())
11456 /* See if we can store by pieces into (dst + strlen(dst)). */
11458 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11459 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11461 if (!strlen_fn || !strcpy_fn)
11464 /* If we don't have a movstr we don't want to emit an strcpy
11465 call. We have to do that if the length of the source string
11466 isn't computable (in that case we can use memcpy probably
11467 later expanding to a sequence of mov instructions). If we
11468 have movstr instructions we can emit strcpy calls. */
11471 tree len = c_strlen (src, 1);
11472 if (! len || TREE_SIDE_EFFECTS (len))
11476 /* Stabilize the argument list. */
11477 dst = builtin_save_expr (dst);
11479 /* Create strlen (dst). */
11480 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11481 /* Create (dst p+ strlen (dst)). */
11483 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11484 TREE_TYPE (dst), dst, newdst);
11485 newdst = builtin_save_expr (newdst);
11487 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11488 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11494 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11495 arguments to the call.
11497 Return NULL_TREE if no simplification was possible, otherwise return the
11498 simplified form of the call as a tree.
11500 The simplified form may be a constant or other expression which
11501 computes the same value, but in a more efficient manner (including
11502 calls to other builtin functions).
11504 The call may contain arguments which need to be evaluated, but
11505 which are not useful to determine the result of the call. In
11506 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11507 COMPOUND_EXPR will be an argument which must be evaluated.
11508 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11509 COMPOUND_EXPR in the chain will contain the tree for the simplified
11510 form of the builtin function call. */
11513 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11515 if (!validate_arg (dst, POINTER_TYPE)
11516 || !validate_arg (src, POINTER_TYPE)
11517 || !validate_arg (len, INTEGER_TYPE))
11521 const char *p = c_getstr (src);
11523 /* If the requested length is zero, or the src parameter string
11524 length is zero, return the dst parameter. */
11525 if (integer_zerop (len) || (p && *p == '\0'))
11526 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11528 /* If the requested len is greater than or equal to the string
11529 length, call strcat. */
11530 if (TREE_CODE (len) == INTEGER_CST && p
11531 && compare_tree_int (len, strlen (p)) >= 0)
11533 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11535 /* If the replacement _DECL isn't initialized, don't do the
11540 return build_call_expr_loc (loc, fn, 2, dst, src);
11546 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11549 Return NULL_TREE if no simplification was possible, otherwise return the
11550 simplified form of the call as a tree.
11552 The simplified form may be a constant or other expression which
11553 computes the same value, but in a more efficient manner (including
11554 calls to other builtin functions).
11556 The call may contain arguments which need to be evaluated, but
11557 which are not useful to determine the result of the call. In
11558 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11559 COMPOUND_EXPR will be an argument which must be evaluated.
11560 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11561 COMPOUND_EXPR in the chain will contain the tree for the simplified
11562 form of the builtin function call. */
11565 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11567 if (!validate_arg (s1, POINTER_TYPE)
11568 || !validate_arg (s2, POINTER_TYPE))
11572 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11574 /* If both arguments are constants, evaluate at compile-time. */
11577 const size_t r = strspn (p1, p2);
11578 return size_int (r);
11581 /* If either argument is "", return NULL_TREE. */
11582 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11583 /* Evaluate and ignore both arguments in case either one has
11585 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11591 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11594 Return NULL_TREE if no simplification was possible, otherwise return the
11595 simplified form of the call as a tree.
11597 The simplified form may be a constant or other expression which
11598 computes the same value, but in a more efficient manner (including
11599 calls to other builtin functions).
11601 The call may contain arguments which need to be evaluated, but
11602 which are not useful to determine the result of the call. In
11603 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11604 COMPOUND_EXPR will be an argument which must be evaluated.
11605 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11606 COMPOUND_EXPR in the chain will contain the tree for the simplified
11607 form of the builtin function call. */
11610 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11612 if (!validate_arg (s1, POINTER_TYPE)
11613 || !validate_arg (s2, POINTER_TYPE))
11617 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11619 /* If both arguments are constants, evaluate at compile-time. */
11622 const size_t r = strcspn (p1, p2);
11623 return size_int (r);
11626 /* If the first argument is "", return NULL_TREE. */
11627 if (p1 && *p1 == '\0')
11629 /* Evaluate and ignore argument s2 in case it has
11631 return omit_one_operand_loc (loc, size_type_node,
11632 size_zero_node, s2);
11635 /* If the second argument is "", return __builtin_strlen(s1). */
11636 if (p2 && *p2 == '\0')
11638 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11640 /* If the replacement _DECL isn't initialized, don't do the
11645 return build_call_expr_loc (loc, fn, 1, s1);
11651 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11652 to the call. IGNORE is true if the value returned
11653 by the builtin will be ignored. UNLOCKED is true is true if this
11654 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11655 the known length of the string. Return NULL_TREE if no simplification
11659 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11660 bool ignore, bool unlocked, tree len)
11662 /* If we're using an unlocked function, assume the other unlocked
11663 functions exist explicitly. */
11664 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11665 : implicit_built_in_decls[BUILT_IN_FPUTC];
11666 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11667 : implicit_built_in_decls[BUILT_IN_FWRITE];
11669 /* If the return value is used, don't do the transformation. */
11673 /* Verify the arguments in the original call. */
11674 if (!validate_arg (arg0, POINTER_TYPE)
11675 || !validate_arg (arg1, POINTER_TYPE))
11679 len = c_strlen (arg0, 0);
11681 /* Get the length of the string passed to fputs. If the length
11682 can't be determined, punt. */
11684 || TREE_CODE (len) != INTEGER_CST)
11687 switch (compare_tree_int (len, 1))
11689 case -1: /* length is 0, delete the call entirely . */
11690 return omit_one_operand_loc (loc, integer_type_node,
11691 integer_zero_node, arg1);;
11693 case 0: /* length is 1, call fputc. */
11695 const char *p = c_getstr (arg0);
11700 return build_call_expr_loc (loc, fn_fputc, 2,
11701 build_int_cst (NULL_TREE, p[0]), arg1);
11707 case 1: /* length is greater than 1, call fwrite. */
11709 /* If optimizing for size keep fputs. */
11710 if (optimize_function_for_size_p (cfun))
11712 /* New argument list transforming fputs(string, stream) to
11713 fwrite(string, 1, len, stream). */
11715 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11716 size_one_node, len, arg1);
11721 gcc_unreachable ();
11726 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11727 produced. False otherwise. This is done so that we don't output the error
11728 or warning twice or three times. */
11731 fold_builtin_next_arg (tree exp, bool va_start_p)
11733 tree fntype = TREE_TYPE (current_function_decl);
11734 int nargs = call_expr_nargs (exp);
11737 if (!stdarg_p (fntype))
11739 error ("%<va_start%> used in function with fixed args");
11745 if (va_start_p && (nargs != 2))
11747 error ("wrong number of arguments to function %<va_start%>");
11750 arg = CALL_EXPR_ARG (exp, 1);
11752 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11753 when we checked the arguments and if needed issued a warning. */
11758 /* Evidently an out of date version of <stdarg.h>; can't validate
11759 va_start's second argument, but can still work as intended. */
11760 warning (0, "%<__builtin_next_arg%> called without an argument");
11763 else if (nargs > 1)
11765 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11768 arg = CALL_EXPR_ARG (exp, 0);
11771 if (TREE_CODE (arg) == SSA_NAME)
11772 arg = SSA_NAME_VAR (arg);
11774 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11775 or __builtin_next_arg (0) the first time we see it, after checking
11776 the arguments and if needed issuing a warning. */
11777 if (!integer_zerop (arg))
11779 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11781 /* Strip off all nops for the sake of the comparison. This
11782 is not quite the same as STRIP_NOPS. It does more.
11783 We must also strip off INDIRECT_EXPR for C++ reference
11785 while (CONVERT_EXPR_P (arg)
11786 || TREE_CODE (arg) == INDIRECT_REF)
11787 arg = TREE_OPERAND (arg, 0);
11788 if (arg != last_parm)
11790 /* FIXME: Sometimes with the tree optimizers we can get the
11791 not the last argument even though the user used the last
11792 argument. We just warn and set the arg to be the last
11793 argument so that we will get wrong-code because of
11795 warning (0, "second parameter of %<va_start%> not last named argument");
11798 /* Undefined by C99 7.15.1.4p4 (va_start):
11799 "If the parameter parmN is declared with the register storage
11800 class, with a function or array type, or with a type that is
11801 not compatible with the type that results after application of
11802 the default argument promotions, the behavior is undefined."
11804 else if (DECL_REGISTER (arg))
11805 warning (0, "undefined behaviour when second parameter of "
11806 "%<va_start%> is declared with %<register%> storage");
11808 /* We want to verify the second parameter just once before the tree
11809 optimizers are run and then avoid keeping it in the tree,
11810 as otherwise we could warn even for correct code like:
11811 void foo (int i, ...)
11812 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11814 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11816 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11822 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11823 ORIG may be null if this is a 2-argument call. We don't attempt to
11824 simplify calls with more than 3 arguments.
11826 Return NULL_TREE if no simplification was possible, otherwise return the
11827 simplified form of the call as a tree. If IGNORED is true, it means that
11828 the caller does not use the returned value of the function. */
11831 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11832 tree orig, int ignored)
11835 const char *fmt_str = NULL;
11837 /* Verify the required arguments in the original call. We deal with two
11838 types of sprintf() calls: 'sprintf (str, fmt)' and
11839 'sprintf (dest, "%s", orig)'. */
11840 if (!validate_arg (dest, POINTER_TYPE)
11841 || !validate_arg (fmt, POINTER_TYPE))
11843 if (orig && !validate_arg (orig, POINTER_TYPE))
11846 /* Check whether the format is a literal string constant. */
11847 fmt_str = c_getstr (fmt);
11848 if (fmt_str == NULL)
11852 retval = NULL_TREE;
11854 if (!init_target_chars ())
11857 /* If the format doesn't contain % args or %%, use strcpy. */
11858 if (strchr (fmt_str, target_percent) == NULL)
11860 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11865 /* Don't optimize sprintf (buf, "abc", ptr++). */
11869 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11870 'format' is known to contain no % formats. */
11871 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11873 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11876 /* If the format is "%s", use strcpy if the result isn't used. */
11877 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11880 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11885 /* Don't crash on sprintf (str1, "%s"). */
11889 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11892 retval = c_strlen (orig, 1);
11893 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11896 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11899 if (call && retval)
11901 retval = fold_convert_loc
11902 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11904 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11910 /* Expand a call EXP to __builtin_object_size. */
11913 expand_builtin_object_size (tree exp)
11916 int object_size_type;
11917 tree fndecl = get_callee_fndecl (exp);
11919 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11921 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11923 expand_builtin_trap ();
11927 ost = CALL_EXPR_ARG (exp, 1);
11930 if (TREE_CODE (ost) != INTEGER_CST
11931 || tree_int_cst_sgn (ost) < 0
11932 || compare_tree_int (ost, 3) > 0)
11934 error ("%Klast argument of %D is not integer constant between 0 and 3",
11936 expand_builtin_trap ();
11940 object_size_type = tree_low_cst (ost, 0);
11942 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11945 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11946 FCODE is the BUILT_IN_* to use.
11947 Return NULL_RTX if we failed; the caller should emit a normal call,
11948 otherwise try to get the result in TARGET, if convenient (and in
11949 mode MODE if that's convenient). */
11952 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11953 enum built_in_function fcode)
11955 tree dest, src, len, size;
11957 if (!validate_arglist (exp,
11959 fcode == BUILT_IN_MEMSET_CHK
11960 ? INTEGER_TYPE : POINTER_TYPE,
11961 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11964 dest = CALL_EXPR_ARG (exp, 0);
11965 src = CALL_EXPR_ARG (exp, 1);
11966 len = CALL_EXPR_ARG (exp, 2);
11967 size = CALL_EXPR_ARG (exp, 3);
11969 if (! host_integerp (size, 1))
11972 if (host_integerp (len, 1) || integer_all_onesp (size))
11976 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11978 warning_at (tree_nonartificial_location (exp),
11979 0, "%Kcall to %D will always overflow destination buffer",
11980 exp, get_callee_fndecl (exp));
11985 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11986 mem{cpy,pcpy,move,set} is available. */
11989 case BUILT_IN_MEMCPY_CHK:
11990 fn = built_in_decls[BUILT_IN_MEMCPY];
11992 case BUILT_IN_MEMPCPY_CHK:
11993 fn = built_in_decls[BUILT_IN_MEMPCPY];
11995 case BUILT_IN_MEMMOVE_CHK:
11996 fn = built_in_decls[BUILT_IN_MEMMOVE];
11998 case BUILT_IN_MEMSET_CHK:
11999 fn = built_in_decls[BUILT_IN_MEMSET];
12008 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12009 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12010 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12011 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12013 else if (fcode == BUILT_IN_MEMSET_CHK)
12017 unsigned int dest_align
12018 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12020 /* If DEST is not a pointer type, call the normal function. */
12021 if (dest_align == 0)
12024 /* If SRC and DEST are the same (and not volatile), do nothing. */
12025 if (operand_equal_p (src, dest, 0))
12029 if (fcode != BUILT_IN_MEMPCPY_CHK)
12031 /* Evaluate and ignore LEN in case it has side-effects. */
12032 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12033 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12036 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12037 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12040 /* __memmove_chk special case. */
12041 if (fcode == BUILT_IN_MEMMOVE_CHK)
12043 unsigned int src_align
12044 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12046 if (src_align == 0)
12049 /* If src is categorized for a readonly section we can use
12050 normal __memcpy_chk. */
12051 if (readonly_data_expr (src))
12053 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12056 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12057 dest, src, len, size);
12058 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12059 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12060 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12067 /* Emit warning if a buffer overflow is detected at compile time. */
12070 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12074 location_t loc = tree_nonartificial_location (exp);
12078 case BUILT_IN_STRCPY_CHK:
12079 case BUILT_IN_STPCPY_CHK:
12080 /* For __strcat_chk the warning will be emitted only if overflowing
12081 by at least strlen (dest) + 1 bytes. */
12082 case BUILT_IN_STRCAT_CHK:
12083 len = CALL_EXPR_ARG (exp, 1);
12084 size = CALL_EXPR_ARG (exp, 2);
12087 case BUILT_IN_STRNCAT_CHK:
12088 case BUILT_IN_STRNCPY_CHK:
12089 len = CALL_EXPR_ARG (exp, 2);
12090 size = CALL_EXPR_ARG (exp, 3);
12092 case BUILT_IN_SNPRINTF_CHK:
12093 case BUILT_IN_VSNPRINTF_CHK:
12094 len = CALL_EXPR_ARG (exp, 1);
12095 size = CALL_EXPR_ARG (exp, 3);
12098 gcc_unreachable ();
12104 if (! host_integerp (size, 1) || integer_all_onesp (size))
12109 len = c_strlen (len, 1);
12110 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12113 else if (fcode == BUILT_IN_STRNCAT_CHK)
12115 tree src = CALL_EXPR_ARG (exp, 1);
12116 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12118 src = c_strlen (src, 1);
12119 if (! src || ! host_integerp (src, 1))
12121 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12122 exp, get_callee_fndecl (exp));
12125 else if (tree_int_cst_lt (src, size))
12128 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12131 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12132 exp, get_callee_fndecl (exp));
12135 /* Emit warning if a buffer overflow is detected at compile time
12136 in __sprintf_chk/__vsprintf_chk calls. */
12139 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12141 tree size, len, fmt;
12142 const char *fmt_str;
12143 int nargs = call_expr_nargs (exp);
12145 /* Verify the required arguments in the original call. */
12149 size = CALL_EXPR_ARG (exp, 2);
12150 fmt = CALL_EXPR_ARG (exp, 3);
12152 if (! host_integerp (size, 1) || integer_all_onesp (size))
12155 /* Check whether the format is a literal string constant. */
12156 fmt_str = c_getstr (fmt);
12157 if (fmt_str == NULL)
12160 if (!init_target_chars ())
12163 /* If the format doesn't contain % args or %%, we know its size. */
12164 if (strchr (fmt_str, target_percent) == 0)
12165 len = build_int_cstu (size_type_node, strlen (fmt_str));
12166 /* If the format is "%s" and first ... argument is a string literal,
12168 else if (fcode == BUILT_IN_SPRINTF_CHK
12169 && strcmp (fmt_str, target_percent_s) == 0)
12175 arg = CALL_EXPR_ARG (exp, 4);
12176 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12179 len = c_strlen (arg, 1);
12180 if (!len || ! host_integerp (len, 1))
12186 if (! tree_int_cst_lt (len, size))
12187 warning_at (tree_nonartificial_location (exp),
12188 0, "%Kcall to %D will always overflow destination buffer",
12189 exp, get_callee_fndecl (exp));
12192 /* Emit warning if a free is called with address of a variable. */
12195 maybe_emit_free_warning (tree exp)
12197 tree arg = CALL_EXPR_ARG (exp, 0);
12200 if (TREE_CODE (arg) != ADDR_EXPR)
12203 arg = get_base_address (TREE_OPERAND (arg, 0));
12204 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12207 if (SSA_VAR_P (arg))
12208 warning_at (tree_nonartificial_location (exp),
12209 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12211 warning_at (tree_nonartificial_location (exp),
12212 0, "%Kattempt to free a non-heap object", exp);
12215 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12219 fold_builtin_object_size (tree ptr, tree ost)
12221 unsigned HOST_WIDE_INT bytes;
12222 int object_size_type;
12224 if (!validate_arg (ptr, POINTER_TYPE)
12225 || !validate_arg (ost, INTEGER_TYPE))
12230 if (TREE_CODE (ost) != INTEGER_CST
12231 || tree_int_cst_sgn (ost) < 0
12232 || compare_tree_int (ost, 3) > 0)
12235 object_size_type = tree_low_cst (ost, 0);
12237 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12238 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12239 and (size_t) 0 for types 2 and 3. */
12240 if (TREE_SIDE_EFFECTS (ptr))
12241 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12243 if (TREE_CODE (ptr) == ADDR_EXPR)
12245 bytes = compute_builtin_object_size (ptr, object_size_type);
12246 if (double_int_fits_to_tree_p (size_type_node,
12247 uhwi_to_double_int (bytes)))
12248 return build_int_cstu (size_type_node, bytes);
12250 else if (TREE_CODE (ptr) == SSA_NAME)
12252 /* If object size is not known yet, delay folding until
12253 later. Maybe subsequent passes will help determining
12255 bytes = compute_builtin_object_size (ptr, object_size_type);
12256 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12257 && double_int_fits_to_tree_p (size_type_node,
12258 uhwi_to_double_int (bytes)))
12259 return build_int_cstu (size_type_node, bytes);
12265 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12266 DEST, SRC, LEN, and SIZE are the arguments to the call.
12267 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12268 code of the builtin. If MAXLEN is not NULL, it is maximum length
12269 passed as third argument. */
12272 fold_builtin_memory_chk (location_t loc, tree fndecl,
12273 tree dest, tree src, tree len, tree size,
12274 tree maxlen, bool ignore,
12275 enum built_in_function fcode)
12279 if (!validate_arg (dest, POINTER_TYPE)
12280 || !validate_arg (src,
12281 (fcode == BUILT_IN_MEMSET_CHK
12282 ? INTEGER_TYPE : POINTER_TYPE))
12283 || !validate_arg (len, INTEGER_TYPE)
12284 || !validate_arg (size, INTEGER_TYPE))
12287 /* If SRC and DEST are the same (and not volatile), return DEST
12288 (resp. DEST+LEN for __mempcpy_chk). */
12289 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12291 if (fcode != BUILT_IN_MEMPCPY_CHK)
12292 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12296 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12298 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12302 if (! host_integerp (size, 1))
12305 if (! integer_all_onesp (size))
12307 if (! host_integerp (len, 1))
12309 /* If LEN is not constant, try MAXLEN too.
12310 For MAXLEN only allow optimizing into non-_ocs function
12311 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12312 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12314 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12316 /* (void) __mempcpy_chk () can be optimized into
12317 (void) __memcpy_chk (). */
12318 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12322 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12330 if (tree_int_cst_lt (size, maxlen))
12335 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12336 mem{cpy,pcpy,move,set} is available. */
12339 case BUILT_IN_MEMCPY_CHK:
12340 fn = built_in_decls[BUILT_IN_MEMCPY];
12342 case BUILT_IN_MEMPCPY_CHK:
12343 fn = built_in_decls[BUILT_IN_MEMPCPY];
12345 case BUILT_IN_MEMMOVE_CHK:
12346 fn = built_in_decls[BUILT_IN_MEMMOVE];
12348 case BUILT_IN_MEMSET_CHK:
12349 fn = built_in_decls[BUILT_IN_MEMSET];
12358 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12361 /* Fold a call to the __st[rp]cpy_chk builtin.
12362 DEST, SRC, and SIZE are the arguments to the call.
12363 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12364 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12365 strings passed as second argument. */
12368 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12369 tree src, tree size,
12370 tree maxlen, bool ignore,
12371 enum built_in_function fcode)
12375 if (!validate_arg (dest, POINTER_TYPE)
12376 || !validate_arg (src, POINTER_TYPE)
12377 || !validate_arg (size, INTEGER_TYPE))
12380 /* If SRC and DEST are the same (and not volatile), return DEST. */
12381 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12382 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12384 if (! host_integerp (size, 1))
12387 if (! integer_all_onesp (size))
12389 len = c_strlen (src, 1);
12390 if (! len || ! host_integerp (len, 1))
12392 /* If LEN is not constant, try MAXLEN too.
12393 For MAXLEN only allow optimizing into non-_ocs function
12394 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12395 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12397 if (fcode == BUILT_IN_STPCPY_CHK)
12402 /* If return value of __stpcpy_chk is ignored,
12403 optimize into __strcpy_chk. */
12404 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12408 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12411 if (! len || TREE_SIDE_EFFECTS (len))
12414 /* If c_strlen returned something, but not a constant,
12415 transform __strcpy_chk into __memcpy_chk. */
12416 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12420 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12421 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12422 build_call_expr_loc (loc, fn, 4,
12423 dest, src, len, size));
12429 if (! tree_int_cst_lt (maxlen, size))
12433 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12434 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12435 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12439 return build_call_expr_loc (loc, fn, 2, dest, src);
12442 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12443 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12444 length passed as third argument. */
12447 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12448 tree len, tree size, tree maxlen)
12452 if (!validate_arg (dest, POINTER_TYPE)
12453 || !validate_arg (src, POINTER_TYPE)
12454 || !validate_arg (len, INTEGER_TYPE)
12455 || !validate_arg (size, INTEGER_TYPE))
12458 if (! host_integerp (size, 1))
12461 if (! integer_all_onesp (size))
12463 if (! host_integerp (len, 1))
12465 /* If LEN is not constant, try MAXLEN too.
12466 For MAXLEN only allow optimizing into non-_ocs function
12467 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12468 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12474 if (tree_int_cst_lt (size, maxlen))
12478 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12479 fn = built_in_decls[BUILT_IN_STRNCPY];
12483 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12486 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12487 are the arguments to the call. */
12490 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12491 tree src, tree size)
12496 if (!validate_arg (dest, POINTER_TYPE)
12497 || !validate_arg (src, POINTER_TYPE)
12498 || !validate_arg (size, INTEGER_TYPE))
12501 p = c_getstr (src);
12502 /* If the SRC parameter is "", return DEST. */
12503 if (p && *p == '\0')
12504 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12506 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12509 /* If __builtin_strcat_chk is used, assume strcat is available. */
12510 fn = built_in_decls[BUILT_IN_STRCAT];
12514 return build_call_expr_loc (loc, fn, 2, dest, src);
12517 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12521 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12522 tree dest, tree src, tree len, tree size)
12527 if (!validate_arg (dest, POINTER_TYPE)
12528 || !validate_arg (src, POINTER_TYPE)
12529 || !validate_arg (size, INTEGER_TYPE)
12530 || !validate_arg (size, INTEGER_TYPE))
12533 p = c_getstr (src);
12534 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12535 if (p && *p == '\0')
12536 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12537 else if (integer_zerop (len))
12538 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12540 if (! host_integerp (size, 1))
12543 if (! integer_all_onesp (size))
12545 tree src_len = c_strlen (src, 1);
12547 && host_integerp (src_len, 1)
12548 && host_integerp (len, 1)
12549 && ! tree_int_cst_lt (len, src_len))
12551 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12552 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12556 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12561 /* If __builtin_strncat_chk is used, assume strncat is available. */
12562 fn = built_in_decls[BUILT_IN_STRNCAT];
12566 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12569 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12570 Return NULL_TREE if a normal call should be emitted rather than
12571 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12572 or BUILT_IN_VSPRINTF_CHK. */
12575 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12576 enum built_in_function fcode)
12578 tree dest, size, len, fn, fmt, flag;
12579 const char *fmt_str;
12581 /* Verify the required arguments in the original call. */
12585 if (!validate_arg (dest, POINTER_TYPE))
12588 if (!validate_arg (flag, INTEGER_TYPE))
12591 if (!validate_arg (size, INTEGER_TYPE))
12594 if (!validate_arg (fmt, POINTER_TYPE))
12597 if (! host_integerp (size, 1))
12602 if (!init_target_chars ())
12605 /* Check whether the format is a literal string constant. */
12606 fmt_str = c_getstr (fmt);
12607 if (fmt_str != NULL)
12609 /* If the format doesn't contain % args or %%, we know the size. */
12610 if (strchr (fmt_str, target_percent) == 0)
12612 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12613 len = build_int_cstu (size_type_node, strlen (fmt_str));
12615 /* If the format is "%s" and first ... argument is a string literal,
12616 we know the size too. */
12617 else if (fcode == BUILT_IN_SPRINTF_CHK
12618 && strcmp (fmt_str, target_percent_s) == 0)
12625 if (validate_arg (arg, POINTER_TYPE))
12627 len = c_strlen (arg, 1);
12628 if (! len || ! host_integerp (len, 1))
12635 if (! integer_all_onesp (size))
12637 if (! len || ! tree_int_cst_lt (len, size))
12641 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12642 or if format doesn't contain % chars or is "%s". */
12643 if (! integer_zerop (flag))
12645 if (fmt_str == NULL)
12647 if (strchr (fmt_str, target_percent) != NULL
12648 && strcmp (fmt_str, target_percent_s))
12652 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12653 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12654 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12658 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12661 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12662 a normal call should be emitted rather than expanding the function
12663 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12666 fold_builtin_sprintf_chk (location_t loc, tree exp,
12667 enum built_in_function fcode)
12669 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12670 CALL_EXPR_ARGP (exp), fcode);
12673 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12674 NULL_TREE if a normal call should be emitted rather than expanding
12675 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12676 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12677 passed as second argument. */
12680 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12681 tree maxlen, enum built_in_function fcode)
12683 tree dest, size, len, fn, fmt, flag;
12684 const char *fmt_str;
12686 /* Verify the required arguments in the original call. */
12690 if (!validate_arg (dest, POINTER_TYPE))
12693 if (!validate_arg (len, INTEGER_TYPE))
12696 if (!validate_arg (flag, INTEGER_TYPE))
12699 if (!validate_arg (size, INTEGER_TYPE))
12702 if (!validate_arg (fmt, POINTER_TYPE))
12705 if (! host_integerp (size, 1))
12708 if (! integer_all_onesp (size))
12710 if (! host_integerp (len, 1))
12712 /* If LEN is not constant, try MAXLEN too.
12713 For MAXLEN only allow optimizing into non-_ocs function
12714 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12715 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12721 if (tree_int_cst_lt (size, maxlen))
12725 if (!init_target_chars ())
12728 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12729 or if format doesn't contain % chars or is "%s". */
12730 if (! integer_zerop (flag))
12732 fmt_str = c_getstr (fmt);
12733 if (fmt_str == NULL)
12735 if (strchr (fmt_str, target_percent) != NULL
12736 && strcmp (fmt_str, target_percent_s))
12740 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12742 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12743 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12747 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12750 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12751 a normal call should be emitted rather than expanding the function
12752 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12753 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12754 passed as second argument. */
12757 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12758 enum built_in_function fcode)
12760 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12761 CALL_EXPR_ARGP (exp), maxlen, fcode);
12764 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12765 FMT and ARG are the arguments to the call; we don't fold cases with
12766 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12768 Return NULL_TREE if no simplification was possible, otherwise return the
12769 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12770 code of the function to be simplified. */
12773 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12774 tree arg, bool ignore,
12775 enum built_in_function fcode)
12777 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12778 const char *fmt_str = NULL;
12780 /* If the return value is used, don't do the transformation. */
12784 /* Verify the required arguments in the original call. */
12785 if (!validate_arg (fmt, POINTER_TYPE))
12788 /* Check whether the format is a literal string constant. */
12789 fmt_str = c_getstr (fmt);
12790 if (fmt_str == NULL)
12793 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12795 /* If we're using an unlocked function, assume the other
12796 unlocked functions exist explicitly. */
12797 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12798 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12802 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12803 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12806 if (!init_target_chars ())
12809 if (strcmp (fmt_str, target_percent_s) == 0
12810 || strchr (fmt_str, target_percent) == NULL)
12814 if (strcmp (fmt_str, target_percent_s) == 0)
12816 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12819 if (!arg || !validate_arg (arg, POINTER_TYPE))
12822 str = c_getstr (arg);
12828 /* The format specifier doesn't contain any '%' characters. */
12829 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12835 /* If the string was "", printf does nothing. */
12836 if (str[0] == '\0')
12837 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12839 /* If the string has length of 1, call putchar. */
12840 if (str[1] == '\0')
12842 /* Given printf("c"), (where c is any one character,)
12843 convert "c"[0] to an int and pass that to the replacement
12845 newarg = build_int_cst (NULL_TREE, str[0]);
12847 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12851 /* If the string was "string\n", call puts("string"). */
12852 size_t len = strlen (str);
12853 if ((unsigned char)str[len - 1] == target_newline
12854 && (size_t) (int) len == len
12858 tree offset_node, string_cst;
12860 /* Create a NUL-terminated string that's one char shorter
12861 than the original, stripping off the trailing '\n'. */
12862 newarg = build_string_literal (len, str);
12863 string_cst = string_constant (newarg, &offset_node);
12864 gcc_checking_assert (string_cst
12865 && (TREE_STRING_LENGTH (string_cst)
12867 && integer_zerop (offset_node)
12869 TREE_STRING_POINTER (string_cst)[len - 1]
12870 == target_newline);
12871 /* build_string_literal creates a new STRING_CST,
12872 modify it in place to avoid double copying. */
12873 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12874 newstr[len - 1] = '\0';
12876 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12879 /* We'd like to arrange to call fputs(string,stdout) here,
12880 but we need stdout and don't have a way to get it yet. */
12885 /* The other optimizations can be done only on the non-va_list variants. */
12886 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12889 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12890 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12892 if (!arg || !validate_arg (arg, POINTER_TYPE))
12895 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12898 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12899 else if (strcmp (fmt_str, target_percent_c) == 0)
12901 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12904 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12910 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12913 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12914 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12915 more than 3 arguments, and ARG may be null in the 2-argument case.
12917 Return NULL_TREE if no simplification was possible, otherwise return the
12918 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12919 code of the function to be simplified. */
12922 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12923 tree fmt, tree arg, bool ignore,
12924 enum built_in_function fcode)
12926 tree fn_fputc, fn_fputs, call = NULL_TREE;
12927 const char *fmt_str = NULL;
12929 /* If the return value is used, don't do the transformation. */
12933 /* Verify the required arguments in the original call. */
12934 if (!validate_arg (fp, POINTER_TYPE))
12936 if (!validate_arg (fmt, POINTER_TYPE))
12939 /* Check whether the format is a literal string constant. */
12940 fmt_str = c_getstr (fmt);
12941 if (fmt_str == NULL)
12944 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12946 /* If we're using an unlocked function, assume the other
12947 unlocked functions exist explicitly. */
12948 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12949 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12953 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12954 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12957 if (!init_target_chars ())
12960 /* If the format doesn't contain % args or %%, use strcpy. */
12961 if (strchr (fmt_str, target_percent) == NULL)
12963 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12967 /* If the format specifier was "", fprintf does nothing. */
12968 if (fmt_str[0] == '\0')
12970 /* If FP has side-effects, just wait until gimplification is
12972 if (TREE_SIDE_EFFECTS (fp))
12975 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12978 /* When "string" doesn't contain %, replace all cases of
12979 fprintf (fp, string) with fputs (string, fp). The fputs
12980 builtin will take care of special cases like length == 1. */
12982 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12985 /* The other optimizations can be done only on the non-va_list variants. */
12986 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12989 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12990 else if (strcmp (fmt_str, target_percent_s) == 0)
12992 if (!arg || !validate_arg (arg, POINTER_TYPE))
12995 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12998 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12999 else if (strcmp (fmt_str, target_percent_c) == 0)
13001 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13004 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13009 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13012 /* Initialize format string characters in the target charset. */
13015 init_target_chars (void)
13020 target_newline = lang_hooks.to_target_charset ('\n');
13021 target_percent = lang_hooks.to_target_charset ('%');
13022 target_c = lang_hooks.to_target_charset ('c');
13023 target_s = lang_hooks.to_target_charset ('s');
13024 if (target_newline == 0 || target_percent == 0 || target_c == 0
13028 target_percent_c[0] = target_percent;
13029 target_percent_c[1] = target_c;
13030 target_percent_c[2] = '\0';
13032 target_percent_s[0] = target_percent;
13033 target_percent_s[1] = target_s;
13034 target_percent_s[2] = '\0';
13036 target_percent_s_newline[0] = target_percent;
13037 target_percent_s_newline[1] = target_s;
13038 target_percent_s_newline[2] = target_newline;
13039 target_percent_s_newline[3] = '\0';
13046 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13047 and no overflow/underflow occurred. INEXACT is true if M was not
13048 exactly calculated. TYPE is the tree type for the result. This
13049 function assumes that you cleared the MPFR flags and then
13050 calculated M to see if anything subsequently set a flag prior to
13051 entering this function. Return NULL_TREE if any checks fail. */
13054 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13056 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13057 overflow/underflow occurred. If -frounding-math, proceed iff the
13058 result of calling FUNC was exact. */
13059 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13060 && (!flag_rounding_math || !inexact))
13062 REAL_VALUE_TYPE rr;
13064 real_from_mpfr (&rr, m, type, GMP_RNDN);
13065 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13066 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13067 but the mpft_t is not, then we underflowed in the
13069 if (real_isfinite (&rr)
13070 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13072 REAL_VALUE_TYPE rmode;
13074 real_convert (&rmode, TYPE_MODE (type), &rr);
13075 /* Proceed iff the specified mode can hold the value. */
13076 if (real_identical (&rmode, &rr))
13077 return build_real (type, rmode);
13083 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13084 number and no overflow/underflow occurred. INEXACT is true if M
13085 was not exactly calculated. TYPE is the tree type for the result.
13086 This function assumes that you cleared the MPFR flags and then
13087 calculated M to see if anything subsequently set a flag prior to
13088 entering this function. Return NULL_TREE if any checks fail, if
13089 FORCE_CONVERT is true, then bypass the checks. */
13092 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13094 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13095 overflow/underflow occurred. If -frounding-math, proceed iff the
13096 result of calling FUNC was exact. */
13098 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13099 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13100 && (!flag_rounding_math || !inexact)))
13102 REAL_VALUE_TYPE re, im;
13104 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13105 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13106 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13107 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13108 but the mpft_t is not, then we underflowed in the
13111 || (real_isfinite (&re) && real_isfinite (&im)
13112 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13113 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13115 REAL_VALUE_TYPE re_mode, im_mode;
13117 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13118 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13119 /* Proceed iff the specified mode can hold the value. */
13121 || (real_identical (&re_mode, &re)
13122 && real_identical (&im_mode, &im)))
13123 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13124 build_real (TREE_TYPE (type), im_mode));
13130 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13131 FUNC on it and return the resulting value as a tree with type TYPE.
13132 If MIN and/or MAX are not NULL, then the supplied ARG must be
13133 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13134 acceptable values, otherwise they are not. The mpfr precision is
13135 set to the precision of TYPE. We assume that function FUNC returns
13136 zero if the result could be calculated exactly within the requested
13140 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13141 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13144 tree result = NULL_TREE;
13148 /* To proceed, MPFR must exactly represent the target floating point
13149 format, which only happens when the target base equals two. */
13150 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13151 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13153 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13155 if (real_isfinite (ra)
13156 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13157 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13159 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13160 const int prec = fmt->p;
13161 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13165 mpfr_init2 (m, prec);
13166 mpfr_from_real (m, ra, GMP_RNDN);
13167 mpfr_clear_flags ();
13168 inexact = func (m, m, rnd);
13169 result = do_mpfr_ckconv (m, type, inexact);
13177 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13178 FUNC on it and return the resulting value as a tree with type TYPE.
13179 The mpfr precision is set to the precision of TYPE. We assume that
13180 function FUNC returns zero if the result could be calculated
13181 exactly within the requested precision. */
13184 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13185 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13187 tree result = NULL_TREE;
13192 /* To proceed, MPFR must exactly represent the target floating point
13193 format, which only happens when the target base equals two. */
13194 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13195 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13196 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13198 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13199 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13201 if (real_isfinite (ra1) && real_isfinite (ra2))
13203 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13204 const int prec = fmt->p;
13205 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13209 mpfr_inits2 (prec, m1, m2, NULL);
13210 mpfr_from_real (m1, ra1, GMP_RNDN);
13211 mpfr_from_real (m2, ra2, GMP_RNDN);
13212 mpfr_clear_flags ();
13213 inexact = func (m1, m1, m2, rnd);
13214 result = do_mpfr_ckconv (m1, type, inexact);
13215 mpfr_clears (m1, m2, NULL);
13222 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13223 FUNC on it and return the resulting value as a tree with type TYPE.
13224 The mpfr precision is set to the precision of TYPE. We assume that
13225 function FUNC returns zero if the result could be calculated
13226 exactly within the requested precision. */
13229 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13230 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13232 tree result = NULL_TREE;
13238 /* To proceed, MPFR must exactly represent the target floating point
13239 format, which only happens when the target base equals two. */
13240 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13241 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13242 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13243 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13245 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13246 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13247 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13249 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13251 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13252 const int prec = fmt->p;
13253 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13257 mpfr_inits2 (prec, m1, m2, m3, NULL);
13258 mpfr_from_real (m1, ra1, GMP_RNDN);
13259 mpfr_from_real (m2, ra2, GMP_RNDN);
13260 mpfr_from_real (m3, ra3, GMP_RNDN);
13261 mpfr_clear_flags ();
13262 inexact = func (m1, m1, m2, m3, rnd);
13263 result = do_mpfr_ckconv (m1, type, inexact);
13264 mpfr_clears (m1, m2, m3, NULL);
13271 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13272 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13273 If ARG_SINP and ARG_COSP are NULL then the result is returned
13274 as a complex value.
13275 The type is taken from the type of ARG and is used for setting the
13276 precision of the calculation and results. */
13279 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13281 tree const type = TREE_TYPE (arg);
13282 tree result = NULL_TREE;
13286 /* To proceed, MPFR must exactly represent the target floating point
13287 format, which only happens when the target base equals two. */
13288 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13289 && TREE_CODE (arg) == REAL_CST
13290 && !TREE_OVERFLOW (arg))
13292 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13294 if (real_isfinite (ra))
13296 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13297 const int prec = fmt->p;
13298 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13299 tree result_s, result_c;
13303 mpfr_inits2 (prec, m, ms, mc, NULL);
13304 mpfr_from_real (m, ra, GMP_RNDN);
13305 mpfr_clear_flags ();
13306 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13307 result_s = do_mpfr_ckconv (ms, type, inexact);
13308 result_c = do_mpfr_ckconv (mc, type, inexact);
13309 mpfr_clears (m, ms, mc, NULL);
13310 if (result_s && result_c)
13312 /* If we are to return in a complex value do so. */
13313 if (!arg_sinp && !arg_cosp)
13314 return build_complex (build_complex_type (type),
13315 result_c, result_s);
13317 /* Dereference the sin/cos pointer arguments. */
13318 arg_sinp = build_fold_indirect_ref (arg_sinp);
13319 arg_cosp = build_fold_indirect_ref (arg_cosp);
13320 /* Proceed if valid pointer type were passed in. */
13321 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13322 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13324 /* Set the values. */
13325 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13327 TREE_SIDE_EFFECTS (result_s) = 1;
13328 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13330 TREE_SIDE_EFFECTS (result_c) = 1;
13331 /* Combine the assignments into a compound expr. */
13332 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13333 result_s, result_c));
13341 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13342 two-argument mpfr order N Bessel function FUNC on them and return
13343 the resulting value as a tree with type TYPE. The mpfr precision
13344 is set to the precision of TYPE. We assume that function FUNC
13345 returns zero if the result could be calculated exactly within the
13346 requested precision. */
13348 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13349 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13350 const REAL_VALUE_TYPE *min, bool inclusive)
13352 tree result = NULL_TREE;
13357 /* To proceed, MPFR must exactly represent the target floating point
13358 format, which only happens when the target base equals two. */
13359 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13360 && host_integerp (arg1, 0)
13361 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13363 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13364 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13367 && real_isfinite (ra)
13368 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13370 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13371 const int prec = fmt->p;
13372 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13376 mpfr_init2 (m, prec);
13377 mpfr_from_real (m, ra, GMP_RNDN);
13378 mpfr_clear_flags ();
13379 inexact = func (m, n, m, rnd);
13380 result = do_mpfr_ckconv (m, type, inexact);
13388 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13389 the pointer *(ARG_QUO) and return the result. The type is taken
13390 from the type of ARG0 and is used for setting the precision of the
13391 calculation and results. */
13394 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13396 tree const type = TREE_TYPE (arg0);
13397 tree result = NULL_TREE;
13402 /* To proceed, MPFR must exactly represent the target floating point
13403 format, which only happens when the target base equals two. */
13404 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13405 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13406 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13408 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13409 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13411 if (real_isfinite (ra0) && real_isfinite (ra1))
13413 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13414 const int prec = fmt->p;
13415 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13420 mpfr_inits2 (prec, m0, m1, NULL);
13421 mpfr_from_real (m0, ra0, GMP_RNDN);
13422 mpfr_from_real (m1, ra1, GMP_RNDN);
13423 mpfr_clear_flags ();
13424 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13425 /* Remquo is independent of the rounding mode, so pass
13426 inexact=0 to do_mpfr_ckconv(). */
13427 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13428 mpfr_clears (m0, m1, NULL);
13431 /* MPFR calculates quo in the host's long so it may
13432 return more bits in quo than the target int can hold
13433 if sizeof(host long) > sizeof(target int). This can
13434 happen even for native compilers in LP64 mode. In
13435 these cases, modulo the quo value with the largest
13436 number that the target int can hold while leaving one
13437 bit for the sign. */
13438 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13439 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13441 /* Dereference the quo pointer argument. */
13442 arg_quo = build_fold_indirect_ref (arg_quo);
13443 /* Proceed iff a valid pointer type was passed in. */
13444 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13446 /* Set the value. */
13447 tree result_quo = fold_build2 (MODIFY_EXPR,
13448 TREE_TYPE (arg_quo), arg_quo,
13449 build_int_cst (NULL, integer_quo));
13450 TREE_SIDE_EFFECTS (result_quo) = 1;
13451 /* Combine the quo assignment with the rem. */
13452 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13453 result_quo, result_rem));
13461 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13462 resulting value as a tree with type TYPE. The mpfr precision is
13463 set to the precision of TYPE. We assume that this mpfr function
13464 returns zero if the result could be calculated exactly within the
13465 requested precision. In addition, the integer pointer represented
13466 by ARG_SG will be dereferenced and set to the appropriate signgam
13470 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13472 tree result = NULL_TREE;
13476 /* To proceed, MPFR must exactly represent the target floating point
13477 format, which only happens when the target base equals two. Also
13478 verify ARG is a constant and that ARG_SG is an int pointer. */
13479 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13480 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13481 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13482 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13484 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13486 /* In addition to NaN and Inf, the argument cannot be zero or a
13487 negative integer. */
13488 if (real_isfinite (ra)
13489 && ra->cl != rvc_zero
13490 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13492 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13493 const int prec = fmt->p;
13494 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13499 mpfr_init2 (m, prec);
13500 mpfr_from_real (m, ra, GMP_RNDN);
13501 mpfr_clear_flags ();
13502 inexact = mpfr_lgamma (m, &sg, m, rnd);
13503 result_lg = do_mpfr_ckconv (m, type, inexact);
13509 /* Dereference the arg_sg pointer argument. */
13510 arg_sg = build_fold_indirect_ref (arg_sg);
13511 /* Assign the signgam value into *arg_sg. */
13512 result_sg = fold_build2 (MODIFY_EXPR,
13513 TREE_TYPE (arg_sg), arg_sg,
13514 build_int_cst (NULL, sg));
13515 TREE_SIDE_EFFECTS (result_sg) = 1;
13516 /* Combine the signgam assignment with the lgamma result. */
13517 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13518 result_sg, result_lg));
13526 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13527 function FUNC on it and return the resulting value as a tree with
13528 type TYPE. The mpfr precision is set to the precision of TYPE. We
13529 assume that function FUNC returns zero if the result could be
13530 calculated exactly within the requested precision. */
13533 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13535 tree result = NULL_TREE;
13539 /* To proceed, MPFR must exactly represent the target floating point
13540 format, which only happens when the target base equals two. */
13541 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13542 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13543 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13545 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13546 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13548 if (real_isfinite (re) && real_isfinite (im))
13550 const struct real_format *const fmt =
13551 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13552 const int prec = fmt->p;
13553 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13554 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13558 mpc_init2 (m, prec);
13559 mpfr_from_real (mpc_realref(m), re, rnd);
13560 mpfr_from_real (mpc_imagref(m), im, rnd);
13561 mpfr_clear_flags ();
13562 inexact = func (m, m, crnd);
13563 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13571 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13572 mpc function FUNC on it and return the resulting value as a tree
13573 with type TYPE. The mpfr precision is set to the precision of
13574 TYPE. We assume that function FUNC returns zero if the result
13575 could be calculated exactly within the requested precision. If
13576 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13577 in the arguments and/or results. */
13580 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13581 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13583 tree result = NULL_TREE;
13588 /* To proceed, MPFR must exactly represent the target floating point
13589 format, which only happens when the target base equals two. */
13590 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13591 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13592 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13593 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13594 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13596 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13597 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13598 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13599 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13602 || (real_isfinite (re0) && real_isfinite (im0)
13603 && real_isfinite (re1) && real_isfinite (im1)))
13605 const struct real_format *const fmt =
13606 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13607 const int prec = fmt->p;
13608 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13609 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13613 mpc_init2 (m0, prec);
13614 mpc_init2 (m1, prec);
13615 mpfr_from_real (mpc_realref(m0), re0, rnd);
13616 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13617 mpfr_from_real (mpc_realref(m1), re1, rnd);
13618 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13619 mpfr_clear_flags ();
13620 inexact = func (m0, m0, m1, crnd);
13621 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13630 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13631 a normal call should be emitted rather than expanding the function
13632 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13635 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13637 int nargs = gimple_call_num_args (stmt);
13639 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13641 ? gimple_call_arg_ptr (stmt, 0)
13642 : &error_mark_node), fcode);
13645 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13646 a normal call should be emitted rather than expanding the function
13647 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13648 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13649 passed as second argument. */
13652 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13653 enum built_in_function fcode)
13655 int nargs = gimple_call_num_args (stmt);
13657 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13659 ? gimple_call_arg_ptr (stmt, 0)
13660 : &error_mark_node), maxlen, fcode);
13663 /* Builtins with folding operations that operate on "..." arguments
13664 need special handling; we need to store the arguments in a convenient
13665 data structure before attempting any folding. Fortunately there are
13666 only a few builtins that fall into this category. FNDECL is the
13667 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13668 result of the function call is ignored. */
13671 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13672 bool ignore ATTRIBUTE_UNUSED)
13674 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13675 tree ret = NULL_TREE;
13679 case BUILT_IN_SPRINTF_CHK:
13680 case BUILT_IN_VSPRINTF_CHK:
13681 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13684 case BUILT_IN_SNPRINTF_CHK:
13685 case BUILT_IN_VSNPRINTF_CHK:
13686 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13693 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13694 TREE_NO_WARNING (ret) = 1;
13700 /* A wrapper function for builtin folding that prevents warnings for
13701 "statement without effect" and the like, caused by removing the
13702 call node earlier than the warning is generated. */
13705 fold_call_stmt (gimple stmt, bool ignore)
13707 tree ret = NULL_TREE;
13708 tree fndecl = gimple_call_fndecl (stmt);
13709 location_t loc = gimple_location (stmt);
13711 && TREE_CODE (fndecl) == FUNCTION_DECL
13712 && DECL_BUILT_IN (fndecl)
13713 && !gimple_call_va_arg_pack_p (stmt))
13715 int nargs = gimple_call_num_args (stmt);
13716 tree *args = (nargs > 0
13717 ? gimple_call_arg_ptr (stmt, 0)
13718 : &error_mark_node);
13720 if (avoid_folding_inline_builtin (fndecl))
13722 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13724 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13728 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13729 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13731 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13734 /* Propagate location information from original call to
13735 expansion of builtin. Otherwise things like
13736 maybe_emit_chk_warning, that operate on the expansion
13737 of a builtin, will use the wrong location information. */
13738 if (gimple_has_location (stmt))
13740 tree realret = ret;
13741 if (TREE_CODE (ret) == NOP_EXPR)
13742 realret = TREE_OPERAND (ret, 0);
13743 if (CAN_HAVE_LOCATION_P (realret)
13744 && !EXPR_HAS_LOCATION (realret))
13745 SET_EXPR_LOCATION (realret, loc);
13755 /* Look up the function in built_in_decls that corresponds to DECL
13756 and set ASMSPEC as its user assembler name. DECL must be a
13757 function decl that declares a builtin. */
13760 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13763 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13764 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13767 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13768 set_user_assembler_name (builtin, asmspec);
13769 switch (DECL_FUNCTION_CODE (decl))
13771 case BUILT_IN_MEMCPY:
13772 init_block_move_fn (asmspec);
13773 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13775 case BUILT_IN_MEMSET:
13776 init_block_clear_fn (asmspec);
13777 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13779 case BUILT_IN_MEMMOVE:
13780 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13782 case BUILT_IN_MEMCMP:
13783 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13785 case BUILT_IN_ABORT:
13786 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13789 if (INT_TYPE_SIZE < BITS_PER_WORD)
13791 set_user_assembler_libfunc ("ffs", asmspec);
13792 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13793 MODE_INT, 0), "ffs");
13801 /* Return true if DECL is a builtin that expands to a constant or similarly
13804 is_simple_builtin (tree decl)
13806 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13807 switch (DECL_FUNCTION_CODE (decl))
13809 /* Builtins that expand to constants. */
13810 case BUILT_IN_CONSTANT_P:
13811 case BUILT_IN_EXPECT:
13812 case BUILT_IN_OBJECT_SIZE:
13813 case BUILT_IN_UNREACHABLE:
13814 /* Simple register moves or loads from stack. */
13815 case BUILT_IN_RETURN_ADDRESS:
13816 case BUILT_IN_EXTRACT_RETURN_ADDR:
13817 case BUILT_IN_FROB_RETURN_ADDR:
13818 case BUILT_IN_RETURN:
13819 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13820 case BUILT_IN_FRAME_ADDRESS:
13821 case BUILT_IN_VA_END:
13822 case BUILT_IN_STACK_SAVE:
13823 case BUILT_IN_STACK_RESTORE:
13824 /* Exception state returns or moves registers around. */
13825 case BUILT_IN_EH_FILTER:
13826 case BUILT_IN_EH_POINTER:
13827 case BUILT_IN_EH_COPY_VALUES:
13837 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13838 most probably expanded inline into reasonably simple code. This is a
13839 superset of is_simple_builtin. */
13841 is_inexpensive_builtin (tree decl)
13845 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13847 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13848 switch (DECL_FUNCTION_CODE (decl))
13851 case BUILT_IN_ALLOCA:
13852 case BUILT_IN_BSWAP32:
13853 case BUILT_IN_BSWAP64:
13855 case BUILT_IN_CLZIMAX:
13856 case BUILT_IN_CLZL:
13857 case BUILT_IN_CLZLL:
13859 case BUILT_IN_CTZIMAX:
13860 case BUILT_IN_CTZL:
13861 case BUILT_IN_CTZLL:
13863 case BUILT_IN_FFSIMAX:
13864 case BUILT_IN_FFSL:
13865 case BUILT_IN_FFSLL:
13866 case BUILT_IN_IMAXABS:
13867 case BUILT_IN_FINITE:
13868 case BUILT_IN_FINITEF:
13869 case BUILT_IN_FINITEL:
13870 case BUILT_IN_FINITED32:
13871 case BUILT_IN_FINITED64:
13872 case BUILT_IN_FINITED128:
13873 case BUILT_IN_FPCLASSIFY:
13874 case BUILT_IN_ISFINITE:
13875 case BUILT_IN_ISINF_SIGN:
13876 case BUILT_IN_ISINF:
13877 case BUILT_IN_ISINFF:
13878 case BUILT_IN_ISINFL:
13879 case BUILT_IN_ISINFD32:
13880 case BUILT_IN_ISINFD64:
13881 case BUILT_IN_ISINFD128:
13882 case BUILT_IN_ISNAN:
13883 case BUILT_IN_ISNANF:
13884 case BUILT_IN_ISNANL:
13885 case BUILT_IN_ISNAND32:
13886 case BUILT_IN_ISNAND64:
13887 case BUILT_IN_ISNAND128:
13888 case BUILT_IN_ISNORMAL:
13889 case BUILT_IN_ISGREATER:
13890 case BUILT_IN_ISGREATEREQUAL:
13891 case BUILT_IN_ISLESS:
13892 case BUILT_IN_ISLESSEQUAL:
13893 case BUILT_IN_ISLESSGREATER:
13894 case BUILT_IN_ISUNORDERED:
13895 case BUILT_IN_VA_ARG_PACK:
13896 case BUILT_IN_VA_ARG_PACK_LEN:
13897 case BUILT_IN_VA_COPY:
13898 case BUILT_IN_TRAP:
13899 case BUILT_IN_SAVEREGS:
13900 case BUILT_IN_POPCOUNTL:
13901 case BUILT_IN_POPCOUNTLL:
13902 case BUILT_IN_POPCOUNTIMAX:
13903 case BUILT_IN_POPCOUNT:
13904 case BUILT_IN_PARITYL:
13905 case BUILT_IN_PARITYLL:
13906 case BUILT_IN_PARITYIMAX:
13907 case BUILT_IN_PARITY:
13908 case BUILT_IN_LABS:
13909 case BUILT_IN_LLABS:
13910 case BUILT_IN_PREFETCH:
13914 return is_simple_builtin (decl);