1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
62 struct target_builtins *this_target_builtins = &default_target_builtins;
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
238 if (strncmp (name, "__sync_", 7) == 0)
244 /* Return true if DECL is a function symbol representing a built-in. */
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Compute values M and N such that M divides (address of EXP - N) and
268 such that N < M. Store N in *BITPOSP and return M.
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address. */
278 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
280 HOST_WIDE_INT bitsize, bitpos;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
284 unsigned int align, inner;
286 /* Get the innermost object and the constant (bitpos) and possibly
287 variable (offset) offset of the access. */
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
291 /* Extract alignment information from the innermost object and
292 possibly adjust bitpos and offset. */
293 if (TREE_CODE (exp) == CONST_DECL)
294 exp = DECL_INITIAL (exp);
296 && TREE_CODE (exp) != LABEL_DECL)
298 if (TREE_CODE (exp) == FUNCTION_DECL)
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
305 align = 2 * BITS_PER_UNIT;
307 align = BITS_PER_UNIT;
310 align = DECL_ALIGN (exp);
312 else if (CONSTANT_CLASS_P (exp))
314 align = TYPE_ALIGN (TREE_TYPE (exp));
315 #ifdef CONSTANT_ALIGNMENT
316 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
319 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 else if (TREE_CODE (exp) == INDIRECT_REF)
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 else if (TREE_CODE (exp) == MEM_REF)
325 tree addr = TREE_OPERAND (exp, 0);
326 struct ptr_info_def *pi;
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
330 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
331 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
332 align *= BITS_PER_UNIT;
333 addr = TREE_OPERAND (addr, 0);
336 align = BITS_PER_UNIT;
337 if (TREE_CODE (addr) == SSA_NAME
338 && (pi = SSA_NAME_PTR_INFO (addr)))
340 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
341 align = MAX (pi->align * BITS_PER_UNIT, align);
343 else if (TREE_CODE (addr) == ADDR_EXPR)
344 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
345 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
347 else if (TREE_CODE (exp) == TARGET_MEM_REF)
349 struct ptr_info_def *pi;
350 tree addr = TMR_BASE (exp);
351 if (TREE_CODE (addr) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
354 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
355 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
356 align *= BITS_PER_UNIT;
357 addr = TREE_OPERAND (addr, 0);
360 align = BITS_PER_UNIT;
361 if (TREE_CODE (addr) == SSA_NAME
362 && (pi = SSA_NAME_PTR_INFO (addr)))
364 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
365 align = MAX (pi->align * BITS_PER_UNIT, align);
367 else if (TREE_CODE (addr) == ADDR_EXPR)
368 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
369 if (TMR_OFFSET (exp))
370 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
371 if (TMR_INDEX (exp) && TMR_STEP (exp))
373 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
374 align = MIN (align, (step & -step) * BITS_PER_UNIT);
376 else if (TMR_INDEX (exp))
377 align = BITS_PER_UNIT;
378 if (TMR_INDEX2 (exp))
379 align = BITS_PER_UNIT;
382 align = BITS_PER_UNIT;
384 /* If there is a non-constant offset part extract the maximum
385 alignment that can prevail. */
391 if (TREE_CODE (offset) == PLUS_EXPR)
393 next_offset = TREE_OPERAND (offset, 0);
394 offset = TREE_OPERAND (offset, 1);
398 if (host_integerp (offset, 1))
400 /* Any overflow in calculating offset_bits won't change
403 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
406 inner = MIN (inner, (offset_bits & -offset_bits));
408 else if (TREE_CODE (offset) == MULT_EXPR
409 && host_integerp (TREE_OPERAND (offset, 1), 1))
411 /* Any overflow in calculating offset_factor won't change
413 unsigned offset_factor
414 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
418 inner = MIN (inner, (offset_factor & -offset_factor));
422 inner = MIN (inner, BITS_PER_UNIT);
425 offset = next_offset;
428 /* Alignment is innermost object alignment adjusted by the constant
429 and non-constant offset parts. */
430 align = MIN (align, inner);
431 bitpos = bitpos & (align - 1);
437 /* Return the alignment in bits of EXP, an object.
438 Don't return more than MAX_ALIGN no matter what. */
441 get_object_alignment (tree exp, unsigned int max_align)
443 unsigned HOST_WIDE_INT bitpos = 0;
446 align = get_object_alignment_1 (exp, &bitpos);
448 /* align and bitpos now specify known low bits of the pointer.
449 ptr & (align - 1) == bitpos. */
452 align = (bitpos & -bitpos);
454 return MIN (align, max_align);
457 /* Returns true iff we can trust that alignment information has been
458 calculated properly. */
461 can_trust_pointer_alignment (void)
463 /* We rely on TER to compute accurate alignment information. */
464 return (optimize && flag_tree_ter);
467 /* Return the alignment in bits of EXP, a pointer valued expression.
468 But don't return more than MAX_ALIGN no matter what.
469 The alignment returned is, by default, the alignment of the thing that
470 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
472 Otherwise, look at the expression to see if we can do better, i.e., if the
473 expression is actually pointing at an object whose alignment is tighter. */
476 get_pointer_alignment (tree exp, unsigned int max_align)
480 if (TREE_CODE (exp) == ADDR_EXPR)
481 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
482 else if (TREE_CODE (exp) == SSA_NAME
483 && POINTER_TYPE_P (TREE_TYPE (exp)))
485 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
488 return BITS_PER_UNIT;
489 if (pi->misalign != 0)
490 align = (pi->misalign & -pi->misalign);
493 return MIN (max_align, align * BITS_PER_UNIT);
496 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
499 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
500 way, because it could contain a zero byte in the middle.
501 TREE_STRING_LENGTH is the size of the character array, not the string.
503 ONLY_VALUE should be nonzero if the result is not going to be emitted
504 into the instruction stream and zero if it is going to be expanded.
505 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
506 is returned, otherwise NULL, since
507 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
508 evaluate the side-effects.
510 The value returned is of type `ssizetype'.
512 Unfortunately, string_constant can't access the values of const char
513 arrays with initializers, so neither can we do so here. */
516 c_strlen (tree src, int only_value)
519 HOST_WIDE_INT offset;
525 if (TREE_CODE (src) == COND_EXPR
526 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
530 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
531 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
532 if (tree_int_cst_equal (len1, len2))
536 if (TREE_CODE (src) == COMPOUND_EXPR
537 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
538 return c_strlen (TREE_OPERAND (src, 1), only_value);
540 loc = EXPR_LOC_OR_HERE (src);
542 src = string_constant (src, &offset_node);
546 max = TREE_STRING_LENGTH (src) - 1;
547 ptr = TREE_STRING_POINTER (src);
549 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
551 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
552 compute the offset to the following null if we don't know where to
553 start searching for it. */
556 for (i = 0; i < max; i++)
560 /* We don't know the starting offset, but we do know that the string
561 has no internal zero bytes. We can assume that the offset falls
562 within the bounds of the string; otherwise, the programmer deserves
563 what he gets. Subtract the offset from the length of the string,
564 and return that. This would perhaps not be valid if we were dealing
565 with named arrays in addition to literal string constants. */
567 return size_diffop_loc (loc, size_int (max), offset_node);
570 /* We have a known offset into the string. Start searching there for
571 a null character if we can represent it as a single HOST_WIDE_INT. */
572 if (offset_node == 0)
574 else if (! host_integerp (offset_node, 0))
577 offset = tree_low_cst (offset_node, 0);
579 /* If the offset is known to be out of bounds, warn, and call strlen at
581 if (offset < 0 || offset > max)
583 /* Suppress multiple warnings for propagated constant strings. */
584 if (! TREE_NO_WARNING (src))
586 warning_at (loc, 0, "offset outside bounds of constant string");
587 TREE_NO_WARNING (src) = 1;
592 /* Use strlen to search for the first zero byte. Since any strings
593 constructed with build_string will have nulls appended, we win even
594 if we get handed something like (char[4])"abcd".
596 Since OFFSET is our starting index into the string, no further
597 calculation is needed. */
598 return ssize_int (strlen (ptr + offset));
601 /* Return a char pointer for a C string if it is a string constant
602 or sum of string constant and integer constant. */
609 src = string_constant (src, &offset_node);
613 if (offset_node == 0)
614 return TREE_STRING_POINTER (src);
615 else if (!host_integerp (offset_node, 1)
616 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
619 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
622 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
623 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
626 c_readstr (const char *str, enum machine_mode mode)
632 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
637 for (i = 0; i < GET_MODE_SIZE (mode); i++)
640 if (WORDS_BIG_ENDIAN)
641 j = GET_MODE_SIZE (mode) - i - 1;
642 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
643 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
644 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
646 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
649 ch = (unsigned char) str[i];
650 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
652 return immed_double_const (c[0], c[1], mode);
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
660 target_char_cast (tree cst, char *p)
662 unsigned HOST_WIDE_INT val, hostval;
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
668 val = TREE_INT_CST_LOW (cst);
669 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
670 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
673 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
674 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
683 /* Similar to save_expr, but assumes that arbitrary code is not executed
684 in between the multiple evaluations. In particular, we assume that a
685 non-addressable local variable will not be modified. */
688 builtin_save_expr (tree exp)
690 if (TREE_CODE (exp) == SSA_NAME
691 || (TREE_ADDRESSABLE (exp) == 0
692 && (TREE_CODE (exp) == PARM_DECL
693 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return save_expr (exp);
699 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
700 times to get the address of either a higher stack frame, or a return
701 address located within it (depending on FNDECL_CODE). */
704 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
708 #ifdef INITIAL_FRAME_ADDRESS_RTX
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
713 /* For a zero count with __builtin_return_address, we don't care what
714 frame address we return, because target-specific definitions will
715 override us. Therefore frame pointer elimination is OK, and using
716 the soft frame pointer is OK.
718 For a nonzero count, or a zero count with __builtin_frame_address,
719 we require a stable offset from the current frame pointer to the
720 previous one, so we must use the hard frame pointer, and
721 we must disable frame pointer elimination. */
722 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
723 tem = frame_pointer_rtx;
726 tem = hard_frame_pointer_rtx;
728 /* Tell reload not to eliminate the frame pointer. */
729 crtl->accesses_prior_frames = 1;
733 /* Some machines need special handling before we can access
734 arbitrary frames. For example, on the SPARC, we must first flush
735 all register windows to the stack. */
736 #ifdef SETUP_FRAME_ADDRESSES
738 SETUP_FRAME_ADDRESSES ();
741 /* On the SPARC, the return address is not in the frame, it is in a
742 register. There is no way to access it off of the current frame
743 pointer, but it can be accessed off the previous frame pointer by
744 reading the value from the register window save area. */
745 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
746 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
750 /* Scan back COUNT frames to the specified frame. */
751 for (i = 0; i < count; i++)
753 /* Assume the dynamic chain pointer is in the word that the
754 frame address points to, unless otherwise specified. */
755 #ifdef DYNAMIC_CHAIN_ADDRESS
756 tem = DYNAMIC_CHAIN_ADDRESS (tem);
758 tem = memory_address (Pmode, tem);
759 tem = gen_frame_mem (Pmode, tem);
760 tem = copy_to_reg (tem);
763 /* For __builtin_frame_address, return what we've got. But, on
764 the SPARC for example, we may have to add a bias. */
765 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
766 #ifdef FRAME_ADDR_RTX
767 return FRAME_ADDR_RTX (tem);
772 /* For __builtin_return_address, get the return address from that frame. */
773 #ifdef RETURN_ADDR_RTX
774 tem = RETURN_ADDR_RTX (count, tem);
776 tem = memory_address (Pmode,
777 plus_constant (tem, GET_MODE_SIZE (Pmode)));
778 tem = gen_frame_mem (Pmode, tem);
783 /* Alias set used for setjmp buffer. */
784 static alias_set_type setjmp_alias_set = -1;
786 /* Construct the leading half of a __builtin_setjmp call. Control will
787 return to RECEIVER_LABEL. This is also called directly by the SJLJ
788 exception handling code. */
791 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
793 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
797 if (setjmp_alias_set == -1)
798 setjmp_alias_set = new_alias_set ();
800 buf_addr = convert_memory_address (Pmode, buf_addr);
802 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
804 /* We store the frame pointer and the address of receiver_label in
805 the buffer and use the rest of it for the stack save area, which
806 is machine-dependent. */
808 mem = gen_rtx_MEM (Pmode, buf_addr);
809 set_mem_alias_set (mem, setjmp_alias_set);
810 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
812 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
813 set_mem_alias_set (mem, setjmp_alias_set);
815 emit_move_insn (validize_mem (mem),
816 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
818 stack_save = gen_rtx_MEM (sa_mode,
819 plus_constant (buf_addr,
820 2 * GET_MODE_SIZE (Pmode)));
821 set_mem_alias_set (stack_save, setjmp_alias_set);
822 emit_stack_save (SAVE_NONLOCAL, &stack_save);
824 /* If there is further processing to do, do it. */
825 #ifdef HAVE_builtin_setjmp_setup
826 if (HAVE_builtin_setjmp_setup)
827 emit_insn (gen_builtin_setjmp_setup (buf_addr));
830 /* We have a nonlocal label. */
831 cfun->has_nonlocal_label = 1;
834 /* Construct the trailing part of a __builtin_setjmp call. This is
835 also called directly by the SJLJ exception handling code. */
838 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
842 /* Clobber the FP when we get here, so we have to make sure it's
843 marked as used by this function. */
844 emit_use (hard_frame_pointer_rtx);
846 /* Mark the static chain as clobbered here so life information
847 doesn't get messed up for it. */
848 chain = targetm.calls.static_chain (current_function_decl, true);
849 if (chain && REG_P (chain))
850 emit_clobber (chain);
852 /* Now put in the code to restore the frame pointer, and argument
853 pointer, if needed. */
854 #ifdef HAVE_nonlocal_goto
855 if (! HAVE_nonlocal_goto)
858 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
859 /* This might change the hard frame pointer in ways that aren't
860 apparent to early optimization passes, so force a clobber. */
861 emit_clobber (hard_frame_pointer_rtx);
864 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
865 if (fixed_regs[ARG_POINTER_REGNUM])
867 #ifdef ELIMINABLE_REGS
869 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
871 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
872 if (elim_regs[i].from == ARG_POINTER_REGNUM
873 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
876 if (i == ARRAY_SIZE (elim_regs))
879 /* Now restore our arg pointer from the address at which it
880 was saved in our stack frame. */
881 emit_move_insn (crtl->args.internal_arg_pointer,
882 copy_to_reg (get_arg_pointer_save_area ()));
887 #ifdef HAVE_builtin_setjmp_receiver
888 if (HAVE_builtin_setjmp_receiver)
889 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
892 #ifdef HAVE_nonlocal_goto_receiver
893 if (HAVE_nonlocal_goto_receiver)
894 emit_insn (gen_nonlocal_goto_receiver ());
899 /* We must not allow the code we just generated to be reordered by
900 scheduling. Specifically, the update of the frame pointer must
901 happen immediately, not later. */
902 emit_insn (gen_blockage ());
905 /* __builtin_longjmp is passed a pointer to an array of five words (not
906 all will be used on all machines). It operates similarly to the C
907 library function of the same name, but is more efficient. Much of
908 the code below is copied from the handling of non-local gotos. */
911 expand_builtin_longjmp (rtx buf_addr, rtx value)
913 rtx fp, lab, stack, insn, last;
914 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
916 /* DRAP is needed for stack realign if longjmp is expanded to current
918 if (SUPPORTS_STACK_ALIGNMENT)
919 crtl->need_drap = true;
921 if (setjmp_alias_set == -1)
922 setjmp_alias_set = new_alias_set ();
924 buf_addr = convert_memory_address (Pmode, buf_addr);
926 buf_addr = force_reg (Pmode, buf_addr);
928 /* We require that the user must pass a second argument of 1, because
929 that is what builtin_setjmp will return. */
930 gcc_assert (value == const1_rtx);
932 last = get_last_insn ();
933 #ifdef HAVE_builtin_longjmp
934 if (HAVE_builtin_longjmp)
935 emit_insn (gen_builtin_longjmp (buf_addr));
939 fp = gen_rtx_MEM (Pmode, buf_addr);
940 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
941 GET_MODE_SIZE (Pmode)));
943 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
944 2 * GET_MODE_SIZE (Pmode)));
945 set_mem_alias_set (fp, setjmp_alias_set);
946 set_mem_alias_set (lab, setjmp_alias_set);
947 set_mem_alias_set (stack, setjmp_alias_set);
949 /* Pick up FP, label, and SP from the block and jump. This code is
950 from expand_goto in stmt.c; see there for detailed comments. */
951 #ifdef HAVE_nonlocal_goto
952 if (HAVE_nonlocal_goto)
953 /* We have to pass a value to the nonlocal_goto pattern that will
954 get copied into the static_chain pointer, but it does not matter
955 what that value is, because builtin_setjmp does not use it. */
956 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
960 lab = copy_to_reg (lab);
962 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
963 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
965 emit_move_insn (hard_frame_pointer_rtx, fp);
966 emit_stack_restore (SAVE_NONLOCAL, stack);
968 emit_use (hard_frame_pointer_rtx);
969 emit_use (stack_pointer_rtx);
970 emit_indirect_jump (lab);
974 /* Search backwards and mark the jump insn as a non-local goto.
975 Note that this precludes the use of __builtin_longjmp to a
976 __builtin_setjmp target in the same function. However, we've
977 already cautioned the user that these functions are for
978 internal exception handling use only. */
979 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
981 gcc_assert (insn != last);
985 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
988 else if (CALL_P (insn))
993 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
994 and the address of the save area. */
997 expand_builtin_nonlocal_goto (tree exp)
999 tree t_label, t_save_area;
1000 rtx r_label, r_save_area, r_fp, r_sp, insn;
1002 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1005 t_label = CALL_EXPR_ARG (exp, 0);
1006 t_save_area = CALL_EXPR_ARG (exp, 1);
1008 r_label = expand_normal (t_label);
1009 r_label = convert_memory_address (Pmode, r_label);
1010 r_save_area = expand_normal (t_save_area);
1011 r_save_area = convert_memory_address (Pmode, r_save_area);
1012 /* Copy the address of the save location to a register just in case it was
1013 based on the frame pointer. */
1014 r_save_area = copy_to_reg (r_save_area);
1015 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1016 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1017 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1019 crtl->has_nonlocal_goto = 1;
1021 #ifdef HAVE_nonlocal_goto
1022 /* ??? We no longer need to pass the static chain value, afaik. */
1023 if (HAVE_nonlocal_goto)
1024 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1028 r_label = copy_to_reg (r_label);
1030 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1031 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1033 /* Restore frame pointer for containing function. */
1034 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1035 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1037 /* USE of hard_frame_pointer_rtx added for consistency;
1038 not clear if really needed. */
1039 emit_use (hard_frame_pointer_rtx);
1040 emit_use (stack_pointer_rtx);
1042 /* If the architecture is using a GP register, we must
1043 conservatively assume that the target function makes use of it.
1044 The prologue of functions with nonlocal gotos must therefore
1045 initialize the GP register to the appropriate value, and we
1046 must then make sure that this value is live at the point
1047 of the jump. (Note that this doesn't necessarily apply
1048 to targets with a nonlocal_goto pattern; they are free
1049 to implement it in their own way. Note also that this is
1050 a no-op if the GP register is a global invariant.) */
1051 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1052 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1053 emit_use (pic_offset_table_rtx);
1055 emit_indirect_jump (r_label);
1058 /* Search backwards to the jump insn and mark it as a
1060 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1064 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1067 else if (CALL_P (insn))
1074 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1075 (not all will be used on all machines) that was passed to __builtin_setjmp.
1076 It updates the stack pointer in that block to correspond to the current
1080 expand_builtin_update_setjmp_buf (rtx buf_addr)
1082 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1084 = gen_rtx_MEM (sa_mode,
1087 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1089 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1092 /* Expand a call to __builtin_prefetch. For a target that does not support
1093 data prefetch, evaluate the memory address argument in case it has side
1097 expand_builtin_prefetch (tree exp)
1099 tree arg0, arg1, arg2;
1103 if (!validate_arglist (exp, POINTER_TYPE, 0))
1106 arg0 = CALL_EXPR_ARG (exp, 0);
1108 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1109 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1111 nargs = call_expr_nargs (exp);
1113 arg1 = CALL_EXPR_ARG (exp, 1);
1115 arg1 = integer_zero_node;
1117 arg2 = CALL_EXPR_ARG (exp, 2);
1119 arg2 = integer_three_node;
1121 /* Argument 0 is an address. */
1122 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1124 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1125 if (TREE_CODE (arg1) != INTEGER_CST)
1127 error ("second argument to %<__builtin_prefetch%> must be a constant");
1128 arg1 = integer_zero_node;
1130 op1 = expand_normal (arg1);
1131 /* Argument 1 must be either zero or one. */
1132 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1134 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1139 /* Argument 2 (locality) must be a compile-time constant int. */
1140 if (TREE_CODE (arg2) != INTEGER_CST)
1142 error ("third argument to %<__builtin_prefetch%> must be a constant");
1143 arg2 = integer_zero_node;
1145 op2 = expand_normal (arg2);
1146 /* Argument 2 must be 0, 1, 2, or 3. */
1147 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1149 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1153 #ifdef HAVE_prefetch
1156 struct expand_operand ops[3];
1158 create_address_operand (&ops[0], op0);
1159 create_integer_operand (&ops[1], INTVAL (op1));
1160 create_integer_operand (&ops[2], INTVAL (op2));
1161 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1166 /* Don't do anything with direct references to volatile memory, but
1167 generate code to handle other side effects. */
1168 if (!MEM_P (op0) && side_effects_p (op0))
1172 /* Get a MEM rtx for expression EXP which is the address of an operand
1173 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1174 the maximum length of the block of memory that might be accessed or
1178 get_memory_rtx (tree exp, tree len)
1180 tree orig_exp = exp;
1184 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1185 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1186 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1187 exp = TREE_OPERAND (exp, 0);
1189 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1190 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1192 /* Get an expression we can use to find the attributes to assign to MEM.
1193 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1194 we can. First remove any nops. */
1195 while (CONVERT_EXPR_P (exp)
1196 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1197 exp = TREE_OPERAND (exp, 0);
1200 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1201 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1202 && host_integerp (TREE_OPERAND (exp, 1), 0)
1203 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1204 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1205 else if (TREE_CODE (exp) == ADDR_EXPR)
1206 exp = TREE_OPERAND (exp, 0);
1207 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1208 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1212 /* Honor attributes derived from exp, except for the alias set
1213 (as builtin stringops may alias with anything) and the size
1214 (as stringops may access multiple array elements). */
1217 set_mem_attributes (mem, exp, 0);
1220 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1222 /* Allow the string and memory builtins to overflow from one
1223 field into another, see http://gcc.gnu.org/PR23561.
1224 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1225 memory accessed by the string or memory builtin will fit
1226 within the field. */
1227 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1229 tree mem_expr = MEM_EXPR (mem);
1230 HOST_WIDE_INT offset = -1, length = -1;
1233 while (TREE_CODE (inner) == ARRAY_REF
1234 || CONVERT_EXPR_P (inner)
1235 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1236 || TREE_CODE (inner) == SAVE_EXPR)
1237 inner = TREE_OPERAND (inner, 0);
1239 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1241 if (MEM_OFFSET_KNOWN_P (mem))
1242 offset = MEM_OFFSET (mem);
1244 if (offset >= 0 && len && host_integerp (len, 0))
1245 length = tree_low_cst (len, 0);
1247 while (TREE_CODE (inner) == COMPONENT_REF)
1249 tree field = TREE_OPERAND (inner, 1);
1250 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1251 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1253 /* Bitfields are generally not byte-addressable. */
1254 gcc_assert (!DECL_BIT_FIELD (field)
1255 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1256 % BITS_PER_UNIT) == 0
1257 && host_integerp (DECL_SIZE (field), 0)
1258 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1259 % BITS_PER_UNIT) == 0));
1261 /* If we can prove that the memory starting at XEXP (mem, 0) and
1262 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1263 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1264 fields without DECL_SIZE_UNIT like flexible array members. */
1266 && DECL_SIZE_UNIT (field)
1267 && host_integerp (DECL_SIZE_UNIT (field), 0))
1270 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1273 && offset + length <= size)
1278 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1279 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1280 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1288 mem_expr = TREE_OPERAND (mem_expr, 0);
1289 inner = TREE_OPERAND (inner, 0);
1292 if (mem_expr == NULL)
1294 if (mem_expr != MEM_EXPR (mem))
1296 set_mem_expr (mem, mem_expr);
1298 set_mem_offset (mem, offset);
1300 clear_mem_offset (mem);
1303 set_mem_alias_set (mem, 0);
1304 clear_mem_size (mem);
1310 /* Built-in functions to perform an untyped call and return. */
1312 #define apply_args_mode \
1313 (this_target_builtins->x_apply_args_mode)
1314 #define apply_result_mode \
1315 (this_target_builtins->x_apply_result_mode)
1317 /* Return the size required for the block returned by __builtin_apply_args,
1318 and initialize apply_args_mode. */
1321 apply_args_size (void)
1323 static int size = -1;
1326 enum machine_mode mode;
1328 /* The values computed by this function never change. */
1331 /* The first value is the incoming arg-pointer. */
1332 size = GET_MODE_SIZE (Pmode);
1334 /* The second value is the structure value address unless this is
1335 passed as an "invisible" first argument. */
1336 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1337 size += GET_MODE_SIZE (Pmode);
1339 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1340 if (FUNCTION_ARG_REGNO_P (regno))
1342 mode = targetm.calls.get_raw_arg_mode (regno);
1344 gcc_assert (mode != VOIDmode);
1346 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1347 if (size % align != 0)
1348 size = CEIL (size, align) * align;
1349 size += GET_MODE_SIZE (mode);
1350 apply_args_mode[regno] = mode;
1354 apply_args_mode[regno] = VOIDmode;
1360 /* Return the size required for the block returned by __builtin_apply,
1361 and initialize apply_result_mode. */
1364 apply_result_size (void)
1366 static int size = -1;
1368 enum machine_mode mode;
1370 /* The values computed by this function never change. */
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (targetm.calls.function_value_regno_p (regno))
1378 mode = targetm.calls.get_raw_result_mode (regno);
1380 gcc_assert (mode != VOIDmode);
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_result_mode[regno] = mode;
1389 apply_result_mode[regno] = VOIDmode;
1391 /* Allow targets that use untyped_call and untyped_return to override
1392 the size so that machine-specific information can be stored here. */
1393 #ifdef APPLY_RESULT_SIZE
1394 size = APPLY_RESULT_SIZE;
1400 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1401 /* Create a vector describing the result block RESULT. If SAVEP is true,
1402 the result block is used to save the values; otherwise it is used to
1403 restore the values. */
1406 result_vector (int savep, rtx result)
1408 int regno, size, align, nelts;
1409 enum machine_mode mode;
1411 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1414 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1415 if ((mode = apply_result_mode[regno]) != VOIDmode)
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1420 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1421 mem = adjust_address (result, mode, size);
1422 savevec[nelts++] = (savep
1423 ? gen_rtx_SET (VOIDmode, mem, reg)
1424 : gen_rtx_SET (VOIDmode, reg, mem));
1425 size += GET_MODE_SIZE (mode);
1427 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1429 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1431 /* Save the state required to perform an untyped call with the same
1432 arguments as were passed to the current function. */
1435 expand_builtin_apply_args_1 (void)
1438 int size, align, regno;
1439 enum machine_mode mode;
1440 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1442 /* Create a block where the arg-pointer, structure value address,
1443 and argument registers can be saved. */
1444 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1446 /* Walk past the arg-pointer and structure value address. */
1447 size = GET_MODE_SIZE (Pmode);
1448 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1449 size += GET_MODE_SIZE (Pmode);
1451 /* Save each register used in calling a function to the block. */
1452 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1453 if ((mode = apply_args_mode[regno]) != VOIDmode)
1455 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1456 if (size % align != 0)
1457 size = CEIL (size, align) * align;
1459 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1461 emit_move_insn (adjust_address (registers, mode, size), tem);
1462 size += GET_MODE_SIZE (mode);
1465 /* Save the arg pointer to the block. */
1466 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1467 #ifdef STACK_GROWS_DOWNWARD
1468 /* We need the pointer as the caller actually passed them to us, not
1469 as we might have pretended they were passed. Make sure it's a valid
1470 operand, as emit_move_insn isn't expected to handle a PLUS. */
1472 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1475 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1477 size = GET_MODE_SIZE (Pmode);
1479 /* Save the structure value address unless this is passed as an
1480 "invisible" first argument. */
1481 if (struct_incoming_value)
1483 emit_move_insn (adjust_address (registers, Pmode, size),
1484 copy_to_reg (struct_incoming_value));
1485 size += GET_MODE_SIZE (Pmode);
1488 /* Return the address of the block. */
1489 return copy_addr_to_reg (XEXP (registers, 0));
1492 /* __builtin_apply_args returns block of memory allocated on
1493 the stack into which is stored the arg pointer, structure
1494 value address, static chain, and all the registers that might
1495 possibly be used in performing a function call. The code is
1496 moved to the start of the function so the incoming values are
1500 expand_builtin_apply_args (void)
1502 /* Don't do __builtin_apply_args more than once in a function.
1503 Save the result of the first call and reuse it. */
1504 if (apply_args_value != 0)
1505 return apply_args_value;
1507 /* When this function is called, it means that registers must be
1508 saved on entry to this function. So we migrate the
1509 call to the first insn of this function. */
1514 temp = expand_builtin_apply_args_1 ();
1518 apply_args_value = temp;
1520 /* Put the insns after the NOTE that starts the function.
1521 If this is inside a start_sequence, make the outer-level insn
1522 chain current, so the code is placed at the start of the
1523 function. If internal_arg_pointer is a non-virtual pseudo,
1524 it needs to be placed after the function that initializes
1526 push_topmost_sequence ();
1527 if (REG_P (crtl->args.internal_arg_pointer)
1528 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1529 emit_insn_before (seq, parm_birth_insn);
1531 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1532 pop_topmost_sequence ();
1537 /* Perform an untyped call and save the state required to perform an
1538 untyped return of whatever value was returned by the given function. */
1541 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1543 int size, align, regno;
1544 enum machine_mode mode;
1545 rtx incoming_args, result, reg, dest, src, call_insn;
1546 rtx old_stack_level = 0;
1547 rtx call_fusage = 0;
1548 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1550 arguments = convert_memory_address (Pmode, arguments);
1552 /* Create a block where the return registers can be saved. */
1553 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1555 /* Fetch the arg pointer from the ARGUMENTS block. */
1556 incoming_args = gen_reg_rtx (Pmode);
1557 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1558 #ifndef STACK_GROWS_DOWNWARD
1559 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1560 incoming_args, 0, OPTAB_LIB_WIDEN);
1563 /* Push a new argument block and copy the arguments. Do not allow
1564 the (potential) memcpy call below to interfere with our stack
1566 do_pending_stack_adjust ();
1569 /* Save the stack with nonlocal if available. */
1570 #ifdef HAVE_save_stack_nonlocal
1571 if (HAVE_save_stack_nonlocal)
1572 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1575 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1577 /* Allocate a block of memory onto the stack and copy the memory
1578 arguments to the outgoing arguments address. We can pass TRUE
1579 as the 4th argument because we just saved the stack pointer
1580 and will restore it right after the call. */
1581 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1583 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1584 may have already set current_function_calls_alloca to true.
1585 current_function_calls_alloca won't be set if argsize is zero,
1586 so we have to guarantee need_drap is true here. */
1587 if (SUPPORTS_STACK_ALIGNMENT)
1588 crtl->need_drap = true;
1590 dest = virtual_outgoing_args_rtx;
1591 #ifndef STACK_GROWS_DOWNWARD
1592 if (CONST_INT_P (argsize))
1593 dest = plus_constant (dest, -INTVAL (argsize));
1595 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1597 dest = gen_rtx_MEM (BLKmode, dest);
1598 set_mem_align (dest, PARM_BOUNDARY);
1599 src = gen_rtx_MEM (BLKmode, incoming_args);
1600 set_mem_align (src, PARM_BOUNDARY);
1601 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1603 /* Refer to the argument block. */
1605 arguments = gen_rtx_MEM (BLKmode, arguments);
1606 set_mem_align (arguments, PARM_BOUNDARY);
1608 /* Walk past the arg-pointer and structure value address. */
1609 size = GET_MODE_SIZE (Pmode);
1611 size += GET_MODE_SIZE (Pmode);
1613 /* Restore each of the registers previously saved. Make USE insns
1614 for each of these registers for use in making the call. */
1615 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1616 if ((mode = apply_args_mode[regno]) != VOIDmode)
1618 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1619 if (size % align != 0)
1620 size = CEIL (size, align) * align;
1621 reg = gen_rtx_REG (mode, regno);
1622 emit_move_insn (reg, adjust_address (arguments, mode, size));
1623 use_reg (&call_fusage, reg);
1624 size += GET_MODE_SIZE (mode);
1627 /* Restore the structure value address unless this is passed as an
1628 "invisible" first argument. */
1629 size = GET_MODE_SIZE (Pmode);
1632 rtx value = gen_reg_rtx (Pmode);
1633 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1634 emit_move_insn (struct_value, value);
1635 if (REG_P (struct_value))
1636 use_reg (&call_fusage, struct_value);
1637 size += GET_MODE_SIZE (Pmode);
1640 /* All arguments and registers used for the call are set up by now! */
1641 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1643 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1644 and we don't want to load it into a register as an optimization,
1645 because prepare_call_address already did it if it should be done. */
1646 if (GET_CODE (function) != SYMBOL_REF)
1647 function = memory_address (FUNCTION_MODE, function);
1649 /* Generate the actual call instruction and save the return value. */
1650 #ifdef HAVE_untyped_call
1651 if (HAVE_untyped_call)
1652 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1653 result, result_vector (1, result)));
1656 #ifdef HAVE_call_value
1657 if (HAVE_call_value)
1661 /* Locate the unique return register. It is not possible to
1662 express a call that sets more than one return register using
1663 call_value; use untyped_call for that. In fact, untyped_call
1664 only needs to save the return registers in the given block. */
1665 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1666 if ((mode = apply_result_mode[regno]) != VOIDmode)
1668 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1670 valreg = gen_rtx_REG (mode, regno);
1673 emit_call_insn (GEN_CALL_VALUE (valreg,
1674 gen_rtx_MEM (FUNCTION_MODE, function),
1675 const0_rtx, NULL_RTX, const0_rtx));
1677 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1683 /* Find the CALL insn we just emitted, and attach the register usage
1685 call_insn = last_call_insn ();
1686 add_function_usage_to (call_insn, call_fusage);
1688 /* Restore the stack. */
1689 #ifdef HAVE_save_stack_nonlocal
1690 if (HAVE_save_stack_nonlocal)
1691 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1694 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1698 /* Return the address of the result block. */
1699 result = copy_addr_to_reg (XEXP (result, 0));
1700 return convert_memory_address (ptr_mode, result);
1703 /* Perform an untyped return. */
1706 expand_builtin_return (rtx result)
1708 int size, align, regno;
1709 enum machine_mode mode;
1711 rtx call_fusage = 0;
1713 result = convert_memory_address (Pmode, result);
1715 apply_result_size ();
1716 result = gen_rtx_MEM (BLKmode, result);
1718 #ifdef HAVE_untyped_return
1719 if (HAVE_untyped_return)
1721 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1727 /* Restore the return value and note that each value is used. */
1729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1730 if ((mode = apply_result_mode[regno]) != VOIDmode)
1732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1733 if (size % align != 0)
1734 size = CEIL (size, align) * align;
1735 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1736 emit_move_insn (reg, adjust_address (result, mode, size));
1738 push_to_sequence (call_fusage);
1740 call_fusage = get_insns ();
1742 size += GET_MODE_SIZE (mode);
1745 /* Put the USE insns before the return. */
1746 emit_insn (call_fusage);
1748 /* Return whatever values was restored by jumping directly to the end
1750 expand_naked_return ();
1753 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1755 static enum type_class
1756 type_to_class (tree type)
1758 switch (TREE_CODE (type))
1760 case VOID_TYPE: return void_type_class;
1761 case INTEGER_TYPE: return integer_type_class;
1762 case ENUMERAL_TYPE: return enumeral_type_class;
1763 case BOOLEAN_TYPE: return boolean_type_class;
1764 case POINTER_TYPE: return pointer_type_class;
1765 case REFERENCE_TYPE: return reference_type_class;
1766 case OFFSET_TYPE: return offset_type_class;
1767 case REAL_TYPE: return real_type_class;
1768 case COMPLEX_TYPE: return complex_type_class;
1769 case FUNCTION_TYPE: return function_type_class;
1770 case METHOD_TYPE: return method_type_class;
1771 case RECORD_TYPE: return record_type_class;
1773 case QUAL_UNION_TYPE: return union_type_class;
1774 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1775 ? string_type_class : array_type_class);
1776 case LANG_TYPE: return lang_type_class;
1777 default: return no_type_class;
1781 /* Expand a call EXP to __builtin_classify_type. */
1784 expand_builtin_classify_type (tree exp)
1786 if (call_expr_nargs (exp))
1787 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1788 return GEN_INT (no_type_class);
1791 /* This helper macro, meant to be used in mathfn_built_in below,
1792 determines which among a set of three builtin math functions is
1793 appropriate for a given type mode. The `F' and `L' cases are
1794 automatically generated from the `double' case. */
1795 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1796 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1797 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1798 fcodel = BUILT_IN_MATHFN##L ; break;
1799 /* Similar to above, but appends _R after any F/L suffix. */
1800 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1801 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1802 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1803 fcodel = BUILT_IN_MATHFN##L_R ; break;
1805 /* Return mathematic function equivalent to FN but operating directly
1806 on TYPE, if available. If IMPLICIT is true find the function in
1807 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1808 can't do the conversion, return zero. */
1811 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1813 tree const *const fn_arr
1814 = implicit ? implicit_built_in_decls : built_in_decls;
1815 enum built_in_function fcode, fcodef, fcodel;
1819 CASE_MATHFN (BUILT_IN_ACOS)
1820 CASE_MATHFN (BUILT_IN_ACOSH)
1821 CASE_MATHFN (BUILT_IN_ASIN)
1822 CASE_MATHFN (BUILT_IN_ASINH)
1823 CASE_MATHFN (BUILT_IN_ATAN)
1824 CASE_MATHFN (BUILT_IN_ATAN2)
1825 CASE_MATHFN (BUILT_IN_ATANH)
1826 CASE_MATHFN (BUILT_IN_CBRT)
1827 CASE_MATHFN (BUILT_IN_CEIL)
1828 CASE_MATHFN (BUILT_IN_CEXPI)
1829 CASE_MATHFN (BUILT_IN_COPYSIGN)
1830 CASE_MATHFN (BUILT_IN_COS)
1831 CASE_MATHFN (BUILT_IN_COSH)
1832 CASE_MATHFN (BUILT_IN_DREM)
1833 CASE_MATHFN (BUILT_IN_ERF)
1834 CASE_MATHFN (BUILT_IN_ERFC)
1835 CASE_MATHFN (BUILT_IN_EXP)
1836 CASE_MATHFN (BUILT_IN_EXP10)
1837 CASE_MATHFN (BUILT_IN_EXP2)
1838 CASE_MATHFN (BUILT_IN_EXPM1)
1839 CASE_MATHFN (BUILT_IN_FABS)
1840 CASE_MATHFN (BUILT_IN_FDIM)
1841 CASE_MATHFN (BUILT_IN_FLOOR)
1842 CASE_MATHFN (BUILT_IN_FMA)
1843 CASE_MATHFN (BUILT_IN_FMAX)
1844 CASE_MATHFN (BUILT_IN_FMIN)
1845 CASE_MATHFN (BUILT_IN_FMOD)
1846 CASE_MATHFN (BUILT_IN_FREXP)
1847 CASE_MATHFN (BUILT_IN_GAMMA)
1848 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1849 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1850 CASE_MATHFN (BUILT_IN_HYPOT)
1851 CASE_MATHFN (BUILT_IN_ILOGB)
1852 CASE_MATHFN (BUILT_IN_INF)
1853 CASE_MATHFN (BUILT_IN_ISINF)
1854 CASE_MATHFN (BUILT_IN_J0)
1855 CASE_MATHFN (BUILT_IN_J1)
1856 CASE_MATHFN (BUILT_IN_JN)
1857 CASE_MATHFN (BUILT_IN_LCEIL)
1858 CASE_MATHFN (BUILT_IN_LDEXP)
1859 CASE_MATHFN (BUILT_IN_LFLOOR)
1860 CASE_MATHFN (BUILT_IN_LGAMMA)
1861 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1862 CASE_MATHFN (BUILT_IN_LLCEIL)
1863 CASE_MATHFN (BUILT_IN_LLFLOOR)
1864 CASE_MATHFN (BUILT_IN_LLRINT)
1865 CASE_MATHFN (BUILT_IN_LLROUND)
1866 CASE_MATHFN (BUILT_IN_LOG)
1867 CASE_MATHFN (BUILT_IN_LOG10)
1868 CASE_MATHFN (BUILT_IN_LOG1P)
1869 CASE_MATHFN (BUILT_IN_LOG2)
1870 CASE_MATHFN (BUILT_IN_LOGB)
1871 CASE_MATHFN (BUILT_IN_LRINT)
1872 CASE_MATHFN (BUILT_IN_LROUND)
1873 CASE_MATHFN (BUILT_IN_MODF)
1874 CASE_MATHFN (BUILT_IN_NAN)
1875 CASE_MATHFN (BUILT_IN_NANS)
1876 CASE_MATHFN (BUILT_IN_NEARBYINT)
1877 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1878 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1879 CASE_MATHFN (BUILT_IN_POW)
1880 CASE_MATHFN (BUILT_IN_POWI)
1881 CASE_MATHFN (BUILT_IN_POW10)
1882 CASE_MATHFN (BUILT_IN_REMAINDER)
1883 CASE_MATHFN (BUILT_IN_REMQUO)
1884 CASE_MATHFN (BUILT_IN_RINT)
1885 CASE_MATHFN (BUILT_IN_ROUND)
1886 CASE_MATHFN (BUILT_IN_SCALB)
1887 CASE_MATHFN (BUILT_IN_SCALBLN)
1888 CASE_MATHFN (BUILT_IN_SCALBN)
1889 CASE_MATHFN (BUILT_IN_SIGNBIT)
1890 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1891 CASE_MATHFN (BUILT_IN_SIN)
1892 CASE_MATHFN (BUILT_IN_SINCOS)
1893 CASE_MATHFN (BUILT_IN_SINH)
1894 CASE_MATHFN (BUILT_IN_SQRT)
1895 CASE_MATHFN (BUILT_IN_TAN)
1896 CASE_MATHFN (BUILT_IN_TANH)
1897 CASE_MATHFN (BUILT_IN_TGAMMA)
1898 CASE_MATHFN (BUILT_IN_TRUNC)
1899 CASE_MATHFN (BUILT_IN_Y0)
1900 CASE_MATHFN (BUILT_IN_Y1)
1901 CASE_MATHFN (BUILT_IN_YN)
1907 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1908 return fn_arr[fcode];
1909 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1910 return fn_arr[fcodef];
1911 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1912 return fn_arr[fcodel];
1917 /* Like mathfn_built_in_1(), but always use the implicit array. */
1920 mathfn_built_in (tree type, enum built_in_function fn)
1922 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1925 /* If errno must be maintained, expand the RTL to check if the result,
1926 TARGET, of a built-in function call, EXP, is NaN, and if so set
1930 expand_errno_check (tree exp, rtx target)
1932 rtx lab = gen_label_rtx ();
1934 /* Test the result; if it is NaN, set errno=EDOM because
1935 the argument was not in the domain. */
1936 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1937 NULL_RTX, NULL_RTX, lab,
1938 /* The jump is very likely. */
1939 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1942 /* If this built-in doesn't throw an exception, set errno directly. */
1943 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1945 #ifdef GEN_ERRNO_RTX
1946 rtx errno_rtx = GEN_ERRNO_RTX;
1949 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1951 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1957 /* Make sure the library call isn't expanded as a tail call. */
1958 CALL_EXPR_TAILCALL (exp) = 0;
1960 /* We can't set errno=EDOM directly; let the library call do it.
1961 Pop the arguments right away in case the call gets deleted. */
1963 expand_call (exp, target, 0);
1968 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1969 Return NULL_RTX if a normal call should be emitted rather than expanding
1970 the function in-line. EXP is the expression that is a call to the builtin
1971 function; if convenient, the result should be placed in TARGET.
1972 SUBTARGET may be used as the target for computing one of EXP's operands. */
1975 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1977 optab builtin_optab;
1979 tree fndecl = get_callee_fndecl (exp);
1980 enum machine_mode mode;
1981 bool errno_set = false;
1984 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1987 arg = CALL_EXPR_ARG (exp, 0);
1989 switch (DECL_FUNCTION_CODE (fndecl))
1991 CASE_FLT_FN (BUILT_IN_SQRT):
1992 errno_set = ! tree_expr_nonnegative_p (arg);
1993 builtin_optab = sqrt_optab;
1995 CASE_FLT_FN (BUILT_IN_EXP):
1996 errno_set = true; builtin_optab = exp_optab; break;
1997 CASE_FLT_FN (BUILT_IN_EXP10):
1998 CASE_FLT_FN (BUILT_IN_POW10):
1999 errno_set = true; builtin_optab = exp10_optab; break;
2000 CASE_FLT_FN (BUILT_IN_EXP2):
2001 errno_set = true; builtin_optab = exp2_optab; break;
2002 CASE_FLT_FN (BUILT_IN_EXPM1):
2003 errno_set = true; builtin_optab = expm1_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOGB):
2005 errno_set = true; builtin_optab = logb_optab; break;
2006 CASE_FLT_FN (BUILT_IN_LOG):
2007 errno_set = true; builtin_optab = log_optab; break;
2008 CASE_FLT_FN (BUILT_IN_LOG10):
2009 errno_set = true; builtin_optab = log10_optab; break;
2010 CASE_FLT_FN (BUILT_IN_LOG2):
2011 errno_set = true; builtin_optab = log2_optab; break;
2012 CASE_FLT_FN (BUILT_IN_LOG1P):
2013 errno_set = true; builtin_optab = log1p_optab; break;
2014 CASE_FLT_FN (BUILT_IN_ASIN):
2015 builtin_optab = asin_optab; break;
2016 CASE_FLT_FN (BUILT_IN_ACOS):
2017 builtin_optab = acos_optab; break;
2018 CASE_FLT_FN (BUILT_IN_TAN):
2019 builtin_optab = tan_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ATAN):
2021 builtin_optab = atan_optab; break;
2022 CASE_FLT_FN (BUILT_IN_FLOOR):
2023 builtin_optab = floor_optab; break;
2024 CASE_FLT_FN (BUILT_IN_CEIL):
2025 builtin_optab = ceil_optab; break;
2026 CASE_FLT_FN (BUILT_IN_TRUNC):
2027 builtin_optab = btrunc_optab; break;
2028 CASE_FLT_FN (BUILT_IN_ROUND):
2029 builtin_optab = round_optab; break;
2030 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2031 builtin_optab = nearbyint_optab;
2032 if (flag_trapping_math)
2034 /* Else fallthrough and expand as rint. */
2035 CASE_FLT_FN (BUILT_IN_RINT):
2036 builtin_optab = rint_optab; break;
2037 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2038 builtin_optab = significand_optab; break;
2043 /* Make a suitable register to place result in. */
2044 mode = TYPE_MODE (TREE_TYPE (exp));
2046 if (! flag_errno_math || ! HONOR_NANS (mode))
2049 /* Before working hard, check whether the instruction is available. */
2050 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2051 && (!errno_set || !optimize_insn_for_size_p ()))
2053 target = gen_reg_rtx (mode);
2055 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2056 need to expand the argument again. This way, we will not perform
2057 side-effects more the once. */
2058 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2060 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2064 /* Compute into TARGET.
2065 Set TARGET to wherever the result comes back. */
2066 target = expand_unop (mode, builtin_optab, op0, target, 0);
2071 expand_errno_check (exp, target);
2073 /* Output the entire sequence. */
2074 insns = get_insns ();
2080 /* If we were unable to expand via the builtin, stop the sequence
2081 (without outputting the insns) and call to the library function
2082 with the stabilized argument list. */
2086 return expand_call (exp, target, target == const0_rtx);
2089 /* Expand a call to the builtin binary math functions (pow and atan2).
2090 Return NULL_RTX if a normal call should be emitted rather than expanding the
2091 function in-line. EXP is the expression that is a call to the builtin
2092 function; if convenient, the result should be placed in TARGET.
2093 SUBTARGET may be used as the target for computing one of EXP's
2097 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2099 optab builtin_optab;
2100 rtx op0, op1, insns;
2101 int op1_type = REAL_TYPE;
2102 tree fndecl = get_callee_fndecl (exp);
2104 enum machine_mode mode;
2105 bool errno_set = true;
2107 switch (DECL_FUNCTION_CODE (fndecl))
2109 CASE_FLT_FN (BUILT_IN_SCALBN):
2110 CASE_FLT_FN (BUILT_IN_SCALBLN):
2111 CASE_FLT_FN (BUILT_IN_LDEXP):
2112 op1_type = INTEGER_TYPE;
2117 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2120 arg0 = CALL_EXPR_ARG (exp, 0);
2121 arg1 = CALL_EXPR_ARG (exp, 1);
2123 switch (DECL_FUNCTION_CODE (fndecl))
2125 CASE_FLT_FN (BUILT_IN_POW):
2126 builtin_optab = pow_optab; break;
2127 CASE_FLT_FN (BUILT_IN_ATAN2):
2128 builtin_optab = atan2_optab; break;
2129 CASE_FLT_FN (BUILT_IN_SCALB):
2130 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2132 builtin_optab = scalb_optab; break;
2133 CASE_FLT_FN (BUILT_IN_SCALBN):
2134 CASE_FLT_FN (BUILT_IN_SCALBLN):
2135 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2137 /* Fall through... */
2138 CASE_FLT_FN (BUILT_IN_LDEXP):
2139 builtin_optab = ldexp_optab; break;
2140 CASE_FLT_FN (BUILT_IN_FMOD):
2141 builtin_optab = fmod_optab; break;
2142 CASE_FLT_FN (BUILT_IN_REMAINDER):
2143 CASE_FLT_FN (BUILT_IN_DREM):
2144 builtin_optab = remainder_optab; break;
2149 /* Make a suitable register to place result in. */
2150 mode = TYPE_MODE (TREE_TYPE (exp));
2152 /* Before working hard, check whether the instruction is available. */
2153 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2156 target = gen_reg_rtx (mode);
2158 if (! flag_errno_math || ! HONOR_NANS (mode))
2161 if (errno_set && optimize_insn_for_size_p ())
2164 /* Always stabilize the argument list. */
2165 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2166 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2168 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2169 op1 = expand_normal (arg1);
2173 /* Compute into TARGET.
2174 Set TARGET to wherever the result comes back. */
2175 target = expand_binop (mode, builtin_optab, op0, op1,
2176 target, 0, OPTAB_DIRECT);
2178 /* If we were unable to expand via the builtin, stop the sequence
2179 (without outputting the insns) and call to the library function
2180 with the stabilized argument list. */
2184 return expand_call (exp, target, target == const0_rtx);
2188 expand_errno_check (exp, target);
2190 /* Output the entire sequence. */
2191 insns = get_insns ();
2198 /* Expand a call to the builtin trinary math functions (fma).
2199 Return NULL_RTX if a normal call should be emitted rather than expanding the
2200 function in-line. EXP is the expression that is a call to the builtin
2201 function; if convenient, the result should be placed in TARGET.
2202 SUBTARGET may be used as the target for computing one of EXP's
2206 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2208 optab builtin_optab;
2209 rtx op0, op1, op2, insns;
2210 tree fndecl = get_callee_fndecl (exp);
2211 tree arg0, arg1, arg2;
2212 enum machine_mode mode;
2214 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2217 arg0 = CALL_EXPR_ARG (exp, 0);
2218 arg1 = CALL_EXPR_ARG (exp, 1);
2219 arg2 = CALL_EXPR_ARG (exp, 2);
2221 switch (DECL_FUNCTION_CODE (fndecl))
2223 CASE_FLT_FN (BUILT_IN_FMA):
2224 builtin_optab = fma_optab; break;
2229 /* Make a suitable register to place result in. */
2230 mode = TYPE_MODE (TREE_TYPE (exp));
2232 /* Before working hard, check whether the instruction is available. */
2233 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2236 target = gen_reg_rtx (mode);
2238 /* Always stabilize the argument list. */
2239 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2240 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2241 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2243 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2244 op1 = expand_normal (arg1);
2245 op2 = expand_normal (arg2);
2249 /* Compute into TARGET.
2250 Set TARGET to wherever the result comes back. */
2251 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2254 /* If we were unable to expand via the builtin, stop the sequence
2255 (without outputting the insns) and call to the library function
2256 with the stabilized argument list. */
2260 return expand_call (exp, target, target == const0_rtx);
2263 /* Output the entire sequence. */
2264 insns = get_insns ();
2271 /* Expand a call to the builtin sin and cos math functions.
2272 Return NULL_RTX if a normal call should be emitted rather than expanding the
2273 function in-line. EXP is the expression that is a call to the builtin
2274 function; if convenient, the result should be placed in TARGET.
2275 SUBTARGET may be used as the target for computing one of EXP's
2279 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2281 optab builtin_optab;
2283 tree fndecl = get_callee_fndecl (exp);
2284 enum machine_mode mode;
2287 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2290 arg = CALL_EXPR_ARG (exp, 0);
2292 switch (DECL_FUNCTION_CODE (fndecl))
2294 CASE_FLT_FN (BUILT_IN_SIN):
2295 CASE_FLT_FN (BUILT_IN_COS):
2296 builtin_optab = sincos_optab; break;
2301 /* Make a suitable register to place result in. */
2302 mode = TYPE_MODE (TREE_TYPE (exp));
2304 /* Check if sincos insn is available, otherwise fallback
2305 to sin or cos insn. */
2306 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2307 switch (DECL_FUNCTION_CODE (fndecl))
2309 CASE_FLT_FN (BUILT_IN_SIN):
2310 builtin_optab = sin_optab; break;
2311 CASE_FLT_FN (BUILT_IN_COS):
2312 builtin_optab = cos_optab; break;
2317 /* Before working hard, check whether the instruction is available. */
2318 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2320 target = gen_reg_rtx (mode);
2322 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2323 need to expand the argument again. This way, we will not perform
2324 side-effects more the once. */
2325 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2327 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2331 /* Compute into TARGET.
2332 Set TARGET to wherever the result comes back. */
2333 if (builtin_optab == sincos_optab)
2337 switch (DECL_FUNCTION_CODE (fndecl))
2339 CASE_FLT_FN (BUILT_IN_SIN):
2340 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2342 CASE_FLT_FN (BUILT_IN_COS):
2343 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2348 gcc_assert (result);
2352 target = expand_unop (mode, builtin_optab, op0, target, 0);
2357 /* Output the entire sequence. */
2358 insns = get_insns ();
2364 /* If we were unable to expand via the builtin, stop the sequence
2365 (without outputting the insns) and call to the library function
2366 with the stabilized argument list. */
2370 target = expand_call (exp, target, target == const0_rtx);
2375 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2376 return an RTL instruction code that implements the functionality.
2377 If that isn't possible or available return CODE_FOR_nothing. */
2379 static enum insn_code
2380 interclass_mathfn_icode (tree arg, tree fndecl)
2382 bool errno_set = false;
2383 optab builtin_optab = 0;
2384 enum machine_mode mode;
2386 switch (DECL_FUNCTION_CODE (fndecl))
2388 CASE_FLT_FN (BUILT_IN_ILOGB):
2389 errno_set = true; builtin_optab = ilogb_optab; break;
2390 CASE_FLT_FN (BUILT_IN_ISINF):
2391 builtin_optab = isinf_optab; break;
2392 case BUILT_IN_ISNORMAL:
2393 case BUILT_IN_ISFINITE:
2394 CASE_FLT_FN (BUILT_IN_FINITE):
2395 case BUILT_IN_FINITED32:
2396 case BUILT_IN_FINITED64:
2397 case BUILT_IN_FINITED128:
2398 case BUILT_IN_ISINFD32:
2399 case BUILT_IN_ISINFD64:
2400 case BUILT_IN_ISINFD128:
2401 /* These builtins have no optabs (yet). */
2407 /* There's no easy way to detect the case we need to set EDOM. */
2408 if (flag_errno_math && errno_set)
2409 return CODE_FOR_nothing;
2411 /* Optab mode depends on the mode of the input argument. */
2412 mode = TYPE_MODE (TREE_TYPE (arg));
2415 return optab_handler (builtin_optab, mode);
2416 return CODE_FOR_nothing;
2419 /* Expand a call to one of the builtin math functions that operate on
2420 floating point argument and output an integer result (ilogb, isinf,
2422 Return 0 if a normal call should be emitted rather than expanding the
2423 function in-line. EXP is the expression that is a call to the builtin
2424 function; if convenient, the result should be placed in TARGET. */
2427 expand_builtin_interclass_mathfn (tree exp, rtx target)
2429 enum insn_code icode = CODE_FOR_nothing;
2431 tree fndecl = get_callee_fndecl (exp);
2432 enum machine_mode mode;
2435 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2438 arg = CALL_EXPR_ARG (exp, 0);
2439 icode = interclass_mathfn_icode (arg, fndecl);
2440 mode = TYPE_MODE (TREE_TYPE (arg));
2442 if (icode != CODE_FOR_nothing)
2444 struct expand_operand ops[1];
2445 rtx last = get_last_insn ();
2446 tree orig_arg = arg;
2448 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2449 need to expand the argument again. This way, we will not perform
2450 side-effects more the once. */
2451 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2453 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2455 if (mode != GET_MODE (op0))
2456 op0 = convert_to_mode (mode, op0, 0);
2458 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2459 if (maybe_legitimize_operands (icode, 0, 1, ops)
2460 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2461 return ops[0].value;
2463 delete_insns_since (last);
2464 CALL_EXPR_ARG (exp, 0) = orig_arg;
2470 /* Expand a call to the builtin sincos math function.
2471 Return NULL_RTX if a normal call should be emitted rather than expanding the
2472 function in-line. EXP is the expression that is a call to the builtin
2476 expand_builtin_sincos (tree exp)
2478 rtx op0, op1, op2, target1, target2;
2479 enum machine_mode mode;
2480 tree arg, sinp, cosp;
2482 location_t loc = EXPR_LOCATION (exp);
2483 tree alias_type, alias_off;
2485 if (!validate_arglist (exp, REAL_TYPE,
2486 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2489 arg = CALL_EXPR_ARG (exp, 0);
2490 sinp = CALL_EXPR_ARG (exp, 1);
2491 cosp = CALL_EXPR_ARG (exp, 2);
2493 /* Make a suitable register to place result in. */
2494 mode = TYPE_MODE (TREE_TYPE (arg));
2496 /* Check if sincos insn is available, otherwise emit the call. */
2497 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2500 target1 = gen_reg_rtx (mode);
2501 target2 = gen_reg_rtx (mode);
2503 op0 = expand_normal (arg);
2504 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2505 alias_off = build_int_cst (alias_type, 0);
2506 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2508 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2511 /* Compute into target1 and target2.
2512 Set TARGET to wherever the result comes back. */
2513 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2514 gcc_assert (result);
2516 /* Move target1 and target2 to the memory locations indicated
2518 emit_move_insn (op1, target1);
2519 emit_move_insn (op2, target2);
2524 /* Expand a call to the internal cexpi builtin to the sincos math function.
2525 EXP is the expression that is a call to the builtin function; if convenient,
2526 the result should be placed in TARGET. */
2529 expand_builtin_cexpi (tree exp, rtx target)
2531 tree fndecl = get_callee_fndecl (exp);
2533 enum machine_mode mode;
2535 location_t loc = EXPR_LOCATION (exp);
2537 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2540 arg = CALL_EXPR_ARG (exp, 0);
2541 type = TREE_TYPE (arg);
2542 mode = TYPE_MODE (TREE_TYPE (arg));
2544 /* Try expanding via a sincos optab, fall back to emitting a libcall
2545 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2546 is only generated from sincos, cexp or if we have either of them. */
2547 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2549 op1 = gen_reg_rtx (mode);
2550 op2 = gen_reg_rtx (mode);
2552 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2554 /* Compute into op1 and op2. */
2555 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2557 else if (TARGET_HAS_SINCOS)
2559 tree call, fn = NULL_TREE;
2563 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2564 fn = built_in_decls[BUILT_IN_SINCOSF];
2565 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2566 fn = built_in_decls[BUILT_IN_SINCOS];
2567 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2568 fn = built_in_decls[BUILT_IN_SINCOSL];
2572 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2573 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2574 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2575 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2576 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2577 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2579 /* Make sure not to fold the sincos call again. */
2580 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2581 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2582 call, 3, arg, top1, top2));
2586 tree call, fn = NULL_TREE, narg;
2587 tree ctype = build_complex_type (type);
2589 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2590 fn = built_in_decls[BUILT_IN_CEXPF];
2591 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2592 fn = built_in_decls[BUILT_IN_CEXP];
2593 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2594 fn = built_in_decls[BUILT_IN_CEXPL];
2598 /* If we don't have a decl for cexp create one. This is the
2599 friendliest fallback if the user calls __builtin_cexpi
2600 without full target C99 function support. */
2601 if (fn == NULL_TREE)
2604 const char *name = NULL;
2606 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2613 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2614 fn = build_fn_decl (name, fntype);
2617 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2618 build_real (type, dconst0), arg);
2620 /* Make sure not to fold the cexp call again. */
2621 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2622 return expand_expr (build_call_nary (ctype, call, 1, narg),
2623 target, VOIDmode, EXPAND_NORMAL);
2626 /* Now build the proper return type. */
2627 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2628 make_tree (TREE_TYPE (arg), op2),
2629 make_tree (TREE_TYPE (arg), op1)),
2630 target, VOIDmode, EXPAND_NORMAL);
2633 /* Conveniently construct a function call expression. FNDECL names the
2634 function to be called, N is the number of arguments, and the "..."
2635 parameters are the argument expressions. Unlike build_call_exr
2636 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2639 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2642 tree fntype = TREE_TYPE (fndecl);
2643 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2646 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2648 SET_EXPR_LOCATION (fn, loc);
2652 /* Expand a call to one of the builtin rounding functions gcc defines
2653 as an extension (lfloor and lceil). As these are gcc extensions we
2654 do not need to worry about setting errno to EDOM.
2655 If expanding via optab fails, lower expression to (int)(floor(x)).
2656 EXP is the expression that is a call to the builtin function;
2657 if convenient, the result should be placed in TARGET. */
2660 expand_builtin_int_roundingfn (tree exp, rtx target)
2662 convert_optab builtin_optab;
2663 rtx op0, insns, tmp;
2664 tree fndecl = get_callee_fndecl (exp);
2665 enum built_in_function fallback_fn;
2666 tree fallback_fndecl;
2667 enum machine_mode mode;
2670 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2673 arg = CALL_EXPR_ARG (exp, 0);
2675 switch (DECL_FUNCTION_CODE (fndecl))
2677 CASE_FLT_FN (BUILT_IN_LCEIL):
2678 CASE_FLT_FN (BUILT_IN_LLCEIL):
2679 builtin_optab = lceil_optab;
2680 fallback_fn = BUILT_IN_CEIL;
2683 CASE_FLT_FN (BUILT_IN_LFLOOR):
2684 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2685 builtin_optab = lfloor_optab;
2686 fallback_fn = BUILT_IN_FLOOR;
2693 /* Make a suitable register to place result in. */
2694 mode = TYPE_MODE (TREE_TYPE (exp));
2696 target = gen_reg_rtx (mode);
2698 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2699 need to expand the argument again. This way, we will not perform
2700 side-effects more the once. */
2701 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2703 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2707 /* Compute into TARGET. */
2708 if (expand_sfix_optab (target, op0, builtin_optab))
2710 /* Output the entire sequence. */
2711 insns = get_insns ();
2717 /* If we were unable to expand via the builtin, stop the sequence
2718 (without outputting the insns). */
2721 /* Fall back to floating point rounding optab. */
2722 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2724 /* For non-C99 targets we may end up without a fallback fndecl here
2725 if the user called __builtin_lfloor directly. In this case emit
2726 a call to the floor/ceil variants nevertheless. This should result
2727 in the best user experience for not full C99 targets. */
2728 if (fallback_fndecl == NULL_TREE)
2731 const char *name = NULL;
2733 switch (DECL_FUNCTION_CODE (fndecl))
2735 case BUILT_IN_LCEIL:
2736 case BUILT_IN_LLCEIL:
2739 case BUILT_IN_LCEILF:
2740 case BUILT_IN_LLCEILF:
2743 case BUILT_IN_LCEILL:
2744 case BUILT_IN_LLCEILL:
2747 case BUILT_IN_LFLOOR:
2748 case BUILT_IN_LLFLOOR:
2751 case BUILT_IN_LFLOORF:
2752 case BUILT_IN_LLFLOORF:
2755 case BUILT_IN_LFLOORL:
2756 case BUILT_IN_LLFLOORL:
2763 fntype = build_function_type_list (TREE_TYPE (arg),
2764 TREE_TYPE (arg), NULL_TREE);
2765 fallback_fndecl = build_fn_decl (name, fntype);
2768 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2770 tmp = expand_normal (exp);
2772 /* Truncate the result of floating point optab to integer
2773 via expand_fix (). */
2774 target = gen_reg_rtx (mode);
2775 expand_fix (target, tmp, 0);
2780 /* Expand a call to one of the builtin math functions doing integer
2782 Return 0 if a normal call should be emitted rather than expanding the
2783 function in-line. EXP is the expression that is a call to the builtin
2784 function; if convenient, the result should be placed in TARGET. */
2787 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2789 convert_optab builtin_optab;
2791 tree fndecl = get_callee_fndecl (exp);
2793 enum machine_mode mode;
2795 /* There's no easy way to detect the case we need to set EDOM. */
2796 if (flag_errno_math)
2799 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2802 arg = CALL_EXPR_ARG (exp, 0);
2804 switch (DECL_FUNCTION_CODE (fndecl))
2806 CASE_FLT_FN (BUILT_IN_LRINT):
2807 CASE_FLT_FN (BUILT_IN_LLRINT):
2808 builtin_optab = lrint_optab; break;
2809 CASE_FLT_FN (BUILT_IN_LROUND):
2810 CASE_FLT_FN (BUILT_IN_LLROUND):
2811 builtin_optab = lround_optab; break;
2816 /* Make a suitable register to place result in. */
2817 mode = TYPE_MODE (TREE_TYPE (exp));
2819 target = gen_reg_rtx (mode);
2821 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2822 need to expand the argument again. This way, we will not perform
2823 side-effects more the once. */
2824 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2826 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2830 if (expand_sfix_optab (target, op0, builtin_optab))
2832 /* Output the entire sequence. */
2833 insns = get_insns ();
2839 /* If we were unable to expand via the builtin, stop the sequence
2840 (without outputting the insns) and call to the library function
2841 with the stabilized argument list. */
2844 target = expand_call (exp, target, target == const0_rtx);
2849 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2850 a normal call should be emitted rather than expanding the function
2851 in-line. EXP is the expression that is a call to the builtin
2852 function; if convenient, the result should be placed in TARGET. */
2855 expand_builtin_powi (tree exp, rtx target)
2859 enum machine_mode mode;
2860 enum machine_mode mode2;
2862 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2865 arg0 = CALL_EXPR_ARG (exp, 0);
2866 arg1 = CALL_EXPR_ARG (exp, 1);
2867 mode = TYPE_MODE (TREE_TYPE (exp));
2869 /* Emit a libcall to libgcc. */
2871 /* Mode of the 2nd argument must match that of an int. */
2872 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2874 if (target == NULL_RTX)
2875 target = gen_reg_rtx (mode);
2877 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2878 if (GET_MODE (op0) != mode)
2879 op0 = convert_to_mode (mode, op0, 0);
2880 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2881 if (GET_MODE (op1) != mode2)
2882 op1 = convert_to_mode (mode2, op1, 0);
2884 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2885 target, LCT_CONST, mode, 2,
2886 op0, mode, op1, mode2);
2891 /* Expand expression EXP which is a call to the strlen builtin. Return
2892 NULL_RTX if we failed the caller should emit a normal call, otherwise
2893 try to get the result in TARGET, if convenient. */
2896 expand_builtin_strlen (tree exp, rtx target,
2897 enum machine_mode target_mode)
2899 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2903 struct expand_operand ops[4];
2906 tree src = CALL_EXPR_ARG (exp, 0);
2907 rtx src_reg, before_strlen;
2908 enum machine_mode insn_mode = target_mode;
2909 enum insn_code icode = CODE_FOR_nothing;
2912 /* If the length can be computed at compile-time, return it. */
2913 len = c_strlen (src, 0);
2915 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2917 /* If the length can be computed at compile-time and is constant
2918 integer, but there are side-effects in src, evaluate
2919 src for side-effects, then return len.
2920 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2921 can be optimized into: i++; x = 3; */
2922 len = c_strlen (src, 1);
2923 if (len && TREE_CODE (len) == INTEGER_CST)
2925 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2926 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2929 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2931 /* If SRC is not a pointer type, don't do this operation inline. */
2935 /* Bail out if we can't compute strlen in the right mode. */
2936 while (insn_mode != VOIDmode)
2938 icode = optab_handler (strlen_optab, insn_mode);
2939 if (icode != CODE_FOR_nothing)
2942 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2944 if (insn_mode == VOIDmode)
2947 /* Make a place to hold the source address. We will not expand
2948 the actual source until we are sure that the expansion will
2949 not fail -- there are trees that cannot be expanded twice. */
2950 src_reg = gen_reg_rtx (Pmode);
2952 /* Mark the beginning of the strlen sequence so we can emit the
2953 source operand later. */
2954 before_strlen = get_last_insn ();
2956 create_output_operand (&ops[0], target, insn_mode);
2957 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2958 create_integer_operand (&ops[2], 0);
2959 create_integer_operand (&ops[3], align);
2960 if (!maybe_expand_insn (icode, 4, ops))
2963 /* Now that we are assured of success, expand the source. */
2965 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2968 #ifdef POINTERS_EXTEND_UNSIGNED
2969 if (GET_MODE (pat) != Pmode)
2970 pat = convert_to_mode (Pmode, pat,
2971 POINTERS_EXTEND_UNSIGNED);
2973 emit_move_insn (src_reg, pat);
2979 emit_insn_after (pat, before_strlen);
2981 emit_insn_before (pat, get_insns ());
2983 /* Return the value in the proper mode for this function. */
2984 if (GET_MODE (ops[0].value) == target_mode)
2985 target = ops[0].value;
2986 else if (target != 0)
2987 convert_move (target, ops[0].value, 0);
2989 target = convert_to_mode (target_mode, ops[0].value, 0);
2995 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2996 bytes from constant string DATA + OFFSET and return it as target
3000 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3001 enum machine_mode mode)
3003 const char *str = (const char *) data;
3005 gcc_assert (offset >= 0
3006 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3007 <= strlen (str) + 1));
3009 return c_readstr (str + offset, mode);
3012 /* Expand a call EXP to the memcpy builtin.
3013 Return NULL_RTX if we failed, the caller should emit a normal call,
3014 otherwise try to get the result in TARGET, if convenient (and in
3015 mode MODE if that's convenient). */
3018 expand_builtin_memcpy (tree exp, rtx target)
3020 if (!validate_arglist (exp,
3021 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3025 tree dest = CALL_EXPR_ARG (exp, 0);
3026 tree src = CALL_EXPR_ARG (exp, 1);
3027 tree len = CALL_EXPR_ARG (exp, 2);
3028 const char *src_str;
3029 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3030 unsigned int dest_align
3031 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3032 rtx dest_mem, src_mem, dest_addr, len_rtx;
3033 HOST_WIDE_INT expected_size = -1;
3034 unsigned int expected_align = 0;
3036 /* If DEST is not a pointer type, call the normal function. */
3037 if (dest_align == 0)
3040 /* If either SRC is not a pointer type, don't do this
3041 operation in-line. */
3045 if (currently_expanding_gimple_stmt)
3046 stringop_block_profile (currently_expanding_gimple_stmt,
3047 &expected_align, &expected_size);
3049 if (expected_align < dest_align)
3050 expected_align = dest_align;
3051 dest_mem = get_memory_rtx (dest, len);
3052 set_mem_align (dest_mem, dest_align);
3053 len_rtx = expand_normal (len);
3054 src_str = c_getstr (src);
3056 /* If SRC is a string constant and block move would be done
3057 by pieces, we can avoid loading the string from memory
3058 and only stored the computed constants. */
3060 && CONST_INT_P (len_rtx)
3061 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3062 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3063 CONST_CAST (char *, src_str),
3066 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3067 builtin_memcpy_read_str,
3068 CONST_CAST (char *, src_str),
3069 dest_align, false, 0);
3070 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3071 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3075 src_mem = get_memory_rtx (src, len);
3076 set_mem_align (src_mem, src_align);
3078 /* Copy word part most expediently. */
3079 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3080 CALL_EXPR_TAILCALL (exp)
3081 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3082 expected_align, expected_size);
3086 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3087 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3093 /* Expand a call EXP to the mempcpy builtin.
3094 Return NULL_RTX if we failed; the caller should emit a normal call,
3095 otherwise try to get the result in TARGET, if convenient (and in
3096 mode MODE if that's convenient). If ENDP is 0 return the
3097 destination pointer, if ENDP is 1 return the end pointer ala
3098 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3102 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3104 if (!validate_arglist (exp,
3105 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3109 tree dest = CALL_EXPR_ARG (exp, 0);
3110 tree src = CALL_EXPR_ARG (exp, 1);
3111 tree len = CALL_EXPR_ARG (exp, 2);
3112 return expand_builtin_mempcpy_args (dest, src, len,
3113 target, mode, /*endp=*/ 1);
3117 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3118 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3119 so that this can also be called without constructing an actual CALL_EXPR.
3120 The other arguments and return value are the same as for
3121 expand_builtin_mempcpy. */
3124 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3125 rtx target, enum machine_mode mode, int endp)
3127 /* If return value is ignored, transform mempcpy into memcpy. */
3128 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3130 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3131 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3133 return expand_expr (result, target, mode, EXPAND_NORMAL);
3137 const char *src_str;
3138 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3139 unsigned int dest_align
3140 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3141 rtx dest_mem, src_mem, len_rtx;
3143 /* If either SRC or DEST is not a pointer type, don't do this
3144 operation in-line. */
3145 if (dest_align == 0 || src_align == 0)
3148 /* If LEN is not constant, call the normal function. */
3149 if (! host_integerp (len, 1))
3152 len_rtx = expand_normal (len);
3153 src_str = c_getstr (src);
3155 /* If SRC is a string constant and block move would be done
3156 by pieces, we can avoid loading the string from memory
3157 and only stored the computed constants. */
3159 && CONST_INT_P (len_rtx)
3160 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3161 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3162 CONST_CAST (char *, src_str),
3165 dest_mem = get_memory_rtx (dest, len);
3166 set_mem_align (dest_mem, dest_align);
3167 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3168 builtin_memcpy_read_str,
3169 CONST_CAST (char *, src_str),
3170 dest_align, false, endp);
3171 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3172 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3176 if (CONST_INT_P (len_rtx)
3177 && can_move_by_pieces (INTVAL (len_rtx),
3178 MIN (dest_align, src_align)))
3180 dest_mem = get_memory_rtx (dest, len);
3181 set_mem_align (dest_mem, dest_align);
3182 src_mem = get_memory_rtx (src, len);
3183 set_mem_align (src_mem, src_align);
3184 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3185 MIN (dest_align, src_align), endp);
3186 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3187 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3196 # define HAVE_movstr 0
3197 # define CODE_FOR_movstr CODE_FOR_nothing
3200 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3201 we failed, the caller should emit a normal call, otherwise try to
3202 get the result in TARGET, if convenient. If ENDP is 0 return the
3203 destination pointer, if ENDP is 1 return the end pointer ala
3204 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3208 expand_movstr (tree dest, tree src, rtx target, int endp)
3210 struct expand_operand ops[3];
3217 dest_mem = get_memory_rtx (dest, NULL);
3218 src_mem = get_memory_rtx (src, NULL);
3221 target = force_reg (Pmode, XEXP (dest_mem, 0));
3222 dest_mem = replace_equiv_address (dest_mem, target);
3225 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3226 create_fixed_operand (&ops[1], dest_mem);
3227 create_fixed_operand (&ops[2], src_mem);
3228 expand_insn (CODE_FOR_movstr, 3, ops);
3230 if (endp && target != const0_rtx)
3232 target = ops[0].value;
3233 /* movstr is supposed to set end to the address of the NUL
3234 terminator. If the caller requested a mempcpy-like return value,
3238 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3239 emit_move_insn (target, force_operand (tem, NULL_RTX));
3245 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3246 NULL_RTX if we failed the caller should emit a normal call, otherwise
3247 try to get the result in TARGET, if convenient (and in mode MODE if that's
3251 expand_builtin_strcpy (tree exp, rtx target)
3253 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 1);
3257 return expand_builtin_strcpy_args (dest, src, target);
3262 /* Helper function to do the actual work for expand_builtin_strcpy. The
3263 arguments to the builtin_strcpy call DEST and SRC are broken out
3264 so that this can also be called without constructing an actual CALL_EXPR.
3265 The other arguments and return value are the same as for
3266 expand_builtin_strcpy. */
3269 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3271 return expand_movstr (dest, src, target, /*endp=*/0);
3274 /* Expand a call EXP to the stpcpy builtin.
3275 Return NULL_RTX if we failed the caller should emit a normal call,
3276 otherwise try to get the result in TARGET, if convenient (and in
3277 mode MODE if that's convenient). */
3280 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3283 location_t loc = EXPR_LOCATION (exp);
3285 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3288 dst = CALL_EXPR_ARG (exp, 0);
3289 src = CALL_EXPR_ARG (exp, 1);
3291 /* If return value is ignored, transform stpcpy into strcpy. */
3292 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3294 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3295 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3296 return expand_expr (result, target, mode, EXPAND_NORMAL);
3303 /* Ensure we get an actual string whose length can be evaluated at
3304 compile-time, not an expression containing a string. This is
3305 because the latter will potentially produce pessimized code
3306 when used to produce the return value. */
3307 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3308 return expand_movstr (dst, src, target, /*endp=*/2);
3310 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3311 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3312 target, mode, /*endp=*/2);
3317 if (TREE_CODE (len) == INTEGER_CST)
3319 rtx len_rtx = expand_normal (len);
3321 if (CONST_INT_P (len_rtx))
3323 ret = expand_builtin_strcpy_args (dst, src, target);
3329 if (mode != VOIDmode)
3330 target = gen_reg_rtx (mode);
3332 target = gen_reg_rtx (GET_MODE (ret));
3334 if (GET_MODE (target) != GET_MODE (ret))
3335 ret = gen_lowpart (GET_MODE (target), ret);
3337 ret = plus_constant (ret, INTVAL (len_rtx));
3338 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3346 return expand_movstr (dst, src, target, /*endp=*/2);
3350 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3351 bytes from constant string DATA + OFFSET and return it as target
3355 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3356 enum machine_mode mode)
3358 const char *str = (const char *) data;
3360 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3363 return c_readstr (str + offset, mode);
3366 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3367 NULL_RTX if we failed the caller should emit a normal call. */
3370 expand_builtin_strncpy (tree exp, rtx target)
3372 location_t loc = EXPR_LOCATION (exp);
3374 if (validate_arglist (exp,
3375 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3377 tree dest = CALL_EXPR_ARG (exp, 0);
3378 tree src = CALL_EXPR_ARG (exp, 1);
3379 tree len = CALL_EXPR_ARG (exp, 2);
3380 tree slen = c_strlen (src, 1);
3382 /* We must be passed a constant len and src parameter. */
3383 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3386 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3388 /* We're required to pad with trailing zeros if the requested
3389 len is greater than strlen(s2)+1. In that case try to
3390 use store_by_pieces, if it fails, punt. */
3391 if (tree_int_cst_lt (slen, len))
3393 unsigned int dest_align
3394 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3395 const char *p = c_getstr (src);
3398 if (!p || dest_align == 0 || !host_integerp (len, 1)
3399 || !can_store_by_pieces (tree_low_cst (len, 1),
3400 builtin_strncpy_read_str,
3401 CONST_CAST (char *, p),
3405 dest_mem = get_memory_rtx (dest, len);
3406 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3407 builtin_strncpy_read_str,
3408 CONST_CAST (char *, p), dest_align, false, 0);
3409 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3410 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3417 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3418 bytes from constant string DATA + OFFSET and return it as target
3422 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3423 enum machine_mode mode)
3425 const char *c = (const char *) data;
3426 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3428 memset (p, *c, GET_MODE_SIZE (mode));
3430 return c_readstr (p, mode);
3433 /* Callback routine for store_by_pieces. Return the RTL of a register
3434 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3435 char value given in the RTL register data. For example, if mode is
3436 4 bytes wide, return the RTL for 0x01010101*data. */
3439 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3440 enum machine_mode mode)
3446 size = GET_MODE_SIZE (mode);
3450 p = XALLOCAVEC (char, size);
3451 memset (p, 1, size);
3452 coeff = c_readstr (p, mode);
3454 target = convert_to_mode (mode, (rtx) data, 1);
3455 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3456 return force_reg (mode, target);
3459 /* Expand expression EXP, which is a call to the memset builtin. Return
3460 NULL_RTX if we failed the caller should emit a normal call, otherwise
3461 try to get the result in TARGET, if convenient (and in mode MODE if that's
3465 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3467 if (!validate_arglist (exp,
3468 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3472 tree dest = CALL_EXPR_ARG (exp, 0);
3473 tree val = CALL_EXPR_ARG (exp, 1);
3474 tree len = CALL_EXPR_ARG (exp, 2);
3475 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3479 /* Helper function to do the actual work for expand_builtin_memset. The
3480 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3481 so that this can also be called without constructing an actual CALL_EXPR.
3482 The other arguments and return value are the same as for
3483 expand_builtin_memset. */
3486 expand_builtin_memset_args (tree dest, tree val, tree len,
3487 rtx target, enum machine_mode mode, tree orig_exp)
3490 enum built_in_function fcode;
3491 enum machine_mode val_mode;
3493 unsigned int dest_align;
3494 rtx dest_mem, dest_addr, len_rtx;
3495 HOST_WIDE_INT expected_size = -1;
3496 unsigned int expected_align = 0;
3498 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3500 /* If DEST is not a pointer type, don't do this operation in-line. */
3501 if (dest_align == 0)
3504 if (currently_expanding_gimple_stmt)
3505 stringop_block_profile (currently_expanding_gimple_stmt,
3506 &expected_align, &expected_size);
3508 if (expected_align < dest_align)
3509 expected_align = dest_align;
3511 /* If the LEN parameter is zero, return DEST. */
3512 if (integer_zerop (len))
3514 /* Evaluate and ignore VAL in case it has side-effects. */
3515 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3516 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3519 /* Stabilize the arguments in case we fail. */
3520 dest = builtin_save_expr (dest);
3521 val = builtin_save_expr (val);
3522 len = builtin_save_expr (len);
3524 len_rtx = expand_normal (len);
3525 dest_mem = get_memory_rtx (dest, len);
3526 val_mode = TYPE_MODE (unsigned_char_type_node);
3528 if (TREE_CODE (val) != INTEGER_CST)
3532 val_rtx = expand_normal (val);
3533 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3535 /* Assume that we can memset by pieces if we can store
3536 * the coefficients by pieces (in the required modes).
3537 * We can't pass builtin_memset_gen_str as that emits RTL. */
3539 if (host_integerp (len, 1)
3540 && can_store_by_pieces (tree_low_cst (len, 1),
3541 builtin_memset_read_str, &c, dest_align,
3544 val_rtx = force_reg (val_mode, val_rtx);
3545 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3546 builtin_memset_gen_str, val_rtx, dest_align,
3549 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3550 dest_align, expected_align,
3554 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3555 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3559 if (target_char_cast (val, &c))
3564 if (host_integerp (len, 1)
3565 && can_store_by_pieces (tree_low_cst (len, 1),
3566 builtin_memset_read_str, &c, dest_align,
3568 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3569 builtin_memset_read_str, &c, dest_align, true, 0);
3570 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3571 gen_int_mode (c, val_mode),
3572 dest_align, expected_align,
3576 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3577 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3581 set_mem_align (dest_mem, dest_align);
3582 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3583 CALL_EXPR_TAILCALL (orig_exp)
3584 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3585 expected_align, expected_size);
3589 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3590 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3596 fndecl = get_callee_fndecl (orig_exp);
3597 fcode = DECL_FUNCTION_CODE (fndecl);
3598 if (fcode == BUILT_IN_MEMSET)
3599 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3601 else if (fcode == BUILT_IN_BZERO)
3602 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3606 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3607 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3608 return expand_call (fn, target, target == const0_rtx);
3611 /* Expand expression EXP, which is a call to the bzero builtin. Return
3612 NULL_RTX if we failed the caller should emit a normal call. */
3615 expand_builtin_bzero (tree exp)
3618 location_t loc = EXPR_LOCATION (exp);
3620 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3623 dest = CALL_EXPR_ARG (exp, 0);
3624 size = CALL_EXPR_ARG (exp, 1);
3626 /* New argument list transforming bzero(ptr x, int y) to
3627 memset(ptr x, int 0, size_t y). This is done this way
3628 so that if it isn't expanded inline, we fallback to
3629 calling bzero instead of memset. */
3631 return expand_builtin_memset_args (dest, integer_zero_node,
3632 fold_convert_loc (loc, sizetype, size),
3633 const0_rtx, VOIDmode, exp);
3636 /* Expand expression EXP, which is a call to the memcmp built-in function.
3637 Return NULL_RTX if we failed and the
3638 caller should emit a normal call, otherwise try to get the result in
3639 TARGET, if convenient (and in mode MODE, if that's convenient). */
3642 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3643 ATTRIBUTE_UNUSED enum machine_mode mode)
3645 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3647 if (!validate_arglist (exp,
3648 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3651 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3653 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3656 tree arg1 = CALL_EXPR_ARG (exp, 0);
3657 tree arg2 = CALL_EXPR_ARG (exp, 1);
3658 tree len = CALL_EXPR_ARG (exp, 2);
3660 unsigned int arg1_align
3661 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3662 unsigned int arg2_align
3663 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3664 enum machine_mode insn_mode;
3666 #ifdef HAVE_cmpmemsi
3668 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3671 #ifdef HAVE_cmpstrnsi
3673 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3678 /* If we don't have POINTER_TYPE, call the function. */
3679 if (arg1_align == 0 || arg2_align == 0)
3682 /* Make a place to write the result of the instruction. */
3685 && REG_P (result) && GET_MODE (result) == insn_mode
3686 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3687 result = gen_reg_rtx (insn_mode);
3689 arg1_rtx = get_memory_rtx (arg1, len);
3690 arg2_rtx = get_memory_rtx (arg2, len);
3691 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3693 /* Set MEM_SIZE as appropriate. */
3694 if (CONST_INT_P (arg3_rtx))
3696 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3697 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3700 #ifdef HAVE_cmpmemsi
3702 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3703 GEN_INT (MIN (arg1_align, arg2_align)));
3706 #ifdef HAVE_cmpstrnsi
3708 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3709 GEN_INT (MIN (arg1_align, arg2_align)));
3717 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3718 TYPE_MODE (integer_type_node), 3,
3719 XEXP (arg1_rtx, 0), Pmode,
3720 XEXP (arg2_rtx, 0), Pmode,
3721 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3722 TYPE_UNSIGNED (sizetype)),
3723 TYPE_MODE (sizetype));
3725 /* Return the value in the proper mode for this function. */
3726 mode = TYPE_MODE (TREE_TYPE (exp));
3727 if (GET_MODE (result) == mode)
3729 else if (target != 0)
3731 convert_move (target, result, 0);
3735 return convert_to_mode (mode, result, 0);
3742 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3743 if we failed the caller should emit a normal call, otherwise try to get
3744 the result in TARGET, if convenient. */
3747 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3749 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3752 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3753 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3754 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3756 rtx arg1_rtx, arg2_rtx;
3757 rtx result, insn = NULL_RTX;
3759 tree arg1 = CALL_EXPR_ARG (exp, 0);
3760 tree arg2 = CALL_EXPR_ARG (exp, 1);
3762 unsigned int arg1_align
3763 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3764 unsigned int arg2_align
3765 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3767 /* If we don't have POINTER_TYPE, call the function. */
3768 if (arg1_align == 0 || arg2_align == 0)
3771 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3772 arg1 = builtin_save_expr (arg1);
3773 arg2 = builtin_save_expr (arg2);
3775 arg1_rtx = get_memory_rtx (arg1, NULL);
3776 arg2_rtx = get_memory_rtx (arg2, NULL);
3778 #ifdef HAVE_cmpstrsi
3779 /* Try to call cmpstrsi. */
3782 enum machine_mode insn_mode
3783 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3785 /* Make a place to write the result of the instruction. */
3788 && REG_P (result) && GET_MODE (result) == insn_mode
3789 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3790 result = gen_reg_rtx (insn_mode);
3792 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3793 GEN_INT (MIN (arg1_align, arg2_align)));
3796 #ifdef HAVE_cmpstrnsi
3797 /* Try to determine at least one length and call cmpstrnsi. */
3798 if (!insn && HAVE_cmpstrnsi)
3803 enum machine_mode insn_mode
3804 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3805 tree len1 = c_strlen (arg1, 1);
3806 tree len2 = c_strlen (arg2, 1);
3809 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3811 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3813 /* If we don't have a constant length for the first, use the length
3814 of the second, if we know it. We don't require a constant for
3815 this case; some cost analysis could be done if both are available
3816 but neither is constant. For now, assume they're equally cheap,
3817 unless one has side effects. If both strings have constant lengths,
3824 else if (TREE_SIDE_EFFECTS (len1))
3826 else if (TREE_SIDE_EFFECTS (len2))
3828 else if (TREE_CODE (len1) != INTEGER_CST)
3830 else if (TREE_CODE (len2) != INTEGER_CST)
3832 else if (tree_int_cst_lt (len1, len2))
3837 /* If both arguments have side effects, we cannot optimize. */
3838 if (!len || TREE_SIDE_EFFECTS (len))
3841 arg3_rtx = expand_normal (len);
3843 /* Make a place to write the result of the instruction. */
3846 && REG_P (result) && GET_MODE (result) == insn_mode
3847 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3848 result = gen_reg_rtx (insn_mode);
3850 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3851 GEN_INT (MIN (arg1_align, arg2_align)));
3857 enum machine_mode mode;
3860 /* Return the value in the proper mode for this function. */
3861 mode = TYPE_MODE (TREE_TYPE (exp));
3862 if (GET_MODE (result) == mode)
3865 return convert_to_mode (mode, result, 0);
3866 convert_move (target, result, 0);
3870 /* Expand the library call ourselves using a stabilized argument
3871 list to avoid re-evaluating the function's arguments twice. */
3872 #ifdef HAVE_cmpstrnsi
3875 fndecl = get_callee_fndecl (exp);
3876 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3877 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3878 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3879 return expand_call (fn, target, target == const0_rtx);
3885 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3886 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3887 the result in TARGET, if convenient. */
3890 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3891 ATTRIBUTE_UNUSED enum machine_mode mode)
3893 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3895 if (!validate_arglist (exp,
3896 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3899 /* If c_strlen can determine an expression for one of the string
3900 lengths, and it doesn't have side effects, then emit cmpstrnsi
3901 using length MIN(strlen(string)+1, arg3). */
3902 #ifdef HAVE_cmpstrnsi
3905 tree len, len1, len2;
3906 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3909 tree arg1 = CALL_EXPR_ARG (exp, 0);
3910 tree arg2 = CALL_EXPR_ARG (exp, 1);
3911 tree arg3 = CALL_EXPR_ARG (exp, 2);
3913 unsigned int arg1_align
3914 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3915 unsigned int arg2_align
3916 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3917 enum machine_mode insn_mode
3918 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3920 len1 = c_strlen (arg1, 1);
3921 len2 = c_strlen (arg2, 1);
3924 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3926 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3928 /* If we don't have a constant length for the first, use the length
3929 of the second, if we know it. We don't require a constant for
3930 this case; some cost analysis could be done if both are available
3931 but neither is constant. For now, assume they're equally cheap,
3932 unless one has side effects. If both strings have constant lengths,
3939 else if (TREE_SIDE_EFFECTS (len1))
3941 else if (TREE_SIDE_EFFECTS (len2))
3943 else if (TREE_CODE (len1) != INTEGER_CST)
3945 else if (TREE_CODE (len2) != INTEGER_CST)
3947 else if (tree_int_cst_lt (len1, len2))
3952 /* If both arguments have side effects, we cannot optimize. */
3953 if (!len || TREE_SIDE_EFFECTS (len))
3956 /* The actual new length parameter is MIN(len,arg3). */
3957 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3958 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3960 /* If we don't have POINTER_TYPE, call the function. */
3961 if (arg1_align == 0 || arg2_align == 0)
3964 /* Make a place to write the result of the instruction. */
3967 && REG_P (result) && GET_MODE (result) == insn_mode
3968 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3969 result = gen_reg_rtx (insn_mode);
3971 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3972 arg1 = builtin_save_expr (arg1);
3973 arg2 = builtin_save_expr (arg2);
3974 len = builtin_save_expr (len);
3976 arg1_rtx = get_memory_rtx (arg1, len);
3977 arg2_rtx = get_memory_rtx (arg2, len);
3978 arg3_rtx = expand_normal (len);
3979 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3980 GEN_INT (MIN (arg1_align, arg2_align)));
3985 /* Return the value in the proper mode for this function. */
3986 mode = TYPE_MODE (TREE_TYPE (exp));
3987 if (GET_MODE (result) == mode)
3990 return convert_to_mode (mode, result, 0);
3991 convert_move (target, result, 0);
3995 /* Expand the library call ourselves using a stabilized argument
3996 list to avoid re-evaluating the function's arguments twice. */
3997 fndecl = get_callee_fndecl (exp);
3998 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4000 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4001 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4002 return expand_call (fn, target, target == const0_rtx);
4008 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4009 if that's convenient. */
4012 expand_builtin_saveregs (void)
4016 /* Don't do __builtin_saveregs more than once in a function.
4017 Save the result of the first call and reuse it. */
4018 if (saveregs_value != 0)
4019 return saveregs_value;
4021 /* When this function is called, it means that registers must be
4022 saved on entry to this function. So we migrate the call to the
4023 first insn of this function. */
4027 /* Do whatever the machine needs done in this case. */
4028 val = targetm.calls.expand_builtin_saveregs ();
4033 saveregs_value = val;
4035 /* Put the insns after the NOTE that starts the function. If this
4036 is inside a start_sequence, make the outer-level insn chain current, so
4037 the code is placed at the start of the function. */
4038 push_topmost_sequence ();
4039 emit_insn_after (seq, entry_of_function ());
4040 pop_topmost_sequence ();
4045 /* Expand a call to __builtin_next_arg. */
4048 expand_builtin_next_arg (void)
4050 /* Checking arguments is already done in fold_builtin_next_arg
4051 that must be called before this function. */
4052 return expand_binop (ptr_mode, add_optab,
4053 crtl->args.internal_arg_pointer,
4054 crtl->args.arg_offset_rtx,
4055 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4058 /* Make it easier for the backends by protecting the valist argument
4059 from multiple evaluations. */
4062 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4064 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4066 /* The current way of determining the type of valist is completely
4067 bogus. We should have the information on the va builtin instead. */
4069 vatype = targetm.fn_abi_va_list (cfun->decl);
4071 if (TREE_CODE (vatype) == ARRAY_TYPE)
4073 if (TREE_SIDE_EFFECTS (valist))
4074 valist = save_expr (valist);
4076 /* For this case, the backends will be expecting a pointer to
4077 vatype, but it's possible we've actually been given an array
4078 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4080 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4082 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4083 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4088 tree pt = build_pointer_type (vatype);
4092 if (! TREE_SIDE_EFFECTS (valist))
4095 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4096 TREE_SIDE_EFFECTS (valist) = 1;
4099 if (TREE_SIDE_EFFECTS (valist))
4100 valist = save_expr (valist);
4101 valist = fold_build2_loc (loc, MEM_REF,
4102 vatype, valist, build_int_cst (pt, 0));
4108 /* The "standard" definition of va_list is void*. */
4111 std_build_builtin_va_list (void)
4113 return ptr_type_node;
4116 /* The "standard" abi va_list is va_list_type_node. */
4119 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4121 return va_list_type_node;
4124 /* The "standard" type of va_list is va_list_type_node. */
4127 std_canonical_va_list_type (tree type)
4131 if (INDIRECT_REF_P (type))
4132 type = TREE_TYPE (type);
4133 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4134 type = TREE_TYPE (type);
4135 wtype = va_list_type_node;
4137 /* Treat structure va_list types. */
4138 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4139 htype = TREE_TYPE (htype);
4140 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4142 /* If va_list is an array type, the argument may have decayed
4143 to a pointer type, e.g. by being passed to another function.
4144 In that case, unwrap both types so that we can compare the
4145 underlying records. */
4146 if (TREE_CODE (htype) == ARRAY_TYPE
4147 || POINTER_TYPE_P (htype))
4149 wtype = TREE_TYPE (wtype);
4150 htype = TREE_TYPE (htype);
4153 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4154 return va_list_type_node;
4159 /* The "standard" implementation of va_start: just assign `nextarg' to
4163 std_expand_builtin_va_start (tree valist, rtx nextarg)
4165 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4166 convert_move (va_r, nextarg, 0);
4169 /* Expand EXP, a call to __builtin_va_start. */
4172 expand_builtin_va_start (tree exp)
4176 location_t loc = EXPR_LOCATION (exp);
4178 if (call_expr_nargs (exp) < 2)
4180 error_at (loc, "too few arguments to function %<va_start%>");
4184 if (fold_builtin_next_arg (exp, true))
4187 nextarg = expand_builtin_next_arg ();
4188 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4190 if (targetm.expand_builtin_va_start)
4191 targetm.expand_builtin_va_start (valist, nextarg);
4193 std_expand_builtin_va_start (valist, nextarg);
4198 /* The "standard" implementation of va_arg: read the value from the
4199 current (padded) address and increment by the (padded) size. */
4202 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4205 tree addr, t, type_size, rounded_size, valist_tmp;
4206 unsigned HOST_WIDE_INT align, boundary;
4209 #ifdef ARGS_GROW_DOWNWARD
4210 /* All of the alignment and movement below is for args-grow-up machines.
4211 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4212 implement their own specialized gimplify_va_arg_expr routines. */
4216 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4218 type = build_pointer_type (type);
4220 align = PARM_BOUNDARY / BITS_PER_UNIT;
4221 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4223 /* When we align parameter on stack for caller, if the parameter
4224 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4225 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4226 here with caller. */
4227 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4228 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4230 boundary /= BITS_PER_UNIT;
4232 /* Hoist the valist value into a temporary for the moment. */
4233 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4235 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4236 requires greater alignment, we must perform dynamic alignment. */
4237 if (boundary > align
4238 && !integer_zerop (TYPE_SIZE (type)))
4240 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4241 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4242 gimplify_and_add (t, pre_p);
4244 t = fold_convert (sizetype, valist_tmp);
4245 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4246 fold_convert (TREE_TYPE (valist),
4247 fold_build2 (BIT_AND_EXPR, sizetype, t,
4248 size_int (-boundary))));
4249 gimplify_and_add (t, pre_p);
4254 /* If the actual alignment is less than the alignment of the type,
4255 adjust the type accordingly so that we don't assume strict alignment
4256 when dereferencing the pointer. */
4257 boundary *= BITS_PER_UNIT;
4258 if (boundary < TYPE_ALIGN (type))
4260 type = build_variant_type_copy (type);
4261 TYPE_ALIGN (type) = boundary;
4264 /* Compute the rounded size of the type. */
4265 type_size = size_in_bytes (type);
4266 rounded_size = round_up (type_size, align);
4268 /* Reduce rounded_size so it's sharable with the postqueue. */
4269 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4273 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4275 /* Small args are padded downward. */
4276 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4277 rounded_size, size_int (align));
4278 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4279 size_binop (MINUS_EXPR, rounded_size, type_size));
4280 addr = fold_build_pointer_plus (addr, t);
4283 /* Compute new value for AP. */
4284 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4285 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4286 gimplify_and_add (t, pre_p);
4288 addr = fold_convert (build_pointer_type (type), addr);
4291 addr = build_va_arg_indirect_ref (addr);
4293 return build_va_arg_indirect_ref (addr);
4296 /* Build an indirect-ref expression over the given TREE, which represents a
4297 piece of a va_arg() expansion. */
4299 build_va_arg_indirect_ref (tree addr)
4301 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4303 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4309 /* Return a dummy expression of type TYPE in order to keep going after an
4313 dummy_object (tree type)
4315 tree t = build_int_cst (build_pointer_type (type), 0);
4316 return build2 (MEM_REF, type, t, t);
4319 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4320 builtin function, but a very special sort of operator. */
4322 enum gimplify_status
4323 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4325 tree promoted_type, have_va_type;
4326 tree valist = TREE_OPERAND (*expr_p, 0);
4327 tree type = TREE_TYPE (*expr_p);
4329 location_t loc = EXPR_LOCATION (*expr_p);
4331 /* Verify that valist is of the proper type. */
4332 have_va_type = TREE_TYPE (valist);
4333 if (have_va_type == error_mark_node)
4335 have_va_type = targetm.canonical_va_list_type (have_va_type);
4337 if (have_va_type == NULL_TREE)
4339 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4343 /* Generate a diagnostic for requesting data of a type that cannot
4344 be passed through `...' due to type promotion at the call site. */
4345 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4348 static bool gave_help;
4351 /* Unfortunately, this is merely undefined, rather than a constraint
4352 violation, so we cannot make this an error. If this call is never
4353 executed, the program is still strictly conforming. */
4354 warned = warning_at (loc, 0,
4355 "%qT is promoted to %qT when passed through %<...%>",
4356 type, promoted_type);
4357 if (!gave_help && warned)
4360 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4361 promoted_type, type);
4364 /* We can, however, treat "undefined" any way we please.
4365 Call abort to encourage the user to fix the program. */
4367 inform (loc, "if this code is reached, the program will abort");
4368 /* Before the abort, allow the evaluation of the va_list
4369 expression to exit or longjmp. */
4370 gimplify_and_add (valist, pre_p);
4371 t = build_call_expr_loc (loc,
4372 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4373 gimplify_and_add (t, pre_p);
4375 /* This is dead code, but go ahead and finish so that the
4376 mode of the result comes out right. */
4377 *expr_p = dummy_object (type);
4382 /* Make it easier for the backends by protecting the valist argument
4383 from multiple evaluations. */
4384 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4386 /* For this case, the backends will be expecting a pointer to
4387 TREE_TYPE (abi), but it's possible we've
4388 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4390 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4392 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4393 valist = fold_convert_loc (loc, p1,
4394 build_fold_addr_expr_loc (loc, valist));
4397 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4400 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4402 if (!targetm.gimplify_va_arg_expr)
4403 /* FIXME: Once most targets are converted we should merely
4404 assert this is non-null. */
4407 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4412 /* Expand EXP, a call to __builtin_va_end. */
4415 expand_builtin_va_end (tree exp)
4417 tree valist = CALL_EXPR_ARG (exp, 0);
4419 /* Evaluate for side effects, if needed. I hate macros that don't
4421 if (TREE_SIDE_EFFECTS (valist))
4422 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4427 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4428 builtin rather than just as an assignment in stdarg.h because of the
4429 nastiness of array-type va_list types. */
4432 expand_builtin_va_copy (tree exp)
4435 location_t loc = EXPR_LOCATION (exp);
4437 dst = CALL_EXPR_ARG (exp, 0);
4438 src = CALL_EXPR_ARG (exp, 1);
4440 dst = stabilize_va_list_loc (loc, dst, 1);
4441 src = stabilize_va_list_loc (loc, src, 0);
4443 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4445 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4447 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4448 TREE_SIDE_EFFECTS (t) = 1;
4449 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4453 rtx dstb, srcb, size;
4455 /* Evaluate to pointers. */
4456 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4457 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4458 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4459 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4461 dstb = convert_memory_address (Pmode, dstb);
4462 srcb = convert_memory_address (Pmode, srcb);
4464 /* "Dereference" to BLKmode memories. */
4465 dstb = gen_rtx_MEM (BLKmode, dstb);
4466 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4467 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4468 srcb = gen_rtx_MEM (BLKmode, srcb);
4469 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4470 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4473 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4479 /* Expand a call to one of the builtin functions __builtin_frame_address or
4480 __builtin_return_address. */
4483 expand_builtin_frame_address (tree fndecl, tree exp)
4485 /* The argument must be a nonnegative integer constant.
4486 It counts the number of frames to scan up the stack.
4487 The value is the return address saved in that frame. */
4488 if (call_expr_nargs (exp) == 0)
4489 /* Warning about missing arg was already issued. */
4491 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4493 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4494 error ("invalid argument to %<__builtin_frame_address%>");
4496 error ("invalid argument to %<__builtin_return_address%>");
4502 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4503 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4505 /* Some ports cannot access arbitrary stack frames. */
4508 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4509 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4511 warning (0, "unsupported argument to %<__builtin_return_address%>");
4515 /* For __builtin_frame_address, return what we've got. */
4516 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4520 && ! CONSTANT_P (tem))
4521 tem = copy_to_mode_reg (Pmode, tem);
4526 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4527 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4528 is the same as for allocate_dynamic_stack_space. */
4531 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4536 /* Emit normal call if marked not-inlineable. */
4537 if (CALL_CANNOT_INLINE_P (exp))
4540 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4543 /* Compute the argument. */
4544 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4546 /* Allocate the desired space. */
4547 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4549 result = convert_memory_address (ptr_mode, result);
4554 /* Expand a call to a bswap builtin with argument ARG0. MODE
4555 is the mode to expand with. */
4558 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4560 enum machine_mode mode;
4564 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4567 arg = CALL_EXPR_ARG (exp, 0);
4568 mode = TYPE_MODE (TREE_TYPE (arg));
4569 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4571 target = expand_unop (mode, bswap_optab, op0, target, 1);
4573 gcc_assert (target);
4575 return convert_to_mode (mode, target, 0);
4578 /* Expand a call to a unary builtin in EXP.
4579 Return NULL_RTX if a normal call should be emitted rather than expanding the
4580 function in-line. If convenient, the result should be placed in TARGET.
4581 SUBTARGET may be used as the target for computing one of EXP's operands. */
4584 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4585 rtx subtarget, optab op_optab)
4589 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4592 /* Compute the argument. */
4593 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4595 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4596 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4597 VOIDmode, EXPAND_NORMAL);
4598 /* Compute op, into TARGET if possible.
4599 Set TARGET to wherever the result comes back. */
4600 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4601 op_optab, op0, target, op_optab != clrsb_optab);
4602 gcc_assert (target);
4604 return convert_to_mode (target_mode, target, 0);
4607 /* Expand a call to __builtin_expect. We just return our argument
4608 as the builtin_expect semantic should've been already executed by
4609 tree branch prediction pass. */
4612 expand_builtin_expect (tree exp, rtx target)
4616 if (call_expr_nargs (exp) < 2)
4618 arg = CALL_EXPR_ARG (exp, 0);
4620 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4621 /* When guessing was done, the hints should be already stripped away. */
4622 gcc_assert (!flag_guess_branch_prob
4623 || optimize == 0 || seen_error ());
4627 /* Expand a call to __builtin_assume_aligned. We just return our first
4628 argument as the builtin_assume_aligned semantic should've been already
4632 expand_builtin_assume_aligned (tree exp, rtx target)
4634 if (call_expr_nargs (exp) < 2)
4636 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4638 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4639 && (call_expr_nargs (exp) < 3
4640 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4645 expand_builtin_trap (void)
4649 emit_insn (gen_trap ());
4652 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4656 /* Expand a call to __builtin_unreachable. We do nothing except emit
4657 a barrier saying that control flow will not pass here.
4659 It is the responsibility of the program being compiled to ensure
4660 that control flow does never reach __builtin_unreachable. */
4662 expand_builtin_unreachable (void)
4667 /* Expand EXP, a call to fabs, fabsf or fabsl.
4668 Return NULL_RTX if a normal call should be emitted rather than expanding
4669 the function inline. If convenient, the result should be placed
4670 in TARGET. SUBTARGET may be used as the target for computing
4674 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4676 enum machine_mode mode;
4680 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4683 arg = CALL_EXPR_ARG (exp, 0);
4684 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4685 mode = TYPE_MODE (TREE_TYPE (arg));
4686 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4687 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4690 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4691 Return NULL is a normal call should be emitted rather than expanding the
4692 function inline. If convenient, the result should be placed in TARGET.
4693 SUBTARGET may be used as the target for computing the operand. */
4696 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4701 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4704 arg = CALL_EXPR_ARG (exp, 0);
4705 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4707 arg = CALL_EXPR_ARG (exp, 1);
4708 op1 = expand_normal (arg);
4710 return expand_copysign (op0, op1, target);
4713 /* Create a new constant string literal and return a char* pointer to it.
4714 The STRING_CST value is the LEN characters at STR. */
4716 build_string_literal (int len, const char *str)
4718 tree t, elem, index, type;
4720 t = build_string (len, str);
4721 elem = build_type_variant (char_type_node, 1, 0);
4722 index = build_index_type (size_int (len - 1));
4723 type = build_array_type (elem, index);
4724 TREE_TYPE (t) = type;
4725 TREE_CONSTANT (t) = 1;
4726 TREE_READONLY (t) = 1;
4727 TREE_STATIC (t) = 1;
4729 type = build_pointer_type (elem);
4730 t = build1 (ADDR_EXPR, type,
4731 build4 (ARRAY_REF, elem,
4732 t, integer_zero_node, NULL_TREE, NULL_TREE));
4736 /* Expand a call to __builtin___clear_cache. */
4739 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4741 #ifndef HAVE_clear_cache
4742 #ifdef CLEAR_INSN_CACHE
4743 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4744 does something. Just do the default expansion to a call to
4748 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4749 does nothing. There is no need to call it. Do nothing. */
4751 #endif /* CLEAR_INSN_CACHE */
4753 /* We have a "clear_cache" insn, and it will handle everything. */
4755 rtx begin_rtx, end_rtx;
4757 /* We must not expand to a library call. If we did, any
4758 fallback library function in libgcc that might contain a call to
4759 __builtin___clear_cache() would recurse infinitely. */
4760 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4762 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4766 if (HAVE_clear_cache)
4768 struct expand_operand ops[2];
4770 begin = CALL_EXPR_ARG (exp, 0);
4771 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4773 end = CALL_EXPR_ARG (exp, 1);
4774 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4776 create_address_operand (&ops[0], begin_rtx);
4777 create_address_operand (&ops[1], end_rtx);
4778 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4782 #endif /* HAVE_clear_cache */
4785 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4788 round_trampoline_addr (rtx tramp)
4790 rtx temp, addend, mask;
4792 /* If we don't need too much alignment, we'll have been guaranteed
4793 proper alignment by get_trampoline_type. */
4794 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4797 /* Round address up to desired boundary. */
4798 temp = gen_reg_rtx (Pmode);
4799 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4800 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4802 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4803 temp, 0, OPTAB_LIB_WIDEN);
4804 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4805 temp, 0, OPTAB_LIB_WIDEN);
4811 expand_builtin_init_trampoline (tree exp)
4813 tree t_tramp, t_func, t_chain;
4814 rtx m_tramp, r_tramp, r_chain, tmp;
4816 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4817 POINTER_TYPE, VOID_TYPE))
4820 t_tramp = CALL_EXPR_ARG (exp, 0);
4821 t_func = CALL_EXPR_ARG (exp, 1);
4822 t_chain = CALL_EXPR_ARG (exp, 2);
4824 r_tramp = expand_normal (t_tramp);
4825 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4826 MEM_NOTRAP_P (m_tramp) = 1;
4828 /* The TRAMP argument should be the address of a field within the
4829 local function's FRAME decl. Let's see if we can fill in the
4830 to fill in the MEM_ATTRs for this memory. */
4831 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4832 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4835 tmp = round_trampoline_addr (r_tramp);
4838 m_tramp = change_address (m_tramp, BLKmode, tmp);
4839 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4840 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4843 /* The FUNC argument should be the address of the nested function.
4844 Extract the actual function decl to pass to the hook. */
4845 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4846 t_func = TREE_OPERAND (t_func, 0);
4847 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4849 r_chain = expand_normal (t_chain);
4851 /* Generate insns to initialize the trampoline. */
4852 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4854 trampolines_created = 1;
4856 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4857 "trampoline generated for nested function %qD", t_func);
4863 expand_builtin_adjust_trampoline (tree exp)
4867 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4870 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4871 tramp = round_trampoline_addr (tramp);
4872 if (targetm.calls.trampoline_adjust_address)
4873 tramp = targetm.calls.trampoline_adjust_address (tramp);
4878 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4879 function. The function first checks whether the back end provides
4880 an insn to implement signbit for the respective mode. If not, it
4881 checks whether the floating point format of the value is such that
4882 the sign bit can be extracted. If that is not the case, the
4883 function returns NULL_RTX to indicate that a normal call should be
4884 emitted rather than expanding the function in-line. EXP is the
4885 expression that is a call to the builtin function; if convenient,
4886 the result should be placed in TARGET. */
4888 expand_builtin_signbit (tree exp, rtx target)
4890 const struct real_format *fmt;
4891 enum machine_mode fmode, imode, rmode;
4894 enum insn_code icode;
4896 location_t loc = EXPR_LOCATION (exp);
4898 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4901 arg = CALL_EXPR_ARG (exp, 0);
4902 fmode = TYPE_MODE (TREE_TYPE (arg));
4903 rmode = TYPE_MODE (TREE_TYPE (exp));
4904 fmt = REAL_MODE_FORMAT (fmode);
4906 arg = builtin_save_expr (arg);
4908 /* Expand the argument yielding a RTX expression. */
4909 temp = expand_normal (arg);
4911 /* Check if the back end provides an insn that handles signbit for the
4913 icode = optab_handler (signbit_optab, fmode);
4914 if (icode != CODE_FOR_nothing)
4916 rtx last = get_last_insn ();
4917 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4918 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4920 delete_insns_since (last);
4923 /* For floating point formats without a sign bit, implement signbit
4925 bitpos = fmt->signbit_ro;
4928 /* But we can't do this if the format supports signed zero. */
4929 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4932 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4933 build_real (TREE_TYPE (arg), dconst0));
4934 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4937 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4939 imode = int_mode_for_mode (fmode);
4940 if (imode == BLKmode)
4942 temp = gen_lowpart (imode, temp);
4947 /* Handle targets with different FP word orders. */
4948 if (FLOAT_WORDS_BIG_ENDIAN)
4949 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4951 word = bitpos / BITS_PER_WORD;
4952 temp = operand_subword_force (temp, word, fmode);
4953 bitpos = bitpos % BITS_PER_WORD;
4956 /* Force the intermediate word_mode (or narrower) result into a
4957 register. This avoids attempting to create paradoxical SUBREGs
4958 of floating point modes below. */
4959 temp = force_reg (imode, temp);
4961 /* If the bitpos is within the "result mode" lowpart, the operation
4962 can be implement with a single bitwise AND. Otherwise, we need
4963 a right shift and an AND. */
4965 if (bitpos < GET_MODE_BITSIZE (rmode))
4967 double_int mask = double_int_setbit (double_int_zero, bitpos);
4969 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4970 temp = gen_lowpart (rmode, temp);
4971 temp = expand_binop (rmode, and_optab, temp,
4972 immed_double_int_const (mask, rmode),
4973 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4977 /* Perform a logical right shift to place the signbit in the least
4978 significant bit, then truncate the result to the desired mode
4979 and mask just this bit. */
4980 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4981 temp = gen_lowpart (rmode, temp);
4982 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4983 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4989 /* Expand fork or exec calls. TARGET is the desired target of the
4990 call. EXP is the call. FN is the
4991 identificator of the actual function. IGNORE is nonzero if the
4992 value is to be ignored. */
4995 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5000 /* If we are not profiling, just call the function. */
5001 if (!profile_arc_flag)
5004 /* Otherwise call the wrapper. This should be equivalent for the rest of
5005 compiler, so the code does not diverge, and the wrapper may run the
5006 code necessary for keeping the profiling sane. */
5008 switch (DECL_FUNCTION_CODE (fn))
5011 id = get_identifier ("__gcov_fork");
5014 case BUILT_IN_EXECL:
5015 id = get_identifier ("__gcov_execl");
5018 case BUILT_IN_EXECV:
5019 id = get_identifier ("__gcov_execv");
5022 case BUILT_IN_EXECLP:
5023 id = get_identifier ("__gcov_execlp");
5026 case BUILT_IN_EXECLE:
5027 id = get_identifier ("__gcov_execle");
5030 case BUILT_IN_EXECVP:
5031 id = get_identifier ("__gcov_execvp");
5034 case BUILT_IN_EXECVE:
5035 id = get_identifier ("__gcov_execve");
5042 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5043 FUNCTION_DECL, id, TREE_TYPE (fn));
5044 DECL_EXTERNAL (decl) = 1;
5045 TREE_PUBLIC (decl) = 1;
5046 DECL_ARTIFICIAL (decl) = 1;
5047 TREE_NOTHROW (decl) = 1;
5048 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5049 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5050 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5051 return expand_call (call, target, ignore);
5056 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5057 the pointer in these functions is void*, the tree optimizers may remove
5058 casts. The mode computed in expand_builtin isn't reliable either, due
5059 to __sync_bool_compare_and_swap.
5061 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5062 group of builtins. This gives us log2 of the mode size. */
5064 static inline enum machine_mode
5065 get_builtin_sync_mode (int fcode_diff)
5067 /* The size is not negotiable, so ask not to get BLKmode in return
5068 if the target indicates that a smaller size would be better. */
5069 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5072 /* Expand the memory expression LOC and return the appropriate memory operand
5073 for the builtin_sync operations. */
5076 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5080 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5081 addr = convert_memory_address (Pmode, addr);
5083 /* Note that we explicitly do not want any alias information for this
5084 memory, so that we kill all other live memories. Otherwise we don't
5085 satisfy the full barrier semantics of the intrinsic. */
5086 mem = validize_mem (gen_rtx_MEM (mode, addr));
5088 /* The alignment needs to be at least according to that of the mode. */
5089 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5090 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5091 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5092 MEM_VOLATILE_P (mem) = 1;
5097 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5098 EXP is the CALL_EXPR. CODE is the rtx code
5099 that corresponds to the arithmetic or logical operation from the name;
5100 an exception here is that NOT actually means NAND. TARGET is an optional
5101 place for us to store the results; AFTER is true if this is the
5102 fetch_and_xxx form. IGNORE is true if we don't actually care about
5103 the result of the operation at all. */
5106 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5107 enum rtx_code code, bool after,
5108 rtx target, bool ignore)
5111 enum machine_mode old_mode;
5112 location_t loc = EXPR_LOCATION (exp);
5114 if (code == NOT && warn_sync_nand)
5116 tree fndecl = get_callee_fndecl (exp);
5117 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5119 static bool warned_f_a_n, warned_n_a_f;
5123 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5124 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5125 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5126 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5127 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5132 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_FETCH_AND_NAND_N];
5133 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5134 warned_f_a_n = true;
5137 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5138 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5139 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5140 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5141 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5146 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_NAND_AND_FETCH_N];
5147 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5148 warned_n_a_f = true;
5156 /* Expand the operands. */
5157 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5159 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5161 of CONST_INTs, where we know the old_mode only from the call argument. */
5162 old_mode = GET_MODE (val);
5163 if (old_mode == VOIDmode)
5164 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5165 val = convert_modes (mode, old_mode, val, 1);
5168 return expand_sync_operation (mem, val, code);
5170 return expand_sync_fetch_operation (mem, val, code, after, target);
5173 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5174 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5175 true if this is the boolean form. TARGET is a place for us to store the
5176 results; this is NOT optional if IS_BOOL is true. */
5179 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5180 bool is_bool, rtx target)
5182 rtx old_val, new_val, mem;
5183 enum machine_mode old_mode;
5185 /* Expand the operands. */
5186 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5189 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5190 mode, EXPAND_NORMAL);
5191 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5192 of CONST_INTs, where we know the old_mode only from the call argument. */
5193 old_mode = GET_MODE (old_val);
5194 if (old_mode == VOIDmode)
5195 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5196 old_val = convert_modes (mode, old_mode, old_val, 1);
5198 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5199 mode, EXPAND_NORMAL);
5200 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5201 of CONST_INTs, where we know the old_mode only from the call argument. */
5202 old_mode = GET_MODE (new_val);
5203 if (old_mode == VOIDmode)
5204 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5205 new_val = convert_modes (mode, old_mode, new_val, 1);
5208 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5210 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5213 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5214 general form is actually an atomic exchange, and some targets only
5215 support a reduced form with the second argument being a constant 1.
5216 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5220 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5224 enum machine_mode old_mode;
5226 /* Expand the operands. */
5227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5228 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5229 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5230 of CONST_INTs, where we know the old_mode only from the call argument. */
5231 old_mode = GET_MODE (val);
5232 if (old_mode == VOIDmode)
5233 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5234 val = convert_modes (mode, old_mode, val, 1);
5236 return expand_sync_lock_test_and_set (mem, val, target);
5239 /* Expand the __sync_synchronize intrinsic. */
5242 expand_builtin_sync_synchronize (void)
5245 VEC (tree, gc) *v_clobbers;
5247 #ifdef HAVE_memory_barrier
5248 if (HAVE_memory_barrier)
5250 emit_insn (gen_memory_barrier ());
5255 if (synchronize_libfunc != NULL_RTX)
5257 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5261 /* If no explicit memory barrier instruction is available, create an
5262 empty asm stmt with a memory clobber. */
5263 v_clobbers = VEC_alloc (tree, gc, 1);
5264 VEC_quick_push (tree, v_clobbers,
5265 tree_cons (NULL, build_string (6, "memory"), NULL));
5266 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5267 gimple_asm_set_volatile (x, true);
5268 expand_asm_stmt (x);
5271 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5274 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5276 struct expand_operand ops[2];
5277 enum insn_code icode;
5280 /* Expand the operands. */
5281 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5283 /* If there is an explicit operation in the md file, use it. */
5284 icode = direct_optab_handler (sync_lock_release_optab, mode);
5285 if (icode != CODE_FOR_nothing)
5287 create_fixed_operand (&ops[0], mem);
5288 create_input_operand (&ops[1], const0_rtx, mode);
5289 if (maybe_expand_insn (icode, 2, ops))
5293 /* Otherwise we can implement this operation by emitting a barrier
5294 followed by a store of zero. */
5295 expand_builtin_sync_synchronize ();
5296 emit_move_insn (mem, const0_rtx);
5299 /* Expand an expression EXP that calls a built-in function,
5300 with result going to TARGET if that's convenient
5301 (and in mode MODE if that's convenient).
5302 SUBTARGET may be used as the target for computing one of EXP's operands.
5303 IGNORE is nonzero if the value is to be ignored. */
5306 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5309 tree fndecl = get_callee_fndecl (exp);
5310 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5311 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5314 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5315 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5317 /* When not optimizing, generate calls to library functions for a certain
5320 && !called_as_built_in (fndecl)
5321 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5322 && fcode != BUILT_IN_ALLOCA
5323 && fcode != BUILT_IN_FREE)
5324 return expand_call (exp, target, ignore);
5326 /* The built-in function expanders test for target == const0_rtx
5327 to determine whether the function's result will be ignored. */
5329 target = const0_rtx;
5331 /* If the result of a pure or const built-in function is ignored, and
5332 none of its arguments are volatile, we can avoid expanding the
5333 built-in call and just evaluate the arguments for side-effects. */
5334 if (target == const0_rtx
5335 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5336 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5338 bool volatilep = false;
5340 call_expr_arg_iterator iter;
5342 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5343 if (TREE_THIS_VOLATILE (arg))
5351 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5352 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5359 CASE_FLT_FN (BUILT_IN_FABS):
5360 target = expand_builtin_fabs (exp, target, subtarget);
5365 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5366 target = expand_builtin_copysign (exp, target, subtarget);
5371 /* Just do a normal library call if we were unable to fold
5373 CASE_FLT_FN (BUILT_IN_CABS):
5376 CASE_FLT_FN (BUILT_IN_EXP):
5377 CASE_FLT_FN (BUILT_IN_EXP10):
5378 CASE_FLT_FN (BUILT_IN_POW10):
5379 CASE_FLT_FN (BUILT_IN_EXP2):
5380 CASE_FLT_FN (BUILT_IN_EXPM1):
5381 CASE_FLT_FN (BUILT_IN_LOGB):
5382 CASE_FLT_FN (BUILT_IN_LOG):
5383 CASE_FLT_FN (BUILT_IN_LOG10):
5384 CASE_FLT_FN (BUILT_IN_LOG2):
5385 CASE_FLT_FN (BUILT_IN_LOG1P):
5386 CASE_FLT_FN (BUILT_IN_TAN):
5387 CASE_FLT_FN (BUILT_IN_ASIN):
5388 CASE_FLT_FN (BUILT_IN_ACOS):
5389 CASE_FLT_FN (BUILT_IN_ATAN):
5390 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5391 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5392 because of possible accuracy problems. */
5393 if (! flag_unsafe_math_optimizations)
5395 CASE_FLT_FN (BUILT_IN_SQRT):
5396 CASE_FLT_FN (BUILT_IN_FLOOR):
5397 CASE_FLT_FN (BUILT_IN_CEIL):
5398 CASE_FLT_FN (BUILT_IN_TRUNC):
5399 CASE_FLT_FN (BUILT_IN_ROUND):
5400 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5401 CASE_FLT_FN (BUILT_IN_RINT):
5402 target = expand_builtin_mathfn (exp, target, subtarget);
5407 CASE_FLT_FN (BUILT_IN_FMA):
5408 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5413 CASE_FLT_FN (BUILT_IN_ILOGB):
5414 if (! flag_unsafe_math_optimizations)
5416 CASE_FLT_FN (BUILT_IN_ISINF):
5417 CASE_FLT_FN (BUILT_IN_FINITE):
5418 case BUILT_IN_ISFINITE:
5419 case BUILT_IN_ISNORMAL:
5420 target = expand_builtin_interclass_mathfn (exp, target);
5425 CASE_FLT_FN (BUILT_IN_LCEIL):
5426 CASE_FLT_FN (BUILT_IN_LLCEIL):
5427 CASE_FLT_FN (BUILT_IN_LFLOOR):
5428 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5429 target = expand_builtin_int_roundingfn (exp, target);
5434 CASE_FLT_FN (BUILT_IN_LRINT):
5435 CASE_FLT_FN (BUILT_IN_LLRINT):
5436 CASE_FLT_FN (BUILT_IN_LROUND):
5437 CASE_FLT_FN (BUILT_IN_LLROUND):
5438 target = expand_builtin_int_roundingfn_2 (exp, target);
5443 CASE_FLT_FN (BUILT_IN_POWI):
5444 target = expand_builtin_powi (exp, target);
5449 CASE_FLT_FN (BUILT_IN_ATAN2):
5450 CASE_FLT_FN (BUILT_IN_LDEXP):
5451 CASE_FLT_FN (BUILT_IN_SCALB):
5452 CASE_FLT_FN (BUILT_IN_SCALBN):
5453 CASE_FLT_FN (BUILT_IN_SCALBLN):
5454 if (! flag_unsafe_math_optimizations)
5457 CASE_FLT_FN (BUILT_IN_FMOD):
5458 CASE_FLT_FN (BUILT_IN_REMAINDER):
5459 CASE_FLT_FN (BUILT_IN_DREM):
5460 CASE_FLT_FN (BUILT_IN_POW):
5461 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5466 CASE_FLT_FN (BUILT_IN_CEXPI):
5467 target = expand_builtin_cexpi (exp, target);
5468 gcc_assert (target);
5471 CASE_FLT_FN (BUILT_IN_SIN):
5472 CASE_FLT_FN (BUILT_IN_COS):
5473 if (! flag_unsafe_math_optimizations)
5475 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5480 CASE_FLT_FN (BUILT_IN_SINCOS):
5481 if (! flag_unsafe_math_optimizations)
5483 target = expand_builtin_sincos (exp);
5488 case BUILT_IN_APPLY_ARGS:
5489 return expand_builtin_apply_args ();
5491 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5492 FUNCTION with a copy of the parameters described by
5493 ARGUMENTS, and ARGSIZE. It returns a block of memory
5494 allocated on the stack into which is stored all the registers
5495 that might possibly be used for returning the result of a
5496 function. ARGUMENTS is the value returned by
5497 __builtin_apply_args. ARGSIZE is the number of bytes of
5498 arguments that must be copied. ??? How should this value be
5499 computed? We'll also need a safe worst case value for varargs
5501 case BUILT_IN_APPLY:
5502 if (!validate_arglist (exp, POINTER_TYPE,
5503 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5504 && !validate_arglist (exp, REFERENCE_TYPE,
5505 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5511 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5512 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5513 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5515 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5518 /* __builtin_return (RESULT) causes the function to return the
5519 value described by RESULT. RESULT is address of the block of
5520 memory returned by __builtin_apply. */
5521 case BUILT_IN_RETURN:
5522 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5523 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5526 case BUILT_IN_SAVEREGS:
5527 return expand_builtin_saveregs ();
5529 case BUILT_IN_VA_ARG_PACK:
5530 /* All valid uses of __builtin_va_arg_pack () are removed during
5532 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5535 case BUILT_IN_VA_ARG_PACK_LEN:
5536 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5538 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5541 /* Return the address of the first anonymous stack arg. */
5542 case BUILT_IN_NEXT_ARG:
5543 if (fold_builtin_next_arg (exp, false))
5545 return expand_builtin_next_arg ();
5547 case BUILT_IN_CLEAR_CACHE:
5548 target = expand_builtin___clear_cache (exp);
5553 case BUILT_IN_CLASSIFY_TYPE:
5554 return expand_builtin_classify_type (exp);
5556 case BUILT_IN_CONSTANT_P:
5559 case BUILT_IN_FRAME_ADDRESS:
5560 case BUILT_IN_RETURN_ADDRESS:
5561 return expand_builtin_frame_address (fndecl, exp);
5563 /* Returns the address of the area where the structure is returned.
5565 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5566 if (call_expr_nargs (exp) != 0
5567 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5568 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5571 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5573 case BUILT_IN_ALLOCA:
5574 /* If the allocation stems from the declaration of a variable-sized
5575 object, it cannot accumulate. */
5576 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5581 case BUILT_IN_STACK_SAVE:
5582 return expand_stack_save ();
5584 case BUILT_IN_STACK_RESTORE:
5585 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5588 case BUILT_IN_BSWAP32:
5589 case BUILT_IN_BSWAP64:
5590 target = expand_builtin_bswap (exp, target, subtarget);
5596 CASE_INT_FN (BUILT_IN_FFS):
5597 case BUILT_IN_FFSIMAX:
5598 target = expand_builtin_unop (target_mode, exp, target,
5599 subtarget, ffs_optab);
5604 CASE_INT_FN (BUILT_IN_CLZ):
5605 case BUILT_IN_CLZIMAX:
5606 target = expand_builtin_unop (target_mode, exp, target,
5607 subtarget, clz_optab);
5612 CASE_INT_FN (BUILT_IN_CTZ):
5613 case BUILT_IN_CTZIMAX:
5614 target = expand_builtin_unop (target_mode, exp, target,
5615 subtarget, ctz_optab);
5620 CASE_INT_FN (BUILT_IN_CLRSB):
5621 case BUILT_IN_CLRSBIMAX:
5622 target = expand_builtin_unop (target_mode, exp, target,
5623 subtarget, clrsb_optab);
5628 CASE_INT_FN (BUILT_IN_POPCOUNT):
5629 case BUILT_IN_POPCOUNTIMAX:
5630 target = expand_builtin_unop (target_mode, exp, target,
5631 subtarget, popcount_optab);
5636 CASE_INT_FN (BUILT_IN_PARITY):
5637 case BUILT_IN_PARITYIMAX:
5638 target = expand_builtin_unop (target_mode, exp, target,
5639 subtarget, parity_optab);
5644 case BUILT_IN_STRLEN:
5645 target = expand_builtin_strlen (exp, target, target_mode);
5650 case BUILT_IN_STRCPY:
5651 target = expand_builtin_strcpy (exp, target);
5656 case BUILT_IN_STRNCPY:
5657 target = expand_builtin_strncpy (exp, target);
5662 case BUILT_IN_STPCPY:
5663 target = expand_builtin_stpcpy (exp, target, mode);
5668 case BUILT_IN_MEMCPY:
5669 target = expand_builtin_memcpy (exp, target);
5674 case BUILT_IN_MEMPCPY:
5675 target = expand_builtin_mempcpy (exp, target, mode);
5680 case BUILT_IN_MEMSET:
5681 target = expand_builtin_memset (exp, target, mode);
5686 case BUILT_IN_BZERO:
5687 target = expand_builtin_bzero (exp);
5692 case BUILT_IN_STRCMP:
5693 target = expand_builtin_strcmp (exp, target);
5698 case BUILT_IN_STRNCMP:
5699 target = expand_builtin_strncmp (exp, target, mode);
5705 case BUILT_IN_MEMCMP:
5706 target = expand_builtin_memcmp (exp, target, mode);
5711 case BUILT_IN_SETJMP:
5712 /* This should have been lowered to the builtins below. */
5715 case BUILT_IN_SETJMP_SETUP:
5716 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5717 and the receiver label. */
5718 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5720 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5721 VOIDmode, EXPAND_NORMAL);
5722 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5723 rtx label_r = label_rtx (label);
5725 /* This is copied from the handling of non-local gotos. */
5726 expand_builtin_setjmp_setup (buf_addr, label_r);
5727 nonlocal_goto_handler_labels
5728 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5729 nonlocal_goto_handler_labels);
5730 /* ??? Do not let expand_label treat us as such since we would
5731 not want to be both on the list of non-local labels and on
5732 the list of forced labels. */
5733 FORCED_LABEL (label) = 0;
5738 case BUILT_IN_SETJMP_DISPATCHER:
5739 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5740 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5742 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5743 rtx label_r = label_rtx (label);
5745 /* Remove the dispatcher label from the list of non-local labels
5746 since the receiver labels have been added to it above. */
5747 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5752 case BUILT_IN_SETJMP_RECEIVER:
5753 /* __builtin_setjmp_receiver is passed the receiver label. */
5754 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5756 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5757 rtx label_r = label_rtx (label);
5759 expand_builtin_setjmp_receiver (label_r);
5764 /* __builtin_longjmp is passed a pointer to an array of five words.
5765 It's similar to the C library longjmp function but works with
5766 __builtin_setjmp above. */
5767 case BUILT_IN_LONGJMP:
5768 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5770 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5771 VOIDmode, EXPAND_NORMAL);
5772 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5774 if (value != const1_rtx)
5776 error ("%<__builtin_longjmp%> second argument must be 1");
5780 expand_builtin_longjmp (buf_addr, value);
5785 case BUILT_IN_NONLOCAL_GOTO:
5786 target = expand_builtin_nonlocal_goto (exp);
5791 /* This updates the setjmp buffer that is its argument with the value
5792 of the current stack pointer. */
5793 case BUILT_IN_UPDATE_SETJMP_BUF:
5794 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5797 = expand_normal (CALL_EXPR_ARG (exp, 0));
5799 expand_builtin_update_setjmp_buf (buf_addr);
5805 expand_builtin_trap ();
5808 case BUILT_IN_UNREACHABLE:
5809 expand_builtin_unreachable ();
5812 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5813 case BUILT_IN_SIGNBITD32:
5814 case BUILT_IN_SIGNBITD64:
5815 case BUILT_IN_SIGNBITD128:
5816 target = expand_builtin_signbit (exp, target);
5821 /* Various hooks for the DWARF 2 __throw routine. */
5822 case BUILT_IN_UNWIND_INIT:
5823 expand_builtin_unwind_init ();
5825 case BUILT_IN_DWARF_CFA:
5826 return virtual_cfa_rtx;
5827 #ifdef DWARF2_UNWIND_INFO
5828 case BUILT_IN_DWARF_SP_COLUMN:
5829 return expand_builtin_dwarf_sp_column ();
5830 case BUILT_IN_INIT_DWARF_REG_SIZES:
5831 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5834 case BUILT_IN_FROB_RETURN_ADDR:
5835 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5836 case BUILT_IN_EXTRACT_RETURN_ADDR:
5837 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5838 case BUILT_IN_EH_RETURN:
5839 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5840 CALL_EXPR_ARG (exp, 1));
5842 #ifdef EH_RETURN_DATA_REGNO
5843 case BUILT_IN_EH_RETURN_DATA_REGNO:
5844 return expand_builtin_eh_return_data_regno (exp);
5846 case BUILT_IN_EXTEND_POINTER:
5847 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5848 case BUILT_IN_EH_POINTER:
5849 return expand_builtin_eh_pointer (exp);
5850 case BUILT_IN_EH_FILTER:
5851 return expand_builtin_eh_filter (exp);
5852 case BUILT_IN_EH_COPY_VALUES:
5853 return expand_builtin_eh_copy_values (exp);
5855 case BUILT_IN_VA_START:
5856 return expand_builtin_va_start (exp);
5857 case BUILT_IN_VA_END:
5858 return expand_builtin_va_end (exp);
5859 case BUILT_IN_VA_COPY:
5860 return expand_builtin_va_copy (exp);
5861 case BUILT_IN_EXPECT:
5862 return expand_builtin_expect (exp, target);
5863 case BUILT_IN_ASSUME_ALIGNED:
5864 return expand_builtin_assume_aligned (exp, target);
5865 case BUILT_IN_PREFETCH:
5866 expand_builtin_prefetch (exp);
5869 case BUILT_IN_INIT_TRAMPOLINE:
5870 return expand_builtin_init_trampoline (exp);
5871 case BUILT_IN_ADJUST_TRAMPOLINE:
5872 return expand_builtin_adjust_trampoline (exp);
5875 case BUILT_IN_EXECL:
5876 case BUILT_IN_EXECV:
5877 case BUILT_IN_EXECLP:
5878 case BUILT_IN_EXECLE:
5879 case BUILT_IN_EXECVP:
5880 case BUILT_IN_EXECVE:
5881 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5886 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
5887 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
5888 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
5889 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
5890 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
5891 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
5892 target = expand_builtin_sync_operation (mode, exp, PLUS,
5893 false, target, ignore);
5898 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
5899 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
5900 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
5901 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
5902 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
5903 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
5904 target = expand_builtin_sync_operation (mode, exp, MINUS,
5905 false, target, ignore);
5910 case BUILT_IN_SYNC_FETCH_AND_OR_1:
5911 case BUILT_IN_SYNC_FETCH_AND_OR_2:
5912 case BUILT_IN_SYNC_FETCH_AND_OR_4:
5913 case BUILT_IN_SYNC_FETCH_AND_OR_8:
5914 case BUILT_IN_SYNC_FETCH_AND_OR_16:
5915 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
5916 target = expand_builtin_sync_operation (mode, exp, IOR,
5917 false, target, ignore);
5922 case BUILT_IN_SYNC_FETCH_AND_AND_1:
5923 case BUILT_IN_SYNC_FETCH_AND_AND_2:
5924 case BUILT_IN_SYNC_FETCH_AND_AND_4:
5925 case BUILT_IN_SYNC_FETCH_AND_AND_8:
5926 case BUILT_IN_SYNC_FETCH_AND_AND_16:
5927 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
5928 target = expand_builtin_sync_operation (mode, exp, AND,
5929 false, target, ignore);
5934 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
5935 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
5936 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
5937 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
5938 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
5939 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
5940 target = expand_builtin_sync_operation (mode, exp, XOR,
5941 false, target, ignore);
5946 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5947 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5948 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5949 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5950 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5951 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
5952 target = expand_builtin_sync_operation (mode, exp, NOT,
5953 false, target, ignore);
5958 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
5959 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
5960 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
5961 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
5962 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
5963 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
5964 target = expand_builtin_sync_operation (mode, exp, PLUS,
5965 true, target, ignore);
5970 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
5971 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
5972 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
5973 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
5974 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
5975 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
5976 target = expand_builtin_sync_operation (mode, exp, MINUS,
5977 true, target, ignore);
5982 case BUILT_IN_SYNC_OR_AND_FETCH_1:
5983 case BUILT_IN_SYNC_OR_AND_FETCH_2:
5984 case BUILT_IN_SYNC_OR_AND_FETCH_4:
5985 case BUILT_IN_SYNC_OR_AND_FETCH_8:
5986 case BUILT_IN_SYNC_OR_AND_FETCH_16:
5987 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
5988 target = expand_builtin_sync_operation (mode, exp, IOR,
5989 true, target, ignore);
5994 case BUILT_IN_SYNC_AND_AND_FETCH_1:
5995 case BUILT_IN_SYNC_AND_AND_FETCH_2:
5996 case BUILT_IN_SYNC_AND_AND_FETCH_4:
5997 case BUILT_IN_SYNC_AND_AND_FETCH_8:
5998 case BUILT_IN_SYNC_AND_AND_FETCH_16:
5999 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6000 target = expand_builtin_sync_operation (mode, exp, AND,
6001 true, target, ignore);
6006 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6007 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6008 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6009 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6010 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6012 target = expand_builtin_sync_operation (mode, exp, XOR,
6013 true, target, ignore);
6018 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6019 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6020 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6021 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6022 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6023 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6024 target = expand_builtin_sync_operation (mode, exp, NOT,
6025 true, target, ignore);
6030 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6031 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6032 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6033 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6034 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6035 if (mode == VOIDmode)
6036 mode = TYPE_MODE (boolean_type_node);
6037 if (!target || !register_operand (target, mode))
6038 target = gen_reg_rtx (mode);
6040 mode = get_builtin_sync_mode
6041 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6042 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6047 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6048 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6049 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6050 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6051 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6052 mode = get_builtin_sync_mode
6053 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6054 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6059 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6060 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6061 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6062 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6063 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6064 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6065 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6070 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6071 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6072 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6073 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6074 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6075 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6076 expand_builtin_sync_lock_release (mode, exp);
6079 case BUILT_IN_SYNC_SYNCHRONIZE:
6080 expand_builtin_sync_synchronize ();
6083 case BUILT_IN_OBJECT_SIZE:
6084 return expand_builtin_object_size (exp);
6086 case BUILT_IN_MEMCPY_CHK:
6087 case BUILT_IN_MEMPCPY_CHK:
6088 case BUILT_IN_MEMMOVE_CHK:
6089 case BUILT_IN_MEMSET_CHK:
6090 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6095 case BUILT_IN_STRCPY_CHK:
6096 case BUILT_IN_STPCPY_CHK:
6097 case BUILT_IN_STRNCPY_CHK:
6098 case BUILT_IN_STRCAT_CHK:
6099 case BUILT_IN_STRNCAT_CHK:
6100 case BUILT_IN_SNPRINTF_CHK:
6101 case BUILT_IN_VSNPRINTF_CHK:
6102 maybe_emit_chk_warning (exp, fcode);
6105 case BUILT_IN_SPRINTF_CHK:
6106 case BUILT_IN_VSPRINTF_CHK:
6107 maybe_emit_sprintf_chk_warning (exp, fcode);
6111 maybe_emit_free_warning (exp);
6114 default: /* just do library call, if unknown builtin */
6118 /* The switch statement above can drop through to cause the function
6119 to be called normally. */
6120 return expand_call (exp, target, ignore);
6123 /* Determine whether a tree node represents a call to a built-in
6124 function. If the tree T is a call to a built-in function with
6125 the right number of arguments of the appropriate types, return
6126 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6127 Otherwise the return value is END_BUILTINS. */
6129 enum built_in_function
6130 builtin_mathfn_code (const_tree t)
6132 const_tree fndecl, arg, parmlist;
6133 const_tree argtype, parmtype;
6134 const_call_expr_arg_iterator iter;
6136 if (TREE_CODE (t) != CALL_EXPR
6137 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6138 return END_BUILTINS;
6140 fndecl = get_callee_fndecl (t);
6141 if (fndecl == NULL_TREE
6142 || TREE_CODE (fndecl) != FUNCTION_DECL
6143 || ! DECL_BUILT_IN (fndecl)
6144 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6145 return END_BUILTINS;
6147 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6148 init_const_call_expr_arg_iterator (t, &iter);
6149 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6151 /* If a function doesn't take a variable number of arguments,
6152 the last element in the list will have type `void'. */
6153 parmtype = TREE_VALUE (parmlist);
6154 if (VOID_TYPE_P (parmtype))
6156 if (more_const_call_expr_args_p (&iter))
6157 return END_BUILTINS;
6158 return DECL_FUNCTION_CODE (fndecl);
6161 if (! more_const_call_expr_args_p (&iter))
6162 return END_BUILTINS;
6164 arg = next_const_call_expr_arg (&iter);
6165 argtype = TREE_TYPE (arg);
6167 if (SCALAR_FLOAT_TYPE_P (parmtype))
6169 if (! SCALAR_FLOAT_TYPE_P (argtype))
6170 return END_BUILTINS;
6172 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6174 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6175 return END_BUILTINS;
6177 else if (POINTER_TYPE_P (parmtype))
6179 if (! POINTER_TYPE_P (argtype))
6180 return END_BUILTINS;
6182 else if (INTEGRAL_TYPE_P (parmtype))
6184 if (! INTEGRAL_TYPE_P (argtype))
6185 return END_BUILTINS;
6188 return END_BUILTINS;
6191 /* Variable-length argument list. */
6192 return DECL_FUNCTION_CODE (fndecl);
6195 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6196 evaluate to a constant. */
6199 fold_builtin_constant_p (tree arg)
6201 /* We return 1 for a numeric type that's known to be a constant
6202 value at compile-time or for an aggregate type that's a
6203 literal constant. */
6206 /* If we know this is a constant, emit the constant of one. */
6207 if (CONSTANT_CLASS_P (arg)
6208 || (TREE_CODE (arg) == CONSTRUCTOR
6209 && TREE_CONSTANT (arg)))
6210 return integer_one_node;
6211 if (TREE_CODE (arg) == ADDR_EXPR)
6213 tree op = TREE_OPERAND (arg, 0);
6214 if (TREE_CODE (op) == STRING_CST
6215 || (TREE_CODE (op) == ARRAY_REF
6216 && integer_zerop (TREE_OPERAND (op, 1))
6217 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6218 return integer_one_node;
6221 /* If this expression has side effects, show we don't know it to be a
6222 constant. Likewise if it's a pointer or aggregate type since in
6223 those case we only want literals, since those are only optimized
6224 when generating RTL, not later.
6225 And finally, if we are compiling an initializer, not code, we
6226 need to return a definite result now; there's not going to be any
6227 more optimization done. */
6228 if (TREE_SIDE_EFFECTS (arg)
6229 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6230 || POINTER_TYPE_P (TREE_TYPE (arg))
6232 || folding_initializer)
6233 return integer_zero_node;
6238 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6239 return it as a truthvalue. */
6242 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6244 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6246 fn = built_in_decls[BUILT_IN_EXPECT];
6247 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6248 ret_type = TREE_TYPE (TREE_TYPE (fn));
6249 pred_type = TREE_VALUE (arg_types);
6250 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6252 pred = fold_convert_loc (loc, pred_type, pred);
6253 expected = fold_convert_loc (loc, expected_type, expected);
6254 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6256 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6257 build_int_cst (ret_type, 0));
6260 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6261 NULL_TREE if no simplification is possible. */
6264 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6266 tree inner, fndecl, inner_arg0;
6267 enum tree_code code;
6269 /* Distribute the expected value over short-circuiting operators.
6270 See through the cast from truthvalue_type_node to long. */
6272 while (TREE_CODE (inner_arg0) == NOP_EXPR
6273 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6274 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6275 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6277 /* If this is a builtin_expect within a builtin_expect keep the
6278 inner one. See through a comparison against a constant. It
6279 might have been added to create a thruthvalue. */
6282 if (COMPARISON_CLASS_P (inner)
6283 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6284 inner = TREE_OPERAND (inner, 0);
6286 if (TREE_CODE (inner) == CALL_EXPR
6287 && (fndecl = get_callee_fndecl (inner))
6288 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6289 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6293 code = TREE_CODE (inner);
6294 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6296 tree op0 = TREE_OPERAND (inner, 0);
6297 tree op1 = TREE_OPERAND (inner, 1);
6299 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6300 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6301 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6303 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6306 /* If the argument isn't invariant then there's nothing else we can do. */
6307 if (!TREE_CONSTANT (inner_arg0))
6310 /* If we expect that a comparison against the argument will fold to
6311 a constant return the constant. In practice, this means a true
6312 constant or the address of a non-weak symbol. */
6315 if (TREE_CODE (inner) == ADDR_EXPR)
6319 inner = TREE_OPERAND (inner, 0);
6321 while (TREE_CODE (inner) == COMPONENT_REF
6322 || TREE_CODE (inner) == ARRAY_REF);
6323 if ((TREE_CODE (inner) == VAR_DECL
6324 || TREE_CODE (inner) == FUNCTION_DECL)
6325 && DECL_WEAK (inner))
6329 /* Otherwise, ARG0 already has the proper type for the return value. */
6333 /* Fold a call to __builtin_classify_type with argument ARG. */
6336 fold_builtin_classify_type (tree arg)
6339 return build_int_cst (integer_type_node, no_type_class);
6341 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6344 /* Fold a call to __builtin_strlen with argument ARG. */
6347 fold_builtin_strlen (location_t loc, tree type, tree arg)
6349 if (!validate_arg (arg, POINTER_TYPE))
6353 tree len = c_strlen (arg, 0);
6356 return fold_convert_loc (loc, type, len);
6362 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6365 fold_builtin_inf (location_t loc, tree type, int warn)
6367 REAL_VALUE_TYPE real;
6369 /* __builtin_inff is intended to be usable to define INFINITY on all
6370 targets. If an infinity is not available, INFINITY expands "to a
6371 positive constant of type float that overflows at translation
6372 time", footnote "In this case, using INFINITY will violate the
6373 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6374 Thus we pedwarn to ensure this constraint violation is
6376 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6377 pedwarn (loc, 0, "target format does not support infinity");
6380 return build_real (type, real);
6383 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6386 fold_builtin_nan (tree arg, tree type, int quiet)
6388 REAL_VALUE_TYPE real;
6391 if (!validate_arg (arg, POINTER_TYPE))
6393 str = c_getstr (arg);
6397 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6400 return build_real (type, real);
6403 /* Return true if the floating point expression T has an integer value.
6404 We also allow +Inf, -Inf and NaN to be considered integer values. */
6407 integer_valued_real_p (tree t)
6409 switch (TREE_CODE (t))
6416 return integer_valued_real_p (TREE_OPERAND (t, 0));
6421 return integer_valued_real_p (TREE_OPERAND (t, 1));
6428 return integer_valued_real_p (TREE_OPERAND (t, 0))
6429 && integer_valued_real_p (TREE_OPERAND (t, 1));
6432 return integer_valued_real_p (TREE_OPERAND (t, 1))
6433 && integer_valued_real_p (TREE_OPERAND (t, 2));
6436 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6440 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6441 if (TREE_CODE (type) == INTEGER_TYPE)
6443 if (TREE_CODE (type) == REAL_TYPE)
6444 return integer_valued_real_p (TREE_OPERAND (t, 0));
6449 switch (builtin_mathfn_code (t))
6451 CASE_FLT_FN (BUILT_IN_CEIL):
6452 CASE_FLT_FN (BUILT_IN_FLOOR):
6453 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6454 CASE_FLT_FN (BUILT_IN_RINT):
6455 CASE_FLT_FN (BUILT_IN_ROUND):
6456 CASE_FLT_FN (BUILT_IN_TRUNC):
6459 CASE_FLT_FN (BUILT_IN_FMIN):
6460 CASE_FLT_FN (BUILT_IN_FMAX):
6461 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6462 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6475 /* FNDECL is assumed to be a builtin where truncation can be propagated
6476 across (for instance floor((double)f) == (double)floorf (f).
6477 Do the transformation for a call with argument ARG. */
6480 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6484 if (!validate_arg (arg, REAL_TYPE))
6487 /* Integer rounding functions are idempotent. */
6488 if (fcode == builtin_mathfn_code (arg))
6491 /* If argument is already integer valued, and we don't need to worry
6492 about setting errno, there's no need to perform rounding. */
6493 if (! flag_errno_math && integer_valued_real_p (arg))
6498 tree arg0 = strip_float_extensions (arg);
6499 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6500 tree newtype = TREE_TYPE (arg0);
6503 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6504 && (decl = mathfn_built_in (newtype, fcode)))
6505 return fold_convert_loc (loc, ftype,
6506 build_call_expr_loc (loc, decl, 1,
6507 fold_convert_loc (loc,
6514 /* FNDECL is assumed to be builtin which can narrow the FP type of
6515 the argument, for instance lround((double)f) -> lroundf (f).
6516 Do the transformation for a call with argument ARG. */
6519 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6521 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6523 if (!validate_arg (arg, REAL_TYPE))
6526 /* If argument is already integer valued, and we don't need to worry
6527 about setting errno, there's no need to perform rounding. */
6528 if (! flag_errno_math && integer_valued_real_p (arg))
6529 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6530 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6534 tree ftype = TREE_TYPE (arg);
6535 tree arg0 = strip_float_extensions (arg);
6536 tree newtype = TREE_TYPE (arg0);
6539 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6540 && (decl = mathfn_built_in (newtype, fcode)))
6541 return build_call_expr_loc (loc, decl, 1,
6542 fold_convert_loc (loc, newtype, arg0));
6545 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6546 sizeof (long long) == sizeof (long). */
6547 if (TYPE_PRECISION (long_long_integer_type_node)
6548 == TYPE_PRECISION (long_integer_type_node))
6550 tree newfn = NULL_TREE;
6553 CASE_FLT_FN (BUILT_IN_LLCEIL):
6554 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6557 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6558 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6561 CASE_FLT_FN (BUILT_IN_LLROUND):
6562 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6565 CASE_FLT_FN (BUILT_IN_LLRINT):
6566 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6575 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6576 return fold_convert_loc (loc,
6577 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6584 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6585 return type. Return NULL_TREE if no simplification can be made. */
6588 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6592 if (!validate_arg (arg, COMPLEX_TYPE)
6593 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6596 /* Calculate the result when the argument is a constant. */
6597 if (TREE_CODE (arg) == COMPLEX_CST
6598 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6602 if (TREE_CODE (arg) == COMPLEX_EXPR)
6604 tree real = TREE_OPERAND (arg, 0);
6605 tree imag = TREE_OPERAND (arg, 1);
6607 /* If either part is zero, cabs is fabs of the other. */
6608 if (real_zerop (real))
6609 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6610 if (real_zerop (imag))
6611 return fold_build1_loc (loc, ABS_EXPR, type, real);
6613 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6614 if (flag_unsafe_math_optimizations
6615 && operand_equal_p (real, imag, OEP_PURE_SAME))
6617 const REAL_VALUE_TYPE sqrt2_trunc
6618 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6620 return fold_build2_loc (loc, MULT_EXPR, type,
6621 fold_build1_loc (loc, ABS_EXPR, type, real),
6622 build_real (type, sqrt2_trunc));
6626 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6627 if (TREE_CODE (arg) == NEGATE_EXPR
6628 || TREE_CODE (arg) == CONJ_EXPR)
6629 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6631 /* Don't do this when optimizing for size. */
6632 if (flag_unsafe_math_optimizations
6633 && optimize && optimize_function_for_speed_p (cfun))
6635 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6637 if (sqrtfn != NULL_TREE)
6639 tree rpart, ipart, result;
6641 arg = builtin_save_expr (arg);
6643 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6644 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6646 rpart = builtin_save_expr (rpart);
6647 ipart = builtin_save_expr (ipart);
6649 result = fold_build2_loc (loc, PLUS_EXPR, type,
6650 fold_build2_loc (loc, MULT_EXPR, type,
6652 fold_build2_loc (loc, MULT_EXPR, type,
6655 return build_call_expr_loc (loc, sqrtfn, 1, result);
6662 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6663 complex tree type of the result. If NEG is true, the imaginary
6664 zero is negative. */
6667 build_complex_cproj (tree type, bool neg)
6669 REAL_VALUE_TYPE rinf, rzero = dconst0;
6673 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6674 build_real (TREE_TYPE (type), rzero));
6677 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6678 return type. Return NULL_TREE if no simplification can be made. */
6681 fold_builtin_cproj (location_t loc, tree arg, tree type)
6683 if (!validate_arg (arg, COMPLEX_TYPE)
6684 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6687 /* If there are no infinities, return arg. */
6688 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6689 return non_lvalue_loc (loc, arg);
6691 /* Calculate the result when the argument is a constant. */
6692 if (TREE_CODE (arg) == COMPLEX_CST)
6694 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6695 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6697 if (real_isinf (real) || real_isinf (imag))
6698 return build_complex_cproj (type, imag->sign);
6702 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6704 tree real = TREE_OPERAND (arg, 0);
6705 tree imag = TREE_OPERAND (arg, 1);
6710 /* If the real part is inf and the imag part is known to be
6711 nonnegative, return (inf + 0i). Remember side-effects are
6712 possible in the imag part. */
6713 if (TREE_CODE (real) == REAL_CST
6714 && real_isinf (TREE_REAL_CST_PTR (real))
6715 && tree_expr_nonnegative_p (imag))
6716 return omit_one_operand_loc (loc, type,
6717 build_complex_cproj (type, false),
6720 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6721 Remember side-effects are possible in the real part. */
6722 if (TREE_CODE (imag) == REAL_CST
6723 && real_isinf (TREE_REAL_CST_PTR (imag)))
6725 omit_one_operand_loc (loc, type,
6726 build_complex_cproj (type, TREE_REAL_CST_PTR
6727 (imag)->sign), arg);
6733 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6734 Return NULL_TREE if no simplification can be made. */
6737 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6740 enum built_in_function fcode;
6743 if (!validate_arg (arg, REAL_TYPE))
6746 /* Calculate the result when the argument is a constant. */
6747 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6750 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6751 fcode = builtin_mathfn_code (arg);
6752 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6754 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6755 arg = fold_build2_loc (loc, MULT_EXPR, type,
6756 CALL_EXPR_ARG (arg, 0),
6757 build_real (type, dconsthalf));
6758 return build_call_expr_loc (loc, expfn, 1, arg);
6761 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6762 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6764 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6768 tree arg0 = CALL_EXPR_ARG (arg, 0);
6770 /* The inner root was either sqrt or cbrt. */
6771 /* This was a conditional expression but it triggered a bug
6773 REAL_VALUE_TYPE dconstroot;
6774 if (BUILTIN_SQRT_P (fcode))
6775 dconstroot = dconsthalf;
6777 dconstroot = dconst_third ();
6779 /* Adjust for the outer root. */
6780 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6781 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6782 tree_root = build_real (type, dconstroot);
6783 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6787 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6788 if (flag_unsafe_math_optimizations
6789 && (fcode == BUILT_IN_POW
6790 || fcode == BUILT_IN_POWF
6791 || fcode == BUILT_IN_POWL))
6793 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6794 tree arg0 = CALL_EXPR_ARG (arg, 0);
6795 tree arg1 = CALL_EXPR_ARG (arg, 1);
6797 if (!tree_expr_nonnegative_p (arg0))
6798 arg0 = build1 (ABS_EXPR, type, arg0);
6799 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6800 build_real (type, dconsthalf));
6801 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6807 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6808 Return NULL_TREE if no simplification can be made. */
6811 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6813 const enum built_in_function fcode = builtin_mathfn_code (arg);
6816 if (!validate_arg (arg, REAL_TYPE))
6819 /* Calculate the result when the argument is a constant. */
6820 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6823 if (flag_unsafe_math_optimizations)
6825 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6826 if (BUILTIN_EXPONENT_P (fcode))
6828 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6829 const REAL_VALUE_TYPE third_trunc =
6830 real_value_truncate (TYPE_MODE (type), dconst_third ());
6831 arg = fold_build2_loc (loc, MULT_EXPR, type,
6832 CALL_EXPR_ARG (arg, 0),
6833 build_real (type, third_trunc));
6834 return build_call_expr_loc (loc, expfn, 1, arg);
6837 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6838 if (BUILTIN_SQRT_P (fcode))
6840 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6844 tree arg0 = CALL_EXPR_ARG (arg, 0);
6846 REAL_VALUE_TYPE dconstroot = dconst_third ();
6848 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6849 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6850 tree_root = build_real (type, dconstroot);
6851 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6855 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6856 if (BUILTIN_CBRT_P (fcode))
6858 tree arg0 = CALL_EXPR_ARG (arg, 0);
6859 if (tree_expr_nonnegative_p (arg0))
6861 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6866 REAL_VALUE_TYPE dconstroot;
6868 real_arithmetic (&dconstroot, MULT_EXPR,
6869 dconst_third_ptr (), dconst_third_ptr ());
6870 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6871 tree_root = build_real (type, dconstroot);
6872 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6877 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6878 if (fcode == BUILT_IN_POW
6879 || fcode == BUILT_IN_POWF
6880 || fcode == BUILT_IN_POWL)
6882 tree arg00 = CALL_EXPR_ARG (arg, 0);
6883 tree arg01 = CALL_EXPR_ARG (arg, 1);
6884 if (tree_expr_nonnegative_p (arg00))
6886 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6887 const REAL_VALUE_TYPE dconstroot
6888 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6889 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6890 build_real (type, dconstroot));
6891 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6898 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6899 TYPE is the type of the return value. Return NULL_TREE if no
6900 simplification can be made. */
6903 fold_builtin_cos (location_t loc,
6904 tree arg, tree type, tree fndecl)
6908 if (!validate_arg (arg, REAL_TYPE))
6911 /* Calculate the result when the argument is a constant. */
6912 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6915 /* Optimize cos(-x) into cos (x). */
6916 if ((narg = fold_strip_sign_ops (arg)))
6917 return build_call_expr_loc (loc, fndecl, 1, narg);
6922 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6923 Return NULL_TREE if no simplification can be made. */
6926 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6928 if (validate_arg (arg, REAL_TYPE))
6932 /* Calculate the result when the argument is a constant. */
6933 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6936 /* Optimize cosh(-x) into cosh (x). */
6937 if ((narg = fold_strip_sign_ops (arg)))
6938 return build_call_expr_loc (loc, fndecl, 1, narg);
6944 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6945 argument ARG. TYPE is the type of the return value. Return
6946 NULL_TREE if no simplification can be made. */
6949 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6952 if (validate_arg (arg, COMPLEX_TYPE)
6953 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6957 /* Calculate the result when the argument is a constant. */
6958 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6961 /* Optimize fn(-x) into fn(x). */
6962 if ((tmp = fold_strip_sign_ops (arg)))
6963 return build_call_expr_loc (loc, fndecl, 1, tmp);
6969 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6970 Return NULL_TREE if no simplification can be made. */
6973 fold_builtin_tan (tree arg, tree type)
6975 enum built_in_function fcode;
6978 if (!validate_arg (arg, REAL_TYPE))
6981 /* Calculate the result when the argument is a constant. */
6982 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
6985 /* Optimize tan(atan(x)) = x. */
6986 fcode = builtin_mathfn_code (arg);
6987 if (flag_unsafe_math_optimizations
6988 && (fcode == BUILT_IN_ATAN
6989 || fcode == BUILT_IN_ATANF
6990 || fcode == BUILT_IN_ATANL))
6991 return CALL_EXPR_ARG (arg, 0);
6996 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
6997 NULL_TREE if no simplification can be made. */
7000 fold_builtin_sincos (location_t loc,
7001 tree arg0, tree arg1, tree arg2)
7006 if (!validate_arg (arg0, REAL_TYPE)
7007 || !validate_arg (arg1, POINTER_TYPE)
7008 || !validate_arg (arg2, POINTER_TYPE))
7011 type = TREE_TYPE (arg0);
7013 /* Calculate the result when the argument is a constant. */
7014 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7017 /* Canonicalize sincos to cexpi. */
7018 if (!TARGET_C99_FUNCTIONS)
7020 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7024 call = build_call_expr_loc (loc, fn, 1, arg0);
7025 call = builtin_save_expr (call);
7027 return build2 (COMPOUND_EXPR, void_type_node,
7028 build2 (MODIFY_EXPR, void_type_node,
7029 build_fold_indirect_ref_loc (loc, arg1),
7030 build1 (IMAGPART_EXPR, type, call)),
7031 build2 (MODIFY_EXPR, void_type_node,
7032 build_fold_indirect_ref_loc (loc, arg2),
7033 build1 (REALPART_EXPR, type, call)));
7036 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7037 NULL_TREE if no simplification can be made. */
7040 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7043 tree realp, imagp, ifn;
7046 if (!validate_arg (arg0, COMPLEX_TYPE)
7047 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7050 /* Calculate the result when the argument is a constant. */
7051 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7054 rtype = TREE_TYPE (TREE_TYPE (arg0));
7056 /* In case we can figure out the real part of arg0 and it is constant zero
7058 if (!TARGET_C99_FUNCTIONS)
7060 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7064 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7065 && real_zerop (realp))
7067 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7068 return build_call_expr_loc (loc, ifn, 1, narg);
7071 /* In case we can easily decompose real and imaginary parts split cexp
7072 to exp (r) * cexpi (i). */
7073 if (flag_unsafe_math_optimizations
7076 tree rfn, rcall, icall;
7078 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7082 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7086 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7087 icall = builtin_save_expr (icall);
7088 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7089 rcall = builtin_save_expr (rcall);
7090 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7091 fold_build2_loc (loc, MULT_EXPR, rtype,
7093 fold_build1_loc (loc, REALPART_EXPR,
7095 fold_build2_loc (loc, MULT_EXPR, rtype,
7097 fold_build1_loc (loc, IMAGPART_EXPR,
7104 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7105 Return NULL_TREE if no simplification can be made. */
7108 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7110 if (!validate_arg (arg, REAL_TYPE))
7113 /* Optimize trunc of constant value. */
7114 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7116 REAL_VALUE_TYPE r, x;
7117 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7119 x = TREE_REAL_CST (arg);
7120 real_trunc (&r, TYPE_MODE (type), &x);
7121 return build_real (type, r);
7124 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7127 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7128 Return NULL_TREE if no simplification can be made. */
7131 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7133 if (!validate_arg (arg, REAL_TYPE))
7136 /* Optimize floor of constant value. */
7137 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7141 x = TREE_REAL_CST (arg);
7142 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7144 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7147 real_floor (&r, TYPE_MODE (type), &x);
7148 return build_real (type, r);
7152 /* Fold floor (x) where x is nonnegative to trunc (x). */
7153 if (tree_expr_nonnegative_p (arg))
7155 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7157 return build_call_expr_loc (loc, truncfn, 1, arg);
7160 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7163 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7164 Return NULL_TREE if no simplification can be made. */
7167 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7169 if (!validate_arg (arg, REAL_TYPE))
7172 /* Optimize ceil of constant value. */
7173 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7177 x = TREE_REAL_CST (arg);
7178 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7180 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7183 real_ceil (&r, TYPE_MODE (type), &x);
7184 return build_real (type, r);
7188 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7191 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7192 Return NULL_TREE if no simplification can be made. */
7195 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7197 if (!validate_arg (arg, REAL_TYPE))
7200 /* Optimize round of constant value. */
7201 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7205 x = TREE_REAL_CST (arg);
7206 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7208 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7211 real_round (&r, TYPE_MODE (type), &x);
7212 return build_real (type, r);
7216 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7219 /* Fold function call to builtin lround, lroundf or lroundl (or the
7220 corresponding long long versions) and other rounding functions. ARG
7221 is the argument to the call. Return NULL_TREE if no simplification
7225 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7227 if (!validate_arg (arg, REAL_TYPE))
7230 /* Optimize lround of constant value. */
7231 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7233 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7235 if (real_isfinite (&x))
7237 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7238 tree ftype = TREE_TYPE (arg);
7242 switch (DECL_FUNCTION_CODE (fndecl))
7244 CASE_FLT_FN (BUILT_IN_LFLOOR):
7245 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7246 real_floor (&r, TYPE_MODE (ftype), &x);
7249 CASE_FLT_FN (BUILT_IN_LCEIL):
7250 CASE_FLT_FN (BUILT_IN_LLCEIL):
7251 real_ceil (&r, TYPE_MODE (ftype), &x);
7254 CASE_FLT_FN (BUILT_IN_LROUND):
7255 CASE_FLT_FN (BUILT_IN_LLROUND):
7256 real_round (&r, TYPE_MODE (ftype), &x);
7263 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7264 if (double_int_fits_to_tree_p (itype, val))
7265 return double_int_to_tree (itype, val);
7269 switch (DECL_FUNCTION_CODE (fndecl))
7271 CASE_FLT_FN (BUILT_IN_LFLOOR):
7272 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7273 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7274 if (tree_expr_nonnegative_p (arg))
7275 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7276 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7281 return fold_fixed_mathfn (loc, fndecl, arg);
7284 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7285 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7286 the argument to the call. Return NULL_TREE if no simplification can
7290 fold_builtin_bitop (tree fndecl, tree arg)
7292 if (!validate_arg (arg, INTEGER_TYPE))
7295 /* Optimize for constant argument. */
7296 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7298 HOST_WIDE_INT hi, width, result;
7299 unsigned HOST_WIDE_INT lo;
7302 type = TREE_TYPE (arg);
7303 width = TYPE_PRECISION (type);
7304 lo = TREE_INT_CST_LOW (arg);
7306 /* Clear all the bits that are beyond the type's precision. */
7307 if (width > HOST_BITS_PER_WIDE_INT)
7309 hi = TREE_INT_CST_HIGH (arg);
7310 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7311 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7312 << (width - HOST_BITS_PER_WIDE_INT));
7317 if (width < HOST_BITS_PER_WIDE_INT)
7318 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7321 switch (DECL_FUNCTION_CODE (fndecl))
7323 CASE_INT_FN (BUILT_IN_FFS):
7325 result = ffs_hwi (lo);
7327 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7332 CASE_INT_FN (BUILT_IN_CLZ):
7334 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7336 result = width - floor_log2 (lo) - 1;
7337 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7341 CASE_INT_FN (BUILT_IN_CTZ):
7343 result = ctz_hwi (lo);
7345 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7346 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7350 CASE_INT_FN (BUILT_IN_CLRSB):
7351 if (width > HOST_BITS_PER_WIDE_INT
7352 && (hi & ((unsigned HOST_WIDE_INT) 1
7353 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7355 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
7356 << (width - HOST_BITS_PER_WIDE_INT - 1));
7359 else if (width <= HOST_BITS_PER_WIDE_INT
7360 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7361 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
7363 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7365 result = width - floor_log2 (lo) - 2;
7370 CASE_INT_FN (BUILT_IN_POPCOUNT):
7373 result++, lo &= lo - 1;
7375 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7378 CASE_INT_FN (BUILT_IN_PARITY):
7381 result++, lo &= lo - 1;
7383 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7391 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7397 /* Fold function call to builtin_bswap and the long and long long
7398 variants. Return NULL_TREE if no simplification can be made. */
7400 fold_builtin_bswap (tree fndecl, tree arg)
7402 if (! validate_arg (arg, INTEGER_TYPE))
7405 /* Optimize constant value. */
7406 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7408 HOST_WIDE_INT hi, width, r_hi = 0;
7409 unsigned HOST_WIDE_INT lo, r_lo = 0;
7412 type = TREE_TYPE (arg);
7413 width = TYPE_PRECISION (type);
7414 lo = TREE_INT_CST_LOW (arg);
7415 hi = TREE_INT_CST_HIGH (arg);
7417 switch (DECL_FUNCTION_CODE (fndecl))
7419 case BUILT_IN_BSWAP32:
7420 case BUILT_IN_BSWAP64:
7424 for (s = 0; s < width; s += 8)
7426 int d = width - s - 8;
7427 unsigned HOST_WIDE_INT byte;
7429 if (s < HOST_BITS_PER_WIDE_INT)
7430 byte = (lo >> s) & 0xff;
7432 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7434 if (d < HOST_BITS_PER_WIDE_INT)
7437 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7447 if (width < HOST_BITS_PER_WIDE_INT)
7448 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7450 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7456 /* A subroutine of fold_builtin to fold the various logarithmic
7457 functions. Return NULL_TREE if no simplification can me made.
7458 FUNC is the corresponding MPFR logarithm function. */
7461 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7462 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7464 if (validate_arg (arg, REAL_TYPE))
7466 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7468 const enum built_in_function fcode = builtin_mathfn_code (arg);
7470 /* Calculate the result when the argument is a constant. */
7471 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7474 /* Special case, optimize logN(expN(x)) = x. */
7475 if (flag_unsafe_math_optimizations
7476 && ((func == mpfr_log
7477 && (fcode == BUILT_IN_EXP
7478 || fcode == BUILT_IN_EXPF
7479 || fcode == BUILT_IN_EXPL))
7480 || (func == mpfr_log2
7481 && (fcode == BUILT_IN_EXP2
7482 || fcode == BUILT_IN_EXP2F
7483 || fcode == BUILT_IN_EXP2L))
7484 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7485 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7487 /* Optimize logN(func()) for various exponential functions. We
7488 want to determine the value "x" and the power "exponent" in
7489 order to transform logN(x**exponent) into exponent*logN(x). */
7490 if (flag_unsafe_math_optimizations)
7492 tree exponent = 0, x = 0;
7496 CASE_FLT_FN (BUILT_IN_EXP):
7497 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7498 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7500 exponent = CALL_EXPR_ARG (arg, 0);
7502 CASE_FLT_FN (BUILT_IN_EXP2):
7503 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7504 x = build_real (type, dconst2);
7505 exponent = CALL_EXPR_ARG (arg, 0);
7507 CASE_FLT_FN (BUILT_IN_EXP10):
7508 CASE_FLT_FN (BUILT_IN_POW10):
7509 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7511 REAL_VALUE_TYPE dconst10;
7512 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7513 x = build_real (type, dconst10);
7515 exponent = CALL_EXPR_ARG (arg, 0);
7517 CASE_FLT_FN (BUILT_IN_SQRT):
7518 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7519 x = CALL_EXPR_ARG (arg, 0);
7520 exponent = build_real (type, dconsthalf);
7522 CASE_FLT_FN (BUILT_IN_CBRT):
7523 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7524 x = CALL_EXPR_ARG (arg, 0);
7525 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7528 CASE_FLT_FN (BUILT_IN_POW):
7529 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7530 x = CALL_EXPR_ARG (arg, 0);
7531 exponent = CALL_EXPR_ARG (arg, 1);
7537 /* Now perform the optimization. */
7540 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7541 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7549 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7550 NULL_TREE if no simplification can be made. */
7553 fold_builtin_hypot (location_t loc, tree fndecl,
7554 tree arg0, tree arg1, tree type)
7556 tree res, narg0, narg1;
7558 if (!validate_arg (arg0, REAL_TYPE)
7559 || !validate_arg (arg1, REAL_TYPE))
7562 /* Calculate the result when the argument is a constant. */
7563 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7566 /* If either argument to hypot has a negate or abs, strip that off.
7567 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7568 narg0 = fold_strip_sign_ops (arg0);
7569 narg1 = fold_strip_sign_ops (arg1);
7572 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7573 narg1 ? narg1 : arg1);
7576 /* If either argument is zero, hypot is fabs of the other. */
7577 if (real_zerop (arg0))
7578 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7579 else if (real_zerop (arg1))
7580 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7582 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7583 if (flag_unsafe_math_optimizations
7584 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7586 const REAL_VALUE_TYPE sqrt2_trunc
7587 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7588 return fold_build2_loc (loc, MULT_EXPR, type,
7589 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7590 build_real (type, sqrt2_trunc));
7597 /* Fold a builtin function call to pow, powf, or powl. Return
7598 NULL_TREE if no simplification can be made. */
7600 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7604 if (!validate_arg (arg0, REAL_TYPE)
7605 || !validate_arg (arg1, REAL_TYPE))
7608 /* Calculate the result when the argument is a constant. */
7609 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7612 /* Optimize pow(1.0,y) = 1.0. */
7613 if (real_onep (arg0))
7614 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7616 if (TREE_CODE (arg1) == REAL_CST
7617 && !TREE_OVERFLOW (arg1))
7619 REAL_VALUE_TYPE cint;
7623 c = TREE_REAL_CST (arg1);
7625 /* Optimize pow(x,0.0) = 1.0. */
7626 if (REAL_VALUES_EQUAL (c, dconst0))
7627 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7630 /* Optimize pow(x,1.0) = x. */
7631 if (REAL_VALUES_EQUAL (c, dconst1))
7634 /* Optimize pow(x,-1.0) = 1.0/x. */
7635 if (REAL_VALUES_EQUAL (c, dconstm1))
7636 return fold_build2_loc (loc, RDIV_EXPR, type,
7637 build_real (type, dconst1), arg0);
7639 /* Optimize pow(x,0.5) = sqrt(x). */
7640 if (flag_unsafe_math_optimizations
7641 && REAL_VALUES_EQUAL (c, dconsthalf))
7643 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7645 if (sqrtfn != NULL_TREE)
7646 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7649 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7650 if (flag_unsafe_math_optimizations)
7652 const REAL_VALUE_TYPE dconstroot
7653 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7655 if (REAL_VALUES_EQUAL (c, dconstroot))
7657 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7658 if (cbrtfn != NULL_TREE)
7659 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7663 /* Check for an integer exponent. */
7664 n = real_to_integer (&c);
7665 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7666 if (real_identical (&c, &cint))
7668 /* Attempt to evaluate pow at compile-time, unless this should
7669 raise an exception. */
7670 if (TREE_CODE (arg0) == REAL_CST
7671 && !TREE_OVERFLOW (arg0)
7673 || (!flag_trapping_math && !flag_errno_math)
7674 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7679 x = TREE_REAL_CST (arg0);
7680 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7681 if (flag_unsafe_math_optimizations || !inexact)
7682 return build_real (type, x);
7685 /* Strip sign ops from even integer powers. */
7686 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7688 tree narg0 = fold_strip_sign_ops (arg0);
7690 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7695 if (flag_unsafe_math_optimizations)
7697 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7699 /* Optimize pow(expN(x),y) = expN(x*y). */
7700 if (BUILTIN_EXPONENT_P (fcode))
7702 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7703 tree arg = CALL_EXPR_ARG (arg0, 0);
7704 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7705 return build_call_expr_loc (loc, expfn, 1, arg);
7708 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7709 if (BUILTIN_SQRT_P (fcode))
7711 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7712 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7713 build_real (type, dconsthalf));
7714 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7717 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7718 if (BUILTIN_CBRT_P (fcode))
7720 tree arg = CALL_EXPR_ARG (arg0, 0);
7721 if (tree_expr_nonnegative_p (arg))
7723 const REAL_VALUE_TYPE dconstroot
7724 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7725 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7726 build_real (type, dconstroot));
7727 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7731 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7732 if (fcode == BUILT_IN_POW
7733 || fcode == BUILT_IN_POWF
7734 || fcode == BUILT_IN_POWL)
7736 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7737 if (tree_expr_nonnegative_p (arg00))
7739 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7740 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7741 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7749 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7750 Return NULL_TREE if no simplification can be made. */
7752 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7753 tree arg0, tree arg1, tree type)
7755 if (!validate_arg (arg0, REAL_TYPE)
7756 || !validate_arg (arg1, INTEGER_TYPE))
7759 /* Optimize pow(1.0,y) = 1.0. */
7760 if (real_onep (arg0))
7761 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7763 if (host_integerp (arg1, 0))
7765 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7767 /* Evaluate powi at compile-time. */
7768 if (TREE_CODE (arg0) == REAL_CST
7769 && !TREE_OVERFLOW (arg0))
7772 x = TREE_REAL_CST (arg0);
7773 real_powi (&x, TYPE_MODE (type), &x, c);
7774 return build_real (type, x);
7777 /* Optimize pow(x,0) = 1.0. */
7779 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7782 /* Optimize pow(x,1) = x. */
7786 /* Optimize pow(x,-1) = 1.0/x. */
7788 return fold_build2_loc (loc, RDIV_EXPR, type,
7789 build_real (type, dconst1), arg0);
7795 /* A subroutine of fold_builtin to fold the various exponent
7796 functions. Return NULL_TREE if no simplification can be made.
7797 FUNC is the corresponding MPFR exponent function. */
7800 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7801 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7803 if (validate_arg (arg, REAL_TYPE))
7805 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7808 /* Calculate the result when the argument is a constant. */
7809 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7812 /* Optimize expN(logN(x)) = x. */
7813 if (flag_unsafe_math_optimizations)
7815 const enum built_in_function fcode = builtin_mathfn_code (arg);
7817 if ((func == mpfr_exp
7818 && (fcode == BUILT_IN_LOG
7819 || fcode == BUILT_IN_LOGF
7820 || fcode == BUILT_IN_LOGL))
7821 || (func == mpfr_exp2
7822 && (fcode == BUILT_IN_LOG2
7823 || fcode == BUILT_IN_LOG2F
7824 || fcode == BUILT_IN_LOG2L))
7825 || (func == mpfr_exp10
7826 && (fcode == BUILT_IN_LOG10
7827 || fcode == BUILT_IN_LOG10F
7828 || fcode == BUILT_IN_LOG10L)))
7829 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7836 /* Return true if VAR is a VAR_DECL or a component thereof. */
7839 var_decl_component_p (tree var)
7842 while (handled_component_p (inner))
7843 inner = TREE_OPERAND (inner, 0);
7844 return SSA_VAR_P (inner);
7847 /* Fold function call to builtin memset. Return
7848 NULL_TREE if no simplification can be made. */
7851 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7852 tree type, bool ignore)
7854 tree var, ret, etype;
7855 unsigned HOST_WIDE_INT length, cval;
7857 if (! validate_arg (dest, POINTER_TYPE)
7858 || ! validate_arg (c, INTEGER_TYPE)
7859 || ! validate_arg (len, INTEGER_TYPE))
7862 if (! host_integerp (len, 1))
7865 /* If the LEN parameter is zero, return DEST. */
7866 if (integer_zerop (len))
7867 return omit_one_operand_loc (loc, type, dest, c);
7869 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7874 if (TREE_CODE (var) != ADDR_EXPR)
7877 var = TREE_OPERAND (var, 0);
7878 if (TREE_THIS_VOLATILE (var))
7881 etype = TREE_TYPE (var);
7882 if (TREE_CODE (etype) == ARRAY_TYPE)
7883 etype = TREE_TYPE (etype);
7885 if (!INTEGRAL_TYPE_P (etype)
7886 && !POINTER_TYPE_P (etype))
7889 if (! var_decl_component_p (var))
7892 length = tree_low_cst (len, 1);
7893 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7894 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
7898 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7901 if (integer_zerop (c))
7905 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7908 cval = TREE_INT_CST_LOW (c);
7912 cval |= (cval << 31) << 1;
7915 ret = build_int_cst_type (etype, cval);
7916 var = build_fold_indirect_ref_loc (loc,
7917 fold_convert_loc (loc,
7918 build_pointer_type (etype),
7920 ret = build2 (MODIFY_EXPR, etype, var, ret);
7924 return omit_one_operand_loc (loc, type, dest, ret);
7927 /* Fold function call to builtin memset. Return
7928 NULL_TREE if no simplification can be made. */
7931 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7933 if (! validate_arg (dest, POINTER_TYPE)
7934 || ! validate_arg (size, INTEGER_TYPE))
7940 /* New argument list transforming bzero(ptr x, int y) to
7941 memset(ptr x, int 0, size_t y). This is done this way
7942 so that if it isn't expanded inline, we fallback to
7943 calling bzero instead of memset. */
7945 return fold_builtin_memset (loc, dest, integer_zero_node,
7946 fold_convert_loc (loc, sizetype, size),
7947 void_type_node, ignore);
7950 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7951 NULL_TREE if no simplification can be made.
7952 If ENDP is 0, return DEST (like memcpy).
7953 If ENDP is 1, return DEST+LEN (like mempcpy).
7954 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7955 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7959 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7960 tree len, tree type, bool ignore, int endp)
7962 tree destvar, srcvar, expr;
7964 if (! validate_arg (dest, POINTER_TYPE)
7965 || ! validate_arg (src, POINTER_TYPE)
7966 || ! validate_arg (len, INTEGER_TYPE))
7969 /* If the LEN parameter is zero, return DEST. */
7970 if (integer_zerop (len))
7971 return omit_one_operand_loc (loc, type, dest, src);
7973 /* If SRC and DEST are the same (and not volatile), return
7974 DEST{,+LEN,+LEN-1}. */
7975 if (operand_equal_p (src, dest, 0))
7979 tree srctype, desttype;
7980 unsigned int src_align, dest_align;
7985 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
7986 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
7988 /* Both DEST and SRC must be pointer types.
7989 ??? This is what old code did. Is the testing for pointer types
7992 If either SRC is readonly or length is 1, we can use memcpy. */
7993 if (!dest_align || !src_align)
7995 if (readonly_data_expr (src)
7996 || (host_integerp (len, 1)
7997 && (MIN (src_align, dest_align) / BITS_PER_UNIT
7998 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8000 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8003 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8006 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8007 if (TREE_CODE (src) == ADDR_EXPR
8008 && TREE_CODE (dest) == ADDR_EXPR)
8010 tree src_base, dest_base, fn;
8011 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8012 HOST_WIDE_INT size = -1;
8013 HOST_WIDE_INT maxsize = -1;
8015 srcvar = TREE_OPERAND (src, 0);
8016 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8018 destvar = TREE_OPERAND (dest, 0);
8019 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8021 if (host_integerp (len, 1))
8022 maxsize = tree_low_cst (len, 1);
8025 src_offset /= BITS_PER_UNIT;
8026 dest_offset /= BITS_PER_UNIT;
8027 if (SSA_VAR_P (src_base)
8028 && SSA_VAR_P (dest_base))
8030 if (operand_equal_p (src_base, dest_base, 0)
8031 && ranges_overlap_p (src_offset, maxsize,
8032 dest_offset, maxsize))
8035 else if (TREE_CODE (src_base) == MEM_REF
8036 && TREE_CODE (dest_base) == MEM_REF)
8039 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8040 TREE_OPERAND (dest_base, 0), 0))
8042 off = double_int_add (mem_ref_offset (src_base),
8043 shwi_to_double_int (src_offset));
8044 if (!double_int_fits_in_shwi_p (off))
8046 src_offset = off.low;
8047 off = double_int_add (mem_ref_offset (dest_base),
8048 shwi_to_double_int (dest_offset));
8049 if (!double_int_fits_in_shwi_p (off))
8051 dest_offset = off.low;
8052 if (ranges_overlap_p (src_offset, maxsize,
8053 dest_offset, maxsize))
8059 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8062 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8065 /* If the destination and source do not alias optimize into
8067 if ((is_gimple_min_invariant (dest)
8068 || TREE_CODE (dest) == SSA_NAME)
8069 && (is_gimple_min_invariant (src)
8070 || TREE_CODE (src) == SSA_NAME))
8073 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8074 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8075 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8078 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8081 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8088 if (!host_integerp (len, 0))
8091 This logic lose for arguments like (type *)malloc (sizeof (type)),
8092 since we strip the casts of up to VOID return value from malloc.
8093 Perhaps we ought to inherit type from non-VOID argument here? */
8096 if (!POINTER_TYPE_P (TREE_TYPE (src))
8097 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8099 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8100 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8102 tree tem = TREE_OPERAND (src, 0);
8104 if (tem != TREE_OPERAND (src, 0))
8105 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8107 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8109 tree tem = TREE_OPERAND (dest, 0);
8111 if (tem != TREE_OPERAND (dest, 0))
8112 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8114 srctype = TREE_TYPE (TREE_TYPE (src));
8115 if (TREE_CODE (srctype) == ARRAY_TYPE
8116 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8118 srctype = TREE_TYPE (srctype);
8120 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8122 desttype = TREE_TYPE (TREE_TYPE (dest));
8123 if (TREE_CODE (desttype) == ARRAY_TYPE
8124 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8126 desttype = TREE_TYPE (desttype);
8128 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8130 if (TREE_ADDRESSABLE (srctype)
8131 || TREE_ADDRESSABLE (desttype))
8134 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8135 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8136 if (dest_align < TYPE_ALIGN (desttype)
8137 || src_align < TYPE_ALIGN (srctype))
8141 dest = builtin_save_expr (dest);
8143 /* Build accesses at offset zero with a ref-all character type. */
8144 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8145 ptr_mode, true), 0);
8148 STRIP_NOPS (destvar);
8149 if (TREE_CODE (destvar) == ADDR_EXPR
8150 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8151 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8152 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8154 destvar = NULL_TREE;
8157 STRIP_NOPS (srcvar);
8158 if (TREE_CODE (srcvar) == ADDR_EXPR
8159 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8160 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8163 || src_align >= TYPE_ALIGN (desttype))
8164 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8166 else if (!STRICT_ALIGNMENT)
8168 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8170 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8178 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8181 if (srcvar == NULL_TREE)
8184 if (src_align >= TYPE_ALIGN (desttype))
8185 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8188 if (STRICT_ALIGNMENT)
8190 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8192 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8195 else if (destvar == NULL_TREE)
8198 if (dest_align >= TYPE_ALIGN (srctype))
8199 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8202 if (STRICT_ALIGNMENT)
8204 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8206 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8210 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8216 if (endp == 0 || endp == 3)
8217 return omit_one_operand_loc (loc, type, dest, expr);
8223 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8226 dest = fold_build_pointer_plus_loc (loc, dest, len);
8227 dest = fold_convert_loc (loc, type, dest);
8229 dest = omit_one_operand_loc (loc, type, dest, expr);
8233 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8234 If LEN is not NULL, it represents the length of the string to be
8235 copied. Return NULL_TREE if no simplification can be made. */
8238 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8242 if (!validate_arg (dest, POINTER_TYPE)
8243 || !validate_arg (src, POINTER_TYPE))
8246 /* If SRC and DEST are the same (and not volatile), return DEST. */
8247 if (operand_equal_p (src, dest, 0))
8248 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8250 if (optimize_function_for_size_p (cfun))
8253 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8259 len = c_strlen (src, 1);
8260 if (! len || TREE_SIDE_EFFECTS (len))
8264 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8265 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8266 build_call_expr_loc (loc, fn, 3, dest, src, len));
8269 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8270 Return NULL_TREE if no simplification can be made. */
8273 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8275 tree fn, len, lenp1, call, type;
8277 if (!validate_arg (dest, POINTER_TYPE)
8278 || !validate_arg (src, POINTER_TYPE))
8281 len = c_strlen (src, 1);
8283 || TREE_CODE (len) != INTEGER_CST)
8286 if (optimize_function_for_size_p (cfun)
8287 /* If length is zero it's small enough. */
8288 && !integer_zerop (len))
8291 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8295 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8296 /* We use dest twice in building our expression. Save it from
8297 multiple expansions. */
8298 dest = builtin_save_expr (dest);
8299 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8301 type = TREE_TYPE (TREE_TYPE (fndecl));
8302 dest = fold_build_pointer_plus_loc (loc, dest, len);
8303 dest = fold_convert_loc (loc, type, dest);
8304 dest = omit_one_operand_loc (loc, type, dest, call);
8308 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8309 If SLEN is not NULL, it represents the length of the source string.
8310 Return NULL_TREE if no simplification can be made. */
8313 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8314 tree src, tree len, tree slen)
8318 if (!validate_arg (dest, POINTER_TYPE)
8319 || !validate_arg (src, POINTER_TYPE)
8320 || !validate_arg (len, INTEGER_TYPE))
8323 /* If the LEN parameter is zero, return DEST. */
8324 if (integer_zerop (len))
8325 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8327 /* We can't compare slen with len as constants below if len is not a
8329 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8333 slen = c_strlen (src, 1);
8335 /* Now, we must be passed a constant src ptr parameter. */
8336 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8339 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8341 /* We do not support simplification of this case, though we do
8342 support it when expanding trees into RTL. */
8343 /* FIXME: generate a call to __builtin_memset. */
8344 if (tree_int_cst_lt (slen, len))
8347 /* OK transform into builtin memcpy. */
8348 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8351 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8352 build_call_expr_loc (loc, fn, 3, dest, src, len));
8355 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8356 arguments to the call, and TYPE is its return type.
8357 Return NULL_TREE if no simplification can be made. */
8360 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8362 if (!validate_arg (arg1, POINTER_TYPE)
8363 || !validate_arg (arg2, INTEGER_TYPE)
8364 || !validate_arg (len, INTEGER_TYPE))
8370 if (TREE_CODE (arg2) != INTEGER_CST
8371 || !host_integerp (len, 1))
8374 p1 = c_getstr (arg1);
8375 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8381 if (target_char_cast (arg2, &c))
8384 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8387 return build_int_cst (TREE_TYPE (arg1), 0);
8389 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8390 return fold_convert_loc (loc, type, tem);
8396 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8397 Return NULL_TREE if no simplification can be made. */
8400 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8402 const char *p1, *p2;
8404 if (!validate_arg (arg1, POINTER_TYPE)
8405 || !validate_arg (arg2, POINTER_TYPE)
8406 || !validate_arg (len, INTEGER_TYPE))
8409 /* If the LEN parameter is zero, return zero. */
8410 if (integer_zerop (len))
8411 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8414 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8415 if (operand_equal_p (arg1, arg2, 0))
8416 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8418 p1 = c_getstr (arg1);
8419 p2 = c_getstr (arg2);
8421 /* If all arguments are constant, and the value of len is not greater
8422 than the lengths of arg1 and arg2, evaluate at compile-time. */
8423 if (host_integerp (len, 1) && p1 && p2
8424 && compare_tree_int (len, strlen (p1) + 1) <= 0
8425 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8427 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8430 return integer_one_node;
8432 return integer_minus_one_node;
8434 return integer_zero_node;
8437 /* If len parameter is one, return an expression corresponding to
8438 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8439 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8441 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8442 tree cst_uchar_ptr_node
8443 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8446 = fold_convert_loc (loc, integer_type_node,
8447 build1 (INDIRECT_REF, cst_uchar_node,
8448 fold_convert_loc (loc,
8452 = fold_convert_loc (loc, integer_type_node,
8453 build1 (INDIRECT_REF, cst_uchar_node,
8454 fold_convert_loc (loc,
8457 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8463 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8464 Return NULL_TREE if no simplification can be made. */
8467 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8469 const char *p1, *p2;
8471 if (!validate_arg (arg1, POINTER_TYPE)
8472 || !validate_arg (arg2, POINTER_TYPE))
8475 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8476 if (operand_equal_p (arg1, arg2, 0))
8477 return integer_zero_node;
8479 p1 = c_getstr (arg1);
8480 p2 = c_getstr (arg2);
8484 const int i = strcmp (p1, p2);
8486 return integer_minus_one_node;
8488 return integer_one_node;
8490 return integer_zero_node;
8493 /* If the second arg is "", return *(const unsigned char*)arg1. */
8494 if (p2 && *p2 == '\0')
8496 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8497 tree cst_uchar_ptr_node
8498 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8500 return fold_convert_loc (loc, integer_type_node,
8501 build1 (INDIRECT_REF, cst_uchar_node,
8502 fold_convert_loc (loc,
8507 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8508 if (p1 && *p1 == '\0')
8510 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8511 tree cst_uchar_ptr_node
8512 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8515 = fold_convert_loc (loc, integer_type_node,
8516 build1 (INDIRECT_REF, cst_uchar_node,
8517 fold_convert_loc (loc,
8520 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8526 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8527 Return NULL_TREE if no simplification can be made. */
8530 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8532 const char *p1, *p2;
8534 if (!validate_arg (arg1, POINTER_TYPE)
8535 || !validate_arg (arg2, POINTER_TYPE)
8536 || !validate_arg (len, INTEGER_TYPE))
8539 /* If the LEN parameter is zero, return zero. */
8540 if (integer_zerop (len))
8541 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8544 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8545 if (operand_equal_p (arg1, arg2, 0))
8546 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8548 p1 = c_getstr (arg1);
8549 p2 = c_getstr (arg2);
8551 if (host_integerp (len, 1) && p1 && p2)
8553 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8555 return integer_one_node;
8557 return integer_minus_one_node;
8559 return integer_zero_node;
8562 /* If the second arg is "", and the length is greater than zero,
8563 return *(const unsigned char*)arg1. */
8564 if (p2 && *p2 == '\0'
8565 && TREE_CODE (len) == INTEGER_CST
8566 && tree_int_cst_sgn (len) == 1)
8568 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8569 tree cst_uchar_ptr_node
8570 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8572 return fold_convert_loc (loc, integer_type_node,
8573 build1 (INDIRECT_REF, cst_uchar_node,
8574 fold_convert_loc (loc,
8579 /* If the first arg is "", and the length is greater than zero,
8580 return -*(const unsigned char*)arg2. */
8581 if (p1 && *p1 == '\0'
8582 && TREE_CODE (len) == INTEGER_CST
8583 && tree_int_cst_sgn (len) == 1)
8585 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8586 tree cst_uchar_ptr_node
8587 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8589 tree temp = fold_convert_loc (loc, integer_type_node,
8590 build1 (INDIRECT_REF, cst_uchar_node,
8591 fold_convert_loc (loc,
8594 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8597 /* If len parameter is one, return an expression corresponding to
8598 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8599 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8601 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8602 tree cst_uchar_ptr_node
8603 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8605 tree ind1 = fold_convert_loc (loc, integer_type_node,
8606 build1 (INDIRECT_REF, cst_uchar_node,
8607 fold_convert_loc (loc,
8610 tree ind2 = fold_convert_loc (loc, integer_type_node,
8611 build1 (INDIRECT_REF, cst_uchar_node,
8612 fold_convert_loc (loc,
8615 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8621 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8622 ARG. Return NULL_TREE if no simplification can be made. */
8625 fold_builtin_signbit (location_t loc, tree arg, tree type)
8627 if (!validate_arg (arg, REAL_TYPE))
8630 /* If ARG is a compile-time constant, determine the result. */
8631 if (TREE_CODE (arg) == REAL_CST
8632 && !TREE_OVERFLOW (arg))
8636 c = TREE_REAL_CST (arg);
8637 return (REAL_VALUE_NEGATIVE (c)
8638 ? build_one_cst (type)
8639 : build_zero_cst (type));
8642 /* If ARG is non-negative, the result is always zero. */
8643 if (tree_expr_nonnegative_p (arg))
8644 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8646 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8647 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8648 return fold_convert (type,
8649 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8650 build_real (TREE_TYPE (arg), dconst0)));
8655 /* Fold function call to builtin copysign, copysignf or copysignl with
8656 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8660 fold_builtin_copysign (location_t loc, tree fndecl,
8661 tree arg1, tree arg2, tree type)
8665 if (!validate_arg (arg1, REAL_TYPE)
8666 || !validate_arg (arg2, REAL_TYPE))
8669 /* copysign(X,X) is X. */
8670 if (operand_equal_p (arg1, arg2, 0))
8671 return fold_convert_loc (loc, type, arg1);
8673 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8674 if (TREE_CODE (arg1) == REAL_CST
8675 && TREE_CODE (arg2) == REAL_CST
8676 && !TREE_OVERFLOW (arg1)
8677 && !TREE_OVERFLOW (arg2))
8679 REAL_VALUE_TYPE c1, c2;
8681 c1 = TREE_REAL_CST (arg1);
8682 c2 = TREE_REAL_CST (arg2);
8683 /* c1.sign := c2.sign. */
8684 real_copysign (&c1, &c2);
8685 return build_real (type, c1);
8688 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8689 Remember to evaluate Y for side-effects. */
8690 if (tree_expr_nonnegative_p (arg2))
8691 return omit_one_operand_loc (loc, type,
8692 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8695 /* Strip sign changing operations for the first argument. */
8696 tem = fold_strip_sign_ops (arg1);
8698 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8703 /* Fold a call to builtin isascii with argument ARG. */
8706 fold_builtin_isascii (location_t loc, tree arg)
8708 if (!validate_arg (arg, INTEGER_TYPE))
8712 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8713 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8714 build_int_cst (integer_type_node,
8715 ~ (unsigned HOST_WIDE_INT) 0x7f));
8716 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8717 arg, integer_zero_node);
8721 /* Fold a call to builtin toascii with argument ARG. */
8724 fold_builtin_toascii (location_t loc, tree arg)
8726 if (!validate_arg (arg, INTEGER_TYPE))
8729 /* Transform toascii(c) -> (c & 0x7f). */
8730 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8731 build_int_cst (integer_type_node, 0x7f));
8734 /* Fold a call to builtin isdigit with argument ARG. */
8737 fold_builtin_isdigit (location_t loc, tree arg)
8739 if (!validate_arg (arg, INTEGER_TYPE))
8743 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8744 /* According to the C standard, isdigit is unaffected by locale.
8745 However, it definitely is affected by the target character set. */
8746 unsigned HOST_WIDE_INT target_digit0
8747 = lang_hooks.to_target_charset ('0');
8749 if (target_digit0 == 0)
8752 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8753 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8754 build_int_cst (unsigned_type_node, target_digit0));
8755 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8756 build_int_cst (unsigned_type_node, 9));
8760 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8763 fold_builtin_fabs (location_t loc, tree arg, tree type)
8765 if (!validate_arg (arg, REAL_TYPE))
8768 arg = fold_convert_loc (loc, type, arg);
8769 if (TREE_CODE (arg) == REAL_CST)
8770 return fold_abs_const (arg, type);
8771 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8774 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8777 fold_builtin_abs (location_t loc, tree arg, tree type)
8779 if (!validate_arg (arg, INTEGER_TYPE))
8782 arg = fold_convert_loc (loc, type, arg);
8783 if (TREE_CODE (arg) == INTEGER_CST)
8784 return fold_abs_const (arg, type);
8785 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8788 /* Fold a fma operation with arguments ARG[012]. */
8791 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8792 tree type, tree arg0, tree arg1, tree arg2)
8794 if (TREE_CODE (arg0) == REAL_CST
8795 && TREE_CODE (arg1) == REAL_CST
8796 && TREE_CODE (arg2) == REAL_CST)
8797 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8802 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8805 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8807 if (validate_arg (arg0, REAL_TYPE)
8808 && validate_arg(arg1, REAL_TYPE)
8809 && validate_arg(arg2, REAL_TYPE))
8811 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8815 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8816 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8817 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8822 /* Fold a call to builtin fmin or fmax. */
8825 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8826 tree type, bool max)
8828 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8830 /* Calculate the result when the argument is a constant. */
8831 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8836 /* If either argument is NaN, return the other one. Avoid the
8837 transformation if we get (and honor) a signalling NaN. Using
8838 omit_one_operand() ensures we create a non-lvalue. */
8839 if (TREE_CODE (arg0) == REAL_CST
8840 && real_isnan (&TREE_REAL_CST (arg0))
8841 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8842 || ! TREE_REAL_CST (arg0).signalling))
8843 return omit_one_operand_loc (loc, type, arg1, arg0);
8844 if (TREE_CODE (arg1) == REAL_CST
8845 && real_isnan (&TREE_REAL_CST (arg1))
8846 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8847 || ! TREE_REAL_CST (arg1).signalling))
8848 return omit_one_operand_loc (loc, type, arg0, arg1);
8850 /* Transform fmin/fmax(x,x) -> x. */
8851 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8852 return omit_one_operand_loc (loc, type, arg0, arg1);
8854 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8855 functions to return the numeric arg if the other one is NaN.
8856 These tree codes don't honor that, so only transform if
8857 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8858 handled, so we don't have to worry about it either. */
8859 if (flag_finite_math_only)
8860 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8861 fold_convert_loc (loc, type, arg0),
8862 fold_convert_loc (loc, type, arg1));
8867 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8870 fold_builtin_carg (location_t loc, tree arg, tree type)
8872 if (validate_arg (arg, COMPLEX_TYPE)
8873 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8875 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8879 tree new_arg = builtin_save_expr (arg);
8880 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8881 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8882 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8889 /* Fold a call to builtin logb/ilogb. */
8892 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8894 if (! validate_arg (arg, REAL_TYPE))
8899 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8901 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8907 /* If arg is Inf or NaN and we're logb, return it. */
8908 if (TREE_CODE (rettype) == REAL_TYPE)
8909 return fold_convert_loc (loc, rettype, arg);
8910 /* Fall through... */
8912 /* Zero may set errno and/or raise an exception for logb, also
8913 for ilogb we don't know FP_ILOGB0. */
8916 /* For normal numbers, proceed iff radix == 2. In GCC,
8917 normalized significands are in the range [0.5, 1.0). We
8918 want the exponent as if they were [1.0, 2.0) so get the
8919 exponent and subtract 1. */
8920 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8921 return fold_convert_loc (loc, rettype,
8922 build_int_cst (integer_type_node,
8923 REAL_EXP (value)-1));
8931 /* Fold a call to builtin significand, if radix == 2. */
8934 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8936 if (! validate_arg (arg, REAL_TYPE))
8941 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8943 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8950 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8951 return fold_convert_loc (loc, rettype, arg);
8953 /* For normal numbers, proceed iff radix == 2. */
8954 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8956 REAL_VALUE_TYPE result = *value;
8957 /* In GCC, normalized significands are in the range [0.5,
8958 1.0). We want them to be [1.0, 2.0) so set the
8960 SET_REAL_EXP (&result, 1);
8961 return build_real (rettype, result);
8970 /* Fold a call to builtin frexp, we can assume the base is 2. */
8973 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8975 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8980 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8983 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8985 /* Proceed if a valid pointer type was passed in. */
8986 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8988 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8994 /* For +-0, return (*exp = 0, +-0). */
8995 exp = integer_zero_node;
9000 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9001 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9004 /* Since the frexp function always expects base 2, and in
9005 GCC normalized significands are already in the range
9006 [0.5, 1.0), we have exactly what frexp wants. */
9007 REAL_VALUE_TYPE frac_rvt = *value;
9008 SET_REAL_EXP (&frac_rvt, 0);
9009 frac = build_real (rettype, frac_rvt);
9010 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9017 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9018 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9019 TREE_SIDE_EFFECTS (arg1) = 1;
9020 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9026 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9027 then we can assume the base is two. If it's false, then we have to
9028 check the mode of the TYPE parameter in certain cases. */
9031 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9032 tree type, bool ldexp)
9034 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9039 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9040 if (real_zerop (arg0) || integer_zerop (arg1)
9041 || (TREE_CODE (arg0) == REAL_CST
9042 && !real_isfinite (&TREE_REAL_CST (arg0))))
9043 return omit_one_operand_loc (loc, type, arg0, arg1);
9045 /* If both arguments are constant, then try to evaluate it. */
9046 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9047 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9048 && host_integerp (arg1, 0))
9050 /* Bound the maximum adjustment to twice the range of the
9051 mode's valid exponents. Use abs to ensure the range is
9052 positive as a sanity check. */
9053 const long max_exp_adj = 2 *
9054 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9055 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9057 /* Get the user-requested adjustment. */
9058 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9060 /* The requested adjustment must be inside this range. This
9061 is a preliminary cap to avoid things like overflow, we
9062 may still fail to compute the result for other reasons. */
9063 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9065 REAL_VALUE_TYPE initial_result;
9067 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9069 /* Ensure we didn't overflow. */
9070 if (! real_isinf (&initial_result))
9072 const REAL_VALUE_TYPE trunc_result
9073 = real_value_truncate (TYPE_MODE (type), initial_result);
9075 /* Only proceed if the target mode can hold the
9077 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9078 return build_real (type, trunc_result);
9087 /* Fold a call to builtin modf. */
9090 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9092 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9097 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9100 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9102 /* Proceed if a valid pointer type was passed in. */
9103 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9105 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9106 REAL_VALUE_TYPE trunc, frac;
9112 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9113 trunc = frac = *value;
9116 /* For +-Inf, return (*arg1 = arg0, +-0). */
9118 frac.sign = value->sign;
9122 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9123 real_trunc (&trunc, VOIDmode, value);
9124 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9125 /* If the original number was negative and already
9126 integral, then the fractional part is -0.0. */
9127 if (value->sign && frac.cl == rvc_zero)
9128 frac.sign = value->sign;
9132 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9133 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9134 build_real (rettype, trunc));
9135 TREE_SIDE_EFFECTS (arg1) = 1;
9136 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9137 build_real (rettype, frac));
9143 /* Given a location LOC, an interclass builtin function decl FNDECL
9144 and its single argument ARG, return an folded expression computing
9145 the same, or NULL_TREE if we either couldn't or didn't want to fold
9146 (the latter happen if there's an RTL instruction available). */
9149 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9151 enum machine_mode mode;
9153 if (!validate_arg (arg, REAL_TYPE))
9156 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9159 mode = TYPE_MODE (TREE_TYPE (arg));
9161 /* If there is no optab, try generic code. */
9162 switch (DECL_FUNCTION_CODE (fndecl))
9166 CASE_FLT_FN (BUILT_IN_ISINF):
9168 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9169 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9170 tree const type = TREE_TYPE (arg);
9174 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9175 real_from_string (&r, buf);
9176 result = build_call_expr (isgr_fn, 2,
9177 fold_build1_loc (loc, ABS_EXPR, type, arg),
9178 build_real (type, r));
9181 CASE_FLT_FN (BUILT_IN_FINITE):
9182 case BUILT_IN_ISFINITE:
9184 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9185 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9186 tree const type = TREE_TYPE (arg);
9190 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9191 real_from_string (&r, buf);
9192 result = build_call_expr (isle_fn, 2,
9193 fold_build1_loc (loc, ABS_EXPR, type, arg),
9194 build_real (type, r));
9195 /*result = fold_build2_loc (loc, UNGT_EXPR,
9196 TREE_TYPE (TREE_TYPE (fndecl)),
9197 fold_build1_loc (loc, ABS_EXPR, type, arg),
9198 build_real (type, r));
9199 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9200 TREE_TYPE (TREE_TYPE (fndecl)),
9204 case BUILT_IN_ISNORMAL:
9206 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9207 islessequal(fabs(x),DBL_MAX). */
9208 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9209 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9210 tree const type = TREE_TYPE (arg);
9211 REAL_VALUE_TYPE rmax, rmin;
9214 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9215 real_from_string (&rmax, buf);
9216 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9217 real_from_string (&rmin, buf);
9218 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9219 result = build_call_expr (isle_fn, 2, arg,
9220 build_real (type, rmax));
9221 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9222 build_call_expr (isge_fn, 2, arg,
9223 build_real (type, rmin)));
9233 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9234 ARG is the argument for the call. */
9237 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9239 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9242 if (!validate_arg (arg, REAL_TYPE))
9245 switch (builtin_index)
9247 case BUILT_IN_ISINF:
9248 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9249 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9251 if (TREE_CODE (arg) == REAL_CST)
9253 r = TREE_REAL_CST (arg);
9254 if (real_isinf (&r))
9255 return real_compare (GT_EXPR, &r, &dconst0)
9256 ? integer_one_node : integer_minus_one_node;
9258 return integer_zero_node;
9263 case BUILT_IN_ISINF_SIGN:
9265 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9266 /* In a boolean context, GCC will fold the inner COND_EXPR to
9267 1. So e.g. "if (isinf_sign(x))" would be folded to just
9268 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9269 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9270 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9271 tree tmp = NULL_TREE;
9273 arg = builtin_save_expr (arg);
9275 if (signbit_fn && isinf_fn)
9277 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9278 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9280 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9281 signbit_call, integer_zero_node);
9282 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9283 isinf_call, integer_zero_node);
9285 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9286 integer_minus_one_node, integer_one_node);
9287 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9295 case BUILT_IN_ISFINITE:
9296 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9297 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9298 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9300 if (TREE_CODE (arg) == REAL_CST)
9302 r = TREE_REAL_CST (arg);
9303 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9308 case BUILT_IN_ISNAN:
9309 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9310 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9312 if (TREE_CODE (arg) == REAL_CST)
9314 r = TREE_REAL_CST (arg);
9315 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9318 arg = builtin_save_expr (arg);
9319 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9326 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9327 This builtin will generate code to return the appropriate floating
9328 point classification depending on the value of the floating point
9329 number passed in. The possible return values must be supplied as
9330 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9331 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9332 one floating point argument which is "type generic". */
9335 fold_builtin_fpclassify (location_t loc, tree exp)
9337 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9338 arg, type, res, tmp;
9339 enum machine_mode mode;
9343 /* Verify the required arguments in the original call. */
9344 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9345 INTEGER_TYPE, INTEGER_TYPE,
9346 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9349 fp_nan = CALL_EXPR_ARG (exp, 0);
9350 fp_infinite = CALL_EXPR_ARG (exp, 1);
9351 fp_normal = CALL_EXPR_ARG (exp, 2);
9352 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9353 fp_zero = CALL_EXPR_ARG (exp, 4);
9354 arg = CALL_EXPR_ARG (exp, 5);
9355 type = TREE_TYPE (arg);
9356 mode = TYPE_MODE (type);
9357 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9361 (fabs(x) == Inf ? FP_INFINITE :
9362 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9363 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9365 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9366 build_real (type, dconst0));
9367 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9368 tmp, fp_zero, fp_subnormal);
9370 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9371 real_from_string (&r, buf);
9372 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9373 arg, build_real (type, r));
9374 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9376 if (HONOR_INFINITIES (mode))
9379 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9380 build_real (type, r));
9381 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9385 if (HONOR_NANS (mode))
9387 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9388 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9394 /* Fold a call to an unordered comparison function such as
9395 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9396 being called and ARG0 and ARG1 are the arguments for the call.
9397 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9398 the opposite of the desired result. UNORDERED_CODE is used
9399 for modes that can hold NaNs and ORDERED_CODE is used for
9403 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9404 enum tree_code unordered_code,
9405 enum tree_code ordered_code)
9407 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9408 enum tree_code code;
9410 enum tree_code code0, code1;
9411 tree cmp_type = NULL_TREE;
9413 type0 = TREE_TYPE (arg0);
9414 type1 = TREE_TYPE (arg1);
9416 code0 = TREE_CODE (type0);
9417 code1 = TREE_CODE (type1);
9419 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9420 /* Choose the wider of two real types. */
9421 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9423 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9425 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9428 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9429 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9431 if (unordered_code == UNORDERED_EXPR)
9433 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9434 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9435 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9438 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9440 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9441 fold_build2_loc (loc, code, type, arg0, arg1));
9444 /* Fold a call to built-in function FNDECL with 0 arguments.
9445 IGNORE is true if the result of the function call is ignored. This
9446 function returns NULL_TREE if no simplification was possible. */
9449 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9455 CASE_FLT_FN (BUILT_IN_INF):
9456 case BUILT_IN_INFD32:
9457 case BUILT_IN_INFD64:
9458 case BUILT_IN_INFD128:
9459 return fold_builtin_inf (loc, type, true);
9461 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9462 return fold_builtin_inf (loc, type, false);
9464 case BUILT_IN_CLASSIFY_TYPE:
9465 return fold_builtin_classify_type (NULL_TREE);
9473 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9474 IGNORE is true if the result of the function call is ignored. This
9475 function returns NULL_TREE if no simplification was possible. */
9478 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9480 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9481 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9484 case BUILT_IN_CONSTANT_P:
9486 tree val = fold_builtin_constant_p (arg0);
9488 /* Gimplification will pull the CALL_EXPR for the builtin out of
9489 an if condition. When not optimizing, we'll not CSE it back.
9490 To avoid link error types of regressions, return false now. */
9491 if (!val && !optimize)
9492 val = integer_zero_node;
9497 case BUILT_IN_CLASSIFY_TYPE:
9498 return fold_builtin_classify_type (arg0);
9500 case BUILT_IN_STRLEN:
9501 return fold_builtin_strlen (loc, type, arg0);
9503 CASE_FLT_FN (BUILT_IN_FABS):
9504 return fold_builtin_fabs (loc, arg0, type);
9508 case BUILT_IN_LLABS:
9509 case BUILT_IN_IMAXABS:
9510 return fold_builtin_abs (loc, arg0, type);
9512 CASE_FLT_FN (BUILT_IN_CONJ):
9513 if (validate_arg (arg0, COMPLEX_TYPE)
9514 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9515 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9518 CASE_FLT_FN (BUILT_IN_CREAL):
9519 if (validate_arg (arg0, COMPLEX_TYPE)
9520 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9521 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9524 CASE_FLT_FN (BUILT_IN_CIMAG):
9525 if (validate_arg (arg0, COMPLEX_TYPE)
9526 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9527 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9530 CASE_FLT_FN (BUILT_IN_CCOS):
9531 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9533 CASE_FLT_FN (BUILT_IN_CCOSH):
9534 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9536 CASE_FLT_FN (BUILT_IN_CPROJ):
9537 return fold_builtin_cproj(loc, arg0, type);
9539 CASE_FLT_FN (BUILT_IN_CSIN):
9540 if (validate_arg (arg0, COMPLEX_TYPE)
9541 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9542 return do_mpc_arg1 (arg0, type, mpc_sin);
9545 CASE_FLT_FN (BUILT_IN_CSINH):
9546 if (validate_arg (arg0, COMPLEX_TYPE)
9547 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9548 return do_mpc_arg1 (arg0, type, mpc_sinh);
9551 CASE_FLT_FN (BUILT_IN_CTAN):
9552 if (validate_arg (arg0, COMPLEX_TYPE)
9553 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9554 return do_mpc_arg1 (arg0, type, mpc_tan);
9557 CASE_FLT_FN (BUILT_IN_CTANH):
9558 if (validate_arg (arg0, COMPLEX_TYPE)
9559 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9560 return do_mpc_arg1 (arg0, type, mpc_tanh);
9563 CASE_FLT_FN (BUILT_IN_CLOG):
9564 if (validate_arg (arg0, COMPLEX_TYPE)
9565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9566 return do_mpc_arg1 (arg0, type, mpc_log);
9569 CASE_FLT_FN (BUILT_IN_CSQRT):
9570 if (validate_arg (arg0, COMPLEX_TYPE)
9571 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9572 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9575 CASE_FLT_FN (BUILT_IN_CASIN):
9576 if (validate_arg (arg0, COMPLEX_TYPE)
9577 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9578 return do_mpc_arg1 (arg0, type, mpc_asin);
9581 CASE_FLT_FN (BUILT_IN_CACOS):
9582 if (validate_arg (arg0, COMPLEX_TYPE)
9583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9584 return do_mpc_arg1 (arg0, type, mpc_acos);
9587 CASE_FLT_FN (BUILT_IN_CATAN):
9588 if (validate_arg (arg0, COMPLEX_TYPE)
9589 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9590 return do_mpc_arg1 (arg0, type, mpc_atan);
9593 CASE_FLT_FN (BUILT_IN_CASINH):
9594 if (validate_arg (arg0, COMPLEX_TYPE)
9595 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9596 return do_mpc_arg1 (arg0, type, mpc_asinh);
9599 CASE_FLT_FN (BUILT_IN_CACOSH):
9600 if (validate_arg (arg0, COMPLEX_TYPE)
9601 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9602 return do_mpc_arg1 (arg0, type, mpc_acosh);
9605 CASE_FLT_FN (BUILT_IN_CATANH):
9606 if (validate_arg (arg0, COMPLEX_TYPE)
9607 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9608 return do_mpc_arg1 (arg0, type, mpc_atanh);
9611 CASE_FLT_FN (BUILT_IN_CABS):
9612 return fold_builtin_cabs (loc, arg0, type, fndecl);
9614 CASE_FLT_FN (BUILT_IN_CARG):
9615 return fold_builtin_carg (loc, arg0, type);
9617 CASE_FLT_FN (BUILT_IN_SQRT):
9618 return fold_builtin_sqrt (loc, arg0, type);
9620 CASE_FLT_FN (BUILT_IN_CBRT):
9621 return fold_builtin_cbrt (loc, arg0, type);
9623 CASE_FLT_FN (BUILT_IN_ASIN):
9624 if (validate_arg (arg0, REAL_TYPE))
9625 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9626 &dconstm1, &dconst1, true);
9629 CASE_FLT_FN (BUILT_IN_ACOS):
9630 if (validate_arg (arg0, REAL_TYPE))
9631 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9632 &dconstm1, &dconst1, true);
9635 CASE_FLT_FN (BUILT_IN_ATAN):
9636 if (validate_arg (arg0, REAL_TYPE))
9637 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9640 CASE_FLT_FN (BUILT_IN_ASINH):
9641 if (validate_arg (arg0, REAL_TYPE))
9642 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9645 CASE_FLT_FN (BUILT_IN_ACOSH):
9646 if (validate_arg (arg0, REAL_TYPE))
9647 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9648 &dconst1, NULL, true);
9651 CASE_FLT_FN (BUILT_IN_ATANH):
9652 if (validate_arg (arg0, REAL_TYPE))
9653 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9654 &dconstm1, &dconst1, false);
9657 CASE_FLT_FN (BUILT_IN_SIN):
9658 if (validate_arg (arg0, REAL_TYPE))
9659 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9662 CASE_FLT_FN (BUILT_IN_COS):
9663 return fold_builtin_cos (loc, arg0, type, fndecl);
9665 CASE_FLT_FN (BUILT_IN_TAN):
9666 return fold_builtin_tan (arg0, type);
9668 CASE_FLT_FN (BUILT_IN_CEXP):
9669 return fold_builtin_cexp (loc, arg0, type);
9671 CASE_FLT_FN (BUILT_IN_CEXPI):
9672 if (validate_arg (arg0, REAL_TYPE))
9673 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9676 CASE_FLT_FN (BUILT_IN_SINH):
9677 if (validate_arg (arg0, REAL_TYPE))
9678 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9681 CASE_FLT_FN (BUILT_IN_COSH):
9682 return fold_builtin_cosh (loc, arg0, type, fndecl);
9684 CASE_FLT_FN (BUILT_IN_TANH):
9685 if (validate_arg (arg0, REAL_TYPE))
9686 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9689 CASE_FLT_FN (BUILT_IN_ERF):
9690 if (validate_arg (arg0, REAL_TYPE))
9691 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9694 CASE_FLT_FN (BUILT_IN_ERFC):
9695 if (validate_arg (arg0, REAL_TYPE))
9696 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9699 CASE_FLT_FN (BUILT_IN_TGAMMA):
9700 if (validate_arg (arg0, REAL_TYPE))
9701 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9704 CASE_FLT_FN (BUILT_IN_EXP):
9705 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9707 CASE_FLT_FN (BUILT_IN_EXP2):
9708 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9710 CASE_FLT_FN (BUILT_IN_EXP10):
9711 CASE_FLT_FN (BUILT_IN_POW10):
9712 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9714 CASE_FLT_FN (BUILT_IN_EXPM1):
9715 if (validate_arg (arg0, REAL_TYPE))
9716 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9719 CASE_FLT_FN (BUILT_IN_LOG):
9720 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9722 CASE_FLT_FN (BUILT_IN_LOG2):
9723 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9725 CASE_FLT_FN (BUILT_IN_LOG10):
9726 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9728 CASE_FLT_FN (BUILT_IN_LOG1P):
9729 if (validate_arg (arg0, REAL_TYPE))
9730 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9731 &dconstm1, NULL, false);
9734 CASE_FLT_FN (BUILT_IN_J0):
9735 if (validate_arg (arg0, REAL_TYPE))
9736 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9740 CASE_FLT_FN (BUILT_IN_J1):
9741 if (validate_arg (arg0, REAL_TYPE))
9742 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9746 CASE_FLT_FN (BUILT_IN_Y0):
9747 if (validate_arg (arg0, REAL_TYPE))
9748 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9749 &dconst0, NULL, false);
9752 CASE_FLT_FN (BUILT_IN_Y1):
9753 if (validate_arg (arg0, REAL_TYPE))
9754 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9755 &dconst0, NULL, false);
9758 CASE_FLT_FN (BUILT_IN_NAN):
9759 case BUILT_IN_NAND32:
9760 case BUILT_IN_NAND64:
9761 case BUILT_IN_NAND128:
9762 return fold_builtin_nan (arg0, type, true);
9764 CASE_FLT_FN (BUILT_IN_NANS):
9765 return fold_builtin_nan (arg0, type, false);
9767 CASE_FLT_FN (BUILT_IN_FLOOR):
9768 return fold_builtin_floor (loc, fndecl, arg0);
9770 CASE_FLT_FN (BUILT_IN_CEIL):
9771 return fold_builtin_ceil (loc, fndecl, arg0);
9773 CASE_FLT_FN (BUILT_IN_TRUNC):
9774 return fold_builtin_trunc (loc, fndecl, arg0);
9776 CASE_FLT_FN (BUILT_IN_ROUND):
9777 return fold_builtin_round (loc, fndecl, arg0);
9779 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9780 CASE_FLT_FN (BUILT_IN_RINT):
9781 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9783 CASE_FLT_FN (BUILT_IN_LCEIL):
9784 CASE_FLT_FN (BUILT_IN_LLCEIL):
9785 CASE_FLT_FN (BUILT_IN_LFLOOR):
9786 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9787 CASE_FLT_FN (BUILT_IN_LROUND):
9788 CASE_FLT_FN (BUILT_IN_LLROUND):
9789 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9791 CASE_FLT_FN (BUILT_IN_LRINT):
9792 CASE_FLT_FN (BUILT_IN_LLRINT):
9793 return fold_fixed_mathfn (loc, fndecl, arg0);
9795 case BUILT_IN_BSWAP32:
9796 case BUILT_IN_BSWAP64:
9797 return fold_builtin_bswap (fndecl, arg0);
9799 CASE_INT_FN (BUILT_IN_FFS):
9800 CASE_INT_FN (BUILT_IN_CLZ):
9801 CASE_INT_FN (BUILT_IN_CTZ):
9802 CASE_INT_FN (BUILT_IN_CLRSB):
9803 CASE_INT_FN (BUILT_IN_POPCOUNT):
9804 CASE_INT_FN (BUILT_IN_PARITY):
9805 return fold_builtin_bitop (fndecl, arg0);
9807 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9808 return fold_builtin_signbit (loc, arg0, type);
9810 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9811 return fold_builtin_significand (loc, arg0, type);
9813 CASE_FLT_FN (BUILT_IN_ILOGB):
9814 CASE_FLT_FN (BUILT_IN_LOGB):
9815 return fold_builtin_logb (loc, arg0, type);
9817 case BUILT_IN_ISASCII:
9818 return fold_builtin_isascii (loc, arg0);
9820 case BUILT_IN_TOASCII:
9821 return fold_builtin_toascii (loc, arg0);
9823 case BUILT_IN_ISDIGIT:
9824 return fold_builtin_isdigit (loc, arg0);
9826 CASE_FLT_FN (BUILT_IN_FINITE):
9827 case BUILT_IN_FINITED32:
9828 case BUILT_IN_FINITED64:
9829 case BUILT_IN_FINITED128:
9830 case BUILT_IN_ISFINITE:
9832 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9835 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9838 CASE_FLT_FN (BUILT_IN_ISINF):
9839 case BUILT_IN_ISINFD32:
9840 case BUILT_IN_ISINFD64:
9841 case BUILT_IN_ISINFD128:
9843 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9846 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9849 case BUILT_IN_ISNORMAL:
9850 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9852 case BUILT_IN_ISINF_SIGN:
9853 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9855 CASE_FLT_FN (BUILT_IN_ISNAN):
9856 case BUILT_IN_ISNAND32:
9857 case BUILT_IN_ISNAND64:
9858 case BUILT_IN_ISNAND128:
9859 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9861 case BUILT_IN_PRINTF:
9862 case BUILT_IN_PRINTF_UNLOCKED:
9863 case BUILT_IN_VPRINTF:
9864 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9867 if (integer_zerop (arg0))
9868 return build_empty_stmt (loc);
9879 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9880 IGNORE is true if the result of the function call is ignored. This
9881 function returns NULL_TREE if no simplification was possible. */
9884 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9886 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9887 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9891 CASE_FLT_FN (BUILT_IN_JN):
9892 if (validate_arg (arg0, INTEGER_TYPE)
9893 && validate_arg (arg1, REAL_TYPE))
9894 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9897 CASE_FLT_FN (BUILT_IN_YN):
9898 if (validate_arg (arg0, INTEGER_TYPE)
9899 && validate_arg (arg1, REAL_TYPE))
9900 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9904 CASE_FLT_FN (BUILT_IN_DREM):
9905 CASE_FLT_FN (BUILT_IN_REMAINDER):
9906 if (validate_arg (arg0, REAL_TYPE)
9907 && validate_arg(arg1, REAL_TYPE))
9908 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9911 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9912 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9913 if (validate_arg (arg0, REAL_TYPE)
9914 && validate_arg(arg1, POINTER_TYPE))
9915 return do_mpfr_lgamma_r (arg0, arg1, type);
9918 CASE_FLT_FN (BUILT_IN_ATAN2):
9919 if (validate_arg (arg0, REAL_TYPE)
9920 && validate_arg(arg1, REAL_TYPE))
9921 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9924 CASE_FLT_FN (BUILT_IN_FDIM):
9925 if (validate_arg (arg0, REAL_TYPE)
9926 && validate_arg(arg1, REAL_TYPE))
9927 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9930 CASE_FLT_FN (BUILT_IN_HYPOT):
9931 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9933 CASE_FLT_FN (BUILT_IN_CPOW):
9934 if (validate_arg (arg0, COMPLEX_TYPE)
9935 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9936 && validate_arg (arg1, COMPLEX_TYPE)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9938 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9941 CASE_FLT_FN (BUILT_IN_LDEXP):
9942 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9943 CASE_FLT_FN (BUILT_IN_SCALBN):
9944 CASE_FLT_FN (BUILT_IN_SCALBLN):
9945 return fold_builtin_load_exponent (loc, arg0, arg1,
9946 type, /*ldexp=*/false);
9948 CASE_FLT_FN (BUILT_IN_FREXP):
9949 return fold_builtin_frexp (loc, arg0, arg1, type);
9951 CASE_FLT_FN (BUILT_IN_MODF):
9952 return fold_builtin_modf (loc, arg0, arg1, type);
9954 case BUILT_IN_BZERO:
9955 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9957 case BUILT_IN_FPUTS:
9958 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9960 case BUILT_IN_FPUTS_UNLOCKED:
9961 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9963 case BUILT_IN_STRSTR:
9964 return fold_builtin_strstr (loc, arg0, arg1, type);
9966 case BUILT_IN_STRCAT:
9967 return fold_builtin_strcat (loc, arg0, arg1);
9969 case BUILT_IN_STRSPN:
9970 return fold_builtin_strspn (loc, arg0, arg1);
9972 case BUILT_IN_STRCSPN:
9973 return fold_builtin_strcspn (loc, arg0, arg1);
9975 case BUILT_IN_STRCHR:
9976 case BUILT_IN_INDEX:
9977 return fold_builtin_strchr (loc, arg0, arg1, type);
9979 case BUILT_IN_STRRCHR:
9980 case BUILT_IN_RINDEX:
9981 return fold_builtin_strrchr (loc, arg0, arg1, type);
9983 case BUILT_IN_STRCPY:
9984 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
9986 case BUILT_IN_STPCPY:
9989 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
9993 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
9996 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
9999 case BUILT_IN_STRCMP:
10000 return fold_builtin_strcmp (loc, arg0, arg1);
10002 case BUILT_IN_STRPBRK:
10003 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10005 case BUILT_IN_EXPECT:
10006 return fold_builtin_expect (loc, arg0, arg1);
10008 CASE_FLT_FN (BUILT_IN_POW):
10009 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10011 CASE_FLT_FN (BUILT_IN_POWI):
10012 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10014 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10015 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10017 CASE_FLT_FN (BUILT_IN_FMIN):
10018 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10020 CASE_FLT_FN (BUILT_IN_FMAX):
10021 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10023 case BUILT_IN_ISGREATER:
10024 return fold_builtin_unordered_cmp (loc, fndecl,
10025 arg0, arg1, UNLE_EXPR, LE_EXPR);
10026 case BUILT_IN_ISGREATEREQUAL:
10027 return fold_builtin_unordered_cmp (loc, fndecl,
10028 arg0, arg1, UNLT_EXPR, LT_EXPR);
10029 case BUILT_IN_ISLESS:
10030 return fold_builtin_unordered_cmp (loc, fndecl,
10031 arg0, arg1, UNGE_EXPR, GE_EXPR);
10032 case BUILT_IN_ISLESSEQUAL:
10033 return fold_builtin_unordered_cmp (loc, fndecl,
10034 arg0, arg1, UNGT_EXPR, GT_EXPR);
10035 case BUILT_IN_ISLESSGREATER:
10036 return fold_builtin_unordered_cmp (loc, fndecl,
10037 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10038 case BUILT_IN_ISUNORDERED:
10039 return fold_builtin_unordered_cmp (loc, fndecl,
10040 arg0, arg1, UNORDERED_EXPR,
10043 /* We do the folding for va_start in the expander. */
10044 case BUILT_IN_VA_START:
10047 case BUILT_IN_SPRINTF:
10048 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10050 case BUILT_IN_OBJECT_SIZE:
10051 return fold_builtin_object_size (arg0, arg1);
10053 case BUILT_IN_PRINTF:
10054 case BUILT_IN_PRINTF_UNLOCKED:
10055 case BUILT_IN_VPRINTF:
10056 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10058 case BUILT_IN_PRINTF_CHK:
10059 case BUILT_IN_VPRINTF_CHK:
10060 if (!validate_arg (arg0, INTEGER_TYPE)
10061 || TREE_SIDE_EFFECTS (arg0))
10064 return fold_builtin_printf (loc, fndecl,
10065 arg1, NULL_TREE, ignore, fcode);
10068 case BUILT_IN_FPRINTF:
10069 case BUILT_IN_FPRINTF_UNLOCKED:
10070 case BUILT_IN_VFPRINTF:
10071 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10080 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10081 and ARG2. IGNORE is true if the result of the function call is ignored.
10082 This function returns NULL_TREE if no simplification was possible. */
10085 fold_builtin_3 (location_t loc, tree fndecl,
10086 tree arg0, tree arg1, tree arg2, bool ignore)
10088 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10089 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10093 CASE_FLT_FN (BUILT_IN_SINCOS):
10094 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10096 CASE_FLT_FN (BUILT_IN_FMA):
10097 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10100 CASE_FLT_FN (BUILT_IN_REMQUO):
10101 if (validate_arg (arg0, REAL_TYPE)
10102 && validate_arg(arg1, REAL_TYPE)
10103 && validate_arg(arg2, POINTER_TYPE))
10104 return do_mpfr_remquo (arg0, arg1, arg2);
10107 case BUILT_IN_MEMSET:
10108 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10110 case BUILT_IN_BCOPY:
10111 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10112 void_type_node, true, /*endp=*/3);
10114 case BUILT_IN_MEMCPY:
10115 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10116 type, ignore, /*endp=*/0);
10118 case BUILT_IN_MEMPCPY:
10119 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10120 type, ignore, /*endp=*/1);
10122 case BUILT_IN_MEMMOVE:
10123 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10124 type, ignore, /*endp=*/3);
10126 case BUILT_IN_STRNCAT:
10127 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10129 case BUILT_IN_STRNCPY:
10130 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10132 case BUILT_IN_STRNCMP:
10133 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10135 case BUILT_IN_MEMCHR:
10136 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10138 case BUILT_IN_BCMP:
10139 case BUILT_IN_MEMCMP:
10140 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10142 case BUILT_IN_SPRINTF:
10143 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10145 case BUILT_IN_SNPRINTF:
10146 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10148 case BUILT_IN_STRCPY_CHK:
10149 case BUILT_IN_STPCPY_CHK:
10150 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10153 case BUILT_IN_STRCAT_CHK:
10154 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10156 case BUILT_IN_PRINTF_CHK:
10157 case BUILT_IN_VPRINTF_CHK:
10158 if (!validate_arg (arg0, INTEGER_TYPE)
10159 || TREE_SIDE_EFFECTS (arg0))
10162 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10165 case BUILT_IN_FPRINTF:
10166 case BUILT_IN_FPRINTF_UNLOCKED:
10167 case BUILT_IN_VFPRINTF:
10168 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10171 case BUILT_IN_FPRINTF_CHK:
10172 case BUILT_IN_VFPRINTF_CHK:
10173 if (!validate_arg (arg1, INTEGER_TYPE)
10174 || TREE_SIDE_EFFECTS (arg1))
10177 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10186 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10187 ARG2, and ARG3. IGNORE is true if the result of the function call is
10188 ignored. This function returns NULL_TREE if no simplification was
10192 fold_builtin_4 (location_t loc, tree fndecl,
10193 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10195 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10199 case BUILT_IN_MEMCPY_CHK:
10200 case BUILT_IN_MEMPCPY_CHK:
10201 case BUILT_IN_MEMMOVE_CHK:
10202 case BUILT_IN_MEMSET_CHK:
10203 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10205 DECL_FUNCTION_CODE (fndecl));
10207 case BUILT_IN_STRNCPY_CHK:
10208 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10210 case BUILT_IN_STRNCAT_CHK:
10211 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10213 case BUILT_IN_SNPRINTF:
10214 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10216 case BUILT_IN_FPRINTF_CHK:
10217 case BUILT_IN_VFPRINTF_CHK:
10218 if (!validate_arg (arg1, INTEGER_TYPE)
10219 || TREE_SIDE_EFFECTS (arg1))
10222 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10232 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10233 arguments, where NARGS <= 4. IGNORE is true if the result of the
10234 function call is ignored. This function returns NULL_TREE if no
10235 simplification was possible. Note that this only folds builtins with
10236 fixed argument patterns. Foldings that do varargs-to-varargs
10237 transformations, or that match calls with more than 4 arguments,
10238 need to be handled with fold_builtin_varargs instead. */
10240 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10243 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10245 tree ret = NULL_TREE;
10250 ret = fold_builtin_0 (loc, fndecl, ignore);
10253 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10256 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10259 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10262 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10270 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10271 SET_EXPR_LOCATION (ret, loc);
10272 TREE_NO_WARNING (ret) = 1;
10278 /* Builtins with folding operations that operate on "..." arguments
10279 need special handling; we need to store the arguments in a convenient
10280 data structure before attempting any folding. Fortunately there are
10281 only a few builtins that fall into this category. FNDECL is the
10282 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10283 result of the function call is ignored. */
10286 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10287 bool ignore ATTRIBUTE_UNUSED)
10289 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10290 tree ret = NULL_TREE;
10294 case BUILT_IN_SPRINTF_CHK:
10295 case BUILT_IN_VSPRINTF_CHK:
10296 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10299 case BUILT_IN_SNPRINTF_CHK:
10300 case BUILT_IN_VSNPRINTF_CHK:
10301 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10304 case BUILT_IN_FPCLASSIFY:
10305 ret = fold_builtin_fpclassify (loc, exp);
10313 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10314 SET_EXPR_LOCATION (ret, loc);
10315 TREE_NO_WARNING (ret) = 1;
10321 /* Return true if FNDECL shouldn't be folded right now.
10322 If a built-in function has an inline attribute always_inline
10323 wrapper, defer folding it after always_inline functions have
10324 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10325 might not be performed. */
10328 avoid_folding_inline_builtin (tree fndecl)
10330 return (DECL_DECLARED_INLINE_P (fndecl)
10331 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10333 && !cfun->always_inline_functions_inlined
10334 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10337 /* A wrapper function for builtin folding that prevents warnings for
10338 "statement without effect" and the like, caused by removing the
10339 call node earlier than the warning is generated. */
10342 fold_call_expr (location_t loc, tree exp, bool ignore)
10344 tree ret = NULL_TREE;
10345 tree fndecl = get_callee_fndecl (exp);
10347 && TREE_CODE (fndecl) == FUNCTION_DECL
10348 && DECL_BUILT_IN (fndecl)
10349 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10350 yet. Defer folding until we see all the arguments
10351 (after inlining). */
10352 && !CALL_EXPR_VA_ARG_PACK (exp))
10354 int nargs = call_expr_nargs (exp);
10356 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10357 instead last argument is __builtin_va_arg_pack (). Defer folding
10358 even in that case, until arguments are finalized. */
10359 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10361 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10363 && TREE_CODE (fndecl2) == FUNCTION_DECL
10364 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10365 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10369 if (avoid_folding_inline_builtin (fndecl))
10372 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10373 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10374 CALL_EXPR_ARGP (exp), ignore);
10377 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10379 tree *args = CALL_EXPR_ARGP (exp);
10380 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10383 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10391 /* Conveniently construct a function call expression. FNDECL names the
10392 function to be called and N arguments are passed in the array
10396 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10398 tree fntype = TREE_TYPE (fndecl);
10399 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10401 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10404 /* Conveniently construct a function call expression. FNDECL names the
10405 function to be called and the arguments are passed in the vector
10409 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10411 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10412 VEC_address (tree, vec));
10416 /* Conveniently construct a function call expression. FNDECL names the
10417 function to be called, N is the number of arguments, and the "..."
10418 parameters are the argument expressions. */
10421 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10424 tree *argarray = XALLOCAVEC (tree, n);
10428 for (i = 0; i < n; i++)
10429 argarray[i] = va_arg (ap, tree);
10431 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10434 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10435 varargs macros aren't supported by all bootstrap compilers. */
10438 build_call_expr (tree fndecl, int n, ...)
10441 tree *argarray = XALLOCAVEC (tree, n);
10445 for (i = 0; i < n; i++)
10446 argarray[i] = va_arg (ap, tree);
10448 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10451 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10452 N arguments are passed in the array ARGARRAY. */
10455 fold_builtin_call_array (location_t loc, tree type,
10460 tree ret = NULL_TREE;
10463 if (TREE_CODE (fn) == ADDR_EXPR)
10465 tree fndecl = TREE_OPERAND (fn, 0);
10466 if (TREE_CODE (fndecl) == FUNCTION_DECL
10467 && DECL_BUILT_IN (fndecl))
10469 /* If last argument is __builtin_va_arg_pack (), arguments to this
10470 function are not finalized yet. Defer folding until they are. */
10471 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10473 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10475 && TREE_CODE (fndecl2) == FUNCTION_DECL
10476 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10477 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10478 return build_call_array_loc (loc, type, fn, n, argarray);
10480 if (avoid_folding_inline_builtin (fndecl))
10481 return build_call_array_loc (loc, type, fn, n, argarray);
10482 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10484 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10488 return build_call_array_loc (loc, type, fn, n, argarray);
10490 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10492 /* First try the transformations that don't require consing up
10494 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10499 /* If we got this far, we need to build an exp. */
10500 exp = build_call_array_loc (loc, type, fn, n, argarray);
10501 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10502 return ret ? ret : exp;
10506 return build_call_array_loc (loc, type, fn, n, argarray);
10509 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10510 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10511 of arguments in ARGS to be omitted. OLDNARGS is the number of
10512 elements in ARGS. */
10515 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10516 int skip, tree fndecl, int n, va_list newargs)
10518 int nargs = oldnargs - skip + n;
10525 buffer = XALLOCAVEC (tree, nargs);
10526 for (i = 0; i < n; i++)
10527 buffer[i] = va_arg (newargs, tree);
10528 for (j = skip; j < oldnargs; j++, i++)
10529 buffer[i] = args[j];
10532 buffer = args + skip;
10534 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10537 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10538 list ARGS along with N new arguments specified as the "..."
10539 parameters. SKIP is the number of arguments in ARGS to be omitted.
10540 OLDNARGS is the number of elements in ARGS. */
10543 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10544 int skip, tree fndecl, int n, ...)
10550 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10556 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10557 along with N new arguments specified as the "..." parameters. SKIP
10558 is the number of arguments in EXP to be omitted. This function is used
10559 to do varargs-to-varargs transformations. */
10562 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10568 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10569 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10575 /* Validate a single argument ARG against a tree code CODE representing
10579 validate_arg (const_tree arg, enum tree_code code)
10583 else if (code == POINTER_TYPE)
10584 return POINTER_TYPE_P (TREE_TYPE (arg));
10585 else if (code == INTEGER_TYPE)
10586 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10587 return code == TREE_CODE (TREE_TYPE (arg));
10590 /* This function validates the types of a function call argument list
10591 against a specified list of tree_codes. If the last specifier is a 0,
10592 that represents an ellipses, otherwise the last specifier must be a
10595 This is the GIMPLE version of validate_arglist. Eventually we want to
10596 completely convert builtins.c to work from GIMPLEs and the tree based
10597 validate_arglist will then be removed. */
10600 validate_gimple_arglist (const_gimple call, ...)
10602 enum tree_code code;
10608 va_start (ap, call);
10613 code = (enum tree_code) va_arg (ap, int);
10617 /* This signifies an ellipses, any further arguments are all ok. */
10621 /* This signifies an endlink, if no arguments remain, return
10622 true, otherwise return false. */
10623 res = (i == gimple_call_num_args (call));
10626 /* If no parameters remain or the parameter's code does not
10627 match the specified code, return false. Otherwise continue
10628 checking any remaining arguments. */
10629 arg = gimple_call_arg (call, i++);
10630 if (!validate_arg (arg, code))
10637 /* We need gotos here since we can only have one VA_CLOSE in a
10645 /* This function validates the types of a function call argument list
10646 against a specified list of tree_codes. If the last specifier is a 0,
10647 that represents an ellipses, otherwise the last specifier must be a
10651 validate_arglist (const_tree callexpr, ...)
10653 enum tree_code code;
10656 const_call_expr_arg_iterator iter;
10659 va_start (ap, callexpr);
10660 init_const_call_expr_arg_iterator (callexpr, &iter);
10664 code = (enum tree_code) va_arg (ap, int);
10668 /* This signifies an ellipses, any further arguments are all ok. */
10672 /* This signifies an endlink, if no arguments remain, return
10673 true, otherwise return false. */
10674 res = !more_const_call_expr_args_p (&iter);
10677 /* If no parameters remain or the parameter's code does not
10678 match the specified code, return false. Otherwise continue
10679 checking any remaining arguments. */
10680 arg = next_const_call_expr_arg (&iter);
10681 if (!validate_arg (arg, code))
10688 /* We need gotos here since we can only have one VA_CLOSE in a
10696 /* Default target-specific builtin expander that does nothing. */
10699 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10700 rtx target ATTRIBUTE_UNUSED,
10701 rtx subtarget ATTRIBUTE_UNUSED,
10702 enum machine_mode mode ATTRIBUTE_UNUSED,
10703 int ignore ATTRIBUTE_UNUSED)
10708 /* Returns true is EXP represents data that would potentially reside
10709 in a readonly section. */
10712 readonly_data_expr (tree exp)
10716 if (TREE_CODE (exp) != ADDR_EXPR)
10719 exp = get_base_address (TREE_OPERAND (exp, 0));
10723 /* Make sure we call decl_readonly_section only for trees it
10724 can handle (since it returns true for everything it doesn't
10726 if (TREE_CODE (exp) == STRING_CST
10727 || TREE_CODE (exp) == CONSTRUCTOR
10728 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10729 return decl_readonly_section (exp, 0);
10734 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10735 to the call, and TYPE is its return type.
10737 Return NULL_TREE if no simplification was possible, otherwise return the
10738 simplified form of the call as a tree.
10740 The simplified form may be a constant or other expression which
10741 computes the same value, but in a more efficient manner (including
10742 calls to other builtin functions).
10744 The call may contain arguments which need to be evaluated, but
10745 which are not useful to determine the result of the call. In
10746 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10747 COMPOUND_EXPR will be an argument which must be evaluated.
10748 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10749 COMPOUND_EXPR in the chain will contain the tree for the simplified
10750 form of the builtin function call. */
10753 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10755 if (!validate_arg (s1, POINTER_TYPE)
10756 || !validate_arg (s2, POINTER_TYPE))
10761 const char *p1, *p2;
10763 p2 = c_getstr (s2);
10767 p1 = c_getstr (s1);
10770 const char *r = strstr (p1, p2);
10774 return build_int_cst (TREE_TYPE (s1), 0);
10776 /* Return an offset into the constant string argument. */
10777 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10778 return fold_convert_loc (loc, type, tem);
10781 /* The argument is const char *, and the result is char *, so we need
10782 a type conversion here to avoid a warning. */
10784 return fold_convert_loc (loc, type, s1);
10789 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10793 /* New argument list transforming strstr(s1, s2) to
10794 strchr(s1, s2[0]). */
10795 return build_call_expr_loc (loc, fn, 2, s1,
10796 build_int_cst (integer_type_node, p2[0]));
10800 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10801 the call, and TYPE is its return type.
10803 Return NULL_TREE if no simplification was possible, otherwise return the
10804 simplified form of the call as a tree.
10806 The simplified form may be a constant or other expression which
10807 computes the same value, but in a more efficient manner (including
10808 calls to other builtin functions).
10810 The call may contain arguments which need to be evaluated, but
10811 which are not useful to determine the result of the call. In
10812 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10813 COMPOUND_EXPR will be an argument which must be evaluated.
10814 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10815 COMPOUND_EXPR in the chain will contain the tree for the simplified
10816 form of the builtin function call. */
10819 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10821 if (!validate_arg (s1, POINTER_TYPE)
10822 || !validate_arg (s2, INTEGER_TYPE))
10828 if (TREE_CODE (s2) != INTEGER_CST)
10831 p1 = c_getstr (s1);
10838 if (target_char_cast (s2, &c))
10841 r = strchr (p1, c);
10844 return build_int_cst (TREE_TYPE (s1), 0);
10846 /* Return an offset into the constant string argument. */
10847 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10848 return fold_convert_loc (loc, type, tem);
10854 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10855 the call, and TYPE is its return type.
10857 Return NULL_TREE if no simplification was possible, otherwise return the
10858 simplified form of the call as a tree.
10860 The simplified form may be a constant or other expression which
10861 computes the same value, but in a more efficient manner (including
10862 calls to other builtin functions).
10864 The call may contain arguments which need to be evaluated, but
10865 which are not useful to determine the result of the call. In
10866 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10867 COMPOUND_EXPR will be an argument which must be evaluated.
10868 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10869 COMPOUND_EXPR in the chain will contain the tree for the simplified
10870 form of the builtin function call. */
10873 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10875 if (!validate_arg (s1, POINTER_TYPE)
10876 || !validate_arg (s2, INTEGER_TYPE))
10883 if (TREE_CODE (s2) != INTEGER_CST)
10886 p1 = c_getstr (s1);
10893 if (target_char_cast (s2, &c))
10896 r = strrchr (p1, c);
10899 return build_int_cst (TREE_TYPE (s1), 0);
10901 /* Return an offset into the constant string argument. */
10902 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10903 return fold_convert_loc (loc, type, tem);
10906 if (! integer_zerop (s2))
10909 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10913 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10914 return build_call_expr_loc (loc, fn, 2, s1, s2);
10918 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10919 to the call, and TYPE is its return type.
10921 Return NULL_TREE if no simplification was possible, otherwise return the
10922 simplified form of the call as a tree.
10924 The simplified form may be a constant or other expression which
10925 computes the same value, but in a more efficient manner (including
10926 calls to other builtin functions).
10928 The call may contain arguments which need to be evaluated, but
10929 which are not useful to determine the result of the call. In
10930 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10931 COMPOUND_EXPR will be an argument which must be evaluated.
10932 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10933 COMPOUND_EXPR in the chain will contain the tree for the simplified
10934 form of the builtin function call. */
10937 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10939 if (!validate_arg (s1, POINTER_TYPE)
10940 || !validate_arg (s2, POINTER_TYPE))
10945 const char *p1, *p2;
10947 p2 = c_getstr (s2);
10951 p1 = c_getstr (s1);
10954 const char *r = strpbrk (p1, p2);
10958 return build_int_cst (TREE_TYPE (s1), 0);
10960 /* Return an offset into the constant string argument. */
10961 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10962 return fold_convert_loc (loc, type, tem);
10966 /* strpbrk(x, "") == NULL.
10967 Evaluate and ignore s1 in case it had side-effects. */
10968 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10971 return NULL_TREE; /* Really call strpbrk. */
10973 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10977 /* New argument list transforming strpbrk(s1, s2) to
10978 strchr(s1, s2[0]). */
10979 return build_call_expr_loc (loc, fn, 2, s1,
10980 build_int_cst (integer_type_node, p2[0]));
10984 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10987 Return NULL_TREE if no simplification was possible, otherwise return the
10988 simplified form of the call as a tree.
10990 The simplified form may be a constant or other expression which
10991 computes the same value, but in a more efficient manner (including
10992 calls to other builtin functions).
10994 The call may contain arguments which need to be evaluated, but
10995 which are not useful to determine the result of the call. In
10996 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10997 COMPOUND_EXPR will be an argument which must be evaluated.
10998 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10999 COMPOUND_EXPR in the chain will contain the tree for the simplified
11000 form of the builtin function call. */
11003 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11005 if (!validate_arg (dst, POINTER_TYPE)
11006 || !validate_arg (src, POINTER_TYPE))
11010 const char *p = c_getstr (src);
11012 /* If the string length is zero, return the dst parameter. */
11013 if (p && *p == '\0')
11016 if (optimize_insn_for_speed_p ())
11018 /* See if we can store by pieces into (dst + strlen(dst)). */
11020 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11021 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11023 if (!strlen_fn || !strcpy_fn)
11026 /* If we don't have a movstr we don't want to emit an strcpy
11027 call. We have to do that if the length of the source string
11028 isn't computable (in that case we can use memcpy probably
11029 later expanding to a sequence of mov instructions). If we
11030 have movstr instructions we can emit strcpy calls. */
11033 tree len = c_strlen (src, 1);
11034 if (! len || TREE_SIDE_EFFECTS (len))
11038 /* Stabilize the argument list. */
11039 dst = builtin_save_expr (dst);
11041 /* Create strlen (dst). */
11042 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11043 /* Create (dst p+ strlen (dst)). */
11045 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11046 newdst = builtin_save_expr (newdst);
11048 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11049 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11055 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11056 arguments to the call.
11058 Return NULL_TREE if no simplification was possible, otherwise return the
11059 simplified form of the call as a tree.
11061 The simplified form may be a constant or other expression which
11062 computes the same value, but in a more efficient manner (including
11063 calls to other builtin functions).
11065 The call may contain arguments which need to be evaluated, but
11066 which are not useful to determine the result of the call. In
11067 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11068 COMPOUND_EXPR will be an argument which must be evaluated.
11069 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11070 COMPOUND_EXPR in the chain will contain the tree for the simplified
11071 form of the builtin function call. */
11074 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11076 if (!validate_arg (dst, POINTER_TYPE)
11077 || !validate_arg (src, POINTER_TYPE)
11078 || !validate_arg (len, INTEGER_TYPE))
11082 const char *p = c_getstr (src);
11084 /* If the requested length is zero, or the src parameter string
11085 length is zero, return the dst parameter. */
11086 if (integer_zerop (len) || (p && *p == '\0'))
11087 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11089 /* If the requested len is greater than or equal to the string
11090 length, call strcat. */
11091 if (TREE_CODE (len) == INTEGER_CST && p
11092 && compare_tree_int (len, strlen (p)) >= 0)
11094 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11096 /* If the replacement _DECL isn't initialized, don't do the
11101 return build_call_expr_loc (loc, fn, 2, dst, src);
11107 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11110 Return NULL_TREE if no simplification was possible, otherwise return the
11111 simplified form of the call as a tree.
11113 The simplified form may be a constant or other expression which
11114 computes the same value, but in a more efficient manner (including
11115 calls to other builtin functions).
11117 The call may contain arguments which need to be evaluated, but
11118 which are not useful to determine the result of the call. In
11119 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11120 COMPOUND_EXPR will be an argument which must be evaluated.
11121 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11122 COMPOUND_EXPR in the chain will contain the tree for the simplified
11123 form of the builtin function call. */
11126 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11128 if (!validate_arg (s1, POINTER_TYPE)
11129 || !validate_arg (s2, POINTER_TYPE))
11133 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11135 /* If both arguments are constants, evaluate at compile-time. */
11138 const size_t r = strspn (p1, p2);
11139 return size_int (r);
11142 /* If either argument is "", return NULL_TREE. */
11143 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11144 /* Evaluate and ignore both arguments in case either one has
11146 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11152 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11155 Return NULL_TREE if no simplification was possible, otherwise return the
11156 simplified form of the call as a tree.
11158 The simplified form may be a constant or other expression which
11159 computes the same value, but in a more efficient manner (including
11160 calls to other builtin functions).
11162 The call may contain arguments which need to be evaluated, but
11163 which are not useful to determine the result of the call. In
11164 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11165 COMPOUND_EXPR will be an argument which must be evaluated.
11166 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11167 COMPOUND_EXPR in the chain will contain the tree for the simplified
11168 form of the builtin function call. */
11171 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11173 if (!validate_arg (s1, POINTER_TYPE)
11174 || !validate_arg (s2, POINTER_TYPE))
11178 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11180 /* If both arguments are constants, evaluate at compile-time. */
11183 const size_t r = strcspn (p1, p2);
11184 return size_int (r);
11187 /* If the first argument is "", return NULL_TREE. */
11188 if (p1 && *p1 == '\0')
11190 /* Evaluate and ignore argument s2 in case it has
11192 return omit_one_operand_loc (loc, size_type_node,
11193 size_zero_node, s2);
11196 /* If the second argument is "", return __builtin_strlen(s1). */
11197 if (p2 && *p2 == '\0')
11199 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11201 /* If the replacement _DECL isn't initialized, don't do the
11206 return build_call_expr_loc (loc, fn, 1, s1);
11212 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11213 to the call. IGNORE is true if the value returned
11214 by the builtin will be ignored. UNLOCKED is true is true if this
11215 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11216 the known length of the string. Return NULL_TREE if no simplification
11220 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11221 bool ignore, bool unlocked, tree len)
11223 /* If we're using an unlocked function, assume the other unlocked
11224 functions exist explicitly. */
11225 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11226 : implicit_built_in_decls[BUILT_IN_FPUTC];
11227 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11228 : implicit_built_in_decls[BUILT_IN_FWRITE];
11230 /* If the return value is used, don't do the transformation. */
11234 /* Verify the arguments in the original call. */
11235 if (!validate_arg (arg0, POINTER_TYPE)
11236 || !validate_arg (arg1, POINTER_TYPE))
11240 len = c_strlen (arg0, 0);
11242 /* Get the length of the string passed to fputs. If the length
11243 can't be determined, punt. */
11245 || TREE_CODE (len) != INTEGER_CST)
11248 switch (compare_tree_int (len, 1))
11250 case -1: /* length is 0, delete the call entirely . */
11251 return omit_one_operand_loc (loc, integer_type_node,
11252 integer_zero_node, arg1);;
11254 case 0: /* length is 1, call fputc. */
11256 const char *p = c_getstr (arg0);
11261 return build_call_expr_loc (loc, fn_fputc, 2,
11263 (integer_type_node, p[0]), arg1);
11269 case 1: /* length is greater than 1, call fwrite. */
11271 /* If optimizing for size keep fputs. */
11272 if (optimize_function_for_size_p (cfun))
11274 /* New argument list transforming fputs(string, stream) to
11275 fwrite(string, 1, len, stream). */
11277 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11278 size_one_node, len, arg1);
11283 gcc_unreachable ();
11288 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11289 produced. False otherwise. This is done so that we don't output the error
11290 or warning twice or three times. */
11293 fold_builtin_next_arg (tree exp, bool va_start_p)
11295 tree fntype = TREE_TYPE (current_function_decl);
11296 int nargs = call_expr_nargs (exp);
11299 if (!stdarg_p (fntype))
11301 error ("%<va_start%> used in function with fixed args");
11307 if (va_start_p && (nargs != 2))
11309 error ("wrong number of arguments to function %<va_start%>");
11312 arg = CALL_EXPR_ARG (exp, 1);
11314 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11315 when we checked the arguments and if needed issued a warning. */
11320 /* Evidently an out of date version of <stdarg.h>; can't validate
11321 va_start's second argument, but can still work as intended. */
11322 warning (0, "%<__builtin_next_arg%> called without an argument");
11325 else if (nargs > 1)
11327 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11330 arg = CALL_EXPR_ARG (exp, 0);
11333 if (TREE_CODE (arg) == SSA_NAME)
11334 arg = SSA_NAME_VAR (arg);
11336 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11337 or __builtin_next_arg (0) the first time we see it, after checking
11338 the arguments and if needed issuing a warning. */
11339 if (!integer_zerop (arg))
11341 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11343 /* Strip off all nops for the sake of the comparison. This
11344 is not quite the same as STRIP_NOPS. It does more.
11345 We must also strip off INDIRECT_EXPR for C++ reference
11347 while (CONVERT_EXPR_P (arg)
11348 || TREE_CODE (arg) == INDIRECT_REF)
11349 arg = TREE_OPERAND (arg, 0);
11350 if (arg != last_parm)
11352 /* FIXME: Sometimes with the tree optimizers we can get the
11353 not the last argument even though the user used the last
11354 argument. We just warn and set the arg to be the last
11355 argument so that we will get wrong-code because of
11357 warning (0, "second parameter of %<va_start%> not last named argument");
11360 /* Undefined by C99 7.15.1.4p4 (va_start):
11361 "If the parameter parmN is declared with the register storage
11362 class, with a function or array type, or with a type that is
11363 not compatible with the type that results after application of
11364 the default argument promotions, the behavior is undefined."
11366 else if (DECL_REGISTER (arg))
11367 warning (0, "undefined behaviour when second parameter of "
11368 "%<va_start%> is declared with %<register%> storage");
11370 /* We want to verify the second parameter just once before the tree
11371 optimizers are run and then avoid keeping it in the tree,
11372 as otherwise we could warn even for correct code like:
11373 void foo (int i, ...)
11374 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11376 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11378 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11384 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11385 ORIG may be null if this is a 2-argument call. We don't attempt to
11386 simplify calls with more than 3 arguments.
11388 Return NULL_TREE if no simplification was possible, otherwise return the
11389 simplified form of the call as a tree. If IGNORED is true, it means that
11390 the caller does not use the returned value of the function. */
11393 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11394 tree orig, int ignored)
11397 const char *fmt_str = NULL;
11399 /* Verify the required arguments in the original call. We deal with two
11400 types of sprintf() calls: 'sprintf (str, fmt)' and
11401 'sprintf (dest, "%s", orig)'. */
11402 if (!validate_arg (dest, POINTER_TYPE)
11403 || !validate_arg (fmt, POINTER_TYPE))
11405 if (orig && !validate_arg (orig, POINTER_TYPE))
11408 /* Check whether the format is a literal string constant. */
11409 fmt_str = c_getstr (fmt);
11410 if (fmt_str == NULL)
11414 retval = NULL_TREE;
11416 if (!init_target_chars ())
11419 /* If the format doesn't contain % args or %%, use strcpy. */
11420 if (strchr (fmt_str, target_percent) == NULL)
11422 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11427 /* Don't optimize sprintf (buf, "abc", ptr++). */
11431 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11432 'format' is known to contain no % formats. */
11433 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11435 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11438 /* If the format is "%s", use strcpy if the result isn't used. */
11439 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11442 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11447 /* Don't crash on sprintf (str1, "%s"). */
11451 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11454 retval = c_strlen (orig, 1);
11455 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11458 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11461 if (call && retval)
11463 retval = fold_convert_loc
11464 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11466 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11472 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11473 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11474 attempt to simplify calls with more than 4 arguments.
11476 Return NULL_TREE if no simplification was possible, otherwise return the
11477 simplified form of the call as a tree. If IGNORED is true, it means that
11478 the caller does not use the returned value of the function. */
11481 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11482 tree orig, int ignored)
11485 const char *fmt_str = NULL;
11486 unsigned HOST_WIDE_INT destlen;
11488 /* Verify the required arguments in the original call. We deal with two
11489 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11490 'snprintf (dest, cst, "%s", orig)'. */
11491 if (!validate_arg (dest, POINTER_TYPE)
11492 || !validate_arg (destsize, INTEGER_TYPE)
11493 || !validate_arg (fmt, POINTER_TYPE))
11495 if (orig && !validate_arg (orig, POINTER_TYPE))
11498 if (!host_integerp (destsize, 1))
11501 /* Check whether the format is a literal string constant. */
11502 fmt_str = c_getstr (fmt);
11503 if (fmt_str == NULL)
11507 retval = NULL_TREE;
11509 if (!init_target_chars ())
11512 destlen = tree_low_cst (destsize, 1);
11514 /* If the format doesn't contain % args or %%, use strcpy. */
11515 if (strchr (fmt_str, target_percent) == NULL)
11517 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11518 size_t len = strlen (fmt_str);
11520 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11524 /* We could expand this as
11525 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11527 memcpy (str, fmt_with_nul_at_cstm1, cst);
11528 but in the former case that might increase code size
11529 and in the latter case grow .rodata section too much.
11530 So punt for now. */
11531 if (len >= destlen)
11537 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11538 'format' is known to contain no % formats and
11539 strlen (fmt) < cst. */
11540 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11543 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11546 /* If the format is "%s", use strcpy if the result isn't used. */
11547 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11549 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11550 unsigned HOST_WIDE_INT origlen;
11552 /* Don't crash on snprintf (str1, cst, "%s"). */
11556 retval = c_strlen (orig, 1);
11557 if (!retval || !host_integerp (retval, 1))
11560 origlen = tree_low_cst (retval, 1);
11561 /* We could expand this as
11562 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11564 memcpy (str1, str2_with_nul_at_cstm1, cst);
11565 but in the former case that might increase code size
11566 and in the latter case grow .rodata section too much.
11567 So punt for now. */
11568 if (origlen >= destlen)
11571 /* Convert snprintf (str1, cst, "%s", str2) into
11572 strcpy (str1, str2) if strlen (str2) < cst. */
11576 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11579 retval = NULL_TREE;
11582 if (call && retval)
11584 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11585 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11586 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11592 /* Expand a call EXP to __builtin_object_size. */
11595 expand_builtin_object_size (tree exp)
11598 int object_size_type;
11599 tree fndecl = get_callee_fndecl (exp);
11601 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11603 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11605 expand_builtin_trap ();
11609 ost = CALL_EXPR_ARG (exp, 1);
11612 if (TREE_CODE (ost) != INTEGER_CST
11613 || tree_int_cst_sgn (ost) < 0
11614 || compare_tree_int (ost, 3) > 0)
11616 error ("%Klast argument of %D is not integer constant between 0 and 3",
11618 expand_builtin_trap ();
11622 object_size_type = tree_low_cst (ost, 0);
11624 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11627 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11628 FCODE is the BUILT_IN_* to use.
11629 Return NULL_RTX if we failed; the caller should emit a normal call,
11630 otherwise try to get the result in TARGET, if convenient (and in
11631 mode MODE if that's convenient). */
11634 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11635 enum built_in_function fcode)
11637 tree dest, src, len, size;
11639 if (!validate_arglist (exp,
11641 fcode == BUILT_IN_MEMSET_CHK
11642 ? INTEGER_TYPE : POINTER_TYPE,
11643 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11646 dest = CALL_EXPR_ARG (exp, 0);
11647 src = CALL_EXPR_ARG (exp, 1);
11648 len = CALL_EXPR_ARG (exp, 2);
11649 size = CALL_EXPR_ARG (exp, 3);
11651 if (! host_integerp (size, 1))
11654 if (host_integerp (len, 1) || integer_all_onesp (size))
11658 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11660 warning_at (tree_nonartificial_location (exp),
11661 0, "%Kcall to %D will always overflow destination buffer",
11662 exp, get_callee_fndecl (exp));
11667 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11668 mem{cpy,pcpy,move,set} is available. */
11671 case BUILT_IN_MEMCPY_CHK:
11672 fn = built_in_decls[BUILT_IN_MEMCPY];
11674 case BUILT_IN_MEMPCPY_CHK:
11675 fn = built_in_decls[BUILT_IN_MEMPCPY];
11677 case BUILT_IN_MEMMOVE_CHK:
11678 fn = built_in_decls[BUILT_IN_MEMMOVE];
11680 case BUILT_IN_MEMSET_CHK:
11681 fn = built_in_decls[BUILT_IN_MEMSET];
11690 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11691 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11692 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11693 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11695 else if (fcode == BUILT_IN_MEMSET_CHK)
11699 unsigned int dest_align
11700 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11702 /* If DEST is not a pointer type, call the normal function. */
11703 if (dest_align == 0)
11706 /* If SRC and DEST are the same (and not volatile), do nothing. */
11707 if (operand_equal_p (src, dest, 0))
11711 if (fcode != BUILT_IN_MEMPCPY_CHK)
11713 /* Evaluate and ignore LEN in case it has side-effects. */
11714 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11715 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11718 expr = fold_build_pointer_plus (dest, len);
11719 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11722 /* __memmove_chk special case. */
11723 if (fcode == BUILT_IN_MEMMOVE_CHK)
11725 unsigned int src_align
11726 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11728 if (src_align == 0)
11731 /* If src is categorized for a readonly section we can use
11732 normal __memcpy_chk. */
11733 if (readonly_data_expr (src))
11735 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11738 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11739 dest, src, len, size);
11740 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11741 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11742 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11749 /* Emit warning if a buffer overflow is detected at compile time. */
11752 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11756 location_t loc = tree_nonartificial_location (exp);
11760 case BUILT_IN_STRCPY_CHK:
11761 case BUILT_IN_STPCPY_CHK:
11762 /* For __strcat_chk the warning will be emitted only if overflowing
11763 by at least strlen (dest) + 1 bytes. */
11764 case BUILT_IN_STRCAT_CHK:
11765 len = CALL_EXPR_ARG (exp, 1);
11766 size = CALL_EXPR_ARG (exp, 2);
11769 case BUILT_IN_STRNCAT_CHK:
11770 case BUILT_IN_STRNCPY_CHK:
11771 len = CALL_EXPR_ARG (exp, 2);
11772 size = CALL_EXPR_ARG (exp, 3);
11774 case BUILT_IN_SNPRINTF_CHK:
11775 case BUILT_IN_VSNPRINTF_CHK:
11776 len = CALL_EXPR_ARG (exp, 1);
11777 size = CALL_EXPR_ARG (exp, 3);
11780 gcc_unreachable ();
11786 if (! host_integerp (size, 1) || integer_all_onesp (size))
11791 len = c_strlen (len, 1);
11792 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11795 else if (fcode == BUILT_IN_STRNCAT_CHK)
11797 tree src = CALL_EXPR_ARG (exp, 1);
11798 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11800 src = c_strlen (src, 1);
11801 if (! src || ! host_integerp (src, 1))
11803 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11804 exp, get_callee_fndecl (exp));
11807 else if (tree_int_cst_lt (src, size))
11810 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11813 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11814 exp, get_callee_fndecl (exp));
11817 /* Emit warning if a buffer overflow is detected at compile time
11818 in __sprintf_chk/__vsprintf_chk calls. */
11821 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11823 tree size, len, fmt;
11824 const char *fmt_str;
11825 int nargs = call_expr_nargs (exp);
11827 /* Verify the required arguments in the original call. */
11831 size = CALL_EXPR_ARG (exp, 2);
11832 fmt = CALL_EXPR_ARG (exp, 3);
11834 if (! host_integerp (size, 1) || integer_all_onesp (size))
11837 /* Check whether the format is a literal string constant. */
11838 fmt_str = c_getstr (fmt);
11839 if (fmt_str == NULL)
11842 if (!init_target_chars ())
11845 /* If the format doesn't contain % args or %%, we know its size. */
11846 if (strchr (fmt_str, target_percent) == 0)
11847 len = build_int_cstu (size_type_node, strlen (fmt_str));
11848 /* If the format is "%s" and first ... argument is a string literal,
11850 else if (fcode == BUILT_IN_SPRINTF_CHK
11851 && strcmp (fmt_str, target_percent_s) == 0)
11857 arg = CALL_EXPR_ARG (exp, 4);
11858 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11861 len = c_strlen (arg, 1);
11862 if (!len || ! host_integerp (len, 1))
11868 if (! tree_int_cst_lt (len, size))
11869 warning_at (tree_nonartificial_location (exp),
11870 0, "%Kcall to %D will always overflow destination buffer",
11871 exp, get_callee_fndecl (exp));
11874 /* Emit warning if a free is called with address of a variable. */
11877 maybe_emit_free_warning (tree exp)
11879 tree arg = CALL_EXPR_ARG (exp, 0);
11882 if (TREE_CODE (arg) != ADDR_EXPR)
11885 arg = get_base_address (TREE_OPERAND (arg, 0));
11886 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11889 if (SSA_VAR_P (arg))
11890 warning_at (tree_nonartificial_location (exp),
11891 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11893 warning_at (tree_nonartificial_location (exp),
11894 0, "%Kattempt to free a non-heap object", exp);
11897 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11901 fold_builtin_object_size (tree ptr, tree ost)
11903 unsigned HOST_WIDE_INT bytes;
11904 int object_size_type;
11906 if (!validate_arg (ptr, POINTER_TYPE)
11907 || !validate_arg (ost, INTEGER_TYPE))
11912 if (TREE_CODE (ost) != INTEGER_CST
11913 || tree_int_cst_sgn (ost) < 0
11914 || compare_tree_int (ost, 3) > 0)
11917 object_size_type = tree_low_cst (ost, 0);
11919 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11920 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11921 and (size_t) 0 for types 2 and 3. */
11922 if (TREE_SIDE_EFFECTS (ptr))
11923 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11925 if (TREE_CODE (ptr) == ADDR_EXPR)
11927 bytes = compute_builtin_object_size (ptr, object_size_type);
11928 if (double_int_fits_to_tree_p (size_type_node,
11929 uhwi_to_double_int (bytes)))
11930 return build_int_cstu (size_type_node, bytes);
11932 else if (TREE_CODE (ptr) == SSA_NAME)
11934 /* If object size is not known yet, delay folding until
11935 later. Maybe subsequent passes will help determining
11937 bytes = compute_builtin_object_size (ptr, object_size_type);
11938 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11939 && double_int_fits_to_tree_p (size_type_node,
11940 uhwi_to_double_int (bytes)))
11941 return build_int_cstu (size_type_node, bytes);
11947 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11948 DEST, SRC, LEN, and SIZE are the arguments to the call.
11949 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11950 code of the builtin. If MAXLEN is not NULL, it is maximum length
11951 passed as third argument. */
11954 fold_builtin_memory_chk (location_t loc, tree fndecl,
11955 tree dest, tree src, tree len, tree size,
11956 tree maxlen, bool ignore,
11957 enum built_in_function fcode)
11961 if (!validate_arg (dest, POINTER_TYPE)
11962 || !validate_arg (src,
11963 (fcode == BUILT_IN_MEMSET_CHK
11964 ? INTEGER_TYPE : POINTER_TYPE))
11965 || !validate_arg (len, INTEGER_TYPE)
11966 || !validate_arg (size, INTEGER_TYPE))
11969 /* If SRC and DEST are the same (and not volatile), return DEST
11970 (resp. DEST+LEN for __mempcpy_chk). */
11971 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11973 if (fcode != BUILT_IN_MEMPCPY_CHK)
11974 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11978 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
11979 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11983 if (! host_integerp (size, 1))
11986 if (! integer_all_onesp (size))
11988 if (! host_integerp (len, 1))
11990 /* If LEN is not constant, try MAXLEN too.
11991 For MAXLEN only allow optimizing into non-_ocs function
11992 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11993 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11995 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11997 /* (void) __mempcpy_chk () can be optimized into
11998 (void) __memcpy_chk (). */
11999 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12003 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12011 if (tree_int_cst_lt (size, maxlen))
12016 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12017 mem{cpy,pcpy,move,set} is available. */
12020 case BUILT_IN_MEMCPY_CHK:
12021 fn = built_in_decls[BUILT_IN_MEMCPY];
12023 case BUILT_IN_MEMPCPY_CHK:
12024 fn = built_in_decls[BUILT_IN_MEMPCPY];
12026 case BUILT_IN_MEMMOVE_CHK:
12027 fn = built_in_decls[BUILT_IN_MEMMOVE];
12029 case BUILT_IN_MEMSET_CHK:
12030 fn = built_in_decls[BUILT_IN_MEMSET];
12039 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12042 /* Fold a call to the __st[rp]cpy_chk builtin.
12043 DEST, SRC, and SIZE are the arguments to the call.
12044 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12045 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12046 strings passed as second argument. */
12049 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12050 tree src, tree size,
12051 tree maxlen, bool ignore,
12052 enum built_in_function fcode)
12056 if (!validate_arg (dest, POINTER_TYPE)
12057 || !validate_arg (src, POINTER_TYPE)
12058 || !validate_arg (size, INTEGER_TYPE))
12061 /* If SRC and DEST are the same (and not volatile), return DEST. */
12062 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12063 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12065 if (! host_integerp (size, 1))
12068 if (! integer_all_onesp (size))
12070 len = c_strlen (src, 1);
12071 if (! len || ! host_integerp (len, 1))
12073 /* If LEN is not constant, try MAXLEN too.
12074 For MAXLEN only allow optimizing into non-_ocs function
12075 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12076 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12078 if (fcode == BUILT_IN_STPCPY_CHK)
12083 /* If return value of __stpcpy_chk is ignored,
12084 optimize into __strcpy_chk. */
12085 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12089 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12092 if (! len || TREE_SIDE_EFFECTS (len))
12095 /* If c_strlen returned something, but not a constant,
12096 transform __strcpy_chk into __memcpy_chk. */
12097 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12101 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12102 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12103 build_call_expr_loc (loc, fn, 4,
12104 dest, src, len, size));
12110 if (! tree_int_cst_lt (maxlen, size))
12114 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12115 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12116 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12120 return build_call_expr_loc (loc, fn, 2, dest, src);
12123 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12124 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12125 length passed as third argument. */
12128 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12129 tree len, tree size, tree maxlen)
12133 if (!validate_arg (dest, POINTER_TYPE)
12134 || !validate_arg (src, POINTER_TYPE)
12135 || !validate_arg (len, INTEGER_TYPE)
12136 || !validate_arg (size, INTEGER_TYPE))
12139 if (! host_integerp (size, 1))
12142 if (! integer_all_onesp (size))
12144 if (! host_integerp (len, 1))
12146 /* If LEN is not constant, try MAXLEN too.
12147 For MAXLEN only allow optimizing into non-_ocs function
12148 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12149 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12155 if (tree_int_cst_lt (size, maxlen))
12159 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12160 fn = built_in_decls[BUILT_IN_STRNCPY];
12164 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12167 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12168 are the arguments to the call. */
12171 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12172 tree src, tree size)
12177 if (!validate_arg (dest, POINTER_TYPE)
12178 || !validate_arg (src, POINTER_TYPE)
12179 || !validate_arg (size, INTEGER_TYPE))
12182 p = c_getstr (src);
12183 /* If the SRC parameter is "", return DEST. */
12184 if (p && *p == '\0')
12185 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12187 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12190 /* If __builtin_strcat_chk is used, assume strcat is available. */
12191 fn = built_in_decls[BUILT_IN_STRCAT];
12195 return build_call_expr_loc (loc, fn, 2, dest, src);
12198 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12202 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12203 tree dest, tree src, tree len, tree size)
12208 if (!validate_arg (dest, POINTER_TYPE)
12209 || !validate_arg (src, POINTER_TYPE)
12210 || !validate_arg (size, INTEGER_TYPE)
12211 || !validate_arg (size, INTEGER_TYPE))
12214 p = c_getstr (src);
12215 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12216 if (p && *p == '\0')
12217 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12218 else if (integer_zerop (len))
12219 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12221 if (! host_integerp (size, 1))
12224 if (! integer_all_onesp (size))
12226 tree src_len = c_strlen (src, 1);
12228 && host_integerp (src_len, 1)
12229 && host_integerp (len, 1)
12230 && ! tree_int_cst_lt (len, src_len))
12232 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12233 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12237 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12242 /* If __builtin_strncat_chk is used, assume strncat is available. */
12243 fn = built_in_decls[BUILT_IN_STRNCAT];
12247 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12250 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12251 Return NULL_TREE if a normal call should be emitted rather than
12252 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12253 or BUILT_IN_VSPRINTF_CHK. */
12256 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12257 enum built_in_function fcode)
12259 tree dest, size, len, fn, fmt, flag;
12260 const char *fmt_str;
12262 /* Verify the required arguments in the original call. */
12266 if (!validate_arg (dest, POINTER_TYPE))
12269 if (!validate_arg (flag, INTEGER_TYPE))
12272 if (!validate_arg (size, INTEGER_TYPE))
12275 if (!validate_arg (fmt, POINTER_TYPE))
12278 if (! host_integerp (size, 1))
12283 if (!init_target_chars ())
12286 /* Check whether the format is a literal string constant. */
12287 fmt_str = c_getstr (fmt);
12288 if (fmt_str != NULL)
12290 /* If the format doesn't contain % args or %%, we know the size. */
12291 if (strchr (fmt_str, target_percent) == 0)
12293 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12294 len = build_int_cstu (size_type_node, strlen (fmt_str));
12296 /* If the format is "%s" and first ... argument is a string literal,
12297 we know the size too. */
12298 else if (fcode == BUILT_IN_SPRINTF_CHK
12299 && strcmp (fmt_str, target_percent_s) == 0)
12306 if (validate_arg (arg, POINTER_TYPE))
12308 len = c_strlen (arg, 1);
12309 if (! len || ! host_integerp (len, 1))
12316 if (! integer_all_onesp (size))
12318 if (! len || ! tree_int_cst_lt (len, size))
12322 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12323 or if format doesn't contain % chars or is "%s". */
12324 if (! integer_zerop (flag))
12326 if (fmt_str == NULL)
12328 if (strchr (fmt_str, target_percent) != NULL
12329 && strcmp (fmt_str, target_percent_s))
12333 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12334 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12335 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12339 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12342 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12343 a normal call should be emitted rather than expanding the function
12344 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12347 fold_builtin_sprintf_chk (location_t loc, tree exp,
12348 enum built_in_function fcode)
12350 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12351 CALL_EXPR_ARGP (exp), fcode);
12354 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12355 NULL_TREE if a normal call should be emitted rather than expanding
12356 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12357 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12358 passed as second argument. */
12361 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12362 tree maxlen, enum built_in_function fcode)
12364 tree dest, size, len, fn, fmt, flag;
12365 const char *fmt_str;
12367 /* Verify the required arguments in the original call. */
12371 if (!validate_arg (dest, POINTER_TYPE))
12374 if (!validate_arg (len, INTEGER_TYPE))
12377 if (!validate_arg (flag, INTEGER_TYPE))
12380 if (!validate_arg (size, INTEGER_TYPE))
12383 if (!validate_arg (fmt, POINTER_TYPE))
12386 if (! host_integerp (size, 1))
12389 if (! integer_all_onesp (size))
12391 if (! host_integerp (len, 1))
12393 /* If LEN is not constant, try MAXLEN too.
12394 For MAXLEN only allow optimizing into non-_ocs function
12395 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12396 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12402 if (tree_int_cst_lt (size, maxlen))
12406 if (!init_target_chars ())
12409 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12410 or if format doesn't contain % chars or is "%s". */
12411 if (! integer_zerop (flag))
12413 fmt_str = c_getstr (fmt);
12414 if (fmt_str == NULL)
12416 if (strchr (fmt_str, target_percent) != NULL
12417 && strcmp (fmt_str, target_percent_s))
12421 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12423 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12424 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12428 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12431 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12432 a normal call should be emitted rather than expanding the function
12433 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12434 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12435 passed as second argument. */
12438 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12439 enum built_in_function fcode)
12441 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12442 CALL_EXPR_ARGP (exp), maxlen, fcode);
12445 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12446 FMT and ARG are the arguments to the call; we don't fold cases with
12447 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12449 Return NULL_TREE if no simplification was possible, otherwise return the
12450 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12451 code of the function to be simplified. */
12454 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12455 tree arg, bool ignore,
12456 enum built_in_function fcode)
12458 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12459 const char *fmt_str = NULL;
12461 /* If the return value is used, don't do the transformation. */
12465 /* Verify the required arguments in the original call. */
12466 if (!validate_arg (fmt, POINTER_TYPE))
12469 /* Check whether the format is a literal string constant. */
12470 fmt_str = c_getstr (fmt);
12471 if (fmt_str == NULL)
12474 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12476 /* If we're using an unlocked function, assume the other
12477 unlocked functions exist explicitly. */
12478 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12479 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12483 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12484 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12487 if (!init_target_chars ())
12490 if (strcmp (fmt_str, target_percent_s) == 0
12491 || strchr (fmt_str, target_percent) == NULL)
12495 if (strcmp (fmt_str, target_percent_s) == 0)
12497 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12500 if (!arg || !validate_arg (arg, POINTER_TYPE))
12503 str = c_getstr (arg);
12509 /* The format specifier doesn't contain any '%' characters. */
12510 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12516 /* If the string was "", printf does nothing. */
12517 if (str[0] == '\0')
12518 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12520 /* If the string has length of 1, call putchar. */
12521 if (str[1] == '\0')
12523 /* Given printf("c"), (where c is any one character,)
12524 convert "c"[0] to an int and pass that to the replacement
12526 newarg = build_int_cst (integer_type_node, str[0]);
12528 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12532 /* If the string was "string\n", call puts("string"). */
12533 size_t len = strlen (str);
12534 if ((unsigned char)str[len - 1] == target_newline
12535 && (size_t) (int) len == len
12539 tree offset_node, string_cst;
12541 /* Create a NUL-terminated string that's one char shorter
12542 than the original, stripping off the trailing '\n'. */
12543 newarg = build_string_literal (len, str);
12544 string_cst = string_constant (newarg, &offset_node);
12545 gcc_checking_assert (string_cst
12546 && (TREE_STRING_LENGTH (string_cst)
12548 && integer_zerop (offset_node)
12550 TREE_STRING_POINTER (string_cst)[len - 1]
12551 == target_newline);
12552 /* build_string_literal creates a new STRING_CST,
12553 modify it in place to avoid double copying. */
12554 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12555 newstr[len - 1] = '\0';
12557 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12560 /* We'd like to arrange to call fputs(string,stdout) here,
12561 but we need stdout and don't have a way to get it yet. */
12566 /* The other optimizations can be done only on the non-va_list variants. */
12567 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12570 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12571 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12573 if (!arg || !validate_arg (arg, POINTER_TYPE))
12576 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12579 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12580 else if (strcmp (fmt_str, target_percent_c) == 0)
12582 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12585 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12591 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12594 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12595 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12596 more than 3 arguments, and ARG may be null in the 2-argument case.
12598 Return NULL_TREE if no simplification was possible, otherwise return the
12599 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12600 code of the function to be simplified. */
12603 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12604 tree fmt, tree arg, bool ignore,
12605 enum built_in_function fcode)
12607 tree fn_fputc, fn_fputs, call = NULL_TREE;
12608 const char *fmt_str = NULL;
12610 /* If the return value is used, don't do the transformation. */
12614 /* Verify the required arguments in the original call. */
12615 if (!validate_arg (fp, POINTER_TYPE))
12617 if (!validate_arg (fmt, POINTER_TYPE))
12620 /* Check whether the format is a literal string constant. */
12621 fmt_str = c_getstr (fmt);
12622 if (fmt_str == NULL)
12625 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12627 /* If we're using an unlocked function, assume the other
12628 unlocked functions exist explicitly. */
12629 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12630 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12634 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12635 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12638 if (!init_target_chars ())
12641 /* If the format doesn't contain % args or %%, use strcpy. */
12642 if (strchr (fmt_str, target_percent) == NULL)
12644 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12648 /* If the format specifier was "", fprintf does nothing. */
12649 if (fmt_str[0] == '\0')
12651 /* If FP has side-effects, just wait until gimplification is
12653 if (TREE_SIDE_EFFECTS (fp))
12656 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12659 /* When "string" doesn't contain %, replace all cases of
12660 fprintf (fp, string) with fputs (string, fp). The fputs
12661 builtin will take care of special cases like length == 1. */
12663 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12666 /* The other optimizations can be done only on the non-va_list variants. */
12667 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12670 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12671 else if (strcmp (fmt_str, target_percent_s) == 0)
12673 if (!arg || !validate_arg (arg, POINTER_TYPE))
12676 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12679 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12680 else if (strcmp (fmt_str, target_percent_c) == 0)
12682 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12685 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12690 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12693 /* Initialize format string characters in the target charset. */
12696 init_target_chars (void)
12701 target_newline = lang_hooks.to_target_charset ('\n');
12702 target_percent = lang_hooks.to_target_charset ('%');
12703 target_c = lang_hooks.to_target_charset ('c');
12704 target_s = lang_hooks.to_target_charset ('s');
12705 if (target_newline == 0 || target_percent == 0 || target_c == 0
12709 target_percent_c[0] = target_percent;
12710 target_percent_c[1] = target_c;
12711 target_percent_c[2] = '\0';
12713 target_percent_s[0] = target_percent;
12714 target_percent_s[1] = target_s;
12715 target_percent_s[2] = '\0';
12717 target_percent_s_newline[0] = target_percent;
12718 target_percent_s_newline[1] = target_s;
12719 target_percent_s_newline[2] = target_newline;
12720 target_percent_s_newline[3] = '\0';
12727 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12728 and no overflow/underflow occurred. INEXACT is true if M was not
12729 exactly calculated. TYPE is the tree type for the result. This
12730 function assumes that you cleared the MPFR flags and then
12731 calculated M to see if anything subsequently set a flag prior to
12732 entering this function. Return NULL_TREE if any checks fail. */
12735 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12737 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12738 overflow/underflow occurred. If -frounding-math, proceed iff the
12739 result of calling FUNC was exact. */
12740 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12741 && (!flag_rounding_math || !inexact))
12743 REAL_VALUE_TYPE rr;
12745 real_from_mpfr (&rr, m, type, GMP_RNDN);
12746 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12747 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12748 but the mpft_t is not, then we underflowed in the
12750 if (real_isfinite (&rr)
12751 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12753 REAL_VALUE_TYPE rmode;
12755 real_convert (&rmode, TYPE_MODE (type), &rr);
12756 /* Proceed iff the specified mode can hold the value. */
12757 if (real_identical (&rmode, &rr))
12758 return build_real (type, rmode);
12764 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12765 number and no overflow/underflow occurred. INEXACT is true if M
12766 was not exactly calculated. TYPE is the tree type for the result.
12767 This function assumes that you cleared the MPFR flags and then
12768 calculated M to see if anything subsequently set a flag prior to
12769 entering this function. Return NULL_TREE if any checks fail, if
12770 FORCE_CONVERT is true, then bypass the checks. */
12773 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12775 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12776 overflow/underflow occurred. If -frounding-math, proceed iff the
12777 result of calling FUNC was exact. */
12779 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12780 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12781 && (!flag_rounding_math || !inexact)))
12783 REAL_VALUE_TYPE re, im;
12785 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12786 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12787 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12788 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12789 but the mpft_t is not, then we underflowed in the
12792 || (real_isfinite (&re) && real_isfinite (&im)
12793 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12794 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12796 REAL_VALUE_TYPE re_mode, im_mode;
12798 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12799 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12800 /* Proceed iff the specified mode can hold the value. */
12802 || (real_identical (&re_mode, &re)
12803 && real_identical (&im_mode, &im)))
12804 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12805 build_real (TREE_TYPE (type), im_mode));
12811 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12812 FUNC on it and return the resulting value as a tree with type TYPE.
12813 If MIN and/or MAX are not NULL, then the supplied ARG must be
12814 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12815 acceptable values, otherwise they are not. The mpfr precision is
12816 set to the precision of TYPE. We assume that function FUNC returns
12817 zero if the result could be calculated exactly within the requested
12821 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12822 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12825 tree result = NULL_TREE;
12829 /* To proceed, MPFR must exactly represent the target floating point
12830 format, which only happens when the target base equals two. */
12831 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12832 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12834 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12836 if (real_isfinite (ra)
12837 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12838 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12840 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12841 const int prec = fmt->p;
12842 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12846 mpfr_init2 (m, prec);
12847 mpfr_from_real (m, ra, GMP_RNDN);
12848 mpfr_clear_flags ();
12849 inexact = func (m, m, rnd);
12850 result = do_mpfr_ckconv (m, type, inexact);
12858 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12859 FUNC on it and return the resulting value as a tree with type TYPE.
12860 The mpfr precision is set to the precision of TYPE. We assume that
12861 function FUNC returns zero if the result could be calculated
12862 exactly within the requested precision. */
12865 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12866 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12868 tree result = NULL_TREE;
12873 /* To proceed, MPFR must exactly represent the target floating point
12874 format, which only happens when the target base equals two. */
12875 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12876 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12877 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12879 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12880 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12882 if (real_isfinite (ra1) && real_isfinite (ra2))
12884 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12885 const int prec = fmt->p;
12886 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12890 mpfr_inits2 (prec, m1, m2, NULL);
12891 mpfr_from_real (m1, ra1, GMP_RNDN);
12892 mpfr_from_real (m2, ra2, GMP_RNDN);
12893 mpfr_clear_flags ();
12894 inexact = func (m1, m1, m2, rnd);
12895 result = do_mpfr_ckconv (m1, type, inexact);
12896 mpfr_clears (m1, m2, NULL);
12903 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12904 FUNC on it and return the resulting value as a tree with type TYPE.
12905 The mpfr precision is set to the precision of TYPE. We assume that
12906 function FUNC returns zero if the result could be calculated
12907 exactly within the requested precision. */
12910 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12911 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12913 tree result = NULL_TREE;
12919 /* To proceed, MPFR must exactly represent the target floating point
12920 format, which only happens when the target base equals two. */
12921 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12922 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12923 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12924 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12926 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12927 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12928 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12930 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12932 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12933 const int prec = fmt->p;
12934 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12938 mpfr_inits2 (prec, m1, m2, m3, NULL);
12939 mpfr_from_real (m1, ra1, GMP_RNDN);
12940 mpfr_from_real (m2, ra2, GMP_RNDN);
12941 mpfr_from_real (m3, ra3, GMP_RNDN);
12942 mpfr_clear_flags ();
12943 inexact = func (m1, m1, m2, m3, rnd);
12944 result = do_mpfr_ckconv (m1, type, inexact);
12945 mpfr_clears (m1, m2, m3, NULL);
12952 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12953 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12954 If ARG_SINP and ARG_COSP are NULL then the result is returned
12955 as a complex value.
12956 The type is taken from the type of ARG and is used for setting the
12957 precision of the calculation and results. */
12960 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12962 tree const type = TREE_TYPE (arg);
12963 tree result = NULL_TREE;
12967 /* To proceed, MPFR must exactly represent the target floating point
12968 format, which only happens when the target base equals two. */
12969 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12970 && TREE_CODE (arg) == REAL_CST
12971 && !TREE_OVERFLOW (arg))
12973 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12975 if (real_isfinite (ra))
12977 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12978 const int prec = fmt->p;
12979 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12980 tree result_s, result_c;
12984 mpfr_inits2 (prec, m, ms, mc, NULL);
12985 mpfr_from_real (m, ra, GMP_RNDN);
12986 mpfr_clear_flags ();
12987 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12988 result_s = do_mpfr_ckconv (ms, type, inexact);
12989 result_c = do_mpfr_ckconv (mc, type, inexact);
12990 mpfr_clears (m, ms, mc, NULL);
12991 if (result_s && result_c)
12993 /* If we are to return in a complex value do so. */
12994 if (!arg_sinp && !arg_cosp)
12995 return build_complex (build_complex_type (type),
12996 result_c, result_s);
12998 /* Dereference the sin/cos pointer arguments. */
12999 arg_sinp = build_fold_indirect_ref (arg_sinp);
13000 arg_cosp = build_fold_indirect_ref (arg_cosp);
13001 /* Proceed if valid pointer type were passed in. */
13002 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13003 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13005 /* Set the values. */
13006 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13008 TREE_SIDE_EFFECTS (result_s) = 1;
13009 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13011 TREE_SIDE_EFFECTS (result_c) = 1;
13012 /* Combine the assignments into a compound expr. */
13013 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13014 result_s, result_c));
13022 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13023 two-argument mpfr order N Bessel function FUNC on them and return
13024 the resulting value as a tree with type TYPE. The mpfr precision
13025 is set to the precision of TYPE. We assume that function FUNC
13026 returns zero if the result could be calculated exactly within the
13027 requested precision. */
13029 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13030 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13031 const REAL_VALUE_TYPE *min, bool inclusive)
13033 tree result = NULL_TREE;
13038 /* To proceed, MPFR must exactly represent the target floating point
13039 format, which only happens when the target base equals two. */
13040 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13041 && host_integerp (arg1, 0)
13042 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13044 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13045 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13048 && real_isfinite (ra)
13049 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13051 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13052 const int prec = fmt->p;
13053 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13057 mpfr_init2 (m, prec);
13058 mpfr_from_real (m, ra, GMP_RNDN);
13059 mpfr_clear_flags ();
13060 inexact = func (m, n, m, rnd);
13061 result = do_mpfr_ckconv (m, type, inexact);
13069 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13070 the pointer *(ARG_QUO) and return the result. The type is taken
13071 from the type of ARG0 and is used for setting the precision of the
13072 calculation and results. */
13075 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13077 tree const type = TREE_TYPE (arg0);
13078 tree result = NULL_TREE;
13083 /* To proceed, MPFR must exactly represent the target floating point
13084 format, which only happens when the target base equals two. */
13085 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13086 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13087 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13089 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13090 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13092 if (real_isfinite (ra0) && real_isfinite (ra1))
13094 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13095 const int prec = fmt->p;
13096 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13101 mpfr_inits2 (prec, m0, m1, NULL);
13102 mpfr_from_real (m0, ra0, GMP_RNDN);
13103 mpfr_from_real (m1, ra1, GMP_RNDN);
13104 mpfr_clear_flags ();
13105 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13106 /* Remquo is independent of the rounding mode, so pass
13107 inexact=0 to do_mpfr_ckconv(). */
13108 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13109 mpfr_clears (m0, m1, NULL);
13112 /* MPFR calculates quo in the host's long so it may
13113 return more bits in quo than the target int can hold
13114 if sizeof(host long) > sizeof(target int). This can
13115 happen even for native compilers in LP64 mode. In
13116 these cases, modulo the quo value with the largest
13117 number that the target int can hold while leaving one
13118 bit for the sign. */
13119 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13120 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13122 /* Dereference the quo pointer argument. */
13123 arg_quo = build_fold_indirect_ref (arg_quo);
13124 /* Proceed iff a valid pointer type was passed in. */
13125 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13127 /* Set the value. */
13129 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13130 build_int_cst (TREE_TYPE (arg_quo),
13132 TREE_SIDE_EFFECTS (result_quo) = 1;
13133 /* Combine the quo assignment with the rem. */
13134 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13135 result_quo, result_rem));
13143 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13144 resulting value as a tree with type TYPE. The mpfr precision is
13145 set to the precision of TYPE. We assume that this mpfr function
13146 returns zero if the result could be calculated exactly within the
13147 requested precision. In addition, the integer pointer represented
13148 by ARG_SG will be dereferenced and set to the appropriate signgam
13152 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13154 tree result = NULL_TREE;
13158 /* To proceed, MPFR must exactly represent the target floating point
13159 format, which only happens when the target base equals two. Also
13160 verify ARG is a constant and that ARG_SG is an int pointer. */
13161 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13162 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13163 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13164 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13166 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13168 /* In addition to NaN and Inf, the argument cannot be zero or a
13169 negative integer. */
13170 if (real_isfinite (ra)
13171 && ra->cl != rvc_zero
13172 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13174 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13175 const int prec = fmt->p;
13176 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13181 mpfr_init2 (m, prec);
13182 mpfr_from_real (m, ra, GMP_RNDN);
13183 mpfr_clear_flags ();
13184 inexact = mpfr_lgamma (m, &sg, m, rnd);
13185 result_lg = do_mpfr_ckconv (m, type, inexact);
13191 /* Dereference the arg_sg pointer argument. */
13192 arg_sg = build_fold_indirect_ref (arg_sg);
13193 /* Assign the signgam value into *arg_sg. */
13194 result_sg = fold_build2 (MODIFY_EXPR,
13195 TREE_TYPE (arg_sg), arg_sg,
13196 build_int_cst (TREE_TYPE (arg_sg), sg));
13197 TREE_SIDE_EFFECTS (result_sg) = 1;
13198 /* Combine the signgam assignment with the lgamma result. */
13199 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13200 result_sg, result_lg));
13208 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13209 function FUNC on it and return the resulting value as a tree with
13210 type TYPE. The mpfr precision is set to the precision of TYPE. We
13211 assume that function FUNC returns zero if the result could be
13212 calculated exactly within the requested precision. */
13215 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13217 tree result = NULL_TREE;
13221 /* To proceed, MPFR must exactly represent the target floating point
13222 format, which only happens when the target base equals two. */
13223 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13224 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13225 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13227 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13228 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13230 if (real_isfinite (re) && real_isfinite (im))
13232 const struct real_format *const fmt =
13233 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13234 const int prec = fmt->p;
13235 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13236 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13240 mpc_init2 (m, prec);
13241 mpfr_from_real (mpc_realref(m), re, rnd);
13242 mpfr_from_real (mpc_imagref(m), im, rnd);
13243 mpfr_clear_flags ();
13244 inexact = func (m, m, crnd);
13245 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13253 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13254 mpc function FUNC on it and return the resulting value as a tree
13255 with type TYPE. The mpfr precision is set to the precision of
13256 TYPE. We assume that function FUNC returns zero if the result
13257 could be calculated exactly within the requested precision. If
13258 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13259 in the arguments and/or results. */
13262 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13263 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13265 tree result = NULL_TREE;
13270 /* To proceed, MPFR must exactly represent the target floating point
13271 format, which only happens when the target base equals two. */
13272 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13273 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13274 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13276 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13278 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13279 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13280 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13281 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13284 || (real_isfinite (re0) && real_isfinite (im0)
13285 && real_isfinite (re1) && real_isfinite (im1)))
13287 const struct real_format *const fmt =
13288 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13289 const int prec = fmt->p;
13290 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13291 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13295 mpc_init2 (m0, prec);
13296 mpc_init2 (m1, prec);
13297 mpfr_from_real (mpc_realref(m0), re0, rnd);
13298 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13299 mpfr_from_real (mpc_realref(m1), re1, rnd);
13300 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13301 mpfr_clear_flags ();
13302 inexact = func (m0, m0, m1, crnd);
13303 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13312 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13313 a normal call should be emitted rather than expanding the function
13314 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13317 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13319 int nargs = gimple_call_num_args (stmt);
13321 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13323 ? gimple_call_arg_ptr (stmt, 0)
13324 : &error_mark_node), fcode);
13327 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13328 a normal call should be emitted rather than expanding the function
13329 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13330 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13331 passed as second argument. */
13334 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13335 enum built_in_function fcode)
13337 int nargs = gimple_call_num_args (stmt);
13339 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13341 ? gimple_call_arg_ptr (stmt, 0)
13342 : &error_mark_node), maxlen, fcode);
13345 /* Builtins with folding operations that operate on "..." arguments
13346 need special handling; we need to store the arguments in a convenient
13347 data structure before attempting any folding. Fortunately there are
13348 only a few builtins that fall into this category. FNDECL is the
13349 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13350 result of the function call is ignored. */
13353 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13354 bool ignore ATTRIBUTE_UNUSED)
13356 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13357 tree ret = NULL_TREE;
13361 case BUILT_IN_SPRINTF_CHK:
13362 case BUILT_IN_VSPRINTF_CHK:
13363 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13366 case BUILT_IN_SNPRINTF_CHK:
13367 case BUILT_IN_VSNPRINTF_CHK:
13368 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13375 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13376 TREE_NO_WARNING (ret) = 1;
13382 /* A wrapper function for builtin folding that prevents warnings for
13383 "statement without effect" and the like, caused by removing the
13384 call node earlier than the warning is generated. */
13387 fold_call_stmt (gimple stmt, bool ignore)
13389 tree ret = NULL_TREE;
13390 tree fndecl = gimple_call_fndecl (stmt);
13391 location_t loc = gimple_location (stmt);
13393 && TREE_CODE (fndecl) == FUNCTION_DECL
13394 && DECL_BUILT_IN (fndecl)
13395 && !gimple_call_va_arg_pack_p (stmt))
13397 int nargs = gimple_call_num_args (stmt);
13398 tree *args = (nargs > 0
13399 ? gimple_call_arg_ptr (stmt, 0)
13400 : &error_mark_node);
13402 if (avoid_folding_inline_builtin (fndecl))
13404 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13406 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13410 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13411 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13413 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13416 /* Propagate location information from original call to
13417 expansion of builtin. Otherwise things like
13418 maybe_emit_chk_warning, that operate on the expansion
13419 of a builtin, will use the wrong location information. */
13420 if (gimple_has_location (stmt))
13422 tree realret = ret;
13423 if (TREE_CODE (ret) == NOP_EXPR)
13424 realret = TREE_OPERAND (ret, 0);
13425 if (CAN_HAVE_LOCATION_P (realret)
13426 && !EXPR_HAS_LOCATION (realret))
13427 SET_EXPR_LOCATION (realret, loc);
13437 /* Look up the function in built_in_decls that corresponds to DECL
13438 and set ASMSPEC as its user assembler name. DECL must be a
13439 function decl that declares a builtin. */
13442 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13445 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13446 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13449 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13450 set_user_assembler_name (builtin, asmspec);
13451 switch (DECL_FUNCTION_CODE (decl))
13453 case BUILT_IN_MEMCPY:
13454 init_block_move_fn (asmspec);
13455 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13457 case BUILT_IN_MEMSET:
13458 init_block_clear_fn (asmspec);
13459 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13461 case BUILT_IN_MEMMOVE:
13462 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13464 case BUILT_IN_MEMCMP:
13465 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13467 case BUILT_IN_ABORT:
13468 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13471 if (INT_TYPE_SIZE < BITS_PER_WORD)
13473 set_user_assembler_libfunc ("ffs", asmspec);
13474 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13475 MODE_INT, 0), "ffs");
13483 /* Return true if DECL is a builtin that expands to a constant or similarly
13486 is_simple_builtin (tree decl)
13488 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13489 switch (DECL_FUNCTION_CODE (decl))
13491 /* Builtins that expand to constants. */
13492 case BUILT_IN_CONSTANT_P:
13493 case BUILT_IN_EXPECT:
13494 case BUILT_IN_OBJECT_SIZE:
13495 case BUILT_IN_UNREACHABLE:
13496 /* Simple register moves or loads from stack. */
13497 case BUILT_IN_ASSUME_ALIGNED:
13498 case BUILT_IN_RETURN_ADDRESS:
13499 case BUILT_IN_EXTRACT_RETURN_ADDR:
13500 case BUILT_IN_FROB_RETURN_ADDR:
13501 case BUILT_IN_RETURN:
13502 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13503 case BUILT_IN_FRAME_ADDRESS:
13504 case BUILT_IN_VA_END:
13505 case BUILT_IN_STACK_SAVE:
13506 case BUILT_IN_STACK_RESTORE:
13507 /* Exception state returns or moves registers around. */
13508 case BUILT_IN_EH_FILTER:
13509 case BUILT_IN_EH_POINTER:
13510 case BUILT_IN_EH_COPY_VALUES:
13520 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13521 most probably expanded inline into reasonably simple code. This is a
13522 superset of is_simple_builtin. */
13524 is_inexpensive_builtin (tree decl)
13528 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13530 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13531 switch (DECL_FUNCTION_CODE (decl))
13534 case BUILT_IN_ALLOCA:
13535 case BUILT_IN_BSWAP32:
13536 case BUILT_IN_BSWAP64:
13538 case BUILT_IN_CLZIMAX:
13539 case BUILT_IN_CLZL:
13540 case BUILT_IN_CLZLL:
13542 case BUILT_IN_CTZIMAX:
13543 case BUILT_IN_CTZL:
13544 case BUILT_IN_CTZLL:
13546 case BUILT_IN_FFSIMAX:
13547 case BUILT_IN_FFSL:
13548 case BUILT_IN_FFSLL:
13549 case BUILT_IN_IMAXABS:
13550 case BUILT_IN_FINITE:
13551 case BUILT_IN_FINITEF:
13552 case BUILT_IN_FINITEL:
13553 case BUILT_IN_FINITED32:
13554 case BUILT_IN_FINITED64:
13555 case BUILT_IN_FINITED128:
13556 case BUILT_IN_FPCLASSIFY:
13557 case BUILT_IN_ISFINITE:
13558 case BUILT_IN_ISINF_SIGN:
13559 case BUILT_IN_ISINF:
13560 case BUILT_IN_ISINFF:
13561 case BUILT_IN_ISINFL:
13562 case BUILT_IN_ISINFD32:
13563 case BUILT_IN_ISINFD64:
13564 case BUILT_IN_ISINFD128:
13565 case BUILT_IN_ISNAN:
13566 case BUILT_IN_ISNANF:
13567 case BUILT_IN_ISNANL:
13568 case BUILT_IN_ISNAND32:
13569 case BUILT_IN_ISNAND64:
13570 case BUILT_IN_ISNAND128:
13571 case BUILT_IN_ISNORMAL:
13572 case BUILT_IN_ISGREATER:
13573 case BUILT_IN_ISGREATEREQUAL:
13574 case BUILT_IN_ISLESS:
13575 case BUILT_IN_ISLESSEQUAL:
13576 case BUILT_IN_ISLESSGREATER:
13577 case BUILT_IN_ISUNORDERED:
13578 case BUILT_IN_VA_ARG_PACK:
13579 case BUILT_IN_VA_ARG_PACK_LEN:
13580 case BUILT_IN_VA_COPY:
13581 case BUILT_IN_TRAP:
13582 case BUILT_IN_SAVEREGS:
13583 case BUILT_IN_POPCOUNTL:
13584 case BUILT_IN_POPCOUNTLL:
13585 case BUILT_IN_POPCOUNTIMAX:
13586 case BUILT_IN_POPCOUNT:
13587 case BUILT_IN_PARITYL:
13588 case BUILT_IN_PARITYLL:
13589 case BUILT_IN_PARITYIMAX:
13590 case BUILT_IN_PARITY:
13591 case BUILT_IN_LABS:
13592 case BUILT_IN_LLABS:
13593 case BUILT_IN_PREFETCH:
13597 return is_simple_builtin (decl);