1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
64 struct target_builtins default_target_builtins;
66 struct target_builtins *this_target_builtins = &default_target_builtins;
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
76 #include "builtins.def"
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 enum machine_mode, int);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_expect (location_t, tree, tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree fold_builtin_nan (tree, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static bool integer_valued_real_p (tree);
150 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
151 static bool readonly_data_expr (tree);
152 static rtx expand_builtin_fabs (tree, rtx, rtx);
153 static rtx expand_builtin_signbit (tree, rtx);
154 static tree fold_builtin_sqrt (location_t, tree, tree);
155 static tree fold_builtin_cbrt (location_t, tree, tree);
156 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_cos (location_t, tree, tree, tree);
159 static tree fold_builtin_cosh (location_t, tree, tree, tree);
160 static tree fold_builtin_tan (tree, tree);
161 static tree fold_builtin_trunc (location_t, tree, tree);
162 static tree fold_builtin_floor (location_t, tree, tree);
163 static tree fold_builtin_ceil (location_t, tree, tree);
164 static tree fold_builtin_round (location_t, tree, tree);
165 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
166 static tree fold_builtin_bitop (tree, tree);
167 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
168 static tree fold_builtin_strchr (location_t, tree, tree, tree);
169 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
171 static tree fold_builtin_strcmp (location_t, tree, tree);
172 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
173 static tree fold_builtin_signbit (location_t, tree, tree);
174 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
182 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
183 static tree fold_builtin_0 (location_t, tree, bool);
184 static tree fold_builtin_1 (location_t, tree, tree, bool);
185 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
187 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
188 static tree fold_builtin_varargs (location_t, tree, tree, bool);
190 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
191 static tree fold_builtin_strstr (location_t, tree, tree, tree);
192 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
193 static tree fold_builtin_strcat (location_t, tree, tree);
194 static tree fold_builtin_strncat (location_t, tree, tree, tree);
195 static tree fold_builtin_strspn (location_t, tree, tree);
196 static tree fold_builtin_strcspn (location_t, tree, tree);
197 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
208 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
209 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
210 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
211 enum built_in_function);
212 static bool init_target_chars (void);
214 static unsigned HOST_WIDE_INT target_newline;
215 static unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 static char target_percent_c[3];
219 static char target_percent_s[3];
220 static char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
234 /* Return true if NAME starts with __builtin_ or __sync_. */
237 is_builtin_name (const char *name)
239 if (strncmp (name, "__builtin_", 10) == 0)
241 if (strncmp (name, "__sync_", 7) == 0)
247 /* Return true if DECL is a function symbol representing a built-in. */
250 is_builtin_fn (tree decl)
252 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
256 /* Return true if NODE should be considered for inline expansion regardless
257 of the optimization level. This means whenever a function is invoked with
258 its "internal" name, which normally contains the prefix "__builtin". */
261 called_as_built_in (tree node)
263 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
264 we want the name used to call the function, not the name it
266 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
267 return is_builtin_name (name);
270 /* Return the alignment in bits of EXP, an object.
271 Don't return more than MAX_ALIGN no matter what. */
274 get_object_alignment (tree exp, unsigned int max_align)
276 HOST_WIDE_INT bitsize, bitpos;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
280 unsigned int align, inner;
282 /* Get the innermost object and the constant (bitpos) and possibly
283 variable (offset) offset of the access. */
284 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
285 &mode, &unsignedp, &volatilep, true);
287 /* Extract alignment information from the innermost object and
288 possibly adjust bitpos and offset. */
289 if (TREE_CODE (exp) == CONST_DECL)
290 exp = DECL_INITIAL (exp);
292 && TREE_CODE (exp) != LABEL_DECL)
293 align = DECL_ALIGN (exp);
294 else if (CONSTANT_CLASS_P (exp))
296 align = TYPE_ALIGN (TREE_TYPE (exp));
297 #ifdef CONSTANT_ALIGNMENT
298 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
301 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
302 align = TYPE_ALIGN (TREE_TYPE (exp));
303 else if (TREE_CODE (exp) == INDIRECT_REF)
304 align = TYPE_ALIGN (TREE_TYPE (exp));
305 else if (TREE_CODE (exp) == MEM_REF)
307 tree addr = TREE_OPERAND (exp, 0);
308 struct ptr_info_def *pi;
309 if (TREE_CODE (addr) == BIT_AND_EXPR
310 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
312 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
313 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
314 align *= BITS_PER_UNIT;
315 addr = TREE_OPERAND (addr, 0);
318 align = BITS_PER_UNIT;
319 if (TREE_CODE (addr) == SSA_NAME
320 && (pi = SSA_NAME_PTR_INFO (addr)))
322 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
323 align = MAX (pi->align * BITS_PER_UNIT, align);
325 else if (TREE_CODE (addr) == ADDR_EXPR)
326 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
328 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
330 else if (TREE_CODE (exp) == TARGET_MEM_REF)
332 struct ptr_info_def *pi;
333 tree addr = TMR_BASE (exp);
334 if (TREE_CODE (addr) == BIT_AND_EXPR
335 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
337 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
338 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
339 align *= BITS_PER_UNIT;
340 addr = TREE_OPERAND (addr, 0);
343 align = BITS_PER_UNIT;
344 if (TREE_CODE (addr) == SSA_NAME
345 && (pi = SSA_NAME_PTR_INFO (addr)))
347 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
348 align = MAX (pi->align * BITS_PER_UNIT, align);
350 else if (TREE_CODE (addr) == ADDR_EXPR)
351 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
353 if (TMR_OFFSET (exp))
354 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
355 if (TMR_INDEX (exp) && TMR_STEP (exp))
357 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
358 align = MIN (align, (step & -step) * BITS_PER_UNIT);
360 else if (TMR_INDEX (exp))
361 align = BITS_PER_UNIT;
362 if (TMR_INDEX2 (exp))
363 align = BITS_PER_UNIT;
366 align = BITS_PER_UNIT;
368 /* If there is a non-constant offset part extract the maximum
369 alignment that can prevail. */
375 if (TREE_CODE (offset) == PLUS_EXPR)
377 next_offset = TREE_OPERAND (offset, 0);
378 offset = TREE_OPERAND (offset, 1);
382 if (host_integerp (offset, 1))
384 /* Any overflow in calculating offset_bits won't change
387 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
390 inner = MIN (inner, (offset_bits & -offset_bits));
392 else if (TREE_CODE (offset) == MULT_EXPR
393 && host_integerp (TREE_OPERAND (offset, 1), 1))
395 /* Any overflow in calculating offset_factor won't change
397 unsigned offset_factor
398 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
402 inner = MIN (inner, (offset_factor & -offset_factor));
406 inner = MIN (inner, BITS_PER_UNIT);
409 offset = next_offset;
412 /* Alignment is innermost object alignment adjusted by the constant
413 and non-constant offset parts. */
414 align = MIN (align, inner);
415 bitpos = bitpos & (align - 1);
417 /* align and bitpos now specify known low bits of the pointer.
418 ptr & (align - 1) == bitpos. */
421 align = (bitpos & -bitpos);
423 return MIN (align, max_align);
426 /* Returns true iff we can trust that alignment information has been
427 calculated properly. */
430 can_trust_pointer_alignment (void)
432 /* We rely on TER to compute accurate alignment information. */
433 return (optimize && flag_tree_ter);
436 /* Return the alignment in bits of EXP, a pointer valued expression.
437 But don't return more than MAX_ALIGN no matter what.
438 The alignment returned is, by default, the alignment of the thing that
439 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
441 Otherwise, look at the expression to see if we can do better, i.e., if the
442 expression is actually pointing at an object whose alignment is tighter. */
445 get_pointer_alignment (tree exp, unsigned int max_align)
449 if (TREE_CODE (exp) == ADDR_EXPR)
450 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
454 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
457 return BITS_PER_UNIT;
458 if (pi->misalign != 0)
459 align = (pi->misalign & -pi->misalign);
462 return MIN (max_align, align * BITS_PER_UNIT);
465 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
468 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
469 way, because it could contain a zero byte in the middle.
470 TREE_STRING_LENGTH is the size of the character array, not the string.
472 ONLY_VALUE should be nonzero if the result is not going to be emitted
473 into the instruction stream and zero if it is going to be expanded.
474 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
475 is returned, otherwise NULL, since
476 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
477 evaluate the side-effects.
479 The value returned is of type `ssizetype'.
481 Unfortunately, string_constant can't access the values of const char
482 arrays with initializers, so neither can we do so here. */
485 c_strlen (tree src, int only_value)
488 HOST_WIDE_INT offset;
494 if (TREE_CODE (src) == COND_EXPR
495 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
499 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
500 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
501 if (tree_int_cst_equal (len1, len2))
505 if (TREE_CODE (src) == COMPOUND_EXPR
506 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
507 return c_strlen (TREE_OPERAND (src, 1), only_value);
509 loc = EXPR_LOC_OR_HERE (src);
511 src = string_constant (src, &offset_node);
515 max = TREE_STRING_LENGTH (src) - 1;
516 ptr = TREE_STRING_POINTER (src);
518 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
520 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
521 compute the offset to the following null if we don't know where to
522 start searching for it. */
525 for (i = 0; i < max; i++)
529 /* We don't know the starting offset, but we do know that the string
530 has no internal zero bytes. We can assume that the offset falls
531 within the bounds of the string; otherwise, the programmer deserves
532 what he gets. Subtract the offset from the length of the string,
533 and return that. This would perhaps not be valid if we were dealing
534 with named arrays in addition to literal string constants. */
536 return size_diffop_loc (loc, size_int (max), offset_node);
539 /* We have a known offset into the string. Start searching there for
540 a null character if we can represent it as a single HOST_WIDE_INT. */
541 if (offset_node == 0)
543 else if (! host_integerp (offset_node, 0))
546 offset = tree_low_cst (offset_node, 0);
548 /* If the offset is known to be out of bounds, warn, and call strlen at
550 if (offset < 0 || offset > max)
552 /* Suppress multiple warnings for propagated constant strings. */
553 if (! TREE_NO_WARNING (src))
555 warning_at (loc, 0, "offset outside bounds of constant string");
556 TREE_NO_WARNING (src) = 1;
561 /* Use strlen to search for the first zero byte. Since any strings
562 constructed with build_string will have nulls appended, we win even
563 if we get handed something like (char[4])"abcd".
565 Since OFFSET is our starting index into the string, no further
566 calculation is needed. */
567 return ssize_int (strlen (ptr + offset));
570 /* Return a char pointer for a C string if it is a string constant
571 or sum of string constant and integer constant. */
578 src = string_constant (src, &offset_node);
582 if (offset_node == 0)
583 return TREE_STRING_POINTER (src);
584 else if (!host_integerp (offset_node, 1)
585 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
588 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
591 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
592 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
595 c_readstr (const char *str, enum machine_mode mode)
601 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
606 for (i = 0; i < GET_MODE_SIZE (mode); i++)
609 if (WORDS_BIG_ENDIAN)
610 j = GET_MODE_SIZE (mode) - i - 1;
611 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
612 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
613 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
615 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
618 ch = (unsigned char) str[i];
619 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
621 return immed_double_const (c[0], c[1], mode);
624 /* Cast a target constant CST to target CHAR and if that value fits into
625 host char type, return zero and put that value into variable pointed to by
629 target_char_cast (tree cst, char *p)
631 unsigned HOST_WIDE_INT val, hostval;
633 if (!host_integerp (cst, 1)
634 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
637 val = tree_low_cst (cst, 1);
638 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
639 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
642 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
643 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
652 /* Similar to save_expr, but assumes that arbitrary code is not executed
653 in between the multiple evaluations. In particular, we assume that a
654 non-addressable local variable will not be modified. */
657 builtin_save_expr (tree exp)
659 if (TREE_ADDRESSABLE (exp) == 0
660 && (TREE_CODE (exp) == PARM_DECL
661 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
664 return save_expr (exp);
667 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
668 times to get the address of either a higher stack frame, or a return
669 address located within it (depending on FNDECL_CODE). */
672 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
676 #ifdef INITIAL_FRAME_ADDRESS_RTX
677 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
681 /* For a zero count with __builtin_return_address, we don't care what
682 frame address we return, because target-specific definitions will
683 override us. Therefore frame pointer elimination is OK, and using
684 the soft frame pointer is OK.
686 For a nonzero count, or a zero count with __builtin_frame_address,
687 we require a stable offset from the current frame pointer to the
688 previous one, so we must use the hard frame pointer, and
689 we must disable frame pointer elimination. */
690 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
691 tem = frame_pointer_rtx;
694 tem = hard_frame_pointer_rtx;
696 /* Tell reload not to eliminate the frame pointer. */
697 crtl->accesses_prior_frames = 1;
701 /* Some machines need special handling before we can access
702 arbitrary frames. For example, on the SPARC, we must first flush
703 all register windows to the stack. */
704 #ifdef SETUP_FRAME_ADDRESSES
706 SETUP_FRAME_ADDRESSES ();
709 /* On the SPARC, the return address is not in the frame, it is in a
710 register. There is no way to access it off of the current frame
711 pointer, but it can be accessed off the previous frame pointer by
712 reading the value from the register window save area. */
713 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
714 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
718 /* Scan back COUNT frames to the specified frame. */
719 for (i = 0; i < count; i++)
721 /* Assume the dynamic chain pointer is in the word that the
722 frame address points to, unless otherwise specified. */
723 #ifdef DYNAMIC_CHAIN_ADDRESS
724 tem = DYNAMIC_CHAIN_ADDRESS (tem);
726 tem = memory_address (Pmode, tem);
727 tem = gen_frame_mem (Pmode, tem);
728 tem = copy_to_reg (tem);
731 /* For __builtin_frame_address, return what we've got. But, on
732 the SPARC for example, we may have to add a bias. */
733 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
734 #ifdef FRAME_ADDR_RTX
735 return FRAME_ADDR_RTX (tem);
740 /* For __builtin_return_address, get the return address from that frame. */
741 #ifdef RETURN_ADDR_RTX
742 tem = RETURN_ADDR_RTX (count, tem);
744 tem = memory_address (Pmode,
745 plus_constant (tem, GET_MODE_SIZE (Pmode)));
746 tem = gen_frame_mem (Pmode, tem);
751 /* Alias set used for setjmp buffer. */
752 static alias_set_type setjmp_alias_set = -1;
754 /* Construct the leading half of a __builtin_setjmp call. Control will
755 return to RECEIVER_LABEL. This is also called directly by the SJLJ
756 exception handling code. */
759 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
761 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
765 if (setjmp_alias_set == -1)
766 setjmp_alias_set = new_alias_set ();
768 buf_addr = convert_memory_address (Pmode, buf_addr);
770 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
772 /* We store the frame pointer and the address of receiver_label in
773 the buffer and use the rest of it for the stack save area, which
774 is machine-dependent. */
776 mem = gen_rtx_MEM (Pmode, buf_addr);
777 set_mem_alias_set (mem, setjmp_alias_set);
778 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
780 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
781 set_mem_alias_set (mem, setjmp_alias_set);
783 emit_move_insn (validize_mem (mem),
784 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
786 stack_save = gen_rtx_MEM (sa_mode,
787 plus_constant (buf_addr,
788 2 * GET_MODE_SIZE (Pmode)));
789 set_mem_alias_set (stack_save, setjmp_alias_set);
790 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
792 /* If there is further processing to do, do it. */
793 #ifdef HAVE_builtin_setjmp_setup
794 if (HAVE_builtin_setjmp_setup)
795 emit_insn (gen_builtin_setjmp_setup (buf_addr));
798 /* Tell optimize_save_area_alloca that extra work is going to
799 need to go on during alloca. */
800 cfun->calls_setjmp = 1;
802 /* We have a nonlocal label. */
803 cfun->has_nonlocal_label = 1;
806 /* Construct the trailing part of a __builtin_setjmp call. This is
807 also called directly by the SJLJ exception handling code. */
810 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
814 /* Clobber the FP when we get here, so we have to make sure it's
815 marked as used by this function. */
816 emit_use (hard_frame_pointer_rtx);
818 /* Mark the static chain as clobbered here so life information
819 doesn't get messed up for it. */
820 chain = targetm.calls.static_chain (current_function_decl, true);
821 if (chain && REG_P (chain))
822 emit_clobber (chain);
824 /* Now put in the code to restore the frame pointer, and argument
825 pointer, if needed. */
826 #ifdef HAVE_nonlocal_goto
827 if (! HAVE_nonlocal_goto)
830 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
831 /* This might change the hard frame pointer in ways that aren't
832 apparent to early optimization passes, so force a clobber. */
833 emit_clobber (hard_frame_pointer_rtx);
836 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
837 if (fixed_regs[ARG_POINTER_REGNUM])
839 #ifdef ELIMINABLE_REGS
841 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
843 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
844 if (elim_regs[i].from == ARG_POINTER_REGNUM
845 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
848 if (i == ARRAY_SIZE (elim_regs))
851 /* Now restore our arg pointer from the address at which it
852 was saved in our stack frame. */
853 emit_move_insn (crtl->args.internal_arg_pointer,
854 copy_to_reg (get_arg_pointer_save_area ()));
859 #ifdef HAVE_builtin_setjmp_receiver
860 if (HAVE_builtin_setjmp_receiver)
861 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
864 #ifdef HAVE_nonlocal_goto_receiver
865 if (HAVE_nonlocal_goto_receiver)
866 emit_insn (gen_nonlocal_goto_receiver ());
871 /* We must not allow the code we just generated to be reordered by
872 scheduling. Specifically, the update of the frame pointer must
873 happen immediately, not later. */
874 emit_insn (gen_blockage ());
877 /* __builtin_longjmp is passed a pointer to an array of five words (not
878 all will be used on all machines). It operates similarly to the C
879 library function of the same name, but is more efficient. Much of
880 the code below is copied from the handling of non-local gotos. */
883 expand_builtin_longjmp (rtx buf_addr, rtx value)
885 rtx fp, lab, stack, insn, last;
886 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
888 /* DRAP is needed for stack realign if longjmp is expanded to current
890 if (SUPPORTS_STACK_ALIGNMENT)
891 crtl->need_drap = true;
893 if (setjmp_alias_set == -1)
894 setjmp_alias_set = new_alias_set ();
896 buf_addr = convert_memory_address (Pmode, buf_addr);
898 buf_addr = force_reg (Pmode, buf_addr);
900 /* We require that the user must pass a second argument of 1, because
901 that is what builtin_setjmp will return. */
902 gcc_assert (value == const1_rtx);
904 last = get_last_insn ();
905 #ifdef HAVE_builtin_longjmp
906 if (HAVE_builtin_longjmp)
907 emit_insn (gen_builtin_longjmp (buf_addr));
911 fp = gen_rtx_MEM (Pmode, buf_addr);
912 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
913 GET_MODE_SIZE (Pmode)));
915 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
916 2 * GET_MODE_SIZE (Pmode)));
917 set_mem_alias_set (fp, setjmp_alias_set);
918 set_mem_alias_set (lab, setjmp_alias_set);
919 set_mem_alias_set (stack, setjmp_alias_set);
921 /* Pick up FP, label, and SP from the block and jump. This code is
922 from expand_goto in stmt.c; see there for detailed comments. */
923 #ifdef HAVE_nonlocal_goto
924 if (HAVE_nonlocal_goto)
925 /* We have to pass a value to the nonlocal_goto pattern that will
926 get copied into the static_chain pointer, but it does not matter
927 what that value is, because builtin_setjmp does not use it. */
928 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
932 lab = copy_to_reg (lab);
934 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
935 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
937 emit_move_insn (hard_frame_pointer_rtx, fp);
938 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
940 emit_use (hard_frame_pointer_rtx);
941 emit_use (stack_pointer_rtx);
942 emit_indirect_jump (lab);
946 /* Search backwards and mark the jump insn as a non-local goto.
947 Note that this precludes the use of __builtin_longjmp to a
948 __builtin_setjmp target in the same function. However, we've
949 already cautioned the user that these functions are for
950 internal exception handling use only. */
951 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
953 gcc_assert (insn != last);
957 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
960 else if (CALL_P (insn))
965 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
966 and the address of the save area. */
969 expand_builtin_nonlocal_goto (tree exp)
971 tree t_label, t_save_area;
972 rtx r_label, r_save_area, r_fp, r_sp, insn;
974 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
977 t_label = CALL_EXPR_ARG (exp, 0);
978 t_save_area = CALL_EXPR_ARG (exp, 1);
980 r_label = expand_normal (t_label);
981 r_label = convert_memory_address (Pmode, r_label);
982 r_save_area = expand_normal (t_save_area);
983 r_save_area = convert_memory_address (Pmode, r_save_area);
984 /* Copy the address of the save location to a register just in case it was based
985 on the frame pointer. */
986 r_save_area = copy_to_reg (r_save_area);
987 r_fp = gen_rtx_MEM (Pmode, r_save_area);
988 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
989 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
991 crtl->has_nonlocal_goto = 1;
993 #ifdef HAVE_nonlocal_goto
994 /* ??? We no longer need to pass the static chain value, afaik. */
995 if (HAVE_nonlocal_goto)
996 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1000 r_label = copy_to_reg (r_label);
1002 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1003 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1005 /* Restore frame pointer for containing function.
1006 This sets the actual hard register used for the frame pointer
1007 to the location of the function's incoming static chain info.
1008 The non-local goto handler will then adjust it to contain the
1009 proper value and reload the argument pointer, if needed. */
1010 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1011 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1013 /* USE of hard_frame_pointer_rtx added for consistency;
1014 not clear if really needed. */
1015 emit_use (hard_frame_pointer_rtx);
1016 emit_use (stack_pointer_rtx);
1018 /* If the architecture is using a GP register, we must
1019 conservatively assume that the target function makes use of it.
1020 The prologue of functions with nonlocal gotos must therefore
1021 initialize the GP register to the appropriate value, and we
1022 must then make sure that this value is live at the point
1023 of the jump. (Note that this doesn't necessarily apply
1024 to targets with a nonlocal_goto pattern; they are free
1025 to implement it in their own way. Note also that this is
1026 a no-op if the GP register is a global invariant.) */
1027 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1028 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1029 emit_use (pic_offset_table_rtx);
1031 emit_indirect_jump (r_label);
1034 /* Search backwards to the jump insn and mark it as a
1036 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1043 else if (CALL_P (insn))
1050 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1051 (not all will be used on all machines) that was passed to __builtin_setjmp.
1052 It updates the stack pointer in that block to correspond to the current
1056 expand_builtin_update_setjmp_buf (rtx buf_addr)
1058 enum machine_mode sa_mode = Pmode;
1062 #ifdef HAVE_save_stack_nonlocal
1063 if (HAVE_save_stack_nonlocal)
1064 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1066 #ifdef STACK_SAVEAREA_MODE
1067 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1071 = gen_rtx_MEM (sa_mode,
1074 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1078 emit_insn (gen_setjmp ());
1081 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1084 /* Expand a call to __builtin_prefetch. For a target that does not support
1085 data prefetch, evaluate the memory address argument in case it has side
1089 expand_builtin_prefetch (tree exp)
1091 tree arg0, arg1, arg2;
1095 if (!validate_arglist (exp, POINTER_TYPE, 0))
1098 arg0 = CALL_EXPR_ARG (exp, 0);
1100 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1101 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1103 nargs = call_expr_nargs (exp);
1105 arg1 = CALL_EXPR_ARG (exp, 1);
1107 arg1 = integer_zero_node;
1109 arg2 = CALL_EXPR_ARG (exp, 2);
1111 arg2 = integer_three_node;
1113 /* Argument 0 is an address. */
1114 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1116 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1117 if (TREE_CODE (arg1) != INTEGER_CST)
1119 error ("second argument to %<__builtin_prefetch%> must be a constant");
1120 arg1 = integer_zero_node;
1122 op1 = expand_normal (arg1);
1123 /* Argument 1 must be either zero or one. */
1124 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1126 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1131 /* Argument 2 (locality) must be a compile-time constant int. */
1132 if (TREE_CODE (arg2) != INTEGER_CST)
1134 error ("third argument to %<__builtin_prefetch%> must be a constant");
1135 arg2 = integer_zero_node;
1137 op2 = expand_normal (arg2);
1138 /* Argument 2 must be 0, 1, 2, or 3. */
1139 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1141 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1145 #ifdef HAVE_prefetch
1148 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1150 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1151 || (GET_MODE (op0) != Pmode))
1153 op0 = convert_memory_address (Pmode, op0);
1154 op0 = force_reg (Pmode, op0);
1156 emit_insn (gen_prefetch (op0, op1, op2));
1160 /* Don't do anything with direct references to volatile memory, but
1161 generate code to handle other side effects. */
1162 if (!MEM_P (op0) && side_effects_p (op0))
1166 /* Get a MEM rtx for expression EXP which is the address of an operand
1167 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1168 the maximum length of the block of memory that might be accessed or
1172 get_memory_rtx (tree exp, tree len)
1174 tree orig_exp = exp;
1178 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1179 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1180 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1181 exp = TREE_OPERAND (exp, 0);
1183 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1184 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1186 /* Get an expression we can use to find the attributes to assign to MEM.
1187 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1188 we can. First remove any nops. */
1189 while (CONVERT_EXPR_P (exp)
1190 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1191 exp = TREE_OPERAND (exp, 0);
1194 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1195 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1196 && host_integerp (TREE_OPERAND (exp, 1), 0)
1197 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1198 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1199 else if (TREE_CODE (exp) == ADDR_EXPR)
1200 exp = TREE_OPERAND (exp, 0);
1201 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1202 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1206 /* Honor attributes derived from exp, except for the alias set
1207 (as builtin stringops may alias with anything) and the size
1208 (as stringops may access multiple array elements). */
1211 set_mem_attributes (mem, exp, 0);
1214 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1216 /* Allow the string and memory builtins to overflow from one
1217 field into another, see http://gcc.gnu.org/PR23561.
1218 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1219 memory accessed by the string or memory builtin will fit
1220 within the field. */
1221 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1223 tree mem_expr = MEM_EXPR (mem);
1224 HOST_WIDE_INT offset = -1, length = -1;
1227 while (TREE_CODE (inner) == ARRAY_REF
1228 || CONVERT_EXPR_P (inner)
1229 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1230 || TREE_CODE (inner) == SAVE_EXPR)
1231 inner = TREE_OPERAND (inner, 0);
1233 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1235 if (MEM_OFFSET (mem)
1236 && CONST_INT_P (MEM_OFFSET (mem)))
1237 offset = INTVAL (MEM_OFFSET (mem));
1239 if (offset >= 0 && len && host_integerp (len, 0))
1240 length = tree_low_cst (len, 0);
1242 while (TREE_CODE (inner) == COMPONENT_REF)
1244 tree field = TREE_OPERAND (inner, 1);
1245 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1246 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1248 /* Bitfields are generally not byte-addressable. */
1249 gcc_assert (!DECL_BIT_FIELD (field)
1250 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1251 % BITS_PER_UNIT) == 0
1252 && host_integerp (DECL_SIZE (field), 0)
1253 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1254 % BITS_PER_UNIT) == 0));
1256 /* If we can prove that the memory starting at XEXP (mem, 0) and
1257 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1258 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1259 fields without DECL_SIZE_UNIT like flexible array members. */
1261 && DECL_SIZE_UNIT (field)
1262 && host_integerp (DECL_SIZE_UNIT (field), 0))
1265 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1268 && offset + length <= size)
1273 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1274 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1275 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1283 mem_expr = TREE_OPERAND (mem_expr, 0);
1284 inner = TREE_OPERAND (inner, 0);
1287 if (mem_expr == NULL)
1289 if (mem_expr != MEM_EXPR (mem))
1291 set_mem_expr (mem, mem_expr);
1292 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1295 set_mem_alias_set (mem, 0);
1296 set_mem_size (mem, NULL_RTX);
1302 /* Built-in functions to perform an untyped call and return. */
1304 #define apply_args_mode \
1305 (this_target_builtins->x_apply_args_mode)
1306 #define apply_result_mode \
1307 (this_target_builtins->x_apply_result_mode)
1309 /* Return the size required for the block returned by __builtin_apply_args,
1310 and initialize apply_args_mode. */
1313 apply_args_size (void)
1315 static int size = -1;
1318 enum machine_mode mode;
1320 /* The values computed by this function never change. */
1323 /* The first value is the incoming arg-pointer. */
1324 size = GET_MODE_SIZE (Pmode);
1326 /* The second value is the structure value address unless this is
1327 passed as an "invisible" first argument. */
1328 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1329 size += GET_MODE_SIZE (Pmode);
1331 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1332 if (FUNCTION_ARG_REGNO_P (regno))
1334 mode = targetm.calls.get_raw_arg_mode (regno);
1336 gcc_assert (mode != VOIDmode);
1338 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1339 if (size % align != 0)
1340 size = CEIL (size, align) * align;
1341 size += GET_MODE_SIZE (mode);
1342 apply_args_mode[regno] = mode;
1346 apply_args_mode[regno] = VOIDmode;
1352 /* Return the size required for the block returned by __builtin_apply,
1353 and initialize apply_result_mode. */
1356 apply_result_size (void)
1358 static int size = -1;
1360 enum machine_mode mode;
1362 /* The values computed by this function never change. */
1367 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1368 if (targetm.calls.function_value_regno_p (regno))
1370 mode = targetm.calls.get_raw_result_mode (regno);
1372 gcc_assert (mode != VOIDmode);
1374 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1375 if (size % align != 0)
1376 size = CEIL (size, align) * align;
1377 size += GET_MODE_SIZE (mode);
1378 apply_result_mode[regno] = mode;
1381 apply_result_mode[regno] = VOIDmode;
1383 /* Allow targets that use untyped_call and untyped_return to override
1384 the size so that machine-specific information can be stored here. */
1385 #ifdef APPLY_RESULT_SIZE
1386 size = APPLY_RESULT_SIZE;
1392 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1393 /* Create a vector describing the result block RESULT. If SAVEP is true,
1394 the result block is used to save the values; otherwise it is used to
1395 restore the values. */
1398 result_vector (int savep, rtx result)
1400 int regno, size, align, nelts;
1401 enum machine_mode mode;
1403 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if ((mode = apply_result_mode[regno]) != VOIDmode)
1409 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1410 if (size % align != 0)
1411 size = CEIL (size, align) * align;
1412 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1413 mem = adjust_address (result, mode, size);
1414 savevec[nelts++] = (savep
1415 ? gen_rtx_SET (VOIDmode, mem, reg)
1416 : gen_rtx_SET (VOIDmode, reg, mem));
1417 size += GET_MODE_SIZE (mode);
1419 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1421 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1423 /* Save the state required to perform an untyped call with the same
1424 arguments as were passed to the current function. */
1427 expand_builtin_apply_args_1 (void)
1430 int size, align, regno;
1431 enum machine_mode mode;
1432 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1434 /* Create a block where the arg-pointer, structure value address,
1435 and argument registers can be saved. */
1436 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1438 /* Walk past the arg-pointer and structure value address. */
1439 size = GET_MODE_SIZE (Pmode);
1440 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1441 size += GET_MODE_SIZE (Pmode);
1443 /* Save each register used in calling a function to the block. */
1444 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445 if ((mode = apply_args_mode[regno]) != VOIDmode)
1447 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1448 if (size % align != 0)
1449 size = CEIL (size, align) * align;
1451 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1453 emit_move_insn (adjust_address (registers, mode, size), tem);
1454 size += GET_MODE_SIZE (mode);
1457 /* Save the arg pointer to the block. */
1458 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1459 #ifdef STACK_GROWS_DOWNWARD
1460 /* We need the pointer as the caller actually passed them to us, not
1461 as we might have pretended they were passed. Make sure it's a valid
1462 operand, as emit_move_insn isn't expected to handle a PLUS. */
1464 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1467 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1469 size = GET_MODE_SIZE (Pmode);
1471 /* Save the structure value address unless this is passed as an
1472 "invisible" first argument. */
1473 if (struct_incoming_value)
1475 emit_move_insn (adjust_address (registers, Pmode, size),
1476 copy_to_reg (struct_incoming_value));
1477 size += GET_MODE_SIZE (Pmode);
1480 /* Return the address of the block. */
1481 return copy_addr_to_reg (XEXP (registers, 0));
1484 /* __builtin_apply_args returns block of memory allocated on
1485 the stack into which is stored the arg pointer, structure
1486 value address, static chain, and all the registers that might
1487 possibly be used in performing a function call. The code is
1488 moved to the start of the function so the incoming values are
1492 expand_builtin_apply_args (void)
1494 /* Don't do __builtin_apply_args more than once in a function.
1495 Save the result of the first call and reuse it. */
1496 if (apply_args_value != 0)
1497 return apply_args_value;
1499 /* When this function is called, it means that registers must be
1500 saved on entry to this function. So we migrate the
1501 call to the first insn of this function. */
1506 temp = expand_builtin_apply_args_1 ();
1510 apply_args_value = temp;
1512 /* Put the insns after the NOTE that starts the function.
1513 If this is inside a start_sequence, make the outer-level insn
1514 chain current, so the code is placed at the start of the
1515 function. If internal_arg_pointer is a non-virtual pseudo,
1516 it needs to be placed after the function that initializes
1518 push_topmost_sequence ();
1519 if (REG_P (crtl->args.internal_arg_pointer)
1520 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1521 emit_insn_before (seq, parm_birth_insn);
1523 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1524 pop_topmost_sequence ();
1529 /* Perform an untyped call and save the state required to perform an
1530 untyped return of whatever value was returned by the given function. */
1533 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1535 int size, align, regno;
1536 enum machine_mode mode;
1537 rtx incoming_args, result, reg, dest, src, call_insn;
1538 rtx old_stack_level = 0;
1539 rtx call_fusage = 0;
1540 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1542 arguments = convert_memory_address (Pmode, arguments);
1544 /* Create a block where the return registers can be saved. */
1545 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1547 /* Fetch the arg pointer from the ARGUMENTS block. */
1548 incoming_args = gen_reg_rtx (Pmode);
1549 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1550 #ifndef STACK_GROWS_DOWNWARD
1551 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1552 incoming_args, 0, OPTAB_LIB_WIDEN);
1555 /* Push a new argument block and copy the arguments. Do not allow
1556 the (potential) memcpy call below to interfere with our stack
1558 do_pending_stack_adjust ();
1561 /* Save the stack with nonlocal if available. */
1562 #ifdef HAVE_save_stack_nonlocal
1563 if (HAVE_save_stack_nonlocal)
1564 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1567 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1569 /* Allocate a block of memory onto the stack and copy the memory
1570 arguments to the outgoing arguments address. We can pass TRUE
1571 as the 4th argument because we just saved the stack pointer
1572 and will restore it right after the call. */
1573 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1575 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1576 may have already set current_function_calls_alloca to true.
1577 current_function_calls_alloca won't be set if argsize is zero,
1578 so we have to guarantee need_drap is true here. */
1579 if (SUPPORTS_STACK_ALIGNMENT)
1580 crtl->need_drap = true;
1582 dest = virtual_outgoing_args_rtx;
1583 #ifndef STACK_GROWS_DOWNWARD
1584 if (CONST_INT_P (argsize))
1585 dest = plus_constant (dest, -INTVAL (argsize));
1587 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1589 dest = gen_rtx_MEM (BLKmode, dest);
1590 set_mem_align (dest, PARM_BOUNDARY);
1591 src = gen_rtx_MEM (BLKmode, incoming_args);
1592 set_mem_align (src, PARM_BOUNDARY);
1593 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1595 /* Refer to the argument block. */
1597 arguments = gen_rtx_MEM (BLKmode, arguments);
1598 set_mem_align (arguments, PARM_BOUNDARY);
1600 /* Walk past the arg-pointer and structure value address. */
1601 size = GET_MODE_SIZE (Pmode);
1603 size += GET_MODE_SIZE (Pmode);
1605 /* Restore each of the registers previously saved. Make USE insns
1606 for each of these registers for use in making the call. */
1607 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1608 if ((mode = apply_args_mode[regno]) != VOIDmode)
1610 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1611 if (size % align != 0)
1612 size = CEIL (size, align) * align;
1613 reg = gen_rtx_REG (mode, regno);
1614 emit_move_insn (reg, adjust_address (arguments, mode, size));
1615 use_reg (&call_fusage, reg);
1616 size += GET_MODE_SIZE (mode);
1619 /* Restore the structure value address unless this is passed as an
1620 "invisible" first argument. */
1621 size = GET_MODE_SIZE (Pmode);
1624 rtx value = gen_reg_rtx (Pmode);
1625 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1626 emit_move_insn (struct_value, value);
1627 if (REG_P (struct_value))
1628 use_reg (&call_fusage, struct_value);
1629 size += GET_MODE_SIZE (Pmode);
1632 /* All arguments and registers used for the call are set up by now! */
1633 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1635 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1636 and we don't want to load it into a register as an optimization,
1637 because prepare_call_address already did it if it should be done. */
1638 if (GET_CODE (function) != SYMBOL_REF)
1639 function = memory_address (FUNCTION_MODE, function);
1641 /* Generate the actual call instruction and save the return value. */
1642 #ifdef HAVE_untyped_call
1643 if (HAVE_untyped_call)
1644 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1645 result, result_vector (1, result)));
1648 #ifdef HAVE_call_value
1649 if (HAVE_call_value)
1653 /* Locate the unique return register. It is not possible to
1654 express a call that sets more than one return register using
1655 call_value; use untyped_call for that. In fact, untyped_call
1656 only needs to save the return registers in the given block. */
1657 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1658 if ((mode = apply_result_mode[regno]) != VOIDmode)
1660 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1662 valreg = gen_rtx_REG (mode, regno);
1665 emit_call_insn (GEN_CALL_VALUE (valreg,
1666 gen_rtx_MEM (FUNCTION_MODE, function),
1667 const0_rtx, NULL_RTX, const0_rtx));
1669 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1675 /* Find the CALL insn we just emitted, and attach the register usage
1677 call_insn = last_call_insn ();
1678 add_function_usage_to (call_insn, call_fusage);
1680 /* Restore the stack. */
1681 #ifdef HAVE_save_stack_nonlocal
1682 if (HAVE_save_stack_nonlocal)
1683 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1686 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1690 /* Return the address of the result block. */
1691 result = copy_addr_to_reg (XEXP (result, 0));
1692 return convert_memory_address (ptr_mode, result);
1695 /* Perform an untyped return. */
1698 expand_builtin_return (rtx result)
1700 int size, align, regno;
1701 enum machine_mode mode;
1703 rtx call_fusage = 0;
1705 result = convert_memory_address (Pmode, result);
1707 apply_result_size ();
1708 result = gen_rtx_MEM (BLKmode, result);
1710 #ifdef HAVE_untyped_return
1711 if (HAVE_untyped_return)
1713 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1719 /* Restore the return value and note that each value is used. */
1721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1722 if ((mode = apply_result_mode[regno]) != VOIDmode)
1724 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1725 if (size % align != 0)
1726 size = CEIL (size, align) * align;
1727 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1728 emit_move_insn (reg, adjust_address (result, mode, size));
1730 push_to_sequence (call_fusage);
1732 call_fusage = get_insns ();
1734 size += GET_MODE_SIZE (mode);
1737 /* Put the USE insns before the return. */
1738 emit_insn (call_fusage);
1740 /* Return whatever values was restored by jumping directly to the end
1742 expand_naked_return ();
1745 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1747 static enum type_class
1748 type_to_class (tree type)
1750 switch (TREE_CODE (type))
1752 case VOID_TYPE: return void_type_class;
1753 case INTEGER_TYPE: return integer_type_class;
1754 case ENUMERAL_TYPE: return enumeral_type_class;
1755 case BOOLEAN_TYPE: return boolean_type_class;
1756 case POINTER_TYPE: return pointer_type_class;
1757 case REFERENCE_TYPE: return reference_type_class;
1758 case OFFSET_TYPE: return offset_type_class;
1759 case REAL_TYPE: return real_type_class;
1760 case COMPLEX_TYPE: return complex_type_class;
1761 case FUNCTION_TYPE: return function_type_class;
1762 case METHOD_TYPE: return method_type_class;
1763 case RECORD_TYPE: return record_type_class;
1765 case QUAL_UNION_TYPE: return union_type_class;
1766 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1767 ? string_type_class : array_type_class);
1768 case LANG_TYPE: return lang_type_class;
1769 default: return no_type_class;
1773 /* Expand a call EXP to __builtin_classify_type. */
1776 expand_builtin_classify_type (tree exp)
1778 if (call_expr_nargs (exp))
1779 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1780 return GEN_INT (no_type_class);
1783 /* This helper macro, meant to be used in mathfn_built_in below,
1784 determines which among a set of three builtin math functions is
1785 appropriate for a given type mode. The `F' and `L' cases are
1786 automatically generated from the `double' case. */
1787 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1788 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1789 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1790 fcodel = BUILT_IN_MATHFN##L ; break;
1791 /* Similar to above, but appends _R after any F/L suffix. */
1792 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1793 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1794 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1795 fcodel = BUILT_IN_MATHFN##L_R ; break;
1797 /* Return mathematic function equivalent to FN but operating directly
1798 on TYPE, if available. If IMPLICIT is true find the function in
1799 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1800 can't do the conversion, return zero. */
1803 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1805 tree const *const fn_arr
1806 = implicit ? implicit_built_in_decls : built_in_decls;
1807 enum built_in_function fcode, fcodef, fcodel;
1811 CASE_MATHFN (BUILT_IN_ACOS)
1812 CASE_MATHFN (BUILT_IN_ACOSH)
1813 CASE_MATHFN (BUILT_IN_ASIN)
1814 CASE_MATHFN (BUILT_IN_ASINH)
1815 CASE_MATHFN (BUILT_IN_ATAN)
1816 CASE_MATHFN (BUILT_IN_ATAN2)
1817 CASE_MATHFN (BUILT_IN_ATANH)
1818 CASE_MATHFN (BUILT_IN_CBRT)
1819 CASE_MATHFN (BUILT_IN_CEIL)
1820 CASE_MATHFN (BUILT_IN_CEXPI)
1821 CASE_MATHFN (BUILT_IN_COPYSIGN)
1822 CASE_MATHFN (BUILT_IN_COS)
1823 CASE_MATHFN (BUILT_IN_COSH)
1824 CASE_MATHFN (BUILT_IN_DREM)
1825 CASE_MATHFN (BUILT_IN_ERF)
1826 CASE_MATHFN (BUILT_IN_ERFC)
1827 CASE_MATHFN (BUILT_IN_EXP)
1828 CASE_MATHFN (BUILT_IN_EXP10)
1829 CASE_MATHFN (BUILT_IN_EXP2)
1830 CASE_MATHFN (BUILT_IN_EXPM1)
1831 CASE_MATHFN (BUILT_IN_FABS)
1832 CASE_MATHFN (BUILT_IN_FDIM)
1833 CASE_MATHFN (BUILT_IN_FLOOR)
1834 CASE_MATHFN (BUILT_IN_FMA)
1835 CASE_MATHFN (BUILT_IN_FMAX)
1836 CASE_MATHFN (BUILT_IN_FMIN)
1837 CASE_MATHFN (BUILT_IN_FMOD)
1838 CASE_MATHFN (BUILT_IN_FREXP)
1839 CASE_MATHFN (BUILT_IN_GAMMA)
1840 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1841 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1842 CASE_MATHFN (BUILT_IN_HYPOT)
1843 CASE_MATHFN (BUILT_IN_ILOGB)
1844 CASE_MATHFN (BUILT_IN_INF)
1845 CASE_MATHFN (BUILT_IN_ISINF)
1846 CASE_MATHFN (BUILT_IN_J0)
1847 CASE_MATHFN (BUILT_IN_J1)
1848 CASE_MATHFN (BUILT_IN_JN)
1849 CASE_MATHFN (BUILT_IN_LCEIL)
1850 CASE_MATHFN (BUILT_IN_LDEXP)
1851 CASE_MATHFN (BUILT_IN_LFLOOR)
1852 CASE_MATHFN (BUILT_IN_LGAMMA)
1853 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1854 CASE_MATHFN (BUILT_IN_LLCEIL)
1855 CASE_MATHFN (BUILT_IN_LLFLOOR)
1856 CASE_MATHFN (BUILT_IN_LLRINT)
1857 CASE_MATHFN (BUILT_IN_LLROUND)
1858 CASE_MATHFN (BUILT_IN_LOG)
1859 CASE_MATHFN (BUILT_IN_LOG10)
1860 CASE_MATHFN (BUILT_IN_LOG1P)
1861 CASE_MATHFN (BUILT_IN_LOG2)
1862 CASE_MATHFN (BUILT_IN_LOGB)
1863 CASE_MATHFN (BUILT_IN_LRINT)
1864 CASE_MATHFN (BUILT_IN_LROUND)
1865 CASE_MATHFN (BUILT_IN_MODF)
1866 CASE_MATHFN (BUILT_IN_NAN)
1867 CASE_MATHFN (BUILT_IN_NANS)
1868 CASE_MATHFN (BUILT_IN_NEARBYINT)
1869 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1870 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1871 CASE_MATHFN (BUILT_IN_POW)
1872 CASE_MATHFN (BUILT_IN_POWI)
1873 CASE_MATHFN (BUILT_IN_POW10)
1874 CASE_MATHFN (BUILT_IN_REMAINDER)
1875 CASE_MATHFN (BUILT_IN_REMQUO)
1876 CASE_MATHFN (BUILT_IN_RINT)
1877 CASE_MATHFN (BUILT_IN_ROUND)
1878 CASE_MATHFN (BUILT_IN_SCALB)
1879 CASE_MATHFN (BUILT_IN_SCALBLN)
1880 CASE_MATHFN (BUILT_IN_SCALBN)
1881 CASE_MATHFN (BUILT_IN_SIGNBIT)
1882 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1883 CASE_MATHFN (BUILT_IN_SIN)
1884 CASE_MATHFN (BUILT_IN_SINCOS)
1885 CASE_MATHFN (BUILT_IN_SINH)
1886 CASE_MATHFN (BUILT_IN_SQRT)
1887 CASE_MATHFN (BUILT_IN_TAN)
1888 CASE_MATHFN (BUILT_IN_TANH)
1889 CASE_MATHFN (BUILT_IN_TGAMMA)
1890 CASE_MATHFN (BUILT_IN_TRUNC)
1891 CASE_MATHFN (BUILT_IN_Y0)
1892 CASE_MATHFN (BUILT_IN_Y1)
1893 CASE_MATHFN (BUILT_IN_YN)
1899 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1900 return fn_arr[fcode];
1901 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1902 return fn_arr[fcodef];
1903 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1904 return fn_arr[fcodel];
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1912 mathfn_built_in (tree type, enum built_in_function fn)
1914 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1922 expand_errno_check (tree exp, rtx target)
1924 rtx lab = gen_label_rtx ();
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1929 NULL_RTX, NULL_RTX, lab,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx = GEN_ERRNO_RTX;
1941 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1943 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp) = 0;
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1955 expand_call (exp, target, 0);
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1967 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1969 optab builtin_optab;
1971 tree fndecl = get_callee_fndecl (exp);
1972 enum machine_mode mode;
1973 bool errno_set = false;
1976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1979 arg = CALL_EXPR_ARG (exp, 0);
1981 switch (DECL_FUNCTION_CODE (fndecl))
1983 CASE_FLT_FN (BUILT_IN_SQRT):
1984 errno_set = ! tree_expr_nonnegative_p (arg);
1985 builtin_optab = sqrt_optab;
1987 CASE_FLT_FN (BUILT_IN_EXP):
1988 errno_set = true; builtin_optab = exp_optab; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10):
1990 CASE_FLT_FN (BUILT_IN_POW10):
1991 errno_set = true; builtin_optab = exp10_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2):
1993 errno_set = true; builtin_optab = exp2_optab; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1):
1995 errno_set = true; builtin_optab = expm1_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB):
1997 errno_set = true; builtin_optab = logb_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG):
1999 errno_set = true; builtin_optab = log_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10):
2001 errno_set = true; builtin_optab = log10_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2):
2003 errno_set = true; builtin_optab = log2_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P):
2005 errno_set = true; builtin_optab = log1p_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN):
2007 builtin_optab = asin_optab; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS):
2009 builtin_optab = acos_optab; break;
2010 CASE_FLT_FN (BUILT_IN_TAN):
2011 builtin_optab = tan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN):
2013 builtin_optab = atan_optab; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR):
2015 builtin_optab = floor_optab; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL):
2017 builtin_optab = ceil_optab; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC):
2019 builtin_optab = btrunc_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND):
2021 builtin_optab = round_optab; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2023 builtin_optab = nearbyint_optab;
2024 if (flag_trapping_math)
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT):
2028 builtin_optab = rint_optab; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2030 builtin_optab = significand_optab; break;
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2038 if (! flag_errno_math || ! HONOR_NANS (mode))
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2044 target = gen_reg_rtx (mode);
2046 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2047 need to expand the argument again. This way, we will not perform
2048 side-effects more the once. */
2049 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2051 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2055 /* Compute into TARGET.
2056 Set TARGET to wherever the result comes back. */
2057 target = expand_unop (mode, builtin_optab, op0, target, 0);
2062 expand_errno_check (exp, target);
2064 /* Output the entire sequence. */
2065 insns = get_insns ();
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2077 return expand_call (exp, target, target == const0_rtx);
2080 /* Expand a call to the builtin binary math functions (pow and atan2).
2081 Return NULL_RTX if a normal call should be emitted rather than expanding the
2082 function in-line. EXP is the expression that is a call to the builtin
2083 function; if convenient, the result should be placed in TARGET.
2084 SUBTARGET may be used as the target for computing one of EXP's
2088 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2090 optab builtin_optab;
2091 rtx op0, op1, insns;
2092 int op1_type = REAL_TYPE;
2093 tree fndecl = get_callee_fndecl (exp);
2095 enum machine_mode mode;
2096 bool errno_set = true;
2098 switch (DECL_FUNCTION_CODE (fndecl))
2100 CASE_FLT_FN (BUILT_IN_SCALBN):
2101 CASE_FLT_FN (BUILT_IN_SCALBLN):
2102 CASE_FLT_FN (BUILT_IN_LDEXP):
2103 op1_type = INTEGER_TYPE;
2108 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2111 arg0 = CALL_EXPR_ARG (exp, 0);
2112 arg1 = CALL_EXPR_ARG (exp, 1);
2114 switch (DECL_FUNCTION_CODE (fndecl))
2116 CASE_FLT_FN (BUILT_IN_POW):
2117 builtin_optab = pow_optab; break;
2118 CASE_FLT_FN (BUILT_IN_ATAN2):
2119 builtin_optab = atan2_optab; break;
2120 CASE_FLT_FN (BUILT_IN_SCALB):
2121 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 builtin_optab = scalb_optab; break;
2124 CASE_FLT_FN (BUILT_IN_SCALBN):
2125 CASE_FLT_FN (BUILT_IN_SCALBLN):
2126 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 /* Fall through... */
2129 CASE_FLT_FN (BUILT_IN_LDEXP):
2130 builtin_optab = ldexp_optab; break;
2131 CASE_FLT_FN (BUILT_IN_FMOD):
2132 builtin_optab = fmod_optab; break;
2133 CASE_FLT_FN (BUILT_IN_REMAINDER):
2134 CASE_FLT_FN (BUILT_IN_DREM):
2135 builtin_optab = remainder_optab; break;
2140 /* Make a suitable register to place result in. */
2141 mode = TYPE_MODE (TREE_TYPE (exp));
2143 /* Before working hard, check whether the instruction is available. */
2144 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2147 target = gen_reg_rtx (mode);
2149 if (! flag_errno_math || ! HONOR_NANS (mode))
2152 /* Always stabilize the argument list. */
2153 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2154 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2156 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2157 op1 = expand_normal (arg1);
2161 /* Compute into TARGET.
2162 Set TARGET to wherever the result comes back. */
2163 target = expand_binop (mode, builtin_optab, op0, op1,
2164 target, 0, OPTAB_DIRECT);
2166 /* If we were unable to expand via the builtin, stop the sequence
2167 (without outputting the insns) and call to the library function
2168 with the stabilized argument list. */
2172 return expand_call (exp, target, target == const0_rtx);
2176 expand_errno_check (exp, target);
2178 /* Output the entire sequence. */
2179 insns = get_insns ();
2186 /* Expand a call to the builtin trinary math functions (fma).
2187 Return NULL_RTX if a normal call should be emitted rather than expanding the
2188 function in-line. EXP is the expression that is a call to the builtin
2189 function; if convenient, the result should be placed in TARGET.
2190 SUBTARGET may be used as the target for computing one of EXP's
2194 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2196 optab builtin_optab;
2197 rtx op0, op1, op2, insns;
2198 tree fndecl = get_callee_fndecl (exp);
2199 tree arg0, arg1, arg2;
2200 enum machine_mode mode;
2202 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2205 arg0 = CALL_EXPR_ARG (exp, 0);
2206 arg1 = CALL_EXPR_ARG (exp, 1);
2207 arg2 = CALL_EXPR_ARG (exp, 2);
2209 switch (DECL_FUNCTION_CODE (fndecl))
2211 CASE_FLT_FN (BUILT_IN_FMA):
2212 builtin_optab = fma_optab; break;
2217 /* Make a suitable register to place result in. */
2218 mode = TYPE_MODE (TREE_TYPE (exp));
2220 /* Before working hard, check whether the instruction is available. */
2221 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2224 target = gen_reg_rtx (mode);
2226 /* Always stabilize the argument list. */
2227 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2228 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2229 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2231 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2232 op1 = expand_normal (arg1);
2233 op2 = expand_normal (arg2);
2237 /* Compute into TARGET.
2238 Set TARGET to wherever the result comes back. */
2239 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2242 /* If we were unable to expand via the builtin, stop the sequence
2243 (without outputting the insns) and call to the library function
2244 with the stabilized argument list. */
2248 return expand_call (exp, target, target == const0_rtx);
2251 /* Output the entire sequence. */
2252 insns = get_insns ();
2259 /* Expand a call to the builtin sin and cos math functions.
2260 Return NULL_RTX if a normal call should be emitted rather than expanding the
2261 function in-line. EXP is the expression that is a call to the builtin
2262 function; if convenient, the result should be placed in TARGET.
2263 SUBTARGET may be used as the target for computing one of EXP's
2267 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2269 optab builtin_optab;
2271 tree fndecl = get_callee_fndecl (exp);
2272 enum machine_mode mode;
2275 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2278 arg = CALL_EXPR_ARG (exp, 0);
2280 switch (DECL_FUNCTION_CODE (fndecl))
2282 CASE_FLT_FN (BUILT_IN_SIN):
2283 CASE_FLT_FN (BUILT_IN_COS):
2284 builtin_optab = sincos_optab; break;
2289 /* Make a suitable register to place result in. */
2290 mode = TYPE_MODE (TREE_TYPE (exp));
2292 /* Check if sincos insn is available, otherwise fallback
2293 to sin or cos insn. */
2294 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2295 switch (DECL_FUNCTION_CODE (fndecl))
2297 CASE_FLT_FN (BUILT_IN_SIN):
2298 builtin_optab = sin_optab; break;
2299 CASE_FLT_FN (BUILT_IN_COS):
2300 builtin_optab = cos_optab; break;
2305 /* Before working hard, check whether the instruction is available. */
2306 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2308 target = gen_reg_rtx (mode);
2310 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2311 need to expand the argument again. This way, we will not perform
2312 side-effects more the once. */
2313 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2315 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2319 /* Compute into TARGET.
2320 Set TARGET to wherever the result comes back. */
2321 if (builtin_optab == sincos_optab)
2325 switch (DECL_FUNCTION_CODE (fndecl))
2327 CASE_FLT_FN (BUILT_IN_SIN):
2328 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2330 CASE_FLT_FN (BUILT_IN_COS):
2331 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2336 gcc_assert (result);
2340 target = expand_unop (mode, builtin_optab, op0, target, 0);
2345 /* Output the entire sequence. */
2346 insns = get_insns ();
2352 /* If we were unable to expand via the builtin, stop the sequence
2353 (without outputting the insns) and call to the library function
2354 with the stabilized argument list. */
2358 target = expand_call (exp, target, target == const0_rtx);
2363 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2364 return an RTL instruction code that implements the functionality.
2365 If that isn't possible or available return CODE_FOR_nothing. */
2367 static enum insn_code
2368 interclass_mathfn_icode (tree arg, tree fndecl)
2370 bool errno_set = false;
2371 optab builtin_optab = 0;
2372 enum machine_mode mode;
2374 switch (DECL_FUNCTION_CODE (fndecl))
2376 CASE_FLT_FN (BUILT_IN_ILOGB):
2377 errno_set = true; builtin_optab = ilogb_optab; break;
2378 CASE_FLT_FN (BUILT_IN_ISINF):
2379 builtin_optab = isinf_optab; break;
2380 case BUILT_IN_ISNORMAL:
2381 case BUILT_IN_ISFINITE:
2382 CASE_FLT_FN (BUILT_IN_FINITE):
2383 case BUILT_IN_FINITED32:
2384 case BUILT_IN_FINITED64:
2385 case BUILT_IN_FINITED128:
2386 case BUILT_IN_ISINFD32:
2387 case BUILT_IN_ISINFD64:
2388 case BUILT_IN_ISINFD128:
2389 /* These builtins have no optabs (yet). */
2395 /* There's no easy way to detect the case we need to set EDOM. */
2396 if (flag_errno_math && errno_set)
2397 return CODE_FOR_nothing;
2399 /* Optab mode depends on the mode of the input argument. */
2400 mode = TYPE_MODE (TREE_TYPE (arg));
2403 return optab_handler (builtin_optab, mode);
2404 return CODE_FOR_nothing;
2407 /* Expand a call to one of the builtin math functions that operate on
2408 floating point argument and output an integer result (ilogb, isinf,
2410 Return 0 if a normal call should be emitted rather than expanding the
2411 function in-line. EXP is the expression that is a call to the builtin
2412 function; if convenient, the result should be placed in TARGET. */
2415 expand_builtin_interclass_mathfn (tree exp, rtx target)
2417 enum insn_code icode = CODE_FOR_nothing;
2419 tree fndecl = get_callee_fndecl (exp);
2420 enum machine_mode mode;
2423 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2426 arg = CALL_EXPR_ARG (exp, 0);
2427 icode = interclass_mathfn_icode (arg, fndecl);
2428 mode = TYPE_MODE (TREE_TYPE (arg));
2430 if (icode != CODE_FOR_nothing)
2432 rtx last = get_last_insn ();
2433 tree orig_arg = arg;
2434 /* Make a suitable register to place result in. */
2436 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2437 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2438 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2440 gcc_assert (insn_data[icode].operand[0].predicate
2441 (target, GET_MODE (target)));
2443 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2444 need to expand the argument again. This way, we will not perform
2445 side-effects more the once. */
2446 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2448 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2450 if (mode != GET_MODE (op0))
2451 op0 = convert_to_mode (mode, op0, 0);
2453 /* Compute into TARGET.
2454 Set TARGET to wherever the result comes back. */
2455 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2457 delete_insns_since (last);
2458 CALL_EXPR_ARG (exp, 0) = orig_arg;
2464 /* Expand a call to the builtin sincos math function.
2465 Return NULL_RTX if a normal call should be emitted rather than expanding the
2466 function in-line. EXP is the expression that is a call to the builtin
2470 expand_builtin_sincos (tree exp)
2472 rtx op0, op1, op2, target1, target2;
2473 enum machine_mode mode;
2474 tree arg, sinp, cosp;
2476 location_t loc = EXPR_LOCATION (exp);
2478 if (!validate_arglist (exp, REAL_TYPE,
2479 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 sinp = CALL_EXPR_ARG (exp, 1);
2484 cosp = CALL_EXPR_ARG (exp, 2);
2486 /* Make a suitable register to place result in. */
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2489 /* Check if sincos insn is available, otherwise emit the call. */
2490 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2493 target1 = gen_reg_rtx (mode);
2494 target2 = gen_reg_rtx (mode);
2496 op0 = expand_normal (arg);
2497 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2498 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2500 /* Compute into target1 and target2.
2501 Set TARGET to wherever the result comes back. */
2502 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2503 gcc_assert (result);
2505 /* Move target1 and target2 to the memory locations indicated
2507 emit_move_insn (op1, target1);
2508 emit_move_insn (op2, target2);
2513 /* Expand a call to the internal cexpi builtin to the sincos math function.
2514 EXP is the expression that is a call to the builtin function; if convenient,
2515 the result should be placed in TARGET. */
2518 expand_builtin_cexpi (tree exp, rtx target)
2520 tree fndecl = get_callee_fndecl (exp);
2522 enum machine_mode mode;
2524 location_t loc = EXPR_LOCATION (exp);
2526 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2529 arg = CALL_EXPR_ARG (exp, 0);
2530 type = TREE_TYPE (arg);
2531 mode = TYPE_MODE (TREE_TYPE (arg));
2533 /* Try expanding via a sincos optab, fall back to emitting a libcall
2534 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2535 is only generated from sincos, cexp or if we have either of them. */
2536 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2538 op1 = gen_reg_rtx (mode);
2539 op2 = gen_reg_rtx (mode);
2541 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2543 /* Compute into op1 and op2. */
2544 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2546 else if (TARGET_HAS_SINCOS)
2548 tree call, fn = NULL_TREE;
2552 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2553 fn = built_in_decls[BUILT_IN_SINCOSF];
2554 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2555 fn = built_in_decls[BUILT_IN_SINCOS];
2556 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2557 fn = built_in_decls[BUILT_IN_SINCOSL];
2561 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2562 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2563 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2564 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2565 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2566 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2568 /* Make sure not to fold the sincos call again. */
2569 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2570 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2571 call, 3, arg, top1, top2));
2575 tree call, fn = NULL_TREE, narg;
2576 tree ctype = build_complex_type (type);
2578 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2579 fn = built_in_decls[BUILT_IN_CEXPF];
2580 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2581 fn = built_in_decls[BUILT_IN_CEXP];
2582 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2583 fn = built_in_decls[BUILT_IN_CEXPL];
2587 /* If we don't have a decl for cexp create one. This is the
2588 friendliest fallback if the user calls __builtin_cexpi
2589 without full target C99 function support. */
2590 if (fn == NULL_TREE)
2593 const char *name = NULL;
2595 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2597 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2599 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2602 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2603 fn = build_fn_decl (name, fntype);
2606 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2607 build_real (type, dconst0), arg);
2609 /* Make sure not to fold the cexp call again. */
2610 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2611 return expand_expr (build_call_nary (ctype, call, 1, narg),
2612 target, VOIDmode, EXPAND_NORMAL);
2615 /* Now build the proper return type. */
2616 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2617 make_tree (TREE_TYPE (arg), op2),
2618 make_tree (TREE_TYPE (arg), op1)),
2619 target, VOIDmode, EXPAND_NORMAL);
2622 /* Conveniently construct a function call expression. FNDECL names the
2623 function to be called, N is the number of arguments, and the "..."
2624 parameters are the argument expressions. Unlike build_call_exr
2625 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2628 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2631 tree fntype = TREE_TYPE (fndecl);
2632 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2635 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2637 SET_EXPR_LOCATION (fn, loc);
2641 /* Expand a call to one of the builtin rounding functions gcc defines
2642 as an extension (lfloor and lceil). As these are gcc extensions we
2643 do not need to worry about setting errno to EDOM.
2644 If expanding via optab fails, lower expression to (int)(floor(x)).
2645 EXP is the expression that is a call to the builtin function;
2646 if convenient, the result should be placed in TARGET. */
2649 expand_builtin_int_roundingfn (tree exp, rtx target)
2651 convert_optab builtin_optab;
2652 rtx op0, insns, tmp;
2653 tree fndecl = get_callee_fndecl (exp);
2654 enum built_in_function fallback_fn;
2655 tree fallback_fndecl;
2656 enum machine_mode mode;
2659 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2662 arg = CALL_EXPR_ARG (exp, 0);
2664 switch (DECL_FUNCTION_CODE (fndecl))
2666 CASE_FLT_FN (BUILT_IN_LCEIL):
2667 CASE_FLT_FN (BUILT_IN_LLCEIL):
2668 builtin_optab = lceil_optab;
2669 fallback_fn = BUILT_IN_CEIL;
2672 CASE_FLT_FN (BUILT_IN_LFLOOR):
2673 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2674 builtin_optab = lfloor_optab;
2675 fallback_fn = BUILT_IN_FLOOR;
2682 /* Make a suitable register to place result in. */
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2685 target = gen_reg_rtx (mode);
2687 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2688 need to expand the argument again. This way, we will not perform
2689 side-effects more the once. */
2690 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2692 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2696 /* Compute into TARGET. */
2697 if (expand_sfix_optab (target, op0, builtin_optab))
2699 /* Output the entire sequence. */
2700 insns = get_insns ();
2706 /* If we were unable to expand via the builtin, stop the sequence
2707 (without outputting the insns). */
2710 /* Fall back to floating point rounding optab. */
2711 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2713 /* For non-C99 targets we may end up without a fallback fndecl here
2714 if the user called __builtin_lfloor directly. In this case emit
2715 a call to the floor/ceil variants nevertheless. This should result
2716 in the best user experience for not full C99 targets. */
2717 if (fallback_fndecl == NULL_TREE)
2720 const char *name = NULL;
2722 switch (DECL_FUNCTION_CODE (fndecl))
2724 case BUILT_IN_LCEIL:
2725 case BUILT_IN_LLCEIL:
2728 case BUILT_IN_LCEILF:
2729 case BUILT_IN_LLCEILF:
2732 case BUILT_IN_LCEILL:
2733 case BUILT_IN_LLCEILL:
2736 case BUILT_IN_LFLOOR:
2737 case BUILT_IN_LLFLOOR:
2740 case BUILT_IN_LFLOORF:
2741 case BUILT_IN_LLFLOORF:
2744 case BUILT_IN_LFLOORL:
2745 case BUILT_IN_LLFLOORL:
2752 fntype = build_function_type_list (TREE_TYPE (arg),
2753 TREE_TYPE (arg), NULL_TREE);
2754 fallback_fndecl = build_fn_decl (name, fntype);
2757 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2759 tmp = expand_normal (exp);
2761 /* Truncate the result of floating point optab to integer
2762 via expand_fix (). */
2763 target = gen_reg_rtx (mode);
2764 expand_fix (target, tmp, 0);
2769 /* Expand a call to one of the builtin math functions doing integer
2771 Return 0 if a normal call should be emitted rather than expanding the
2772 function in-line. EXP is the expression that is a call to the builtin
2773 function; if convenient, the result should be placed in TARGET. */
2776 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2778 convert_optab builtin_optab;
2780 tree fndecl = get_callee_fndecl (exp);
2782 enum machine_mode mode;
2784 /* There's no easy way to detect the case we need to set EDOM. */
2785 if (flag_errno_math)
2788 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2791 arg = CALL_EXPR_ARG (exp, 0);
2793 switch (DECL_FUNCTION_CODE (fndecl))
2795 CASE_FLT_FN (BUILT_IN_LRINT):
2796 CASE_FLT_FN (BUILT_IN_LLRINT):
2797 builtin_optab = lrint_optab; break;
2798 CASE_FLT_FN (BUILT_IN_LROUND):
2799 CASE_FLT_FN (BUILT_IN_LLROUND):
2800 builtin_optab = lround_optab; break;
2805 /* Make a suitable register to place result in. */
2806 mode = TYPE_MODE (TREE_TYPE (exp));
2808 target = gen_reg_rtx (mode);
2810 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2811 need to expand the argument again. This way, we will not perform
2812 side-effects more the once. */
2813 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2815 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2819 if (expand_sfix_optab (target, op0, builtin_optab))
2821 /* Output the entire sequence. */
2822 insns = get_insns ();
2828 /* If we were unable to expand via the builtin, stop the sequence
2829 (without outputting the insns) and call to the library function
2830 with the stabilized argument list. */
2833 target = expand_call (exp, target, target == const0_rtx);
2838 /* To evaluate powi(x,n), the floating point value x raised to the
2839 constant integer exponent n, we use a hybrid algorithm that
2840 combines the "window method" with look-up tables. For an
2841 introduction to exponentiation algorithms and "addition chains",
2842 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2843 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2844 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2845 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2847 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2848 multiplications to inline before calling the system library's pow
2849 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2850 so this default never requires calling pow, powf or powl. */
2852 #ifndef POWI_MAX_MULTS
2853 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2856 /* The size of the "optimal power tree" lookup table. All
2857 exponents less than this value are simply looked up in the
2858 powi_table below. This threshold is also used to size the
2859 cache of pseudo registers that hold intermediate results. */
2860 #define POWI_TABLE_SIZE 256
2862 /* The size, in bits of the window, used in the "window method"
2863 exponentiation algorithm. This is equivalent to a radix of
2864 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2865 #define POWI_WINDOW_SIZE 3
2867 /* The following table is an efficient representation of an
2868 "optimal power tree". For each value, i, the corresponding
2869 value, j, in the table states than an optimal evaluation
2870 sequence for calculating pow(x,i) can be found by evaluating
2871 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2872 100 integers is given in Knuth's "Seminumerical algorithms". */
2874 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2876 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2877 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2878 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2879 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2880 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2881 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2882 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2883 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2884 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2885 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2886 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2887 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2888 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2889 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2890 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2891 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2892 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2893 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2894 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2895 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2896 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2897 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2898 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2899 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2900 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2901 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2902 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2903 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2904 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2905 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2906 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2907 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2911 /* Return the number of multiplications required to calculate
2912 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2913 subroutine of powi_cost. CACHE is an array indicating
2914 which exponents have already been calculated. */
2917 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2919 /* If we've already calculated this exponent, then this evaluation
2920 doesn't require any additional multiplications. */
2925 return powi_lookup_cost (n - powi_table[n], cache)
2926 + powi_lookup_cost (powi_table[n], cache) + 1;
2929 /* Return the number of multiplications required to calculate
2930 powi(x,n) for an arbitrary x, given the exponent N. This
2931 function needs to be kept in sync with expand_powi below. */
2934 powi_cost (HOST_WIDE_INT n)
2936 bool cache[POWI_TABLE_SIZE];
2937 unsigned HOST_WIDE_INT digit;
2938 unsigned HOST_WIDE_INT val;
2944 /* Ignore the reciprocal when calculating the cost. */
2945 val = (n < 0) ? -n : n;
2947 /* Initialize the exponent cache. */
2948 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2953 while (val >= POWI_TABLE_SIZE)
2957 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2958 result += powi_lookup_cost (digit, cache)
2959 + POWI_WINDOW_SIZE + 1;
2960 val >>= POWI_WINDOW_SIZE;
2969 return result + powi_lookup_cost (val, cache);
2972 /* Recursive subroutine of expand_powi. This function takes the array,
2973 CACHE, of already calculated exponents and an exponent N and returns
2974 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2977 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2979 unsigned HOST_WIDE_INT digit;
2983 if (n < POWI_TABLE_SIZE)
2988 target = gen_reg_rtx (mode);
2991 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2992 op1 = expand_powi_1 (mode, powi_table[n], cache);
2996 target = gen_reg_rtx (mode);
2997 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2998 op0 = expand_powi_1 (mode, n - digit, cache);
2999 op1 = expand_powi_1 (mode, digit, cache);
3003 target = gen_reg_rtx (mode);
3004 op0 = expand_powi_1 (mode, n >> 1, cache);
3008 result = expand_mult (mode, op0, op1, target, 0);
3009 if (result != target)
3010 emit_move_insn (target, result);
3014 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3015 floating point operand in mode MODE, and N is the exponent. This
3016 function needs to be kept in sync with powi_cost above. */
3019 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3021 rtx cache[POWI_TABLE_SIZE];
3025 return CONST1_RTX (mode);
3027 memset (cache, 0, sizeof (cache));
3030 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3032 /* If the original exponent was negative, reciprocate the result. */
3034 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3035 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3040 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3041 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3042 if we can simplify it. */
3044 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3047 if (TREE_CODE (arg1) == REAL_CST
3048 && !TREE_OVERFLOW (arg1)
3049 && flag_unsafe_math_optimizations)
3051 enum machine_mode mode = TYPE_MODE (type);
3052 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3053 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3054 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3055 tree op = NULL_TREE;
3059 /* Optimize pow (x, 0.5) into sqrt. */
3060 if (REAL_VALUES_EQUAL (c, dconsthalf))
3061 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3065 REAL_VALUE_TYPE dconst1_4 = dconst1;
3066 REAL_VALUE_TYPE dconst3_4;
3067 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3069 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3070 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3072 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3073 machines that a builtin sqrt instruction is smaller than a
3074 call to pow with 0.25, so do this optimization even if
3076 if (REAL_VALUES_EQUAL (c, dconst1_4))
3078 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3079 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3082 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3083 are optimizing for space. */
3084 else if (optimize_insn_for_speed_p ()
3085 && !TREE_SIDE_EFFECTS (arg0)
3086 && REAL_VALUES_EQUAL (c, dconst3_4))
3088 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3089 tree sqrt2 = builtin_save_expr (sqrt1);
3090 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3091 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3096 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3097 cbrt/sqrts instead of pow (x, 1./6.). */
3099 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3101 /* First try 1/3. */
3102 REAL_VALUE_TYPE dconst1_3
3103 = real_value_truncate (mode, dconst_third ());
3105 if (REAL_VALUES_EQUAL (c, dconst1_3))
3106 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3109 else if (optimize_insn_for_speed_p ())
3111 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3112 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3114 if (REAL_VALUES_EQUAL (c, dconst1_6))
3116 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3117 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3123 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3129 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3130 a normal call should be emitted rather than expanding the function
3131 in-line. EXP is the expression that is a call to the builtin
3132 function; if convenient, the result should be placed in TARGET. */
3135 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3139 tree type = TREE_TYPE (exp);
3140 REAL_VALUE_TYPE cint, c, c2;
3143 enum machine_mode mode = TYPE_MODE (type);
3145 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3148 arg0 = CALL_EXPR_ARG (exp, 0);
3149 arg1 = CALL_EXPR_ARG (exp, 1);
3151 if (TREE_CODE (arg1) != REAL_CST
3152 || TREE_OVERFLOW (arg1))
3153 return expand_builtin_mathfn_2 (exp, target, subtarget);
3155 /* Handle constant exponents. */
3157 /* For integer valued exponents we can expand to an optimal multiplication
3158 sequence using expand_powi. */
3159 c = TREE_REAL_CST (arg1);
3160 n = real_to_integer (&c);
3161 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3162 if (real_identical (&c, &cint)
3163 && ((n >= -1 && n <= 2)
3164 || (flag_unsafe_math_optimizations
3165 && optimize_insn_for_speed_p ()
3166 && powi_cost (n) <= POWI_MAX_MULTS)))
3168 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3171 op = force_reg (mode, op);
3172 op = expand_powi (op, mode, n);
3177 narg0 = builtin_save_expr (arg0);
3179 /* If the exponent is not integer valued, check if it is half of an integer.
3180 In this case we can expand to sqrt (x) * x**(n/2). */
3181 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3182 if (fn != NULL_TREE)
3184 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3185 n = real_to_integer (&c2);
3186 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3187 if (real_identical (&c2, &cint)
3188 && ((flag_unsafe_math_optimizations
3189 && optimize_insn_for_speed_p ()
3190 && powi_cost (n/2) <= POWI_MAX_MULTS)
3191 /* Even the c == 0.5 case cannot be done unconditionally
3192 when we need to preserve signed zeros, as
3193 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3194 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3195 /* For c == 1.5 we can assume that x * sqrt (x) is always
3196 smaller than pow (x, 1.5) if sqrt will not be expanded
3199 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3201 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3203 /* Use expand_expr in case the newly built call expression
3204 was folded to a non-call. */
3205 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3208 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3209 op2 = force_reg (mode, op2);
3210 op2 = expand_powi (op2, mode, abs (n / 2));
3211 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3212 0, OPTAB_LIB_WIDEN);
3213 /* If the original exponent was negative, reciprocate the
3216 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3217 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3223 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3225 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3230 /* Try if the exponent is a third of an integer. In this case
3231 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3232 different from pow (x, 1./3.) due to rounding and behavior
3233 with negative x we need to constrain this transformation to
3234 unsafe math and positive x or finite math. */
3235 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3237 && flag_unsafe_math_optimizations
3238 && (tree_expr_nonnegative_p (arg0)
3239 || !HONOR_NANS (mode)))
3241 REAL_VALUE_TYPE dconst3;
3242 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3243 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3244 real_round (&c2, mode, &c2);
3245 n = real_to_integer (&c2);
3246 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3247 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3248 real_convert (&c2, mode, &c2);
3249 if (real_identical (&c2, &c)
3250 && ((optimize_insn_for_speed_p ()
3251 && powi_cost (n/3) <= POWI_MAX_MULTS)
3254 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3256 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3257 if (abs (n) % 3 == 2)
3258 op = expand_simple_binop (mode, MULT, op, op, op,
3259 0, OPTAB_LIB_WIDEN);
3262 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3263 op2 = force_reg (mode, op2);
3264 op2 = expand_powi (op2, mode, abs (n / 3));
3265 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3266 0, OPTAB_LIB_WIDEN);
3267 /* If the original exponent was negative, reciprocate the
3270 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3271 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3277 /* Fall back to optab expansion. */
3278 return expand_builtin_mathfn_2 (exp, target, subtarget);
3281 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3282 a normal call should be emitted rather than expanding the function
3283 in-line. EXP is the expression that is a call to the builtin
3284 function; if convenient, the result should be placed in TARGET. */
3287 expand_builtin_powi (tree exp, rtx target)
3291 enum machine_mode mode;
3292 enum machine_mode mode2;
3294 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3297 arg0 = CALL_EXPR_ARG (exp, 0);
3298 arg1 = CALL_EXPR_ARG (exp, 1);
3299 mode = TYPE_MODE (TREE_TYPE (exp));
3301 /* Handle constant power. */
3303 if (TREE_CODE (arg1) == INTEGER_CST
3304 && !TREE_OVERFLOW (arg1))
3306 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3308 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3309 Otherwise, check the number of multiplications required. */
3310 if ((TREE_INT_CST_HIGH (arg1) == 0
3311 || TREE_INT_CST_HIGH (arg1) == -1)
3312 && ((n >= -1 && n <= 2)
3313 || (optimize_insn_for_speed_p ()
3314 && powi_cost (n) <= POWI_MAX_MULTS)))
3316 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3317 op0 = force_reg (mode, op0);
3318 return expand_powi (op0, mode, n);
3322 /* Emit a libcall to libgcc. */
3324 /* Mode of the 2nd argument must match that of an int. */
3325 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3327 if (target == NULL_RTX)
3328 target = gen_reg_rtx (mode);
3330 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3331 if (GET_MODE (op0) != mode)
3332 op0 = convert_to_mode (mode, op0, 0);
3333 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3334 if (GET_MODE (op1) != mode2)
3335 op1 = convert_to_mode (mode2, op1, 0);
3337 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3338 target, LCT_CONST, mode, 2,
3339 op0, mode, op1, mode2);
3344 /* Expand expression EXP which is a call to the strlen builtin. Return
3345 NULL_RTX if we failed the caller should emit a normal call, otherwise
3346 try to get the result in TARGET, if convenient. */
3349 expand_builtin_strlen (tree exp, rtx target,
3350 enum machine_mode target_mode)
3352 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3358 tree src = CALL_EXPR_ARG (exp, 0);
3359 rtx result, src_reg, char_rtx, before_strlen;
3360 enum machine_mode insn_mode = target_mode, char_mode;
3361 enum insn_code icode = CODE_FOR_nothing;
3364 /* If the length can be computed at compile-time, return it. */
3365 len = c_strlen (src, 0);
3367 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3369 /* If the length can be computed at compile-time and is constant
3370 integer, but there are side-effects in src, evaluate
3371 src for side-effects, then return len.
3372 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3373 can be optimized into: i++; x = 3; */
3374 len = c_strlen (src, 1);
3375 if (len && TREE_CODE (len) == INTEGER_CST)
3377 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3378 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3381 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3383 /* If SRC is not a pointer type, don't do this operation inline. */
3387 /* Bail out if we can't compute strlen in the right mode. */
3388 while (insn_mode != VOIDmode)
3390 icode = optab_handler (strlen_optab, insn_mode);
3391 if (icode != CODE_FOR_nothing)
3394 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3396 if (insn_mode == VOIDmode)
3399 /* Make a place to write the result of the instruction. */
3403 && GET_MODE (result) == insn_mode
3404 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3405 result = gen_reg_rtx (insn_mode);
3407 /* Make a place to hold the source address. We will not expand
3408 the actual source until we are sure that the expansion will
3409 not fail -- there are trees that cannot be expanded twice. */
3410 src_reg = gen_reg_rtx (Pmode);
3412 /* Mark the beginning of the strlen sequence so we can emit the
3413 source operand later. */
3414 before_strlen = get_last_insn ();
3416 char_rtx = const0_rtx;
3417 char_mode = insn_data[(int) icode].operand[2].mode;
3418 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3420 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3422 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3423 char_rtx, GEN_INT (align));
3428 /* Now that we are assured of success, expand the source. */
3430 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3432 emit_move_insn (src_reg, pat);
3437 emit_insn_after (pat, before_strlen);
3439 emit_insn_before (pat, get_insns ());
3441 /* Return the value in the proper mode for this function. */
3442 if (GET_MODE (result) == target_mode)
3444 else if (target != 0)
3445 convert_move (target, result, 0);
3447 target = convert_to_mode (target_mode, result, 0);
3453 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3454 bytes from constant string DATA + OFFSET and return it as target
3458 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3459 enum machine_mode mode)
3461 const char *str = (const char *) data;
3463 gcc_assert (offset >= 0
3464 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3465 <= strlen (str) + 1));
3467 return c_readstr (str + offset, mode);
3470 /* Expand a call EXP to the memcpy builtin.
3471 Return NULL_RTX if we failed, the caller should emit a normal call,
3472 otherwise try to get the result in TARGET, if convenient (and in
3473 mode MODE if that's convenient). */
3476 expand_builtin_memcpy (tree exp, rtx target)
3478 if (!validate_arglist (exp,
3479 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3483 tree dest = CALL_EXPR_ARG (exp, 0);
3484 tree src = CALL_EXPR_ARG (exp, 1);
3485 tree len = CALL_EXPR_ARG (exp, 2);
3486 const char *src_str;
3487 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3488 unsigned int dest_align
3489 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3490 rtx dest_mem, src_mem, dest_addr, len_rtx;
3491 HOST_WIDE_INT expected_size = -1;
3492 unsigned int expected_align = 0;
3494 /* If DEST is not a pointer type, call the normal function. */
3495 if (dest_align == 0)
3498 /* If either SRC is not a pointer type, don't do this
3499 operation in-line. */
3503 if (currently_expanding_gimple_stmt)
3504 stringop_block_profile (currently_expanding_gimple_stmt,
3505 &expected_align, &expected_size);
3507 if (expected_align < dest_align)
3508 expected_align = dest_align;
3509 dest_mem = get_memory_rtx (dest, len);
3510 set_mem_align (dest_mem, dest_align);
3511 len_rtx = expand_normal (len);
3512 src_str = c_getstr (src);
3514 /* If SRC is a string constant and block move would be done
3515 by pieces, we can avoid loading the string from memory
3516 and only stored the computed constants. */
3518 && CONST_INT_P (len_rtx)
3519 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3520 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3521 CONST_CAST (char *, src_str),
3524 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3525 builtin_memcpy_read_str,
3526 CONST_CAST (char *, src_str),
3527 dest_align, false, 0);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3533 src_mem = get_memory_rtx (src, len);
3534 set_mem_align (src_mem, src_align);
3536 /* Copy word part most expediently. */
3537 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3538 CALL_EXPR_TAILCALL (exp)
3539 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3540 expected_align, expected_size);
3544 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3545 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3551 /* Expand a call EXP to the mempcpy builtin.
3552 Return NULL_RTX if we failed; the caller should emit a normal call,
3553 otherwise try to get the result in TARGET, if convenient (and in
3554 mode MODE if that's convenient). If ENDP is 0 return the
3555 destination pointer, if ENDP is 1 return the end pointer ala
3556 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3560 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3562 if (!validate_arglist (exp,
3563 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3567 tree dest = CALL_EXPR_ARG (exp, 0);
3568 tree src = CALL_EXPR_ARG (exp, 1);
3569 tree len = CALL_EXPR_ARG (exp, 2);
3570 return expand_builtin_mempcpy_args (dest, src, len,
3571 target, mode, /*endp=*/ 1);
3575 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3576 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3577 so that this can also be called without constructing an actual CALL_EXPR.
3578 The other arguments and return value are the same as for
3579 expand_builtin_mempcpy. */
3582 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3583 rtx target, enum machine_mode mode, int endp)
3585 /* If return value is ignored, transform mempcpy into memcpy. */
3586 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3588 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3589 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3591 return expand_expr (result, target, mode, EXPAND_NORMAL);
3595 const char *src_str;
3596 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3597 unsigned int dest_align
3598 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3599 rtx dest_mem, src_mem, len_rtx;
3601 /* If either SRC or DEST is not a pointer type, don't do this
3602 operation in-line. */
3603 if (dest_align == 0 || src_align == 0)
3606 /* If LEN is not constant, call the normal function. */
3607 if (! host_integerp (len, 1))
3610 len_rtx = expand_normal (len);
3611 src_str = c_getstr (src);
3613 /* If SRC is a string constant and block move would be done
3614 by pieces, we can avoid loading the string from memory
3615 and only stored the computed constants. */
3617 && CONST_INT_P (len_rtx)
3618 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3619 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3620 CONST_CAST (char *, src_str),
3623 dest_mem = get_memory_rtx (dest, len);
3624 set_mem_align (dest_mem, dest_align);
3625 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3626 builtin_memcpy_read_str,
3627 CONST_CAST (char *, src_str),
3628 dest_align, false, endp);
3629 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3630 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3634 if (CONST_INT_P (len_rtx)
3635 && can_move_by_pieces (INTVAL (len_rtx),
3636 MIN (dest_align, src_align)))
3638 dest_mem = get_memory_rtx (dest, len);
3639 set_mem_align (dest_mem, dest_align);
3640 src_mem = get_memory_rtx (src, len);
3641 set_mem_align (src_mem, src_align);
3642 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3643 MIN (dest_align, src_align), endp);
3644 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3645 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3654 # define HAVE_movstr 0
3655 # define CODE_FOR_movstr CODE_FOR_nothing
3658 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3659 we failed, the caller should emit a normal call, otherwise try to
3660 get the result in TARGET, if convenient. If ENDP is 0 return the
3661 destination pointer, if ENDP is 1 return the end pointer ala
3662 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3666 expand_movstr (tree dest, tree src, rtx target, int endp)
3672 const struct insn_data_d * data;
3677 dest_mem = get_memory_rtx (dest, NULL);
3678 src_mem = get_memory_rtx (src, NULL);
3679 data = insn_data + CODE_FOR_movstr;
3682 target = force_reg (Pmode, XEXP (dest_mem, 0));
3683 dest_mem = replace_equiv_address (dest_mem, target);
3684 end = gen_reg_rtx (Pmode);
3689 || target == const0_rtx
3690 || ! (*data->operand[0].predicate) (target, Pmode))
3692 end = gen_reg_rtx (Pmode);
3693 if (target != const0_rtx)
3700 if (data->operand[0].mode != VOIDmode)
3701 end = gen_lowpart (data->operand[0].mode, end);
3703 insn = data->genfun (end, dest_mem, src_mem);
3709 /* movstr is supposed to set end to the address of the NUL
3710 terminator. If the caller requested a mempcpy-like return value,
3712 if (endp == 1 && target != const0_rtx)
3714 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3715 emit_move_insn (target, force_operand (tem, NULL_RTX));
3721 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3722 NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3727 expand_builtin_strcpy (tree exp, rtx target)
3729 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3731 tree dest = CALL_EXPR_ARG (exp, 0);
3732 tree src = CALL_EXPR_ARG (exp, 1);
3733 return expand_builtin_strcpy_args (dest, src, target);
3738 /* Helper function to do the actual work for expand_builtin_strcpy. The
3739 arguments to the builtin_strcpy call DEST and SRC are broken out
3740 so that this can also be called without constructing an actual CALL_EXPR.
3741 The other arguments and return value are the same as for
3742 expand_builtin_strcpy. */
3745 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3747 return expand_movstr (dest, src, target, /*endp=*/0);
3750 /* Expand a call EXP to the stpcpy builtin.
3751 Return NULL_RTX if we failed the caller should emit a normal call,
3752 otherwise try to get the result in TARGET, if convenient (and in
3753 mode MODE if that's convenient). */
3756 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3759 location_t loc = EXPR_LOCATION (exp);
3761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3764 dst = CALL_EXPR_ARG (exp, 0);
3765 src = CALL_EXPR_ARG (exp, 1);
3767 /* If return value is ignored, transform stpcpy into strcpy. */
3768 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3770 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3771 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3772 return expand_expr (result, target, mode, EXPAND_NORMAL);
3779 /* Ensure we get an actual string whose length can be evaluated at
3780 compile-time, not an expression containing a string. This is
3781 because the latter will potentially produce pessimized code
3782 when used to produce the return value. */
3783 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3784 return expand_movstr (dst, src, target, /*endp=*/2);
3786 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3787 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3788 target, mode, /*endp=*/2);
3793 if (TREE_CODE (len) == INTEGER_CST)
3795 rtx len_rtx = expand_normal (len);
3797 if (CONST_INT_P (len_rtx))
3799 ret = expand_builtin_strcpy_args (dst, src, target);
3805 if (mode != VOIDmode)
3806 target = gen_reg_rtx (mode);
3808 target = gen_reg_rtx (GET_MODE (ret));
3810 if (GET_MODE (target) != GET_MODE (ret))
3811 ret = gen_lowpart (GET_MODE (target), ret);
3813 ret = plus_constant (ret, INTVAL (len_rtx));
3814 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3822 return expand_movstr (dst, src, target, /*endp=*/2);
3826 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3827 bytes from constant string DATA + OFFSET and return it as target
3831 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3832 enum machine_mode mode)
3834 const char *str = (const char *) data;
3836 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3839 return c_readstr (str + offset, mode);
3842 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3843 NULL_RTX if we failed the caller should emit a normal call. */
3846 expand_builtin_strncpy (tree exp, rtx target)
3848 location_t loc = EXPR_LOCATION (exp);
3850 if (validate_arglist (exp,
3851 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3853 tree dest = CALL_EXPR_ARG (exp, 0);
3854 tree src = CALL_EXPR_ARG (exp, 1);
3855 tree len = CALL_EXPR_ARG (exp, 2);
3856 tree slen = c_strlen (src, 1);
3858 /* We must be passed a constant len and src parameter. */
3859 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3862 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3864 /* We're required to pad with trailing zeros if the requested
3865 len is greater than strlen(s2)+1. In that case try to
3866 use store_by_pieces, if it fails, punt. */
3867 if (tree_int_cst_lt (slen, len))
3869 unsigned int dest_align
3870 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3871 const char *p = c_getstr (src);
3874 if (!p || dest_align == 0 || !host_integerp (len, 1)
3875 || !can_store_by_pieces (tree_low_cst (len, 1),
3876 builtin_strncpy_read_str,
3877 CONST_CAST (char *, p),
3881 dest_mem = get_memory_rtx (dest, len);
3882 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3883 builtin_strncpy_read_str,
3884 CONST_CAST (char *, p), dest_align, false, 0);
3885 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3886 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3893 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3894 bytes from constant string DATA + OFFSET and return it as target
3898 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3899 enum machine_mode mode)
3901 const char *c = (const char *) data;
3902 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3904 memset (p, *c, GET_MODE_SIZE (mode));
3906 return c_readstr (p, mode);
3909 /* Callback routine for store_by_pieces. Return the RTL of a register
3910 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3911 char value given in the RTL register data. For example, if mode is
3912 4 bytes wide, return the RTL for 0x01010101*data. */
3915 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3916 enum machine_mode mode)
3922 size = GET_MODE_SIZE (mode);
3926 p = XALLOCAVEC (char, size);
3927 memset (p, 1, size);
3928 coeff = c_readstr (p, mode);
3930 target = convert_to_mode (mode, (rtx) data, 1);
3931 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3932 return force_reg (mode, target);
3935 /* Expand expression EXP, which is a call to the memset builtin. Return
3936 NULL_RTX if we failed the caller should emit a normal call, otherwise
3937 try to get the result in TARGET, if convenient (and in mode MODE if that's
3941 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3943 if (!validate_arglist (exp,
3944 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3948 tree dest = CALL_EXPR_ARG (exp, 0);
3949 tree val = CALL_EXPR_ARG (exp, 1);
3950 tree len = CALL_EXPR_ARG (exp, 2);
3951 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3955 /* Helper function to do the actual work for expand_builtin_memset. The
3956 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3957 so that this can also be called without constructing an actual CALL_EXPR.
3958 The other arguments and return value are the same as for
3959 expand_builtin_memset. */
3962 expand_builtin_memset_args (tree dest, tree val, tree len,
3963 rtx target, enum machine_mode mode, tree orig_exp)
3966 enum built_in_function fcode;
3968 unsigned int dest_align;
3969 rtx dest_mem, dest_addr, len_rtx;
3970 HOST_WIDE_INT expected_size = -1;
3971 unsigned int expected_align = 0;
3973 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3975 /* If DEST is not a pointer type, don't do this operation in-line. */
3976 if (dest_align == 0)
3979 if (currently_expanding_gimple_stmt)
3980 stringop_block_profile (currently_expanding_gimple_stmt,
3981 &expected_align, &expected_size);
3983 if (expected_align < dest_align)
3984 expected_align = dest_align;
3986 /* If the LEN parameter is zero, return DEST. */
3987 if (integer_zerop (len))
3989 /* Evaluate and ignore VAL in case it has side-effects. */
3990 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3991 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3994 /* Stabilize the arguments in case we fail. */
3995 dest = builtin_save_expr (dest);
3996 val = builtin_save_expr (val);
3997 len = builtin_save_expr (len);
3999 len_rtx = expand_normal (len);
4000 dest_mem = get_memory_rtx (dest, len);
4002 if (TREE_CODE (val) != INTEGER_CST)
4006 val_rtx = expand_normal (val);
4007 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4010 /* Assume that we can memset by pieces if we can store
4011 * the coefficients by pieces (in the required modes).
4012 * We can't pass builtin_memset_gen_str as that emits RTL. */
4014 if (host_integerp (len, 1)
4015 && can_store_by_pieces (tree_low_cst (len, 1),
4016 builtin_memset_read_str, &c, dest_align,
4019 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4021 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4022 builtin_memset_gen_str, val_rtx, dest_align,
4025 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4026 dest_align, expected_align,
4030 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4031 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4035 if (target_char_cast (val, &c))
4040 if (host_integerp (len, 1)
4041 && can_store_by_pieces (tree_low_cst (len, 1),
4042 builtin_memset_read_str, &c, dest_align,
4044 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4045 builtin_memset_read_str, &c, dest_align, true, 0);
4046 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4047 dest_align, expected_align,
4051 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4052 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4056 set_mem_align (dest_mem, dest_align);
4057 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4058 CALL_EXPR_TAILCALL (orig_exp)
4059 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4060 expected_align, expected_size);
4064 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4065 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4071 fndecl = get_callee_fndecl (orig_exp);
4072 fcode = DECL_FUNCTION_CODE (fndecl);
4073 if (fcode == BUILT_IN_MEMSET)
4074 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4076 else if (fcode == BUILT_IN_BZERO)
4077 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4081 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4082 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4083 return expand_call (fn, target, target == const0_rtx);
4086 /* Expand expression EXP, which is a call to the bzero builtin. Return
4087 NULL_RTX if we failed the caller should emit a normal call. */
4090 expand_builtin_bzero (tree exp)
4093 location_t loc = EXPR_LOCATION (exp);
4095 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4098 dest = CALL_EXPR_ARG (exp, 0);
4099 size = CALL_EXPR_ARG (exp, 1);
4101 /* New argument list transforming bzero(ptr x, int y) to
4102 memset(ptr x, int 0, size_t y). This is done this way
4103 so that if it isn't expanded inline, we fallback to
4104 calling bzero instead of memset. */
4106 return expand_builtin_memset_args (dest, integer_zero_node,
4107 fold_convert_loc (loc, sizetype, size),
4108 const0_rtx, VOIDmode, exp);
4111 /* Expand expression EXP, which is a call to the memcmp built-in function.
4112 Return NULL_RTX if we failed and the
4113 caller should emit a normal call, otherwise try to get the result in
4114 TARGET, if convenient (and in mode MODE, if that's convenient). */
4117 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4118 ATTRIBUTE_UNUSED enum machine_mode mode)
4120 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4122 if (!validate_arglist (exp,
4123 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4126 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4128 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4131 tree arg1 = CALL_EXPR_ARG (exp, 0);
4132 tree arg2 = CALL_EXPR_ARG (exp, 1);
4133 tree len = CALL_EXPR_ARG (exp, 2);
4135 unsigned int arg1_align
4136 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4137 unsigned int arg2_align
4138 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4139 enum machine_mode insn_mode;
4141 #ifdef HAVE_cmpmemsi
4143 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4146 #ifdef HAVE_cmpstrnsi
4148 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4153 /* If we don't have POINTER_TYPE, call the function. */
4154 if (arg1_align == 0 || arg2_align == 0)
4157 /* Make a place to write the result of the instruction. */
4160 && REG_P (result) && GET_MODE (result) == insn_mode
4161 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4162 result = gen_reg_rtx (insn_mode);
4164 arg1_rtx = get_memory_rtx (arg1, len);
4165 arg2_rtx = get_memory_rtx (arg2, len);
4166 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4168 /* Set MEM_SIZE as appropriate. */
4169 if (CONST_INT_P (arg3_rtx))
4171 set_mem_size (arg1_rtx, arg3_rtx);
4172 set_mem_size (arg2_rtx, arg3_rtx);
4175 #ifdef HAVE_cmpmemsi
4177 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4178 GEN_INT (MIN (arg1_align, arg2_align)));
4181 #ifdef HAVE_cmpstrnsi
4183 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4184 GEN_INT (MIN (arg1_align, arg2_align)));
4192 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4193 TYPE_MODE (integer_type_node), 3,
4194 XEXP (arg1_rtx, 0), Pmode,
4195 XEXP (arg2_rtx, 0), Pmode,
4196 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4197 TYPE_UNSIGNED (sizetype)),
4198 TYPE_MODE (sizetype));
4200 /* Return the value in the proper mode for this function. */
4201 mode = TYPE_MODE (TREE_TYPE (exp));
4202 if (GET_MODE (result) == mode)
4204 else if (target != 0)
4206 convert_move (target, result, 0);
4210 return convert_to_mode (mode, result, 0);
4217 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4218 if we failed the caller should emit a normal call, otherwise try to get
4219 the result in TARGET, if convenient. */
4222 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4224 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4227 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4228 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4229 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4231 rtx arg1_rtx, arg2_rtx;
4232 rtx result, insn = NULL_RTX;
4234 tree arg1 = CALL_EXPR_ARG (exp, 0);
4235 tree arg2 = CALL_EXPR_ARG (exp, 1);
4237 unsigned int arg1_align
4238 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4239 unsigned int arg2_align
4240 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4242 /* If we don't have POINTER_TYPE, call the function. */
4243 if (arg1_align == 0 || arg2_align == 0)
4246 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4247 arg1 = builtin_save_expr (arg1);
4248 arg2 = builtin_save_expr (arg2);
4250 arg1_rtx = get_memory_rtx (arg1, NULL);
4251 arg2_rtx = get_memory_rtx (arg2, NULL);
4253 #ifdef HAVE_cmpstrsi
4254 /* Try to call cmpstrsi. */
4257 enum machine_mode insn_mode
4258 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4260 /* Make a place to write the result of the instruction. */
4263 && REG_P (result) && GET_MODE (result) == insn_mode
4264 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4265 result = gen_reg_rtx (insn_mode);
4267 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4268 GEN_INT (MIN (arg1_align, arg2_align)));
4271 #ifdef HAVE_cmpstrnsi
4272 /* Try to determine at least one length and call cmpstrnsi. */
4273 if (!insn && HAVE_cmpstrnsi)
4278 enum machine_mode insn_mode
4279 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4280 tree len1 = c_strlen (arg1, 1);
4281 tree len2 = c_strlen (arg2, 1);
4284 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4286 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4288 /* If we don't have a constant length for the first, use the length
4289 of the second, if we know it. We don't require a constant for
4290 this case; some cost analysis could be done if both are available
4291 but neither is constant. For now, assume they're equally cheap,
4292 unless one has side effects. If both strings have constant lengths,
4299 else if (TREE_SIDE_EFFECTS (len1))
4301 else if (TREE_SIDE_EFFECTS (len2))
4303 else if (TREE_CODE (len1) != INTEGER_CST)
4305 else if (TREE_CODE (len2) != INTEGER_CST)
4307 else if (tree_int_cst_lt (len1, len2))
4312 /* If both arguments have side effects, we cannot optimize. */
4313 if (!len || TREE_SIDE_EFFECTS (len))
4316 arg3_rtx = expand_normal (len);
4318 /* Make a place to write the result of the instruction. */
4321 && REG_P (result) && GET_MODE (result) == insn_mode
4322 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4323 result = gen_reg_rtx (insn_mode);
4325 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4326 GEN_INT (MIN (arg1_align, arg2_align)));
4332 enum machine_mode mode;
4335 /* Return the value in the proper mode for this function. */
4336 mode = TYPE_MODE (TREE_TYPE (exp));
4337 if (GET_MODE (result) == mode)
4340 return convert_to_mode (mode, result, 0);
4341 convert_move (target, result, 0);
4345 /* Expand the library call ourselves using a stabilized argument
4346 list to avoid re-evaluating the function's arguments twice. */
4347 #ifdef HAVE_cmpstrnsi
4350 fndecl = get_callee_fndecl (exp);
4351 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4352 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4353 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4354 return expand_call (fn, target, target == const0_rtx);
4360 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4361 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4362 the result in TARGET, if convenient. */
4365 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4366 ATTRIBUTE_UNUSED enum machine_mode mode)
4368 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4370 if (!validate_arglist (exp,
4371 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4374 /* If c_strlen can determine an expression for one of the string
4375 lengths, and it doesn't have side effects, then emit cmpstrnsi
4376 using length MIN(strlen(string)+1, arg3). */
4377 #ifdef HAVE_cmpstrnsi
4380 tree len, len1, len2;
4381 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4384 tree arg1 = CALL_EXPR_ARG (exp, 0);
4385 tree arg2 = CALL_EXPR_ARG (exp, 1);
4386 tree arg3 = CALL_EXPR_ARG (exp, 2);
4388 unsigned int arg1_align
4389 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4390 unsigned int arg2_align
4391 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4392 enum machine_mode insn_mode
4393 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4395 len1 = c_strlen (arg1, 1);
4396 len2 = c_strlen (arg2, 1);
4399 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4401 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4403 /* If we don't have a constant length for the first, use the length
4404 of the second, if we know it. We don't require a constant for
4405 this case; some cost analysis could be done if both are available
4406 but neither is constant. For now, assume they're equally cheap,
4407 unless one has side effects. If both strings have constant lengths,
4414 else if (TREE_SIDE_EFFECTS (len1))
4416 else if (TREE_SIDE_EFFECTS (len2))
4418 else if (TREE_CODE (len1) != INTEGER_CST)
4420 else if (TREE_CODE (len2) != INTEGER_CST)
4422 else if (tree_int_cst_lt (len1, len2))
4427 /* If both arguments have side effects, we cannot optimize. */
4428 if (!len || TREE_SIDE_EFFECTS (len))
4431 /* The actual new length parameter is MIN(len,arg3). */
4432 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4433 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4435 /* If we don't have POINTER_TYPE, call the function. */
4436 if (arg1_align == 0 || arg2_align == 0)
4439 /* Make a place to write the result of the instruction. */
4442 && REG_P (result) && GET_MODE (result) == insn_mode
4443 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4444 result = gen_reg_rtx (insn_mode);
4446 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4447 arg1 = builtin_save_expr (arg1);
4448 arg2 = builtin_save_expr (arg2);
4449 len = builtin_save_expr (len);
4451 arg1_rtx = get_memory_rtx (arg1, len);
4452 arg2_rtx = get_memory_rtx (arg2, len);
4453 arg3_rtx = expand_normal (len);
4454 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4455 GEN_INT (MIN (arg1_align, arg2_align)));
4460 /* Return the value in the proper mode for this function. */
4461 mode = TYPE_MODE (TREE_TYPE (exp));
4462 if (GET_MODE (result) == mode)
4465 return convert_to_mode (mode, result, 0);
4466 convert_move (target, result, 0);
4470 /* Expand the library call ourselves using a stabilized argument
4471 list to avoid re-evaluating the function's arguments twice. */
4472 fndecl = get_callee_fndecl (exp);
4473 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4475 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4476 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4477 return expand_call (fn, target, target == const0_rtx);
4483 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4484 if that's convenient. */
4487 expand_builtin_saveregs (void)
4491 /* Don't do __builtin_saveregs more than once in a function.
4492 Save the result of the first call and reuse it. */
4493 if (saveregs_value != 0)
4494 return saveregs_value;
4496 /* When this function is called, it means that registers must be
4497 saved on entry to this function. So we migrate the call to the
4498 first insn of this function. */
4502 /* Do whatever the machine needs done in this case. */
4503 val = targetm.calls.expand_builtin_saveregs ();
4508 saveregs_value = val;
4510 /* Put the insns after the NOTE that starts the function. If this
4511 is inside a start_sequence, make the outer-level insn chain current, so
4512 the code is placed at the start of the function. */
4513 push_topmost_sequence ();
4514 emit_insn_after (seq, entry_of_function ());
4515 pop_topmost_sequence ();
4520 /* Expand a call to __builtin_next_arg. */
4523 expand_builtin_next_arg (void)
4525 /* Checking arguments is already done in fold_builtin_next_arg
4526 that must be called before this function. */
4527 return expand_binop (ptr_mode, add_optab,
4528 crtl->args.internal_arg_pointer,
4529 crtl->args.arg_offset_rtx,
4530 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4533 /* Make it easier for the backends by protecting the valist argument
4534 from multiple evaluations. */
4537 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4539 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4541 /* The current way of determining the type of valist is completely
4542 bogus. We should have the information on the va builtin instead. */
4544 vatype = targetm.fn_abi_va_list (cfun->decl);
4546 if (TREE_CODE (vatype) == ARRAY_TYPE)
4548 if (TREE_SIDE_EFFECTS (valist))
4549 valist = save_expr (valist);
4551 /* For this case, the backends will be expecting a pointer to
4552 vatype, but it's possible we've actually been given an array
4553 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4555 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4557 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4558 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4563 tree pt = build_pointer_type (vatype);
4567 if (! TREE_SIDE_EFFECTS (valist))
4570 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4571 TREE_SIDE_EFFECTS (valist) = 1;
4574 if (TREE_SIDE_EFFECTS (valist))
4575 valist = save_expr (valist);
4576 valist = fold_build2_loc (loc, MEM_REF,
4577 vatype, valist, build_int_cst (pt, 0));
4583 /* The "standard" definition of va_list is void*. */
4586 std_build_builtin_va_list (void)
4588 return ptr_type_node;
4591 /* The "standard" abi va_list is va_list_type_node. */
4594 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4596 return va_list_type_node;
4599 /* The "standard" type of va_list is va_list_type_node. */
4602 std_canonical_va_list_type (tree type)
4606 if (INDIRECT_REF_P (type))
4607 type = TREE_TYPE (type);
4608 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4609 type = TREE_TYPE (type);
4610 wtype = va_list_type_node;
4612 /* Treat structure va_list types. */
4613 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4614 htype = TREE_TYPE (htype);
4615 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4617 /* If va_list is an array type, the argument may have decayed
4618 to a pointer type, e.g. by being passed to another function.
4619 In that case, unwrap both types so that we can compare the
4620 underlying records. */
4621 if (TREE_CODE (htype) == ARRAY_TYPE
4622 || POINTER_TYPE_P (htype))
4624 wtype = TREE_TYPE (wtype);
4625 htype = TREE_TYPE (htype);
4628 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4629 return va_list_type_node;
4634 /* The "standard" implementation of va_start: just assign `nextarg' to
4638 std_expand_builtin_va_start (tree valist, rtx nextarg)
4640 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4641 convert_move (va_r, nextarg, 0);
4644 /* Expand EXP, a call to __builtin_va_start. */
4647 expand_builtin_va_start (tree exp)
4651 location_t loc = EXPR_LOCATION (exp);
4653 if (call_expr_nargs (exp) < 2)
4655 error_at (loc, "too few arguments to function %<va_start%>");
4659 if (fold_builtin_next_arg (exp, true))
4662 nextarg = expand_builtin_next_arg ();
4663 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4665 if (targetm.expand_builtin_va_start)
4666 targetm.expand_builtin_va_start (valist, nextarg);
4668 std_expand_builtin_va_start (valist, nextarg);
4673 /* The "standard" implementation of va_arg: read the value from the
4674 current (padded) address and increment by the (padded) size. */
4677 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4680 tree addr, t, type_size, rounded_size, valist_tmp;
4681 unsigned HOST_WIDE_INT align, boundary;
4684 #ifdef ARGS_GROW_DOWNWARD
4685 /* All of the alignment and movement below is for args-grow-up machines.
4686 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4687 implement their own specialized gimplify_va_arg_expr routines. */
4691 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4693 type = build_pointer_type (type);
4695 align = PARM_BOUNDARY / BITS_PER_UNIT;
4696 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4698 /* When we align parameter on stack for caller, if the parameter
4699 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4700 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4701 here with caller. */
4702 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4703 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4705 boundary /= BITS_PER_UNIT;
4707 /* Hoist the valist value into a temporary for the moment. */
4708 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4710 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4711 requires greater alignment, we must perform dynamic alignment. */
4712 if (boundary > align
4713 && !integer_zerop (TYPE_SIZE (type)))
4715 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4716 fold_build2 (POINTER_PLUS_EXPR,
4718 valist_tmp, size_int (boundary - 1)));
4719 gimplify_and_add (t, pre_p);
4721 t = fold_convert (sizetype, valist_tmp);
4722 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4723 fold_convert (TREE_TYPE (valist),
4724 fold_build2 (BIT_AND_EXPR, sizetype, t,
4725 size_int (-boundary))));
4726 gimplify_and_add (t, pre_p);
4731 /* If the actual alignment is less than the alignment of the type,
4732 adjust the type accordingly so that we don't assume strict alignment
4733 when dereferencing the pointer. */
4734 boundary *= BITS_PER_UNIT;
4735 if (boundary < TYPE_ALIGN (type))
4737 type = build_variant_type_copy (type);
4738 TYPE_ALIGN (type) = boundary;
4741 /* Compute the rounded size of the type. */
4742 type_size = size_in_bytes (type);
4743 rounded_size = round_up (type_size, align);
4745 /* Reduce rounded_size so it's sharable with the postqueue. */
4746 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4750 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4752 /* Small args are padded downward. */
4753 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4754 rounded_size, size_int (align));
4755 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4756 size_binop (MINUS_EXPR, rounded_size, type_size));
4757 addr = fold_build2 (POINTER_PLUS_EXPR,
4758 TREE_TYPE (addr), addr, t);
4761 /* Compute new value for AP. */
4762 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4763 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4764 gimplify_and_add (t, pre_p);
4766 addr = fold_convert (build_pointer_type (type), addr);
4769 addr = build_va_arg_indirect_ref (addr);
4771 return build_va_arg_indirect_ref (addr);
4774 /* Build an indirect-ref expression over the given TREE, which represents a
4775 piece of a va_arg() expansion. */
4777 build_va_arg_indirect_ref (tree addr)
4779 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4781 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4787 /* Return a dummy expression of type TYPE in order to keep going after an
4791 dummy_object (tree type)
4793 tree t = build_int_cst (build_pointer_type (type), 0);
4794 return build2 (MEM_REF, type, t, t);
4797 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4798 builtin function, but a very special sort of operator. */
4800 enum gimplify_status
4801 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4803 tree promoted_type, have_va_type;
4804 tree valist = TREE_OPERAND (*expr_p, 0);
4805 tree type = TREE_TYPE (*expr_p);
4807 location_t loc = EXPR_LOCATION (*expr_p);
4809 /* Verify that valist is of the proper type. */
4810 have_va_type = TREE_TYPE (valist);
4811 if (have_va_type == error_mark_node)
4813 have_va_type = targetm.canonical_va_list_type (have_va_type);
4815 if (have_va_type == NULL_TREE)
4817 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4821 /* Generate a diagnostic for requesting data of a type that cannot
4822 be passed through `...' due to type promotion at the call site. */
4823 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4826 static bool gave_help;
4829 /* Unfortunately, this is merely undefined, rather than a constraint
4830 violation, so we cannot make this an error. If this call is never
4831 executed, the program is still strictly conforming. */
4832 warned = warning_at (loc, 0,
4833 "%qT is promoted to %qT when passed through %<...%>",
4834 type, promoted_type);
4835 if (!gave_help && warned)
4838 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4839 promoted_type, type);
4842 /* We can, however, treat "undefined" any way we please.
4843 Call abort to encourage the user to fix the program. */
4845 inform (loc, "if this code is reached, the program will abort");
4846 /* Before the abort, allow the evaluation of the va_list
4847 expression to exit or longjmp. */
4848 gimplify_and_add (valist, pre_p);
4849 t = build_call_expr_loc (loc,
4850 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4851 gimplify_and_add (t, pre_p);
4853 /* This is dead code, but go ahead and finish so that the
4854 mode of the result comes out right. */
4855 *expr_p = dummy_object (type);
4860 /* Make it easier for the backends by protecting the valist argument
4861 from multiple evaluations. */
4862 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4864 /* For this case, the backends will be expecting a pointer to
4865 TREE_TYPE (abi), but it's possible we've
4866 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4868 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4870 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4871 valist = fold_convert_loc (loc, p1,
4872 build_fold_addr_expr_loc (loc, valist));
4875 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4878 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4880 if (!targetm.gimplify_va_arg_expr)
4881 /* FIXME: Once most targets are converted we should merely
4882 assert this is non-null. */
4885 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4890 /* Expand EXP, a call to __builtin_va_end. */
4893 expand_builtin_va_end (tree exp)
4895 tree valist = CALL_EXPR_ARG (exp, 0);
4897 /* Evaluate for side effects, if needed. I hate macros that don't
4899 if (TREE_SIDE_EFFECTS (valist))
4900 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4905 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4906 builtin rather than just as an assignment in stdarg.h because of the
4907 nastiness of array-type va_list types. */
4910 expand_builtin_va_copy (tree exp)
4913 location_t loc = EXPR_LOCATION (exp);
4915 dst = CALL_EXPR_ARG (exp, 0);
4916 src = CALL_EXPR_ARG (exp, 1);
4918 dst = stabilize_va_list_loc (loc, dst, 1);
4919 src = stabilize_va_list_loc (loc, src, 0);
4921 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4923 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4925 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4926 TREE_SIDE_EFFECTS (t) = 1;
4927 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4931 rtx dstb, srcb, size;
4933 /* Evaluate to pointers. */
4934 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4935 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4936 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4937 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4939 dstb = convert_memory_address (Pmode, dstb);
4940 srcb = convert_memory_address (Pmode, srcb);
4942 /* "Dereference" to BLKmode memories. */
4943 dstb = gen_rtx_MEM (BLKmode, dstb);
4944 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4945 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4946 srcb = gen_rtx_MEM (BLKmode, srcb);
4947 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4948 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4951 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4957 /* Expand a call to one of the builtin functions __builtin_frame_address or
4958 __builtin_return_address. */
4961 expand_builtin_frame_address (tree fndecl, tree exp)
4963 /* The argument must be a nonnegative integer constant.
4964 It counts the number of frames to scan up the stack.
4965 The value is the return address saved in that frame. */
4966 if (call_expr_nargs (exp) == 0)
4967 /* Warning about missing arg was already issued. */
4969 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4971 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4972 error ("invalid argument to %<__builtin_frame_address%>");
4974 error ("invalid argument to %<__builtin_return_address%>");
4980 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4981 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4983 /* Some ports cannot access arbitrary stack frames. */
4986 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4987 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4989 warning (0, "unsupported argument to %<__builtin_return_address%>");
4993 /* For __builtin_frame_address, return what we've got. */
4994 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4998 && ! CONSTANT_P (tem))
4999 tem = copy_to_mode_reg (Pmode, tem);
5004 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5005 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
5006 is the same as for allocate_dynamic_stack_space. */
5009 expand_builtin_alloca (tree exp, bool cannot_accumulate)
5014 /* Emit normal call if marked not-inlineable. */
5015 if (CALL_CANNOT_INLINE_P (exp))
5018 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5021 /* Compute the argument. */
5022 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5024 /* Allocate the desired space. */
5025 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
5027 result = convert_memory_address (ptr_mode, result);
5032 /* Expand a call to a bswap builtin with argument ARG0. MODE
5033 is the mode to expand with. */
5036 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5038 enum machine_mode mode;
5042 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5045 arg = CALL_EXPR_ARG (exp, 0);
5046 mode = TYPE_MODE (TREE_TYPE (arg));
5047 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5049 target = expand_unop (mode, bswap_optab, op0, target, 1);
5051 gcc_assert (target);
5053 return convert_to_mode (mode, target, 0);
5056 /* Expand a call to a unary builtin in EXP.
5057 Return NULL_RTX if a normal call should be emitted rather than expanding the
5058 function in-line. If convenient, the result should be placed in TARGET.
5059 SUBTARGET may be used as the target for computing one of EXP's operands. */
5062 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5063 rtx subtarget, optab op_optab)
5067 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5070 /* Compute the argument. */
5071 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5073 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5074 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5075 VOIDmode, EXPAND_NORMAL);
5076 /* Compute op, into TARGET if possible.
5077 Set TARGET to wherever the result comes back. */
5078 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5079 op_optab, op0, target, 1);
5080 gcc_assert (target);
5082 return convert_to_mode (target_mode, target, 0);
5085 /* Expand a call to __builtin_expect. We just return our argument
5086 as the builtin_expect semantic should've been already executed by
5087 tree branch prediction pass. */
5090 expand_builtin_expect (tree exp, rtx target)
5094 if (call_expr_nargs (exp) < 2)
5096 arg = CALL_EXPR_ARG (exp, 0);
5098 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5099 /* When guessing was done, the hints should be already stripped away. */
5100 gcc_assert (!flag_guess_branch_prob
5101 || optimize == 0 || seen_error ());
5106 expand_builtin_trap (void)
5110 emit_insn (gen_trap ());
5113 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5117 /* Expand a call to __builtin_unreachable. We do nothing except emit
5118 a barrier saying that control flow will not pass here.
5120 It is the responsibility of the program being compiled to ensure
5121 that control flow does never reach __builtin_unreachable. */
5123 expand_builtin_unreachable (void)
5128 /* Expand EXP, a call to fabs, fabsf or fabsl.
5129 Return NULL_RTX if a normal call should be emitted rather than expanding
5130 the function inline. If convenient, the result should be placed
5131 in TARGET. SUBTARGET may be used as the target for computing
5135 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5137 enum machine_mode mode;
5141 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5144 arg = CALL_EXPR_ARG (exp, 0);
5145 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5146 mode = TYPE_MODE (TREE_TYPE (arg));
5147 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5148 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5151 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5152 Return NULL is a normal call should be emitted rather than expanding the
5153 function inline. If convenient, the result should be placed in TARGET.
5154 SUBTARGET may be used as the target for computing the operand. */
5157 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5162 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5165 arg = CALL_EXPR_ARG (exp, 0);
5166 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5168 arg = CALL_EXPR_ARG (exp, 1);
5169 op1 = expand_normal (arg);
5171 return expand_copysign (op0, op1, target);
5174 /* Create a new constant string literal and return a char* pointer to it.
5175 The STRING_CST value is the LEN characters at STR. */
5177 build_string_literal (int len, const char *str)
5179 tree t, elem, index, type;
5181 t = build_string (len, str);
5182 elem = build_type_variant (char_type_node, 1, 0);
5183 index = build_index_type (size_int (len - 1));
5184 type = build_array_type (elem, index);
5185 TREE_TYPE (t) = type;
5186 TREE_CONSTANT (t) = 1;
5187 TREE_READONLY (t) = 1;
5188 TREE_STATIC (t) = 1;
5190 type = build_pointer_type (elem);
5191 t = build1 (ADDR_EXPR, type,
5192 build4 (ARRAY_REF, elem,
5193 t, integer_zero_node, NULL_TREE, NULL_TREE));
5197 /* Expand a call to either the entry or exit function profiler. */
5200 expand_builtin_profile_func (bool exitp)
5202 rtx this_rtx, which;
5204 this_rtx = DECL_RTL (current_function_decl);
5205 gcc_assert (MEM_P (this_rtx));
5206 this_rtx = XEXP (this_rtx, 0);
5209 which = profile_function_exit_libfunc;
5211 which = profile_function_entry_libfunc;
5213 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5214 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5221 /* Expand a call to __builtin___clear_cache. */
5224 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5226 #ifndef HAVE_clear_cache
5227 #ifdef CLEAR_INSN_CACHE
5228 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5229 does something. Just do the default expansion to a call to
5233 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5234 does nothing. There is no need to call it. Do nothing. */
5236 #endif /* CLEAR_INSN_CACHE */
5238 /* We have a "clear_cache" insn, and it will handle everything. */
5240 rtx begin_rtx, end_rtx;
5241 enum insn_code icode;
5243 /* We must not expand to a library call. If we did, any
5244 fallback library function in libgcc that might contain a call to
5245 __builtin___clear_cache() would recurse infinitely. */
5246 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5248 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5252 if (HAVE_clear_cache)
5254 icode = CODE_FOR_clear_cache;
5256 begin = CALL_EXPR_ARG (exp, 0);
5257 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5258 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5259 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5260 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5262 end = CALL_EXPR_ARG (exp, 1);
5263 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5264 end_rtx = convert_memory_address (Pmode, end_rtx);
5265 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5266 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5268 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5271 #endif /* HAVE_clear_cache */
5274 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5277 round_trampoline_addr (rtx tramp)
5279 rtx temp, addend, mask;
5281 /* If we don't need too much alignment, we'll have been guaranteed
5282 proper alignment by get_trampoline_type. */
5283 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5286 /* Round address up to desired boundary. */
5287 temp = gen_reg_rtx (Pmode);
5288 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5289 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5291 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5292 temp, 0, OPTAB_LIB_WIDEN);
5293 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5294 temp, 0, OPTAB_LIB_WIDEN);
5300 expand_builtin_init_trampoline (tree exp)
5302 tree t_tramp, t_func, t_chain;
5303 rtx m_tramp, r_tramp, r_chain, tmp;
5305 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5306 POINTER_TYPE, VOID_TYPE))
5309 t_tramp = CALL_EXPR_ARG (exp, 0);
5310 t_func = CALL_EXPR_ARG (exp, 1);
5311 t_chain = CALL_EXPR_ARG (exp, 2);
5313 r_tramp = expand_normal (t_tramp);
5314 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5315 MEM_NOTRAP_P (m_tramp) = 1;
5317 /* The TRAMP argument should be the address of a field within the
5318 local function's FRAME decl. Let's see if we can fill in the
5319 to fill in the MEM_ATTRs for this memory. */
5320 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5321 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5324 tmp = round_trampoline_addr (r_tramp);
5327 m_tramp = change_address (m_tramp, BLKmode, tmp);
5328 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5329 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5332 /* The FUNC argument should be the address of the nested function.
5333 Extract the actual function decl to pass to the hook. */
5334 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5335 t_func = TREE_OPERAND (t_func, 0);
5336 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5338 r_chain = expand_normal (t_chain);
5340 /* Generate insns to initialize the trampoline. */
5341 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5343 trampolines_created = 1;
5345 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5346 "trampoline generated for nested function %qD", t_func);
5352 expand_builtin_adjust_trampoline (tree exp)
5356 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5359 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5360 tramp = round_trampoline_addr (tramp);
5361 if (targetm.calls.trampoline_adjust_address)
5362 tramp = targetm.calls.trampoline_adjust_address (tramp);
5367 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5368 function. The function first checks whether the back end provides
5369 an insn to implement signbit for the respective mode. If not, it
5370 checks whether the floating point format of the value is such that
5371 the sign bit can be extracted. If that is not the case, the
5372 function returns NULL_RTX to indicate that a normal call should be
5373 emitted rather than expanding the function in-line. EXP is the
5374 expression that is a call to the builtin function; if convenient,
5375 the result should be placed in TARGET. */
5377 expand_builtin_signbit (tree exp, rtx target)
5379 const struct real_format *fmt;
5380 enum machine_mode fmode, imode, rmode;
5383 enum insn_code icode;
5385 location_t loc = EXPR_LOCATION (exp);
5387 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5390 arg = CALL_EXPR_ARG (exp, 0);
5391 fmode = TYPE_MODE (TREE_TYPE (arg));
5392 rmode = TYPE_MODE (TREE_TYPE (exp));
5393 fmt = REAL_MODE_FORMAT (fmode);
5395 arg = builtin_save_expr (arg);
5397 /* Expand the argument yielding a RTX expression. */
5398 temp = expand_normal (arg);
5400 /* Check if the back end provides an insn that handles signbit for the
5402 icode = optab_handler (signbit_optab, fmode);
5403 if (icode != CODE_FOR_nothing)
5405 rtx last = get_last_insn ();
5406 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5407 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5409 delete_insns_since (last);
5412 /* For floating point formats without a sign bit, implement signbit
5414 bitpos = fmt->signbit_ro;
5417 /* But we can't do this if the format supports signed zero. */
5418 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5421 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5422 build_real (TREE_TYPE (arg), dconst0));
5423 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5426 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5428 imode = int_mode_for_mode (fmode);
5429 if (imode == BLKmode)
5431 temp = gen_lowpart (imode, temp);
5436 /* Handle targets with different FP word orders. */
5437 if (FLOAT_WORDS_BIG_ENDIAN)
5438 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5440 word = bitpos / BITS_PER_WORD;
5441 temp = operand_subword_force (temp, word, fmode);
5442 bitpos = bitpos % BITS_PER_WORD;
5445 /* Force the intermediate word_mode (or narrower) result into a
5446 register. This avoids attempting to create paradoxical SUBREGs
5447 of floating point modes below. */
5448 temp = force_reg (imode, temp);
5450 /* If the bitpos is within the "result mode" lowpart, the operation
5451 can be implement with a single bitwise AND. Otherwise, we need
5452 a right shift and an AND. */
5454 if (bitpos < GET_MODE_BITSIZE (rmode))
5456 double_int mask = double_int_setbit (double_int_zero, bitpos);
5458 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5459 temp = gen_lowpart (rmode, temp);
5460 temp = expand_binop (rmode, and_optab, temp,
5461 immed_double_int_const (mask, rmode),
5462 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5466 /* Perform a logical right shift to place the signbit in the least
5467 significant bit, then truncate the result to the desired mode
5468 and mask just this bit. */
5469 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5470 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5471 temp = gen_lowpart (rmode, temp);
5472 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5473 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5479 /* Expand fork or exec calls. TARGET is the desired target of the
5480 call. EXP is the call. FN is the
5481 identificator of the actual function. IGNORE is nonzero if the
5482 value is to be ignored. */
5485 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5490 /* If we are not profiling, just call the function. */
5491 if (!profile_arc_flag)
5494 /* Otherwise call the wrapper. This should be equivalent for the rest of
5495 compiler, so the code does not diverge, and the wrapper may run the
5496 code necessary for keeping the profiling sane. */
5498 switch (DECL_FUNCTION_CODE (fn))
5501 id = get_identifier ("__gcov_fork");
5504 case BUILT_IN_EXECL:
5505 id = get_identifier ("__gcov_execl");
5508 case BUILT_IN_EXECV:
5509 id = get_identifier ("__gcov_execv");
5512 case BUILT_IN_EXECLP:
5513 id = get_identifier ("__gcov_execlp");
5516 case BUILT_IN_EXECLE:
5517 id = get_identifier ("__gcov_execle");
5520 case BUILT_IN_EXECVP:
5521 id = get_identifier ("__gcov_execvp");
5524 case BUILT_IN_EXECVE:
5525 id = get_identifier ("__gcov_execve");
5532 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5533 FUNCTION_DECL, id, TREE_TYPE (fn));
5534 DECL_EXTERNAL (decl) = 1;
5535 TREE_PUBLIC (decl) = 1;
5536 DECL_ARTIFICIAL (decl) = 1;
5537 TREE_NOTHROW (decl) = 1;
5538 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5539 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5540 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5541 return expand_call (call, target, ignore);
5546 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5547 the pointer in these functions is void*, the tree optimizers may remove
5548 casts. The mode computed in expand_builtin isn't reliable either, due
5549 to __sync_bool_compare_and_swap.
5551 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5552 group of builtins. This gives us log2 of the mode size. */
5554 static inline enum machine_mode
5555 get_builtin_sync_mode (int fcode_diff)
5557 /* The size is not negotiable, so ask not to get BLKmode in return
5558 if the target indicates that a smaller size would be better. */
5559 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5562 /* Expand the memory expression LOC and return the appropriate memory operand
5563 for the builtin_sync operations. */
5566 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5570 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5571 addr = convert_memory_address (Pmode, addr);
5573 /* Note that we explicitly do not want any alias information for this
5574 memory, so that we kill all other live memories. Otherwise we don't
5575 satisfy the full barrier semantics of the intrinsic. */
5576 mem = validize_mem (gen_rtx_MEM (mode, addr));
5578 /* The alignment needs to be at least according to that of the mode. */
5579 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5580 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5581 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5582 MEM_VOLATILE_P (mem) = 1;
5587 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5588 EXP is the CALL_EXPR. CODE is the rtx code
5589 that corresponds to the arithmetic or logical operation from the name;
5590 an exception here is that NOT actually means NAND. TARGET is an optional
5591 place for us to store the results; AFTER is true if this is the
5592 fetch_and_xxx form. IGNORE is true if we don't actually care about
5593 the result of the operation at all. */
5596 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5597 enum rtx_code code, bool after,
5598 rtx target, bool ignore)
5601 enum machine_mode old_mode;
5602 location_t loc = EXPR_LOCATION (exp);
5604 if (code == NOT && warn_sync_nand)
5606 tree fndecl = get_callee_fndecl (exp);
5607 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5609 static bool warned_f_a_n, warned_n_a_f;
5613 case BUILT_IN_FETCH_AND_NAND_1:
5614 case BUILT_IN_FETCH_AND_NAND_2:
5615 case BUILT_IN_FETCH_AND_NAND_4:
5616 case BUILT_IN_FETCH_AND_NAND_8:
5617 case BUILT_IN_FETCH_AND_NAND_16:
5622 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5623 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5624 warned_f_a_n = true;
5627 case BUILT_IN_NAND_AND_FETCH_1:
5628 case BUILT_IN_NAND_AND_FETCH_2:
5629 case BUILT_IN_NAND_AND_FETCH_4:
5630 case BUILT_IN_NAND_AND_FETCH_8:
5631 case BUILT_IN_NAND_AND_FETCH_16:
5636 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5637 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5638 warned_n_a_f = true;
5646 /* Expand the operands. */
5647 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5649 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5650 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5651 of CONST_INTs, where we know the old_mode only from the call argument. */
5652 old_mode = GET_MODE (val);
5653 if (old_mode == VOIDmode)
5654 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5655 val = convert_modes (mode, old_mode, val, 1);
5658 return expand_sync_operation (mem, val, code);
5660 return expand_sync_fetch_operation (mem, val, code, after, target);
5663 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5664 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5665 true if this is the boolean form. TARGET is a place for us to store the
5666 results; this is NOT optional if IS_BOOL is true. */
5669 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5670 bool is_bool, rtx target)
5672 rtx old_val, new_val, mem;
5673 enum machine_mode old_mode;
5675 /* Expand the operands. */
5676 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5679 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5680 mode, EXPAND_NORMAL);
5681 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5682 of CONST_INTs, where we know the old_mode only from the call argument. */
5683 old_mode = GET_MODE (old_val);
5684 if (old_mode == VOIDmode)
5685 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5686 old_val = convert_modes (mode, old_mode, old_val, 1);
5688 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5689 mode, EXPAND_NORMAL);
5690 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5691 of CONST_INTs, where we know the old_mode only from the call argument. */
5692 old_mode = GET_MODE (new_val);
5693 if (old_mode == VOIDmode)
5694 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5695 new_val = convert_modes (mode, old_mode, new_val, 1);
5698 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5700 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5703 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5704 general form is actually an atomic exchange, and some targets only
5705 support a reduced form with the second argument being a constant 1.
5706 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5710 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5714 enum machine_mode old_mode;
5716 /* Expand the operands. */
5717 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5718 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5719 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5720 of CONST_INTs, where we know the old_mode only from the call argument. */
5721 old_mode = GET_MODE (val);
5722 if (old_mode == VOIDmode)
5723 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5724 val = convert_modes (mode, old_mode, val, 1);
5726 return expand_sync_lock_test_and_set (mem, val, target);
5729 /* Expand the __sync_synchronize intrinsic. */
5732 expand_builtin_synchronize (void)
5735 VEC (tree, gc) *v_clobbers;
5737 #ifdef HAVE_memory_barrier
5738 if (HAVE_memory_barrier)
5740 emit_insn (gen_memory_barrier ());
5745 if (synchronize_libfunc != NULL_RTX)
5747 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5751 /* If no explicit memory barrier instruction is available, create an
5752 empty asm stmt with a memory clobber. */
5753 v_clobbers = VEC_alloc (tree, gc, 1);
5754 VEC_quick_push (tree, v_clobbers,
5755 tree_cons (NULL, build_string (6, "memory"), NULL));
5756 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5757 gimple_asm_set_volatile (x, true);
5758 expand_asm_stmt (x);
5761 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5764 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5766 enum insn_code icode;
5768 rtx val = const0_rtx;
5770 /* Expand the operands. */
5771 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5773 /* If there is an explicit operation in the md file, use it. */
5774 icode = direct_optab_handler (sync_lock_release_optab, mode);
5775 if (icode != CODE_FOR_nothing)
5777 if (!insn_data[icode].operand[1].predicate (val, mode))
5778 val = force_reg (mode, val);
5780 insn = GEN_FCN (icode) (mem, val);
5788 /* Otherwise we can implement this operation by emitting a barrier
5789 followed by a store of zero. */
5790 expand_builtin_synchronize ();
5791 emit_move_insn (mem, val);
5794 /* Expand an expression EXP that calls a built-in function,
5795 with result going to TARGET if that's convenient
5796 (and in mode MODE if that's convenient).
5797 SUBTARGET may be used as the target for computing one of EXP's operands.
5798 IGNORE is nonzero if the value is to be ignored. */
5801 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5804 tree fndecl = get_callee_fndecl (exp);
5805 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5806 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5809 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5810 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5812 /* When not optimizing, generate calls to library functions for a certain
5815 && !called_as_built_in (fndecl)
5816 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5817 && fcode != BUILT_IN_ALLOCA
5818 && fcode != BUILT_IN_FREE)
5819 return expand_call (exp, target, ignore);
5821 /* The built-in function expanders test for target == const0_rtx
5822 to determine whether the function's result will be ignored. */
5824 target = const0_rtx;
5826 /* If the result of a pure or const built-in function is ignored, and
5827 none of its arguments are volatile, we can avoid expanding the
5828 built-in call and just evaluate the arguments for side-effects. */
5829 if (target == const0_rtx
5830 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5831 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5833 bool volatilep = false;
5835 call_expr_arg_iterator iter;
5837 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5838 if (TREE_THIS_VOLATILE (arg))
5846 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5847 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5854 CASE_FLT_FN (BUILT_IN_FABS):
5855 target = expand_builtin_fabs (exp, target, subtarget);
5860 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5861 target = expand_builtin_copysign (exp, target, subtarget);
5866 /* Just do a normal library call if we were unable to fold
5868 CASE_FLT_FN (BUILT_IN_CABS):
5871 CASE_FLT_FN (BUILT_IN_EXP):
5872 CASE_FLT_FN (BUILT_IN_EXP10):
5873 CASE_FLT_FN (BUILT_IN_POW10):
5874 CASE_FLT_FN (BUILT_IN_EXP2):
5875 CASE_FLT_FN (BUILT_IN_EXPM1):
5876 CASE_FLT_FN (BUILT_IN_LOGB):
5877 CASE_FLT_FN (BUILT_IN_LOG):
5878 CASE_FLT_FN (BUILT_IN_LOG10):
5879 CASE_FLT_FN (BUILT_IN_LOG2):
5880 CASE_FLT_FN (BUILT_IN_LOG1P):
5881 CASE_FLT_FN (BUILT_IN_TAN):
5882 CASE_FLT_FN (BUILT_IN_ASIN):
5883 CASE_FLT_FN (BUILT_IN_ACOS):
5884 CASE_FLT_FN (BUILT_IN_ATAN):
5885 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5886 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5887 because of possible accuracy problems. */
5888 if (! flag_unsafe_math_optimizations)
5890 CASE_FLT_FN (BUILT_IN_SQRT):
5891 CASE_FLT_FN (BUILT_IN_FLOOR):
5892 CASE_FLT_FN (BUILT_IN_CEIL):
5893 CASE_FLT_FN (BUILT_IN_TRUNC):
5894 CASE_FLT_FN (BUILT_IN_ROUND):
5895 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5896 CASE_FLT_FN (BUILT_IN_RINT):
5897 target = expand_builtin_mathfn (exp, target, subtarget);
5902 CASE_FLT_FN (BUILT_IN_FMA):
5903 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5908 CASE_FLT_FN (BUILT_IN_ILOGB):
5909 if (! flag_unsafe_math_optimizations)
5911 CASE_FLT_FN (BUILT_IN_ISINF):
5912 CASE_FLT_FN (BUILT_IN_FINITE):
5913 case BUILT_IN_ISFINITE:
5914 case BUILT_IN_ISNORMAL:
5915 target = expand_builtin_interclass_mathfn (exp, target);
5920 CASE_FLT_FN (BUILT_IN_LCEIL):
5921 CASE_FLT_FN (BUILT_IN_LLCEIL):
5922 CASE_FLT_FN (BUILT_IN_LFLOOR):
5923 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5924 target = expand_builtin_int_roundingfn (exp, target);
5929 CASE_FLT_FN (BUILT_IN_LRINT):
5930 CASE_FLT_FN (BUILT_IN_LLRINT):
5931 CASE_FLT_FN (BUILT_IN_LROUND):
5932 CASE_FLT_FN (BUILT_IN_LLROUND):
5933 target = expand_builtin_int_roundingfn_2 (exp, target);
5938 CASE_FLT_FN (BUILT_IN_POW):
5939 target = expand_builtin_pow (exp, target, subtarget);
5944 CASE_FLT_FN (BUILT_IN_POWI):
5945 target = expand_builtin_powi (exp, target);
5950 CASE_FLT_FN (BUILT_IN_ATAN2):
5951 CASE_FLT_FN (BUILT_IN_LDEXP):
5952 CASE_FLT_FN (BUILT_IN_SCALB):
5953 CASE_FLT_FN (BUILT_IN_SCALBN):
5954 CASE_FLT_FN (BUILT_IN_SCALBLN):
5955 if (! flag_unsafe_math_optimizations)
5958 CASE_FLT_FN (BUILT_IN_FMOD):
5959 CASE_FLT_FN (BUILT_IN_REMAINDER):
5960 CASE_FLT_FN (BUILT_IN_DREM):
5961 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5966 CASE_FLT_FN (BUILT_IN_CEXPI):
5967 target = expand_builtin_cexpi (exp, target);
5968 gcc_assert (target);
5971 CASE_FLT_FN (BUILT_IN_SIN):
5972 CASE_FLT_FN (BUILT_IN_COS):
5973 if (! flag_unsafe_math_optimizations)
5975 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5980 CASE_FLT_FN (BUILT_IN_SINCOS):
5981 if (! flag_unsafe_math_optimizations)
5983 target = expand_builtin_sincos (exp);
5988 case BUILT_IN_APPLY_ARGS:
5989 return expand_builtin_apply_args ();
5991 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5992 FUNCTION with a copy of the parameters described by
5993 ARGUMENTS, and ARGSIZE. It returns a block of memory
5994 allocated on the stack into which is stored all the registers
5995 that might possibly be used for returning the result of a
5996 function. ARGUMENTS is the value returned by
5997 __builtin_apply_args. ARGSIZE is the number of bytes of
5998 arguments that must be copied. ??? How should this value be
5999 computed? We'll also need a safe worst case value for varargs
6001 case BUILT_IN_APPLY:
6002 if (!validate_arglist (exp, POINTER_TYPE,
6003 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6004 && !validate_arglist (exp, REFERENCE_TYPE,
6005 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6011 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6012 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6013 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6015 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6018 /* __builtin_return (RESULT) causes the function to return the
6019 value described by RESULT. RESULT is address of the block of
6020 memory returned by __builtin_apply. */
6021 case BUILT_IN_RETURN:
6022 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6023 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6026 case BUILT_IN_SAVEREGS:
6027 return expand_builtin_saveregs ();
6029 case BUILT_IN_VA_ARG_PACK:
6030 /* All valid uses of __builtin_va_arg_pack () are removed during
6032 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6035 case BUILT_IN_VA_ARG_PACK_LEN:
6036 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6038 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6041 /* Return the address of the first anonymous stack arg. */
6042 case BUILT_IN_NEXT_ARG:
6043 if (fold_builtin_next_arg (exp, false))
6045 return expand_builtin_next_arg ();
6047 case BUILT_IN_CLEAR_CACHE:
6048 target = expand_builtin___clear_cache (exp);
6053 case BUILT_IN_CLASSIFY_TYPE:
6054 return expand_builtin_classify_type (exp);
6056 case BUILT_IN_CONSTANT_P:
6059 case BUILT_IN_FRAME_ADDRESS:
6060 case BUILT_IN_RETURN_ADDRESS:
6061 return expand_builtin_frame_address (fndecl, exp);
6063 /* Returns the address of the area where the structure is returned.
6065 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6066 if (call_expr_nargs (exp) != 0
6067 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6068 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6071 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6073 case BUILT_IN_ALLOCA:
6074 /* If the allocation stems from the declaration of a variable-sized
6075 object, it cannot accumulate. */
6076 target = expand_builtin_alloca (exp, ALLOCA_FOR_VAR_P (exp));
6081 case BUILT_IN_STACK_SAVE:
6082 return expand_stack_save ();
6084 case BUILT_IN_STACK_RESTORE:
6085 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6088 case BUILT_IN_BSWAP32:
6089 case BUILT_IN_BSWAP64:
6090 target = expand_builtin_bswap (exp, target, subtarget);
6096 CASE_INT_FN (BUILT_IN_FFS):
6097 case BUILT_IN_FFSIMAX:
6098 target = expand_builtin_unop (target_mode, exp, target,
6099 subtarget, ffs_optab);
6104 CASE_INT_FN (BUILT_IN_CLZ):
6105 case BUILT_IN_CLZIMAX:
6106 target = expand_builtin_unop (target_mode, exp, target,
6107 subtarget, clz_optab);
6112 CASE_INT_FN (BUILT_IN_CTZ):
6113 case BUILT_IN_CTZIMAX:
6114 target = expand_builtin_unop (target_mode, exp, target,
6115 subtarget, ctz_optab);
6120 CASE_INT_FN (BUILT_IN_POPCOUNT):
6121 case BUILT_IN_POPCOUNTIMAX:
6122 target = expand_builtin_unop (target_mode, exp, target,
6123 subtarget, popcount_optab);
6128 CASE_INT_FN (BUILT_IN_PARITY):
6129 case BUILT_IN_PARITYIMAX:
6130 target = expand_builtin_unop (target_mode, exp, target,
6131 subtarget, parity_optab);
6136 case BUILT_IN_STRLEN:
6137 target = expand_builtin_strlen (exp, target, target_mode);
6142 case BUILT_IN_STRCPY:
6143 target = expand_builtin_strcpy (exp, target);
6148 case BUILT_IN_STRNCPY:
6149 target = expand_builtin_strncpy (exp, target);
6154 case BUILT_IN_STPCPY:
6155 target = expand_builtin_stpcpy (exp, target, mode);
6160 case BUILT_IN_MEMCPY:
6161 target = expand_builtin_memcpy (exp, target);
6166 case BUILT_IN_MEMPCPY:
6167 target = expand_builtin_mempcpy (exp, target, mode);
6172 case BUILT_IN_MEMSET:
6173 target = expand_builtin_memset (exp, target, mode);
6178 case BUILT_IN_BZERO:
6179 target = expand_builtin_bzero (exp);
6184 case BUILT_IN_STRCMP:
6185 target = expand_builtin_strcmp (exp, target);
6190 case BUILT_IN_STRNCMP:
6191 target = expand_builtin_strncmp (exp, target, mode);
6197 case BUILT_IN_MEMCMP:
6198 target = expand_builtin_memcmp (exp, target, mode);
6203 case BUILT_IN_SETJMP:
6204 /* This should have been lowered to the builtins below. */
6207 case BUILT_IN_SETJMP_SETUP:
6208 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6209 and the receiver label. */
6210 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6212 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6213 VOIDmode, EXPAND_NORMAL);
6214 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6215 rtx label_r = label_rtx (label);
6217 /* This is copied from the handling of non-local gotos. */
6218 expand_builtin_setjmp_setup (buf_addr, label_r);
6219 nonlocal_goto_handler_labels
6220 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6221 nonlocal_goto_handler_labels);
6222 /* ??? Do not let expand_label treat us as such since we would
6223 not want to be both on the list of non-local labels and on
6224 the list of forced labels. */
6225 FORCED_LABEL (label) = 0;
6230 case BUILT_IN_SETJMP_DISPATCHER:
6231 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6232 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6234 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6235 rtx label_r = label_rtx (label);
6237 /* Remove the dispatcher label from the list of non-local labels
6238 since the receiver labels have been added to it above. */
6239 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6244 case BUILT_IN_SETJMP_RECEIVER:
6245 /* __builtin_setjmp_receiver is passed the receiver label. */
6246 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6248 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6249 rtx label_r = label_rtx (label);
6251 expand_builtin_setjmp_receiver (label_r);
6256 /* __builtin_longjmp is passed a pointer to an array of five words.
6257 It's similar to the C library longjmp function but works with
6258 __builtin_setjmp above. */
6259 case BUILT_IN_LONGJMP:
6260 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6262 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6263 VOIDmode, EXPAND_NORMAL);
6264 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6266 if (value != const1_rtx)
6268 error ("%<__builtin_longjmp%> second argument must be 1");
6272 expand_builtin_longjmp (buf_addr, value);
6277 case BUILT_IN_NONLOCAL_GOTO:
6278 target = expand_builtin_nonlocal_goto (exp);
6283 /* This updates the setjmp buffer that is its argument with the value
6284 of the current stack pointer. */
6285 case BUILT_IN_UPDATE_SETJMP_BUF:
6286 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6289 = expand_normal (CALL_EXPR_ARG (exp, 0));
6291 expand_builtin_update_setjmp_buf (buf_addr);
6297 expand_builtin_trap ();
6300 case BUILT_IN_UNREACHABLE:
6301 expand_builtin_unreachable ();
6304 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6305 case BUILT_IN_SIGNBITD32:
6306 case BUILT_IN_SIGNBITD64:
6307 case BUILT_IN_SIGNBITD128:
6308 target = expand_builtin_signbit (exp, target);
6313 /* Various hooks for the DWARF 2 __throw routine. */
6314 case BUILT_IN_UNWIND_INIT:
6315 expand_builtin_unwind_init ();
6317 case BUILT_IN_DWARF_CFA:
6318 return virtual_cfa_rtx;
6319 #ifdef DWARF2_UNWIND_INFO
6320 case BUILT_IN_DWARF_SP_COLUMN:
6321 return expand_builtin_dwarf_sp_column ();
6322 case BUILT_IN_INIT_DWARF_REG_SIZES:
6323 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6326 case BUILT_IN_FROB_RETURN_ADDR:
6327 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6328 case BUILT_IN_EXTRACT_RETURN_ADDR:
6329 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6330 case BUILT_IN_EH_RETURN:
6331 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6332 CALL_EXPR_ARG (exp, 1));
6334 #ifdef EH_RETURN_DATA_REGNO
6335 case BUILT_IN_EH_RETURN_DATA_REGNO:
6336 return expand_builtin_eh_return_data_regno (exp);
6338 case BUILT_IN_EXTEND_POINTER:
6339 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6340 case BUILT_IN_EH_POINTER:
6341 return expand_builtin_eh_pointer (exp);
6342 case BUILT_IN_EH_FILTER:
6343 return expand_builtin_eh_filter (exp);
6344 case BUILT_IN_EH_COPY_VALUES:
6345 return expand_builtin_eh_copy_values (exp);
6347 case BUILT_IN_VA_START:
6348 return expand_builtin_va_start (exp);
6349 case BUILT_IN_VA_END:
6350 return expand_builtin_va_end (exp);
6351 case BUILT_IN_VA_COPY:
6352 return expand_builtin_va_copy (exp);
6353 case BUILT_IN_EXPECT:
6354 return expand_builtin_expect (exp, target);
6355 case BUILT_IN_PREFETCH:
6356 expand_builtin_prefetch (exp);
6359 case BUILT_IN_PROFILE_FUNC_ENTER:
6360 return expand_builtin_profile_func (false);
6361 case BUILT_IN_PROFILE_FUNC_EXIT:
6362 return expand_builtin_profile_func (true);
6364 case BUILT_IN_INIT_TRAMPOLINE:
6365 return expand_builtin_init_trampoline (exp);
6366 case BUILT_IN_ADJUST_TRAMPOLINE:
6367 return expand_builtin_adjust_trampoline (exp);
6370 case BUILT_IN_EXECL:
6371 case BUILT_IN_EXECV:
6372 case BUILT_IN_EXECLP:
6373 case BUILT_IN_EXECLE:
6374 case BUILT_IN_EXECVP:
6375 case BUILT_IN_EXECVE:
6376 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6381 case BUILT_IN_FETCH_AND_ADD_1:
6382 case BUILT_IN_FETCH_AND_ADD_2:
6383 case BUILT_IN_FETCH_AND_ADD_4:
6384 case BUILT_IN_FETCH_AND_ADD_8:
6385 case BUILT_IN_FETCH_AND_ADD_16:
6386 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6387 target = expand_builtin_sync_operation (mode, exp, PLUS,
6388 false, target, ignore);
6393 case BUILT_IN_FETCH_AND_SUB_1:
6394 case BUILT_IN_FETCH_AND_SUB_2:
6395 case BUILT_IN_FETCH_AND_SUB_4:
6396 case BUILT_IN_FETCH_AND_SUB_8:
6397 case BUILT_IN_FETCH_AND_SUB_16:
6398 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6399 target = expand_builtin_sync_operation (mode, exp, MINUS,
6400 false, target, ignore);
6405 case BUILT_IN_FETCH_AND_OR_1:
6406 case BUILT_IN_FETCH_AND_OR_2:
6407 case BUILT_IN_FETCH_AND_OR_4:
6408 case BUILT_IN_FETCH_AND_OR_8:
6409 case BUILT_IN_FETCH_AND_OR_16:
6410 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6411 target = expand_builtin_sync_operation (mode, exp, IOR,
6412 false, target, ignore);
6417 case BUILT_IN_FETCH_AND_AND_1:
6418 case BUILT_IN_FETCH_AND_AND_2:
6419 case BUILT_IN_FETCH_AND_AND_4:
6420 case BUILT_IN_FETCH_AND_AND_8:
6421 case BUILT_IN_FETCH_AND_AND_16:
6422 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6423 target = expand_builtin_sync_operation (mode, exp, AND,
6424 false, target, ignore);
6429 case BUILT_IN_FETCH_AND_XOR_1:
6430 case BUILT_IN_FETCH_AND_XOR_2:
6431 case BUILT_IN_FETCH_AND_XOR_4:
6432 case BUILT_IN_FETCH_AND_XOR_8:
6433 case BUILT_IN_FETCH_AND_XOR_16:
6434 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6435 target = expand_builtin_sync_operation (mode, exp, XOR,
6436 false, target, ignore);
6441 case BUILT_IN_FETCH_AND_NAND_1:
6442 case BUILT_IN_FETCH_AND_NAND_2:
6443 case BUILT_IN_FETCH_AND_NAND_4:
6444 case BUILT_IN_FETCH_AND_NAND_8:
6445 case BUILT_IN_FETCH_AND_NAND_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6447 target = expand_builtin_sync_operation (mode, exp, NOT,
6448 false, target, ignore);
6453 case BUILT_IN_ADD_AND_FETCH_1:
6454 case BUILT_IN_ADD_AND_FETCH_2:
6455 case BUILT_IN_ADD_AND_FETCH_4:
6456 case BUILT_IN_ADD_AND_FETCH_8:
6457 case BUILT_IN_ADD_AND_FETCH_16:
6458 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6459 target = expand_builtin_sync_operation (mode, exp, PLUS,
6460 true, target, ignore);
6465 case BUILT_IN_SUB_AND_FETCH_1:
6466 case BUILT_IN_SUB_AND_FETCH_2:
6467 case BUILT_IN_SUB_AND_FETCH_4:
6468 case BUILT_IN_SUB_AND_FETCH_8:
6469 case BUILT_IN_SUB_AND_FETCH_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6471 target = expand_builtin_sync_operation (mode, exp, MINUS,
6472 true, target, ignore);
6477 case BUILT_IN_OR_AND_FETCH_1:
6478 case BUILT_IN_OR_AND_FETCH_2:
6479 case BUILT_IN_OR_AND_FETCH_4:
6480 case BUILT_IN_OR_AND_FETCH_8:
6481 case BUILT_IN_OR_AND_FETCH_16:
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6483 target = expand_builtin_sync_operation (mode, exp, IOR,
6484 true, target, ignore);
6489 case BUILT_IN_AND_AND_FETCH_1:
6490 case BUILT_IN_AND_AND_FETCH_2:
6491 case BUILT_IN_AND_AND_FETCH_4:
6492 case BUILT_IN_AND_AND_FETCH_8:
6493 case BUILT_IN_AND_AND_FETCH_16:
6494 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6495 target = expand_builtin_sync_operation (mode, exp, AND,
6496 true, target, ignore);
6501 case BUILT_IN_XOR_AND_FETCH_1:
6502 case BUILT_IN_XOR_AND_FETCH_2:
6503 case BUILT_IN_XOR_AND_FETCH_4:
6504 case BUILT_IN_XOR_AND_FETCH_8:
6505 case BUILT_IN_XOR_AND_FETCH_16:
6506 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6507 target = expand_builtin_sync_operation (mode, exp, XOR,
6508 true, target, ignore);
6513 case BUILT_IN_NAND_AND_FETCH_1:
6514 case BUILT_IN_NAND_AND_FETCH_2:
6515 case BUILT_IN_NAND_AND_FETCH_4:
6516 case BUILT_IN_NAND_AND_FETCH_8:
6517 case BUILT_IN_NAND_AND_FETCH_16:
6518 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6519 target = expand_builtin_sync_operation (mode, exp, NOT,
6520 true, target, ignore);
6525 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6526 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6527 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6528 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6529 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6530 if (mode == VOIDmode)
6531 mode = TYPE_MODE (boolean_type_node);
6532 if (!target || !register_operand (target, mode))
6533 target = gen_reg_rtx (mode);
6535 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6536 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6541 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6542 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6543 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6544 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6545 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6546 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6547 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6552 case BUILT_IN_LOCK_TEST_AND_SET_1:
6553 case BUILT_IN_LOCK_TEST_AND_SET_2:
6554 case BUILT_IN_LOCK_TEST_AND_SET_4:
6555 case BUILT_IN_LOCK_TEST_AND_SET_8:
6556 case BUILT_IN_LOCK_TEST_AND_SET_16:
6557 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6558 target = expand_builtin_lock_test_and_set (mode, exp, target);
6563 case BUILT_IN_LOCK_RELEASE_1:
6564 case BUILT_IN_LOCK_RELEASE_2:
6565 case BUILT_IN_LOCK_RELEASE_4:
6566 case BUILT_IN_LOCK_RELEASE_8:
6567 case BUILT_IN_LOCK_RELEASE_16:
6568 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6569 expand_builtin_lock_release (mode, exp);
6572 case BUILT_IN_SYNCHRONIZE:
6573 expand_builtin_synchronize ();
6576 case BUILT_IN_OBJECT_SIZE:
6577 return expand_builtin_object_size (exp);
6579 case BUILT_IN_MEMCPY_CHK:
6580 case BUILT_IN_MEMPCPY_CHK:
6581 case BUILT_IN_MEMMOVE_CHK:
6582 case BUILT_IN_MEMSET_CHK:
6583 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6588 case BUILT_IN_STRCPY_CHK:
6589 case BUILT_IN_STPCPY_CHK:
6590 case BUILT_IN_STRNCPY_CHK:
6591 case BUILT_IN_STRCAT_CHK:
6592 case BUILT_IN_STRNCAT_CHK:
6593 case BUILT_IN_SNPRINTF_CHK:
6594 case BUILT_IN_VSNPRINTF_CHK:
6595 maybe_emit_chk_warning (exp, fcode);
6598 case BUILT_IN_SPRINTF_CHK:
6599 case BUILT_IN_VSPRINTF_CHK:
6600 maybe_emit_sprintf_chk_warning (exp, fcode);
6604 maybe_emit_free_warning (exp);
6607 default: /* just do library call, if unknown builtin */
6611 /* The switch statement above can drop through to cause the function
6612 to be called normally. */
6613 return expand_call (exp, target, ignore);
6616 /* Determine whether a tree node represents a call to a built-in
6617 function. If the tree T is a call to a built-in function with
6618 the right number of arguments of the appropriate types, return
6619 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6620 Otherwise the return value is END_BUILTINS. */
6622 enum built_in_function
6623 builtin_mathfn_code (const_tree t)
6625 const_tree fndecl, arg, parmlist;
6626 const_tree argtype, parmtype;
6627 const_call_expr_arg_iterator iter;
6629 if (TREE_CODE (t) != CALL_EXPR
6630 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6631 return END_BUILTINS;
6633 fndecl = get_callee_fndecl (t);
6634 if (fndecl == NULL_TREE
6635 || TREE_CODE (fndecl) != FUNCTION_DECL
6636 || ! DECL_BUILT_IN (fndecl)
6637 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6638 return END_BUILTINS;
6640 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6641 init_const_call_expr_arg_iterator (t, &iter);
6642 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6644 /* If a function doesn't take a variable number of arguments,
6645 the last element in the list will have type `void'. */
6646 parmtype = TREE_VALUE (parmlist);
6647 if (VOID_TYPE_P (parmtype))
6649 if (more_const_call_expr_args_p (&iter))
6650 return END_BUILTINS;
6651 return DECL_FUNCTION_CODE (fndecl);
6654 if (! more_const_call_expr_args_p (&iter))
6655 return END_BUILTINS;
6657 arg = next_const_call_expr_arg (&iter);
6658 argtype = TREE_TYPE (arg);
6660 if (SCALAR_FLOAT_TYPE_P (parmtype))
6662 if (! SCALAR_FLOAT_TYPE_P (argtype))
6663 return END_BUILTINS;
6665 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6667 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6668 return END_BUILTINS;
6670 else if (POINTER_TYPE_P (parmtype))
6672 if (! POINTER_TYPE_P (argtype))
6673 return END_BUILTINS;
6675 else if (INTEGRAL_TYPE_P (parmtype))
6677 if (! INTEGRAL_TYPE_P (argtype))
6678 return END_BUILTINS;
6681 return END_BUILTINS;
6684 /* Variable-length argument list. */
6685 return DECL_FUNCTION_CODE (fndecl);
6688 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6689 evaluate to a constant. */
6692 fold_builtin_constant_p (tree arg)
6694 /* We return 1 for a numeric type that's known to be a constant
6695 value at compile-time or for an aggregate type that's a
6696 literal constant. */
6699 /* If we know this is a constant, emit the constant of one. */
6700 if (CONSTANT_CLASS_P (arg)
6701 || (TREE_CODE (arg) == CONSTRUCTOR
6702 && TREE_CONSTANT (arg)))
6703 return integer_one_node;
6704 if (TREE_CODE (arg) == ADDR_EXPR)
6706 tree op = TREE_OPERAND (arg, 0);
6707 if (TREE_CODE (op) == STRING_CST
6708 || (TREE_CODE (op) == ARRAY_REF
6709 && integer_zerop (TREE_OPERAND (op, 1))
6710 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6711 return integer_one_node;
6714 /* If this expression has side effects, show we don't know it to be a
6715 constant. Likewise if it's a pointer or aggregate type since in
6716 those case we only want literals, since those are only optimized
6717 when generating RTL, not later.
6718 And finally, if we are compiling an initializer, not code, we
6719 need to return a definite result now; there's not going to be any
6720 more optimization done. */
6721 if (TREE_SIDE_EFFECTS (arg)
6722 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6723 || POINTER_TYPE_P (TREE_TYPE (arg))
6725 || folding_initializer)
6726 return integer_zero_node;
6731 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6732 return it as a truthvalue. */
6735 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6737 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6739 fn = built_in_decls[BUILT_IN_EXPECT];
6740 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6741 ret_type = TREE_TYPE (TREE_TYPE (fn));
6742 pred_type = TREE_VALUE (arg_types);
6743 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6745 pred = fold_convert_loc (loc, pred_type, pred);
6746 expected = fold_convert_loc (loc, expected_type, expected);
6747 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6749 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6750 build_int_cst (ret_type, 0));
6753 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6754 NULL_TREE if no simplification is possible. */
6757 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6760 enum tree_code code;
6762 /* If this is a builtin_expect within a builtin_expect keep the
6763 inner one. See through a comparison against a constant. It
6764 might have been added to create a thruthvalue. */
6766 if (COMPARISON_CLASS_P (inner)
6767 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6768 inner = TREE_OPERAND (inner, 0);
6770 if (TREE_CODE (inner) == CALL_EXPR
6771 && (fndecl = get_callee_fndecl (inner))
6772 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6773 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6776 /* Distribute the expected value over short-circuiting operators.
6777 See through the cast from truthvalue_type_node to long. */
6779 while (TREE_CODE (inner) == NOP_EXPR
6780 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6781 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6782 inner = TREE_OPERAND (inner, 0);
6784 code = TREE_CODE (inner);
6785 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6787 tree op0 = TREE_OPERAND (inner, 0);
6788 tree op1 = TREE_OPERAND (inner, 1);
6790 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6791 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6792 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6794 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6797 /* If the argument isn't invariant then there's nothing else we can do. */
6798 if (!TREE_CONSTANT (arg0))
6801 /* If we expect that a comparison against the argument will fold to
6802 a constant return the constant. In practice, this means a true
6803 constant or the address of a non-weak symbol. */
6806 if (TREE_CODE (inner) == ADDR_EXPR)
6810 inner = TREE_OPERAND (inner, 0);
6812 while (TREE_CODE (inner) == COMPONENT_REF
6813 || TREE_CODE (inner) == ARRAY_REF);
6814 if ((TREE_CODE (inner) == VAR_DECL
6815 || TREE_CODE (inner) == FUNCTION_DECL)
6816 && DECL_WEAK (inner))
6820 /* Otherwise, ARG0 already has the proper type for the return value. */
6824 /* Fold a call to __builtin_classify_type with argument ARG. */
6827 fold_builtin_classify_type (tree arg)
6830 return build_int_cst (NULL_TREE, no_type_class);
6832 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6835 /* Fold a call to __builtin_strlen with argument ARG. */
6838 fold_builtin_strlen (location_t loc, tree type, tree arg)
6840 if (!validate_arg (arg, POINTER_TYPE))
6844 tree len = c_strlen (arg, 0);
6847 return fold_convert_loc (loc, type, len);
6853 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6856 fold_builtin_inf (location_t loc, tree type, int warn)
6858 REAL_VALUE_TYPE real;
6860 /* __builtin_inff is intended to be usable to define INFINITY on all
6861 targets. If an infinity is not available, INFINITY expands "to a
6862 positive constant of type float that overflows at translation
6863 time", footnote "In this case, using INFINITY will violate the
6864 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6865 Thus we pedwarn to ensure this constraint violation is
6867 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6868 pedwarn (loc, 0, "target format does not support infinity");
6871 return build_real (type, real);
6874 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6877 fold_builtin_nan (tree arg, tree type, int quiet)
6879 REAL_VALUE_TYPE real;
6882 if (!validate_arg (arg, POINTER_TYPE))
6884 str = c_getstr (arg);
6888 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6891 return build_real (type, real);
6894 /* Return true if the floating point expression T has an integer value.
6895 We also allow +Inf, -Inf and NaN to be considered integer values. */
6898 integer_valued_real_p (tree t)
6900 switch (TREE_CODE (t))
6907 return integer_valued_real_p (TREE_OPERAND (t, 0));
6912 return integer_valued_real_p (TREE_OPERAND (t, 1));
6919 return integer_valued_real_p (TREE_OPERAND (t, 0))
6920 && integer_valued_real_p (TREE_OPERAND (t, 1));
6923 return integer_valued_real_p (TREE_OPERAND (t, 1))
6924 && integer_valued_real_p (TREE_OPERAND (t, 2));
6927 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6931 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6932 if (TREE_CODE (type) == INTEGER_TYPE)
6934 if (TREE_CODE (type) == REAL_TYPE)
6935 return integer_valued_real_p (TREE_OPERAND (t, 0));
6940 switch (builtin_mathfn_code (t))
6942 CASE_FLT_FN (BUILT_IN_CEIL):
6943 CASE_FLT_FN (BUILT_IN_FLOOR):
6944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6945 CASE_FLT_FN (BUILT_IN_RINT):
6946 CASE_FLT_FN (BUILT_IN_ROUND):
6947 CASE_FLT_FN (BUILT_IN_TRUNC):
6950 CASE_FLT_FN (BUILT_IN_FMIN):
6951 CASE_FLT_FN (BUILT_IN_FMAX):
6952 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6953 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6966 /* FNDECL is assumed to be a builtin where truncation can be propagated
6967 across (for instance floor((double)f) == (double)floorf (f).
6968 Do the transformation for a call with argument ARG. */
6971 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6973 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6975 if (!validate_arg (arg, REAL_TYPE))
6978 /* Integer rounding functions are idempotent. */
6979 if (fcode == builtin_mathfn_code (arg))
6982 /* If argument is already integer valued, and we don't need to worry
6983 about setting errno, there's no need to perform rounding. */
6984 if (! flag_errno_math && integer_valued_real_p (arg))
6989 tree arg0 = strip_float_extensions (arg);
6990 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6991 tree newtype = TREE_TYPE (arg0);
6994 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6995 && (decl = mathfn_built_in (newtype, fcode)))
6996 return fold_convert_loc (loc, ftype,
6997 build_call_expr_loc (loc, decl, 1,
6998 fold_convert_loc (loc,
7005 /* FNDECL is assumed to be builtin which can narrow the FP type of
7006 the argument, for instance lround((double)f) -> lroundf (f).
7007 Do the transformation for a call with argument ARG. */
7010 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7012 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7014 if (!validate_arg (arg, REAL_TYPE))
7017 /* If argument is already integer valued, and we don't need to worry
7018 about setting errno, there's no need to perform rounding. */
7019 if (! flag_errno_math && integer_valued_real_p (arg))
7020 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7021 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7025 tree ftype = TREE_TYPE (arg);
7026 tree arg0 = strip_float_extensions (arg);
7027 tree newtype = TREE_TYPE (arg0);
7030 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7031 && (decl = mathfn_built_in (newtype, fcode)))
7032 return build_call_expr_loc (loc, decl, 1,
7033 fold_convert_loc (loc, newtype, arg0));
7036 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7037 sizeof (long long) == sizeof (long). */
7038 if (TYPE_PRECISION (long_long_integer_type_node)
7039 == TYPE_PRECISION (long_integer_type_node))
7041 tree newfn = NULL_TREE;
7044 CASE_FLT_FN (BUILT_IN_LLCEIL):
7045 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7048 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7049 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7052 CASE_FLT_FN (BUILT_IN_LLROUND):
7053 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7056 CASE_FLT_FN (BUILT_IN_LLRINT):
7057 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7066 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7067 return fold_convert_loc (loc,
7068 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7075 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7076 return type. Return NULL_TREE if no simplification can be made. */
7079 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7083 if (!validate_arg (arg, COMPLEX_TYPE)
7084 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7087 /* Calculate the result when the argument is a constant. */
7088 if (TREE_CODE (arg) == COMPLEX_CST
7089 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7093 if (TREE_CODE (arg) == COMPLEX_EXPR)
7095 tree real = TREE_OPERAND (arg, 0);
7096 tree imag = TREE_OPERAND (arg, 1);
7098 /* If either part is zero, cabs is fabs of the other. */
7099 if (real_zerop (real))
7100 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7101 if (real_zerop (imag))
7102 return fold_build1_loc (loc, ABS_EXPR, type, real);
7104 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7105 if (flag_unsafe_math_optimizations
7106 && operand_equal_p (real, imag, OEP_PURE_SAME))
7108 const REAL_VALUE_TYPE sqrt2_trunc
7109 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7111 return fold_build2_loc (loc, MULT_EXPR, type,
7112 fold_build1_loc (loc, ABS_EXPR, type, real),
7113 build_real (type, sqrt2_trunc));
7117 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7118 if (TREE_CODE (arg) == NEGATE_EXPR
7119 || TREE_CODE (arg) == CONJ_EXPR)
7120 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7122 /* Don't do this when optimizing for size. */
7123 if (flag_unsafe_math_optimizations
7124 && optimize && optimize_function_for_speed_p (cfun))
7126 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7128 if (sqrtfn != NULL_TREE)
7130 tree rpart, ipart, result;
7132 arg = builtin_save_expr (arg);
7134 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7135 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7137 rpart = builtin_save_expr (rpart);
7138 ipart = builtin_save_expr (ipart);
7140 result = fold_build2_loc (loc, PLUS_EXPR, type,
7141 fold_build2_loc (loc, MULT_EXPR, type,
7143 fold_build2_loc (loc, MULT_EXPR, type,
7146 return build_call_expr_loc (loc, sqrtfn, 1, result);
7153 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7154 complex tree type of the result. If NEG is true, the imaginary
7155 zero is negative. */
7158 build_complex_cproj (tree type, bool neg)
7160 REAL_VALUE_TYPE rinf, rzero = dconst0;
7164 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7165 build_real (TREE_TYPE (type), rzero));
7168 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7169 return type. Return NULL_TREE if no simplification can be made. */
7172 fold_builtin_cproj (location_t loc, tree arg, tree type)
7174 if (!validate_arg (arg, COMPLEX_TYPE)
7175 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7178 /* If there are no infinities, return arg. */
7179 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7180 return non_lvalue_loc (loc, arg);
7182 /* Calculate the result when the argument is a constant. */
7183 if (TREE_CODE (arg) == COMPLEX_CST)
7185 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7186 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7188 if (real_isinf (real) || real_isinf (imag))
7189 return build_complex_cproj (type, imag->sign);
7193 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7195 tree real = TREE_OPERAND (arg, 0);
7196 tree imag = TREE_OPERAND (arg, 1);
7201 /* If the real part is inf and the imag part is known to be
7202 nonnegative, return (inf + 0i). Remember side-effects are
7203 possible in the imag part. */
7204 if (TREE_CODE (real) == REAL_CST
7205 && real_isinf (TREE_REAL_CST_PTR (real))
7206 && tree_expr_nonnegative_p (imag))
7207 return omit_one_operand_loc (loc, type,
7208 build_complex_cproj (type, false),
7211 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7212 Remember side-effects are possible in the real part. */
7213 if (TREE_CODE (imag) == REAL_CST
7214 && real_isinf (TREE_REAL_CST_PTR (imag)))
7216 omit_one_operand_loc (loc, type,
7217 build_complex_cproj (type, TREE_REAL_CST_PTR
7218 (imag)->sign), arg);
7224 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7225 Return NULL_TREE if no simplification can be made. */
7228 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7231 enum built_in_function fcode;
7234 if (!validate_arg (arg, REAL_TYPE))
7237 /* Calculate the result when the argument is a constant. */
7238 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7241 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7242 fcode = builtin_mathfn_code (arg);
7243 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7245 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7246 arg = fold_build2_loc (loc, MULT_EXPR, type,
7247 CALL_EXPR_ARG (arg, 0),
7248 build_real (type, dconsthalf));
7249 return build_call_expr_loc (loc, expfn, 1, arg);
7252 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7253 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7255 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7259 tree arg0 = CALL_EXPR_ARG (arg, 0);
7261 /* The inner root was either sqrt or cbrt. */
7262 /* This was a conditional expression but it triggered a bug
7264 REAL_VALUE_TYPE dconstroot;
7265 if (BUILTIN_SQRT_P (fcode))
7266 dconstroot = dconsthalf;
7268 dconstroot = dconst_third ();
7270 /* Adjust for the outer root. */
7271 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7272 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7273 tree_root = build_real (type, dconstroot);
7274 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7278 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7279 if (flag_unsafe_math_optimizations
7280 && (fcode == BUILT_IN_POW
7281 || fcode == BUILT_IN_POWF
7282 || fcode == BUILT_IN_POWL))
7284 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7285 tree arg0 = CALL_EXPR_ARG (arg, 0);
7286 tree arg1 = CALL_EXPR_ARG (arg, 1);
7288 if (!tree_expr_nonnegative_p (arg0))
7289 arg0 = build1 (ABS_EXPR, type, arg0);
7290 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7291 build_real (type, dconsthalf));
7292 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7298 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7299 Return NULL_TREE if no simplification can be made. */
7302 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7304 const enum built_in_function fcode = builtin_mathfn_code (arg);
7307 if (!validate_arg (arg, REAL_TYPE))
7310 /* Calculate the result when the argument is a constant. */
7311 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7314 if (flag_unsafe_math_optimizations)
7316 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7317 if (BUILTIN_EXPONENT_P (fcode))
7319 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7320 const REAL_VALUE_TYPE third_trunc =
7321 real_value_truncate (TYPE_MODE (type), dconst_third ());
7322 arg = fold_build2_loc (loc, MULT_EXPR, type,
7323 CALL_EXPR_ARG (arg, 0),
7324 build_real (type, third_trunc));
7325 return build_call_expr_loc (loc, expfn, 1, arg);
7328 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7329 if (BUILTIN_SQRT_P (fcode))
7331 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7335 tree arg0 = CALL_EXPR_ARG (arg, 0);
7337 REAL_VALUE_TYPE dconstroot = dconst_third ();
7339 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7340 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7341 tree_root = build_real (type, dconstroot);
7342 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7346 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7347 if (BUILTIN_CBRT_P (fcode))
7349 tree arg0 = CALL_EXPR_ARG (arg, 0);
7350 if (tree_expr_nonnegative_p (arg0))
7352 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7357 REAL_VALUE_TYPE dconstroot;
7359 real_arithmetic (&dconstroot, MULT_EXPR,
7360 dconst_third_ptr (), dconst_third_ptr ());
7361 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7362 tree_root = build_real (type, dconstroot);
7363 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7368 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7369 if (fcode == BUILT_IN_POW
7370 || fcode == BUILT_IN_POWF
7371 || fcode == BUILT_IN_POWL)
7373 tree arg00 = CALL_EXPR_ARG (arg, 0);
7374 tree arg01 = CALL_EXPR_ARG (arg, 1);
7375 if (tree_expr_nonnegative_p (arg00))
7377 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7378 const REAL_VALUE_TYPE dconstroot
7379 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7380 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7381 build_real (type, dconstroot));
7382 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7389 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7390 TYPE is the type of the return value. Return NULL_TREE if no
7391 simplification can be made. */
7394 fold_builtin_cos (location_t loc,
7395 tree arg, tree type, tree fndecl)
7399 if (!validate_arg (arg, REAL_TYPE))
7402 /* Calculate the result when the argument is a constant. */
7403 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7406 /* Optimize cos(-x) into cos (x). */
7407 if ((narg = fold_strip_sign_ops (arg)))
7408 return build_call_expr_loc (loc, fndecl, 1, narg);
7413 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7414 Return NULL_TREE if no simplification can be made. */
7417 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7419 if (validate_arg (arg, REAL_TYPE))
7423 /* Calculate the result when the argument is a constant. */
7424 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7427 /* Optimize cosh(-x) into cosh (x). */
7428 if ((narg = fold_strip_sign_ops (arg)))
7429 return build_call_expr_loc (loc, fndecl, 1, narg);
7435 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7436 argument ARG. TYPE is the type of the return value. Return
7437 NULL_TREE if no simplification can be made. */
7440 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7443 if (validate_arg (arg, COMPLEX_TYPE)
7444 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7448 /* Calculate the result when the argument is a constant. */
7449 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7452 /* Optimize fn(-x) into fn(x). */
7453 if ((tmp = fold_strip_sign_ops (arg)))
7454 return build_call_expr_loc (loc, fndecl, 1, tmp);
7460 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7461 Return NULL_TREE if no simplification can be made. */
7464 fold_builtin_tan (tree arg, tree type)
7466 enum built_in_function fcode;
7469 if (!validate_arg (arg, REAL_TYPE))
7472 /* Calculate the result when the argument is a constant. */
7473 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7476 /* Optimize tan(atan(x)) = x. */
7477 fcode = builtin_mathfn_code (arg);
7478 if (flag_unsafe_math_optimizations
7479 && (fcode == BUILT_IN_ATAN
7480 || fcode == BUILT_IN_ATANF
7481 || fcode == BUILT_IN_ATANL))
7482 return CALL_EXPR_ARG (arg, 0);
7487 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7488 NULL_TREE if no simplification can be made. */
7491 fold_builtin_sincos (location_t loc,
7492 tree arg0, tree arg1, tree arg2)
7497 if (!validate_arg (arg0, REAL_TYPE)
7498 || !validate_arg (arg1, POINTER_TYPE)
7499 || !validate_arg (arg2, POINTER_TYPE))
7502 type = TREE_TYPE (arg0);
7504 /* Calculate the result when the argument is a constant. */
7505 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7508 /* Canonicalize sincos to cexpi. */
7509 if (!TARGET_C99_FUNCTIONS)
7511 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7515 call = build_call_expr_loc (loc, fn, 1, arg0);
7516 call = builtin_save_expr (call);
7518 return build2 (COMPOUND_EXPR, void_type_node,
7519 build2 (MODIFY_EXPR, void_type_node,
7520 build_fold_indirect_ref_loc (loc, arg1),
7521 build1 (IMAGPART_EXPR, type, call)),
7522 build2 (MODIFY_EXPR, void_type_node,
7523 build_fold_indirect_ref_loc (loc, arg2),
7524 build1 (REALPART_EXPR, type, call)));
7527 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7528 NULL_TREE if no simplification can be made. */
7531 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7534 tree realp, imagp, ifn;
7537 if (!validate_arg (arg0, COMPLEX_TYPE)
7538 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7541 /* Calculate the result when the argument is a constant. */
7542 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7545 rtype = TREE_TYPE (TREE_TYPE (arg0));
7547 /* In case we can figure out the real part of arg0 and it is constant zero
7549 if (!TARGET_C99_FUNCTIONS)
7551 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7555 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7556 && real_zerop (realp))
7558 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7559 return build_call_expr_loc (loc, ifn, 1, narg);
7562 /* In case we can easily decompose real and imaginary parts split cexp
7563 to exp (r) * cexpi (i). */
7564 if (flag_unsafe_math_optimizations
7567 tree rfn, rcall, icall;
7569 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7573 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7577 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7578 icall = builtin_save_expr (icall);
7579 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7580 rcall = builtin_save_expr (rcall);
7581 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7582 fold_build2_loc (loc, MULT_EXPR, rtype,
7584 fold_build1_loc (loc, REALPART_EXPR,
7586 fold_build2_loc (loc, MULT_EXPR, rtype,
7588 fold_build1_loc (loc, IMAGPART_EXPR,
7595 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7596 Return NULL_TREE if no simplification can be made. */
7599 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7601 if (!validate_arg (arg, REAL_TYPE))
7604 /* Optimize trunc of constant value. */
7605 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7607 REAL_VALUE_TYPE r, x;
7608 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7610 x = TREE_REAL_CST (arg);
7611 real_trunc (&r, TYPE_MODE (type), &x);
7612 return build_real (type, r);
7615 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7618 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7619 Return NULL_TREE if no simplification can be made. */
7622 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7624 if (!validate_arg (arg, REAL_TYPE))
7627 /* Optimize floor of constant value. */
7628 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7632 x = TREE_REAL_CST (arg);
7633 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7635 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7638 real_floor (&r, TYPE_MODE (type), &x);
7639 return build_real (type, r);
7643 /* Fold floor (x) where x is nonnegative to trunc (x). */
7644 if (tree_expr_nonnegative_p (arg))
7646 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7648 return build_call_expr_loc (loc, truncfn, 1, arg);
7651 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7654 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7655 Return NULL_TREE if no simplification can be made. */
7658 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7660 if (!validate_arg (arg, REAL_TYPE))
7663 /* Optimize ceil of constant value. */
7664 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7668 x = TREE_REAL_CST (arg);
7669 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7671 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7674 real_ceil (&r, TYPE_MODE (type), &x);
7675 return build_real (type, r);
7679 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7682 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7683 Return NULL_TREE if no simplification can be made. */
7686 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7688 if (!validate_arg (arg, REAL_TYPE))
7691 /* Optimize round of constant value. */
7692 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7696 x = TREE_REAL_CST (arg);
7697 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7699 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7702 real_round (&r, TYPE_MODE (type), &x);
7703 return build_real (type, r);
7707 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7710 /* Fold function call to builtin lround, lroundf or lroundl (or the
7711 corresponding long long versions) and other rounding functions. ARG
7712 is the argument to the call. Return NULL_TREE if no simplification
7716 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7718 if (!validate_arg (arg, REAL_TYPE))
7721 /* Optimize lround of constant value. */
7722 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7724 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7726 if (real_isfinite (&x))
7728 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7729 tree ftype = TREE_TYPE (arg);
7733 switch (DECL_FUNCTION_CODE (fndecl))
7735 CASE_FLT_FN (BUILT_IN_LFLOOR):
7736 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7737 real_floor (&r, TYPE_MODE (ftype), &x);
7740 CASE_FLT_FN (BUILT_IN_LCEIL):
7741 CASE_FLT_FN (BUILT_IN_LLCEIL):
7742 real_ceil (&r, TYPE_MODE (ftype), &x);
7745 CASE_FLT_FN (BUILT_IN_LROUND):
7746 CASE_FLT_FN (BUILT_IN_LLROUND):
7747 real_round (&r, TYPE_MODE (ftype), &x);
7754 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7755 if (double_int_fits_to_tree_p (itype, val))
7756 return double_int_to_tree (itype, val);
7760 switch (DECL_FUNCTION_CODE (fndecl))
7762 CASE_FLT_FN (BUILT_IN_LFLOOR):
7763 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7764 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7765 if (tree_expr_nonnegative_p (arg))
7766 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7767 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7772 return fold_fixed_mathfn (loc, fndecl, arg);
7775 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7776 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7777 the argument to the call. Return NULL_TREE if no simplification can
7781 fold_builtin_bitop (tree fndecl, tree arg)
7783 if (!validate_arg (arg, INTEGER_TYPE))
7786 /* Optimize for constant argument. */
7787 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7789 HOST_WIDE_INT hi, width, result;
7790 unsigned HOST_WIDE_INT lo;
7793 type = TREE_TYPE (arg);
7794 width = TYPE_PRECISION (type);
7795 lo = TREE_INT_CST_LOW (arg);
7797 /* Clear all the bits that are beyond the type's precision. */
7798 if (width > HOST_BITS_PER_WIDE_INT)
7800 hi = TREE_INT_CST_HIGH (arg);
7801 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7802 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7807 if (width < HOST_BITS_PER_WIDE_INT)
7808 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7811 switch (DECL_FUNCTION_CODE (fndecl))
7813 CASE_INT_FN (BUILT_IN_FFS):
7815 result = ffs_hwi (lo);
7817 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7822 CASE_INT_FN (BUILT_IN_CLZ):
7824 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7826 result = width - floor_log2 (lo) - 1;
7827 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7831 CASE_INT_FN (BUILT_IN_CTZ):
7833 result = ctz_hwi (lo);
7835 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7836 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7840 CASE_INT_FN (BUILT_IN_POPCOUNT):
7843 result++, lo &= lo - 1;
7845 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7848 CASE_INT_FN (BUILT_IN_PARITY):
7851 result++, lo &= lo - 1;
7853 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7861 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7867 /* Fold function call to builtin_bswap and the long and long long
7868 variants. Return NULL_TREE if no simplification can be made. */
7870 fold_builtin_bswap (tree fndecl, tree arg)
7872 if (! validate_arg (arg, INTEGER_TYPE))
7875 /* Optimize constant value. */
7876 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7878 HOST_WIDE_INT hi, width, r_hi = 0;
7879 unsigned HOST_WIDE_INT lo, r_lo = 0;
7882 type = TREE_TYPE (arg);
7883 width = TYPE_PRECISION (type);
7884 lo = TREE_INT_CST_LOW (arg);
7885 hi = TREE_INT_CST_HIGH (arg);
7887 switch (DECL_FUNCTION_CODE (fndecl))
7889 case BUILT_IN_BSWAP32:
7890 case BUILT_IN_BSWAP64:
7894 for (s = 0; s < width; s += 8)
7896 int d = width - s - 8;
7897 unsigned HOST_WIDE_INT byte;
7899 if (s < HOST_BITS_PER_WIDE_INT)
7900 byte = (lo >> s) & 0xff;
7902 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7904 if (d < HOST_BITS_PER_WIDE_INT)
7907 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7917 if (width < HOST_BITS_PER_WIDE_INT)
7918 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7920 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7926 /* A subroutine of fold_builtin to fold the various logarithmic
7927 functions. Return NULL_TREE if no simplification can me made.
7928 FUNC is the corresponding MPFR logarithm function. */
7931 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7932 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7934 if (validate_arg (arg, REAL_TYPE))
7936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7938 const enum built_in_function fcode = builtin_mathfn_code (arg);
7940 /* Calculate the result when the argument is a constant. */
7941 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7944 /* Special case, optimize logN(expN(x)) = x. */
7945 if (flag_unsafe_math_optimizations
7946 && ((func == mpfr_log
7947 && (fcode == BUILT_IN_EXP
7948 || fcode == BUILT_IN_EXPF
7949 || fcode == BUILT_IN_EXPL))
7950 || (func == mpfr_log2
7951 && (fcode == BUILT_IN_EXP2
7952 || fcode == BUILT_IN_EXP2F
7953 || fcode == BUILT_IN_EXP2L))
7954 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7955 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7957 /* Optimize logN(func()) for various exponential functions. We
7958 want to determine the value "x" and the power "exponent" in
7959 order to transform logN(x**exponent) into exponent*logN(x). */
7960 if (flag_unsafe_math_optimizations)
7962 tree exponent = 0, x = 0;
7966 CASE_FLT_FN (BUILT_IN_EXP):
7967 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7968 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7970 exponent = CALL_EXPR_ARG (arg, 0);
7972 CASE_FLT_FN (BUILT_IN_EXP2):
7973 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7974 x = build_real (type, dconst2);
7975 exponent = CALL_EXPR_ARG (arg, 0);
7977 CASE_FLT_FN (BUILT_IN_EXP10):
7978 CASE_FLT_FN (BUILT_IN_POW10):
7979 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7981 REAL_VALUE_TYPE dconst10;
7982 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7983 x = build_real (type, dconst10);
7985 exponent = CALL_EXPR_ARG (arg, 0);
7987 CASE_FLT_FN (BUILT_IN_SQRT):
7988 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7989 x = CALL_EXPR_ARG (arg, 0);
7990 exponent = build_real (type, dconsthalf);
7992 CASE_FLT_FN (BUILT_IN_CBRT):
7993 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7994 x = CALL_EXPR_ARG (arg, 0);
7995 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7998 CASE_FLT_FN (BUILT_IN_POW):
7999 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8000 x = CALL_EXPR_ARG (arg, 0);
8001 exponent = CALL_EXPR_ARG (arg, 1);
8007 /* Now perform the optimization. */
8010 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8011 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8019 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8020 NULL_TREE if no simplification can be made. */
8023 fold_builtin_hypot (location_t loc, tree fndecl,
8024 tree arg0, tree arg1, tree type)
8026 tree res, narg0, narg1;
8028 if (!validate_arg (arg0, REAL_TYPE)
8029 || !validate_arg (arg1, REAL_TYPE))
8032 /* Calculate the result when the argument is a constant. */
8033 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8036 /* If either argument to hypot has a negate or abs, strip that off.
8037 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8038 narg0 = fold_strip_sign_ops (arg0);
8039 narg1 = fold_strip_sign_ops (arg1);
8042 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8043 narg1 ? narg1 : arg1);
8046 /* If either argument is zero, hypot is fabs of the other. */
8047 if (real_zerop (arg0))
8048 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8049 else if (real_zerop (arg1))
8050 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8052 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8053 if (flag_unsafe_math_optimizations
8054 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8056 const REAL_VALUE_TYPE sqrt2_trunc
8057 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8058 return fold_build2_loc (loc, MULT_EXPR, type,
8059 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8060 build_real (type, sqrt2_trunc));
8067 /* Fold a builtin function call to pow, powf, or powl. Return
8068 NULL_TREE if no simplification can be made. */
8070 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8074 if (!validate_arg (arg0, REAL_TYPE)
8075 || !validate_arg (arg1, REAL_TYPE))
8078 /* Calculate the result when the argument is a constant. */
8079 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8082 /* Optimize pow(1.0,y) = 1.0. */
8083 if (real_onep (arg0))
8084 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8086 if (TREE_CODE (arg1) == REAL_CST
8087 && !TREE_OVERFLOW (arg1))
8089 REAL_VALUE_TYPE cint;
8093 c = TREE_REAL_CST (arg1);
8095 /* Optimize pow(x,0.0) = 1.0. */
8096 if (REAL_VALUES_EQUAL (c, dconst0))
8097 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8100 /* Optimize pow(x,1.0) = x. */
8101 if (REAL_VALUES_EQUAL (c, dconst1))
8104 /* Optimize pow(x,-1.0) = 1.0/x. */
8105 if (REAL_VALUES_EQUAL (c, dconstm1))
8106 return fold_build2_loc (loc, RDIV_EXPR, type,
8107 build_real (type, dconst1), arg0);
8109 /* Optimize pow(x,0.5) = sqrt(x). */
8110 if (flag_unsafe_math_optimizations
8111 && REAL_VALUES_EQUAL (c, dconsthalf))
8113 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8115 if (sqrtfn != NULL_TREE)
8116 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8119 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8120 if (flag_unsafe_math_optimizations)
8122 const REAL_VALUE_TYPE dconstroot
8123 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8125 if (REAL_VALUES_EQUAL (c, dconstroot))
8127 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8128 if (cbrtfn != NULL_TREE)
8129 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8133 /* Check for an integer exponent. */
8134 n = real_to_integer (&c);
8135 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8136 if (real_identical (&c, &cint))
8138 /* Attempt to evaluate pow at compile-time, unless this should
8139 raise an exception. */
8140 if (TREE_CODE (arg0) == REAL_CST
8141 && !TREE_OVERFLOW (arg0)
8143 || (!flag_trapping_math && !flag_errno_math)
8144 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8149 x = TREE_REAL_CST (arg0);
8150 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8151 if (flag_unsafe_math_optimizations || !inexact)
8152 return build_real (type, x);
8155 /* Strip sign ops from even integer powers. */
8156 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8158 tree narg0 = fold_strip_sign_ops (arg0);
8160 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8165 if (flag_unsafe_math_optimizations)
8167 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8169 /* Optimize pow(expN(x),y) = expN(x*y). */
8170 if (BUILTIN_EXPONENT_P (fcode))
8172 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8173 tree arg = CALL_EXPR_ARG (arg0, 0);
8174 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8175 return build_call_expr_loc (loc, expfn, 1, arg);
8178 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8179 if (BUILTIN_SQRT_P (fcode))
8181 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8182 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8183 build_real (type, dconsthalf));
8184 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8187 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8188 if (BUILTIN_CBRT_P (fcode))
8190 tree arg = CALL_EXPR_ARG (arg0, 0);
8191 if (tree_expr_nonnegative_p (arg))
8193 const REAL_VALUE_TYPE dconstroot
8194 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8195 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8196 build_real (type, dconstroot));
8197 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8201 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8202 if (fcode == BUILT_IN_POW
8203 || fcode == BUILT_IN_POWF
8204 || fcode == BUILT_IN_POWL)
8206 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8207 if (tree_expr_nonnegative_p (arg00))
8209 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8210 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8211 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8219 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8220 Return NULL_TREE if no simplification can be made. */
8222 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8223 tree arg0, tree arg1, tree type)
8225 if (!validate_arg (arg0, REAL_TYPE)
8226 || !validate_arg (arg1, INTEGER_TYPE))
8229 /* Optimize pow(1.0,y) = 1.0. */
8230 if (real_onep (arg0))
8231 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8233 if (host_integerp (arg1, 0))
8235 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8237 /* Evaluate powi at compile-time. */
8238 if (TREE_CODE (arg0) == REAL_CST
8239 && !TREE_OVERFLOW (arg0))
8242 x = TREE_REAL_CST (arg0);
8243 real_powi (&x, TYPE_MODE (type), &x, c);
8244 return build_real (type, x);
8247 /* Optimize pow(x,0) = 1.0. */
8249 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8252 /* Optimize pow(x,1) = x. */
8256 /* Optimize pow(x,-1) = 1.0/x. */
8258 return fold_build2_loc (loc, RDIV_EXPR, type,
8259 build_real (type, dconst1), arg0);
8265 /* A subroutine of fold_builtin to fold the various exponent
8266 functions. Return NULL_TREE if no simplification can be made.
8267 FUNC is the corresponding MPFR exponent function. */
8270 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8271 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8273 if (validate_arg (arg, REAL_TYPE))
8275 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8278 /* Calculate the result when the argument is a constant. */
8279 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8282 /* Optimize expN(logN(x)) = x. */
8283 if (flag_unsafe_math_optimizations)
8285 const enum built_in_function fcode = builtin_mathfn_code (arg);
8287 if ((func == mpfr_exp
8288 && (fcode == BUILT_IN_LOG
8289 || fcode == BUILT_IN_LOGF
8290 || fcode == BUILT_IN_LOGL))
8291 || (func == mpfr_exp2
8292 && (fcode == BUILT_IN_LOG2
8293 || fcode == BUILT_IN_LOG2F
8294 || fcode == BUILT_IN_LOG2L))
8295 || (func == mpfr_exp10
8296 && (fcode == BUILT_IN_LOG10
8297 || fcode == BUILT_IN_LOG10F
8298 || fcode == BUILT_IN_LOG10L)))
8299 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8306 /* Return true if VAR is a VAR_DECL or a component thereof. */
8309 var_decl_component_p (tree var)
8312 while (handled_component_p (inner))
8313 inner = TREE_OPERAND (inner, 0);
8314 return SSA_VAR_P (inner);
8317 /* Fold function call to builtin memset. Return
8318 NULL_TREE if no simplification can be made. */
8321 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8322 tree type, bool ignore)
8324 tree var, ret, etype;
8325 unsigned HOST_WIDE_INT length, cval;
8327 if (! validate_arg (dest, POINTER_TYPE)
8328 || ! validate_arg (c, INTEGER_TYPE)
8329 || ! validate_arg (len, INTEGER_TYPE))
8332 if (! host_integerp (len, 1))
8335 /* If the LEN parameter is zero, return DEST. */
8336 if (integer_zerop (len))
8337 return omit_one_operand_loc (loc, type, dest, c);
8339 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8344 if (TREE_CODE (var) != ADDR_EXPR)
8347 var = TREE_OPERAND (var, 0);
8348 if (TREE_THIS_VOLATILE (var))
8351 etype = TREE_TYPE (var);
8352 if (TREE_CODE (etype) == ARRAY_TYPE)
8353 etype = TREE_TYPE (etype);
8355 if (!INTEGRAL_TYPE_P (etype)
8356 && !POINTER_TYPE_P (etype))
8359 if (! var_decl_component_p (var))
8362 length = tree_low_cst (len, 1);
8363 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8364 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8368 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8371 if (integer_zerop (c))
8375 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8378 cval = tree_low_cst (c, 1);
8382 cval |= (cval << 31) << 1;
8385 ret = build_int_cst_type (etype, cval);
8386 var = build_fold_indirect_ref_loc (loc,
8387 fold_convert_loc (loc,
8388 build_pointer_type (etype),
8390 ret = build2 (MODIFY_EXPR, etype, var, ret);
8394 return omit_one_operand_loc (loc, type, dest, ret);
8397 /* Fold function call to builtin memset. Return
8398 NULL_TREE if no simplification can be made. */
8401 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8403 if (! validate_arg (dest, POINTER_TYPE)
8404 || ! validate_arg (size, INTEGER_TYPE))
8410 /* New argument list transforming bzero(ptr x, int y) to
8411 memset(ptr x, int 0, size_t y). This is done this way
8412 so that if it isn't expanded inline, we fallback to
8413 calling bzero instead of memset. */
8415 return fold_builtin_memset (loc, dest, integer_zero_node,
8416 fold_convert_loc (loc, sizetype, size),
8417 void_type_node, ignore);
8420 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8421 NULL_TREE if no simplification can be made.
8422 If ENDP is 0, return DEST (like memcpy).
8423 If ENDP is 1, return DEST+LEN (like mempcpy).
8424 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8425 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8429 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8430 tree len, tree type, bool ignore, int endp)
8432 tree destvar, srcvar, expr;
8434 if (! validate_arg (dest, POINTER_TYPE)
8435 || ! validate_arg (src, POINTER_TYPE)
8436 || ! validate_arg (len, INTEGER_TYPE))
8439 /* If the LEN parameter is zero, return DEST. */
8440 if (integer_zerop (len))
8441 return omit_one_operand_loc (loc, type, dest, src);
8443 /* If SRC and DEST are the same (and not volatile), return
8444 DEST{,+LEN,+LEN-1}. */
8445 if (operand_equal_p (src, dest, 0))
8449 tree srctype, desttype;
8450 unsigned int src_align, dest_align;
8455 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8456 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8458 /* Both DEST and SRC must be pointer types.
8459 ??? This is what old code did. Is the testing for pointer types
8462 If either SRC is readonly or length is 1, we can use memcpy. */
8463 if (!dest_align || !src_align)
8465 if (readonly_data_expr (src)
8466 || (host_integerp (len, 1)
8467 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8468 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8470 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8473 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8476 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8477 if (TREE_CODE (src) == ADDR_EXPR
8478 && TREE_CODE (dest) == ADDR_EXPR)
8480 tree src_base, dest_base, fn;
8481 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8482 HOST_WIDE_INT size = -1;
8483 HOST_WIDE_INT maxsize = -1;
8485 srcvar = TREE_OPERAND (src, 0);
8486 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8488 destvar = TREE_OPERAND (dest, 0);
8489 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8491 if (host_integerp (len, 1))
8492 maxsize = tree_low_cst (len, 1);
8495 src_offset /= BITS_PER_UNIT;
8496 dest_offset /= BITS_PER_UNIT;
8497 if (SSA_VAR_P (src_base)
8498 && SSA_VAR_P (dest_base))
8500 if (operand_equal_p (src_base, dest_base, 0)
8501 && ranges_overlap_p (src_offset, maxsize,
8502 dest_offset, maxsize))
8505 else if (TREE_CODE (src_base) == MEM_REF
8506 && TREE_CODE (dest_base) == MEM_REF)
8509 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8510 TREE_OPERAND (dest_base, 0), 0))
8512 off = double_int_add (mem_ref_offset (src_base),
8513 shwi_to_double_int (src_offset));
8514 if (!double_int_fits_in_shwi_p (off))
8516 src_offset = off.low;
8517 off = double_int_add (mem_ref_offset (dest_base),
8518 shwi_to_double_int (dest_offset));
8519 if (!double_int_fits_in_shwi_p (off))
8521 dest_offset = off.low;
8522 if (ranges_overlap_p (src_offset, maxsize,
8523 dest_offset, maxsize))
8529 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8532 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8535 /* If the destination and source do not alias optimize into
8537 if ((is_gimple_min_invariant (dest)
8538 || TREE_CODE (dest) == SSA_NAME)
8539 && (is_gimple_min_invariant (src)
8540 || TREE_CODE (src) == SSA_NAME))
8543 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8544 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8545 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8548 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8551 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8558 if (!host_integerp (len, 0))
8561 This logic lose for arguments like (type *)malloc (sizeof (type)),
8562 since we strip the casts of up to VOID return value from malloc.
8563 Perhaps we ought to inherit type from non-VOID argument here? */
8566 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8567 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8569 tree tem = TREE_OPERAND (src, 0);
8571 if (tem != TREE_OPERAND (src, 0))
8572 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8574 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8576 tree tem = TREE_OPERAND (dest, 0);
8578 if (tem != TREE_OPERAND (dest, 0))
8579 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8581 srctype = TREE_TYPE (TREE_TYPE (src));
8583 && TREE_CODE (srctype) == ARRAY_TYPE
8584 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8586 srctype = TREE_TYPE (srctype);
8588 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8590 desttype = TREE_TYPE (TREE_TYPE (dest));
8592 && TREE_CODE (desttype) == ARRAY_TYPE
8593 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8595 desttype = TREE_TYPE (desttype);
8597 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8599 if (!srctype || !desttype
8600 || TREE_ADDRESSABLE (srctype)
8601 || TREE_ADDRESSABLE (desttype)
8602 || !TYPE_SIZE_UNIT (srctype)
8603 || !TYPE_SIZE_UNIT (desttype)
8604 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8605 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8608 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8609 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8610 if (dest_align < TYPE_ALIGN (desttype)
8611 || src_align < TYPE_ALIGN (srctype))
8615 dest = builtin_save_expr (dest);
8617 /* Build accesses at offset zero with a ref-all character type. */
8618 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8619 ptr_mode, true), 0);
8622 STRIP_NOPS (destvar);
8623 if (TREE_CODE (destvar) == ADDR_EXPR
8624 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8625 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8626 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8628 destvar = NULL_TREE;
8631 STRIP_NOPS (srcvar);
8632 if (TREE_CODE (srcvar) == ADDR_EXPR
8633 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8634 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8637 || src_align >= TYPE_ALIGN (desttype))
8638 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8640 else if (!STRICT_ALIGNMENT)
8642 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8644 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8652 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8655 if (srcvar == NULL_TREE)
8658 if (src_align >= TYPE_ALIGN (desttype))
8659 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8662 if (STRICT_ALIGNMENT)
8664 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8666 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8669 else if (destvar == NULL_TREE)
8672 if (dest_align >= TYPE_ALIGN (srctype))
8673 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8676 if (STRICT_ALIGNMENT)
8678 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8680 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8684 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8690 if (endp == 0 || endp == 3)
8691 return omit_one_operand_loc (loc, type, dest, expr);
8697 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8700 len = fold_convert_loc (loc, sizetype, len);
8701 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8702 dest = fold_convert_loc (loc, type, dest);
8704 dest = omit_one_operand_loc (loc, type, dest, expr);
8708 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8709 If LEN is not NULL, it represents the length of the string to be
8710 copied. Return NULL_TREE if no simplification can be made. */
8713 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8717 if (!validate_arg (dest, POINTER_TYPE)
8718 || !validate_arg (src, POINTER_TYPE))
8721 /* If SRC and DEST are the same (and not volatile), return DEST. */
8722 if (operand_equal_p (src, dest, 0))
8723 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8725 if (optimize_function_for_size_p (cfun))
8728 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8734 len = c_strlen (src, 1);
8735 if (! len || TREE_SIDE_EFFECTS (len))
8739 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8740 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8741 build_call_expr_loc (loc, fn, 3, dest, src, len));
8744 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8745 Return NULL_TREE if no simplification can be made. */
8748 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8750 tree fn, len, lenp1, call, type;
8752 if (!validate_arg (dest, POINTER_TYPE)
8753 || !validate_arg (src, POINTER_TYPE))
8756 len = c_strlen (src, 1);
8758 || TREE_CODE (len) != INTEGER_CST)
8761 if (optimize_function_for_size_p (cfun)
8762 /* If length is zero it's small enough. */
8763 && !integer_zerop (len))
8766 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8770 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8771 /* We use dest twice in building our expression. Save it from
8772 multiple expansions. */
8773 dest = builtin_save_expr (dest);
8774 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8776 type = TREE_TYPE (TREE_TYPE (fndecl));
8777 len = fold_convert_loc (loc, sizetype, len);
8778 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8779 dest = fold_convert_loc (loc, type, dest);
8780 dest = omit_one_operand_loc (loc, type, dest, call);
8784 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8785 If SLEN is not NULL, it represents the length of the source string.
8786 Return NULL_TREE if no simplification can be made. */
8789 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8790 tree src, tree len, tree slen)
8794 if (!validate_arg (dest, POINTER_TYPE)
8795 || !validate_arg (src, POINTER_TYPE)
8796 || !validate_arg (len, INTEGER_TYPE))
8799 /* If the LEN parameter is zero, return DEST. */
8800 if (integer_zerop (len))
8801 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8803 /* We can't compare slen with len as constants below if len is not a
8805 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8809 slen = c_strlen (src, 1);
8811 /* Now, we must be passed a constant src ptr parameter. */
8812 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8815 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8817 /* We do not support simplification of this case, though we do
8818 support it when expanding trees into RTL. */
8819 /* FIXME: generate a call to __builtin_memset. */
8820 if (tree_int_cst_lt (slen, len))
8823 /* OK transform into builtin memcpy. */
8824 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8827 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8828 build_call_expr_loc (loc, fn, 3, dest, src, len));
8831 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8832 arguments to the call, and TYPE is its return type.
8833 Return NULL_TREE if no simplification can be made. */
8836 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8838 if (!validate_arg (arg1, POINTER_TYPE)
8839 || !validate_arg (arg2, INTEGER_TYPE)
8840 || !validate_arg (len, INTEGER_TYPE))
8846 if (TREE_CODE (arg2) != INTEGER_CST
8847 || !host_integerp (len, 1))
8850 p1 = c_getstr (arg1);
8851 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8857 if (target_char_cast (arg2, &c))
8860 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8863 return build_int_cst (TREE_TYPE (arg1), 0);
8865 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8867 return fold_convert_loc (loc, type, tem);
8873 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8874 Return NULL_TREE if no simplification can be made. */
8877 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8879 const char *p1, *p2;
8881 if (!validate_arg (arg1, POINTER_TYPE)
8882 || !validate_arg (arg2, POINTER_TYPE)
8883 || !validate_arg (len, INTEGER_TYPE))
8886 /* If the LEN parameter is zero, return zero. */
8887 if (integer_zerop (len))
8888 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8891 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8892 if (operand_equal_p (arg1, arg2, 0))
8893 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8895 p1 = c_getstr (arg1);
8896 p2 = c_getstr (arg2);
8898 /* If all arguments are constant, and the value of len is not greater
8899 than the lengths of arg1 and arg2, evaluate at compile-time. */
8900 if (host_integerp (len, 1) && p1 && p2
8901 && compare_tree_int (len, strlen (p1) + 1) <= 0
8902 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8904 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8907 return integer_one_node;
8909 return integer_minus_one_node;
8911 return integer_zero_node;
8914 /* If len parameter is one, return an expression corresponding to
8915 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8916 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8918 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8919 tree cst_uchar_ptr_node
8920 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8923 = fold_convert_loc (loc, integer_type_node,
8924 build1 (INDIRECT_REF, cst_uchar_node,
8925 fold_convert_loc (loc,
8929 = fold_convert_loc (loc, integer_type_node,
8930 build1 (INDIRECT_REF, cst_uchar_node,
8931 fold_convert_loc (loc,
8934 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8940 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8941 Return NULL_TREE if no simplification can be made. */
8944 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8946 const char *p1, *p2;
8948 if (!validate_arg (arg1, POINTER_TYPE)
8949 || !validate_arg (arg2, POINTER_TYPE))
8952 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8953 if (operand_equal_p (arg1, arg2, 0))
8954 return integer_zero_node;
8956 p1 = c_getstr (arg1);
8957 p2 = c_getstr (arg2);
8961 const int i = strcmp (p1, p2);
8963 return integer_minus_one_node;
8965 return integer_one_node;
8967 return integer_zero_node;
8970 /* If the second arg is "", return *(const unsigned char*)arg1. */
8971 if (p2 && *p2 == '\0')
8973 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8974 tree cst_uchar_ptr_node
8975 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8977 return fold_convert_loc (loc, integer_type_node,
8978 build1 (INDIRECT_REF, cst_uchar_node,
8979 fold_convert_loc (loc,
8984 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8985 if (p1 && *p1 == '\0')
8987 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8988 tree cst_uchar_ptr_node
8989 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8992 = fold_convert_loc (loc, integer_type_node,
8993 build1 (INDIRECT_REF, cst_uchar_node,
8994 fold_convert_loc (loc,
8997 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9003 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9004 Return NULL_TREE if no simplification can be made. */
9007 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9009 const char *p1, *p2;
9011 if (!validate_arg (arg1, POINTER_TYPE)
9012 || !validate_arg (arg2, POINTER_TYPE)
9013 || !validate_arg (len, INTEGER_TYPE))
9016 /* If the LEN parameter is zero, return zero. */
9017 if (integer_zerop (len))
9018 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9021 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9022 if (operand_equal_p (arg1, arg2, 0))
9023 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9025 p1 = c_getstr (arg1);
9026 p2 = c_getstr (arg2);
9028 if (host_integerp (len, 1) && p1 && p2)
9030 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9032 return integer_one_node;
9034 return integer_minus_one_node;
9036 return integer_zero_node;
9039 /* If the second arg is "", and the length is greater than zero,
9040 return *(const unsigned char*)arg1. */
9041 if (p2 && *p2 == '\0'
9042 && TREE_CODE (len) == INTEGER_CST
9043 && tree_int_cst_sgn (len) == 1)
9045 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9046 tree cst_uchar_ptr_node
9047 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9049 return fold_convert_loc (loc, integer_type_node,
9050 build1 (INDIRECT_REF, cst_uchar_node,
9051 fold_convert_loc (loc,
9056 /* If the first arg is "", and the length is greater than zero,
9057 return -*(const unsigned char*)arg2. */
9058 if (p1 && *p1 == '\0'
9059 && TREE_CODE (len) == INTEGER_CST
9060 && tree_int_cst_sgn (len) == 1)
9062 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9063 tree cst_uchar_ptr_node
9064 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9066 tree temp = fold_convert_loc (loc, integer_type_node,
9067 build1 (INDIRECT_REF, cst_uchar_node,
9068 fold_convert_loc (loc,
9071 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9074 /* If len parameter is one, return an expression corresponding to
9075 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9076 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9078 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9079 tree cst_uchar_ptr_node
9080 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9082 tree ind1 = fold_convert_loc (loc, integer_type_node,
9083 build1 (INDIRECT_REF, cst_uchar_node,
9084 fold_convert_loc (loc,
9087 tree ind2 = fold_convert_loc (loc, integer_type_node,
9088 build1 (INDIRECT_REF, cst_uchar_node,
9089 fold_convert_loc (loc,
9092 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9098 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9099 ARG. Return NULL_TREE if no simplification can be made. */
9102 fold_builtin_signbit (location_t loc, tree arg, tree type)
9106 if (!validate_arg (arg, REAL_TYPE))
9109 /* If ARG is a compile-time constant, determine the result. */
9110 if (TREE_CODE (arg) == REAL_CST
9111 && !TREE_OVERFLOW (arg))
9115 c = TREE_REAL_CST (arg);
9116 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9117 return fold_convert_loc (loc, type, temp);
9120 /* If ARG is non-negative, the result is always zero. */
9121 if (tree_expr_nonnegative_p (arg))
9122 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9124 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9125 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9126 return fold_build2_loc (loc, LT_EXPR, type, arg,
9127 build_real (TREE_TYPE (arg), dconst0));
9132 /* Fold function call to builtin copysign, copysignf or copysignl with
9133 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9137 fold_builtin_copysign (location_t loc, tree fndecl,
9138 tree arg1, tree arg2, tree type)
9142 if (!validate_arg (arg1, REAL_TYPE)
9143 || !validate_arg (arg2, REAL_TYPE))
9146 /* copysign(X,X) is X. */
9147 if (operand_equal_p (arg1, arg2, 0))
9148 return fold_convert_loc (loc, type, arg1);
9150 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9151 if (TREE_CODE (arg1) == REAL_CST
9152 && TREE_CODE (arg2) == REAL_CST
9153 && !TREE_OVERFLOW (arg1)
9154 && !TREE_OVERFLOW (arg2))
9156 REAL_VALUE_TYPE c1, c2;
9158 c1 = TREE_REAL_CST (arg1);
9159 c2 = TREE_REAL_CST (arg2);
9160 /* c1.sign := c2.sign. */
9161 real_copysign (&c1, &c2);
9162 return build_real (type, c1);
9165 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9166 Remember to evaluate Y for side-effects. */
9167 if (tree_expr_nonnegative_p (arg2))
9168 return omit_one_operand_loc (loc, type,
9169 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9172 /* Strip sign changing operations for the first argument. */
9173 tem = fold_strip_sign_ops (arg1);
9175 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9180 /* Fold a call to builtin isascii with argument ARG. */
9183 fold_builtin_isascii (location_t loc, tree arg)
9185 if (!validate_arg (arg, INTEGER_TYPE))
9189 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9190 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9191 build_int_cst (NULL_TREE,
9192 ~ (unsigned HOST_WIDE_INT) 0x7f));
9193 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9194 arg, integer_zero_node);
9198 /* Fold a call to builtin toascii with argument ARG. */
9201 fold_builtin_toascii (location_t loc, tree arg)
9203 if (!validate_arg (arg, INTEGER_TYPE))
9206 /* Transform toascii(c) -> (c & 0x7f). */
9207 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9208 build_int_cst (NULL_TREE, 0x7f));
9211 /* Fold a call to builtin isdigit with argument ARG. */
9214 fold_builtin_isdigit (location_t loc, tree arg)
9216 if (!validate_arg (arg, INTEGER_TYPE))
9220 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9221 /* According to the C standard, isdigit is unaffected by locale.
9222 However, it definitely is affected by the target character set. */
9223 unsigned HOST_WIDE_INT target_digit0
9224 = lang_hooks.to_target_charset ('0');
9226 if (target_digit0 == 0)
9229 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9230 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9231 build_int_cst (unsigned_type_node, target_digit0));
9232 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9233 build_int_cst (unsigned_type_node, 9));
9237 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9240 fold_builtin_fabs (location_t loc, tree arg, tree type)
9242 if (!validate_arg (arg, REAL_TYPE))
9245 arg = fold_convert_loc (loc, type, arg);
9246 if (TREE_CODE (arg) == REAL_CST)
9247 return fold_abs_const (arg, type);
9248 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9251 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9254 fold_builtin_abs (location_t loc, tree arg, tree type)
9256 if (!validate_arg (arg, INTEGER_TYPE))
9259 arg = fold_convert_loc (loc, type, arg);
9260 if (TREE_CODE (arg) == INTEGER_CST)
9261 return fold_abs_const (arg, type);
9262 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9265 /* Fold a call to builtin fmin or fmax. */
9268 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9269 tree type, bool max)
9271 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9273 /* Calculate the result when the argument is a constant. */
9274 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9279 /* If either argument is NaN, return the other one. Avoid the
9280 transformation if we get (and honor) a signalling NaN. Using
9281 omit_one_operand() ensures we create a non-lvalue. */
9282 if (TREE_CODE (arg0) == REAL_CST
9283 && real_isnan (&TREE_REAL_CST (arg0))
9284 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9285 || ! TREE_REAL_CST (arg0).signalling))
9286 return omit_one_operand_loc (loc, type, arg1, arg0);
9287 if (TREE_CODE (arg1) == REAL_CST
9288 && real_isnan (&TREE_REAL_CST (arg1))
9289 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9290 || ! TREE_REAL_CST (arg1).signalling))
9291 return omit_one_operand_loc (loc, type, arg0, arg1);
9293 /* Transform fmin/fmax(x,x) -> x. */
9294 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9295 return omit_one_operand_loc (loc, type, arg0, arg1);
9297 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9298 functions to return the numeric arg if the other one is NaN.
9299 These tree codes don't honor that, so only transform if
9300 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9301 handled, so we don't have to worry about it either. */
9302 if (flag_finite_math_only)
9303 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9304 fold_convert_loc (loc, type, arg0),
9305 fold_convert_loc (loc, type, arg1));
9310 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9313 fold_builtin_carg (location_t loc, tree arg, tree type)
9315 if (validate_arg (arg, COMPLEX_TYPE)
9316 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9318 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9322 tree new_arg = builtin_save_expr (arg);
9323 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9324 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9325 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9332 /* Fold a call to builtin logb/ilogb. */
9335 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9337 if (! validate_arg (arg, REAL_TYPE))
9342 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9344 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9350 /* If arg is Inf or NaN and we're logb, return it. */
9351 if (TREE_CODE (rettype) == REAL_TYPE)
9352 return fold_convert_loc (loc, rettype, arg);
9353 /* Fall through... */
9355 /* Zero may set errno and/or raise an exception for logb, also
9356 for ilogb we don't know FP_ILOGB0. */
9359 /* For normal numbers, proceed iff radix == 2. In GCC,
9360 normalized significands are in the range [0.5, 1.0). We
9361 want the exponent as if they were [1.0, 2.0) so get the
9362 exponent and subtract 1. */
9363 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9364 return fold_convert_loc (loc, rettype,
9365 build_int_cst (NULL_TREE,
9366 REAL_EXP (value)-1));
9374 /* Fold a call to builtin significand, if radix == 2. */
9377 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9379 if (! validate_arg (arg, REAL_TYPE))
9384 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9386 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9393 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9394 return fold_convert_loc (loc, rettype, arg);
9396 /* For normal numbers, proceed iff radix == 2. */
9397 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9399 REAL_VALUE_TYPE result = *value;
9400 /* In GCC, normalized significands are in the range [0.5,
9401 1.0). We want them to be [1.0, 2.0) so set the
9403 SET_REAL_EXP (&result, 1);
9404 return build_real (rettype, result);
9413 /* Fold a call to builtin frexp, we can assume the base is 2. */
9416 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9418 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9423 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9426 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9428 /* Proceed if a valid pointer type was passed in. */
9429 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9431 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9437 /* For +-0, return (*exp = 0, +-0). */
9438 exp = integer_zero_node;
9443 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9444 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9447 /* Since the frexp function always expects base 2, and in
9448 GCC normalized significands are already in the range
9449 [0.5, 1.0), we have exactly what frexp wants. */
9450 REAL_VALUE_TYPE frac_rvt = *value;
9451 SET_REAL_EXP (&frac_rvt, 0);
9452 frac = build_real (rettype, frac_rvt);
9453 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9460 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9461 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9462 TREE_SIDE_EFFECTS (arg1) = 1;
9463 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9469 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9470 then we can assume the base is two. If it's false, then we have to
9471 check the mode of the TYPE parameter in certain cases. */
9474 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9475 tree type, bool ldexp)
9477 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9482 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9483 if (real_zerop (arg0) || integer_zerop (arg1)
9484 || (TREE_CODE (arg0) == REAL_CST
9485 && !real_isfinite (&TREE_REAL_CST (arg0))))
9486 return omit_one_operand_loc (loc, type, arg0, arg1);
9488 /* If both arguments are constant, then try to evaluate it. */
9489 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9490 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9491 && host_integerp (arg1, 0))
9493 /* Bound the maximum adjustment to twice the range of the
9494 mode's valid exponents. Use abs to ensure the range is
9495 positive as a sanity check. */
9496 const long max_exp_adj = 2 *
9497 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9498 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9500 /* Get the user-requested adjustment. */
9501 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9503 /* The requested adjustment must be inside this range. This
9504 is a preliminary cap to avoid things like overflow, we
9505 may still fail to compute the result for other reasons. */
9506 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9508 REAL_VALUE_TYPE initial_result;
9510 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9512 /* Ensure we didn't overflow. */
9513 if (! real_isinf (&initial_result))
9515 const REAL_VALUE_TYPE trunc_result
9516 = real_value_truncate (TYPE_MODE (type), initial_result);
9518 /* Only proceed if the target mode can hold the
9520 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9521 return build_real (type, trunc_result);
9530 /* Fold a call to builtin modf. */
9533 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9535 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9540 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9543 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9545 /* Proceed if a valid pointer type was passed in. */
9546 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9548 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9549 REAL_VALUE_TYPE trunc, frac;
9555 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9556 trunc = frac = *value;
9559 /* For +-Inf, return (*arg1 = arg0, +-0). */
9561 frac.sign = value->sign;
9565 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9566 real_trunc (&trunc, VOIDmode, value);
9567 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9568 /* If the original number was negative and already
9569 integral, then the fractional part is -0.0. */
9570 if (value->sign && frac.cl == rvc_zero)
9571 frac.sign = value->sign;
9575 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9576 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9577 build_real (rettype, trunc));
9578 TREE_SIDE_EFFECTS (arg1) = 1;
9579 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9580 build_real (rettype, frac));
9586 /* Given a location LOC, an interclass builtin function decl FNDECL
9587 and its single argument ARG, return an folded expression computing
9588 the same, or NULL_TREE if we either couldn't or didn't want to fold
9589 (the latter happen if there's an RTL instruction available). */
9592 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9594 enum machine_mode mode;
9596 if (!validate_arg (arg, REAL_TYPE))
9599 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9602 mode = TYPE_MODE (TREE_TYPE (arg));
9604 /* If there is no optab, try generic code. */
9605 switch (DECL_FUNCTION_CODE (fndecl))
9609 CASE_FLT_FN (BUILT_IN_ISINF):
9611 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9612 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9613 tree const type = TREE_TYPE (arg);
9617 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9618 real_from_string (&r, buf);
9619 result = build_call_expr (isgr_fn, 2,
9620 fold_build1_loc (loc, ABS_EXPR, type, arg),
9621 build_real (type, r));
9624 CASE_FLT_FN (BUILT_IN_FINITE):
9625 case BUILT_IN_ISFINITE:
9627 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9628 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9629 tree const type = TREE_TYPE (arg);
9633 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9634 real_from_string (&r, buf);
9635 result = build_call_expr (isle_fn, 2,
9636 fold_build1_loc (loc, ABS_EXPR, type, arg),
9637 build_real (type, r));
9638 /*result = fold_build2_loc (loc, UNGT_EXPR,
9639 TREE_TYPE (TREE_TYPE (fndecl)),
9640 fold_build1_loc (loc, ABS_EXPR, type, arg),
9641 build_real (type, r));
9642 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9643 TREE_TYPE (TREE_TYPE (fndecl)),
9647 case BUILT_IN_ISNORMAL:
9649 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9650 islessequal(fabs(x),DBL_MAX). */
9651 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9652 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9653 tree const type = TREE_TYPE (arg);
9654 REAL_VALUE_TYPE rmax, rmin;
9657 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9658 real_from_string (&rmax, buf);
9659 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9660 real_from_string (&rmin, buf);
9661 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9662 result = build_call_expr (isle_fn, 2, arg,
9663 build_real (type, rmax));
9664 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9665 build_call_expr (isge_fn, 2, arg,
9666 build_real (type, rmin)));
9676 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9677 ARG is the argument for the call. */
9680 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9682 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9685 if (!validate_arg (arg, REAL_TYPE))
9688 switch (builtin_index)
9690 case BUILT_IN_ISINF:
9691 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9692 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9694 if (TREE_CODE (arg) == REAL_CST)
9696 r = TREE_REAL_CST (arg);
9697 if (real_isinf (&r))
9698 return real_compare (GT_EXPR, &r, &dconst0)
9699 ? integer_one_node : integer_minus_one_node;
9701 return integer_zero_node;
9706 case BUILT_IN_ISINF_SIGN:
9708 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9709 /* In a boolean context, GCC will fold the inner COND_EXPR to
9710 1. So e.g. "if (isinf_sign(x))" would be folded to just
9711 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9712 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9713 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9714 tree tmp = NULL_TREE;
9716 arg = builtin_save_expr (arg);
9718 if (signbit_fn && isinf_fn)
9720 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9721 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9723 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9724 signbit_call, integer_zero_node);
9725 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9726 isinf_call, integer_zero_node);
9728 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9729 integer_minus_one_node, integer_one_node);
9730 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9738 case BUILT_IN_ISFINITE:
9739 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9740 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9741 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9743 if (TREE_CODE (arg) == REAL_CST)
9745 r = TREE_REAL_CST (arg);
9746 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9751 case BUILT_IN_ISNAN:
9752 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9753 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9755 if (TREE_CODE (arg) == REAL_CST)
9757 r = TREE_REAL_CST (arg);
9758 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9761 arg = builtin_save_expr (arg);
9762 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9769 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9770 This builtin will generate code to return the appropriate floating
9771 point classification depending on the value of the floating point
9772 number passed in. The possible return values must be supplied as
9773 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9774 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9775 one floating point argument which is "type generic". */
9778 fold_builtin_fpclassify (location_t loc, tree exp)
9780 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9781 arg, type, res, tmp;
9782 enum machine_mode mode;
9786 /* Verify the required arguments in the original call. */
9787 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9788 INTEGER_TYPE, INTEGER_TYPE,
9789 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9792 fp_nan = CALL_EXPR_ARG (exp, 0);
9793 fp_infinite = CALL_EXPR_ARG (exp, 1);
9794 fp_normal = CALL_EXPR_ARG (exp, 2);
9795 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9796 fp_zero = CALL_EXPR_ARG (exp, 4);
9797 arg = CALL_EXPR_ARG (exp, 5);
9798 type = TREE_TYPE (arg);
9799 mode = TYPE_MODE (type);
9800 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9804 (fabs(x) == Inf ? FP_INFINITE :
9805 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9806 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9808 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9809 build_real (type, dconst0));
9810 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9811 tmp, fp_zero, fp_subnormal);
9813 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9814 real_from_string (&r, buf);
9815 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9816 arg, build_real (type, r));
9817 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9819 if (HONOR_INFINITIES (mode))
9822 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9823 build_real (type, r));
9824 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9828 if (HONOR_NANS (mode))
9830 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9831 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9837 /* Fold a call to an unordered comparison function such as
9838 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9839 being called and ARG0 and ARG1 are the arguments for the call.
9840 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9841 the opposite of the desired result. UNORDERED_CODE is used
9842 for modes that can hold NaNs and ORDERED_CODE is used for
9846 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9847 enum tree_code unordered_code,
9848 enum tree_code ordered_code)
9850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9851 enum tree_code code;
9853 enum tree_code code0, code1;
9854 tree cmp_type = NULL_TREE;
9856 type0 = TREE_TYPE (arg0);
9857 type1 = TREE_TYPE (arg1);
9859 code0 = TREE_CODE (type0);
9860 code1 = TREE_CODE (type1);
9862 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9863 /* Choose the wider of two real types. */
9864 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9866 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9868 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9871 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9872 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9874 if (unordered_code == UNORDERED_EXPR)
9876 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9877 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9878 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9881 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9883 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9884 fold_build2_loc (loc, code, type, arg0, arg1));
9887 /* Fold a call to built-in function FNDECL with 0 arguments.
9888 IGNORE is true if the result of the function call is ignored. This
9889 function returns NULL_TREE if no simplification was possible. */
9892 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9898 CASE_FLT_FN (BUILT_IN_INF):
9899 case BUILT_IN_INFD32:
9900 case BUILT_IN_INFD64:
9901 case BUILT_IN_INFD128:
9902 return fold_builtin_inf (loc, type, true);
9904 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9905 return fold_builtin_inf (loc, type, false);
9907 case BUILT_IN_CLASSIFY_TYPE:
9908 return fold_builtin_classify_type (NULL_TREE);
9916 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9917 IGNORE is true if the result of the function call is ignored. This
9918 function returns NULL_TREE if no simplification was possible. */
9921 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9924 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9927 case BUILT_IN_CONSTANT_P:
9929 tree val = fold_builtin_constant_p (arg0);
9931 /* Gimplification will pull the CALL_EXPR for the builtin out of
9932 an if condition. When not optimizing, we'll not CSE it back.
9933 To avoid link error types of regressions, return false now. */
9934 if (!val && !optimize)
9935 val = integer_zero_node;
9940 case BUILT_IN_CLASSIFY_TYPE:
9941 return fold_builtin_classify_type (arg0);
9943 case BUILT_IN_STRLEN:
9944 return fold_builtin_strlen (loc, type, arg0);
9946 CASE_FLT_FN (BUILT_IN_FABS):
9947 return fold_builtin_fabs (loc, arg0, type);
9951 case BUILT_IN_LLABS:
9952 case BUILT_IN_IMAXABS:
9953 return fold_builtin_abs (loc, arg0, type);
9955 CASE_FLT_FN (BUILT_IN_CONJ):
9956 if (validate_arg (arg0, COMPLEX_TYPE)
9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9958 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9961 CASE_FLT_FN (BUILT_IN_CREAL):
9962 if (validate_arg (arg0, COMPLEX_TYPE)
9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9964 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9967 CASE_FLT_FN (BUILT_IN_CIMAG):
9968 if (validate_arg (arg0, COMPLEX_TYPE)
9969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9970 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9973 CASE_FLT_FN (BUILT_IN_CCOS):
9974 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9976 CASE_FLT_FN (BUILT_IN_CCOSH):
9977 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9979 CASE_FLT_FN (BUILT_IN_CPROJ):
9980 return fold_builtin_cproj(loc, arg0, type);
9982 CASE_FLT_FN (BUILT_IN_CSIN):
9983 if (validate_arg (arg0, COMPLEX_TYPE)
9984 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9985 return do_mpc_arg1 (arg0, type, mpc_sin);
9988 CASE_FLT_FN (BUILT_IN_CSINH):
9989 if (validate_arg (arg0, COMPLEX_TYPE)
9990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9991 return do_mpc_arg1 (arg0, type, mpc_sinh);
9994 CASE_FLT_FN (BUILT_IN_CTAN):
9995 if (validate_arg (arg0, COMPLEX_TYPE)
9996 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9997 return do_mpc_arg1 (arg0, type, mpc_tan);
10000 CASE_FLT_FN (BUILT_IN_CTANH):
10001 if (validate_arg (arg0, COMPLEX_TYPE)
10002 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10003 return do_mpc_arg1 (arg0, type, mpc_tanh);
10006 CASE_FLT_FN (BUILT_IN_CLOG):
10007 if (validate_arg (arg0, COMPLEX_TYPE)
10008 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10009 return do_mpc_arg1 (arg0, type, mpc_log);
10012 CASE_FLT_FN (BUILT_IN_CSQRT):
10013 if (validate_arg (arg0, COMPLEX_TYPE)
10014 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10015 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10018 CASE_FLT_FN (BUILT_IN_CASIN):
10019 if (validate_arg (arg0, COMPLEX_TYPE)
10020 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10021 return do_mpc_arg1 (arg0, type, mpc_asin);
10024 CASE_FLT_FN (BUILT_IN_CACOS):
10025 if (validate_arg (arg0, COMPLEX_TYPE)
10026 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10027 return do_mpc_arg1 (arg0, type, mpc_acos);
10030 CASE_FLT_FN (BUILT_IN_CATAN):
10031 if (validate_arg (arg0, COMPLEX_TYPE)
10032 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10033 return do_mpc_arg1 (arg0, type, mpc_atan);
10036 CASE_FLT_FN (BUILT_IN_CASINH):
10037 if (validate_arg (arg0, COMPLEX_TYPE)
10038 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10039 return do_mpc_arg1 (arg0, type, mpc_asinh);
10042 CASE_FLT_FN (BUILT_IN_CACOSH):
10043 if (validate_arg (arg0, COMPLEX_TYPE)
10044 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10045 return do_mpc_arg1 (arg0, type, mpc_acosh);
10048 CASE_FLT_FN (BUILT_IN_CATANH):
10049 if (validate_arg (arg0, COMPLEX_TYPE)
10050 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10051 return do_mpc_arg1 (arg0, type, mpc_atanh);
10054 CASE_FLT_FN (BUILT_IN_CABS):
10055 return fold_builtin_cabs (loc, arg0, type, fndecl);
10057 CASE_FLT_FN (BUILT_IN_CARG):
10058 return fold_builtin_carg (loc, arg0, type);
10060 CASE_FLT_FN (BUILT_IN_SQRT):
10061 return fold_builtin_sqrt (loc, arg0, type);
10063 CASE_FLT_FN (BUILT_IN_CBRT):
10064 return fold_builtin_cbrt (loc, arg0, type);
10066 CASE_FLT_FN (BUILT_IN_ASIN):
10067 if (validate_arg (arg0, REAL_TYPE))
10068 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10069 &dconstm1, &dconst1, true);
10072 CASE_FLT_FN (BUILT_IN_ACOS):
10073 if (validate_arg (arg0, REAL_TYPE))
10074 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10075 &dconstm1, &dconst1, true);
10078 CASE_FLT_FN (BUILT_IN_ATAN):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10083 CASE_FLT_FN (BUILT_IN_ASINH):
10084 if (validate_arg (arg0, REAL_TYPE))
10085 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10088 CASE_FLT_FN (BUILT_IN_ACOSH):
10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10091 &dconst1, NULL, true);
10094 CASE_FLT_FN (BUILT_IN_ATANH):
10095 if (validate_arg (arg0, REAL_TYPE))
10096 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10097 &dconstm1, &dconst1, false);
10100 CASE_FLT_FN (BUILT_IN_SIN):
10101 if (validate_arg (arg0, REAL_TYPE))
10102 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10105 CASE_FLT_FN (BUILT_IN_COS):
10106 return fold_builtin_cos (loc, arg0, type, fndecl);
10108 CASE_FLT_FN (BUILT_IN_TAN):
10109 return fold_builtin_tan (arg0, type);
10111 CASE_FLT_FN (BUILT_IN_CEXP):
10112 return fold_builtin_cexp (loc, arg0, type);
10114 CASE_FLT_FN (BUILT_IN_CEXPI):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10119 CASE_FLT_FN (BUILT_IN_SINH):
10120 if (validate_arg (arg0, REAL_TYPE))
10121 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10124 CASE_FLT_FN (BUILT_IN_COSH):
10125 return fold_builtin_cosh (loc, arg0, type, fndecl);
10127 CASE_FLT_FN (BUILT_IN_TANH):
10128 if (validate_arg (arg0, REAL_TYPE))
10129 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10132 CASE_FLT_FN (BUILT_IN_ERF):
10133 if (validate_arg (arg0, REAL_TYPE))
10134 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10137 CASE_FLT_FN (BUILT_IN_ERFC):
10138 if (validate_arg (arg0, REAL_TYPE))
10139 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10142 CASE_FLT_FN (BUILT_IN_TGAMMA):
10143 if (validate_arg (arg0, REAL_TYPE))
10144 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10147 CASE_FLT_FN (BUILT_IN_EXP):
10148 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10150 CASE_FLT_FN (BUILT_IN_EXP2):
10151 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10153 CASE_FLT_FN (BUILT_IN_EXP10):
10154 CASE_FLT_FN (BUILT_IN_POW10):
10155 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10157 CASE_FLT_FN (BUILT_IN_EXPM1):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10162 CASE_FLT_FN (BUILT_IN_LOG):
10163 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10165 CASE_FLT_FN (BUILT_IN_LOG2):
10166 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10168 CASE_FLT_FN (BUILT_IN_LOG10):
10169 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10171 CASE_FLT_FN (BUILT_IN_LOG1P):
10172 if (validate_arg (arg0, REAL_TYPE))
10173 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10174 &dconstm1, NULL, false);
10177 CASE_FLT_FN (BUILT_IN_J0):
10178 if (validate_arg (arg0, REAL_TYPE))
10179 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10183 CASE_FLT_FN (BUILT_IN_J1):
10184 if (validate_arg (arg0, REAL_TYPE))
10185 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10189 CASE_FLT_FN (BUILT_IN_Y0):
10190 if (validate_arg (arg0, REAL_TYPE))
10191 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10192 &dconst0, NULL, false);
10195 CASE_FLT_FN (BUILT_IN_Y1):
10196 if (validate_arg (arg0, REAL_TYPE))
10197 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10198 &dconst0, NULL, false);
10201 CASE_FLT_FN (BUILT_IN_NAN):
10202 case BUILT_IN_NAND32:
10203 case BUILT_IN_NAND64:
10204 case BUILT_IN_NAND128:
10205 return fold_builtin_nan (arg0, type, true);
10207 CASE_FLT_FN (BUILT_IN_NANS):
10208 return fold_builtin_nan (arg0, type, false);
10210 CASE_FLT_FN (BUILT_IN_FLOOR):
10211 return fold_builtin_floor (loc, fndecl, arg0);
10213 CASE_FLT_FN (BUILT_IN_CEIL):
10214 return fold_builtin_ceil (loc, fndecl, arg0);
10216 CASE_FLT_FN (BUILT_IN_TRUNC):
10217 return fold_builtin_trunc (loc, fndecl, arg0);
10219 CASE_FLT_FN (BUILT_IN_ROUND):
10220 return fold_builtin_round (loc, fndecl, arg0);
10222 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10223 CASE_FLT_FN (BUILT_IN_RINT):
10224 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10226 CASE_FLT_FN (BUILT_IN_LCEIL):
10227 CASE_FLT_FN (BUILT_IN_LLCEIL):
10228 CASE_FLT_FN (BUILT_IN_LFLOOR):
10229 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10230 CASE_FLT_FN (BUILT_IN_LROUND):
10231 CASE_FLT_FN (BUILT_IN_LLROUND):
10232 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10234 CASE_FLT_FN (BUILT_IN_LRINT):
10235 CASE_FLT_FN (BUILT_IN_LLRINT):
10236 return fold_fixed_mathfn (loc, fndecl, arg0);
10238 case BUILT_IN_BSWAP32:
10239 case BUILT_IN_BSWAP64:
10240 return fold_builtin_bswap (fndecl, arg0);
10242 CASE_INT_FN (BUILT_IN_FFS):
10243 CASE_INT_FN (BUILT_IN_CLZ):
10244 CASE_INT_FN (BUILT_IN_CTZ):
10245 CASE_INT_FN (BUILT_IN_POPCOUNT):
10246 CASE_INT_FN (BUILT_IN_PARITY):
10247 return fold_builtin_bitop (fndecl, arg0);
10249 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10250 return fold_builtin_signbit (loc, arg0, type);
10252 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10253 return fold_builtin_significand (loc, arg0, type);
10255 CASE_FLT_FN (BUILT_IN_ILOGB):
10256 CASE_FLT_FN (BUILT_IN_LOGB):
10257 return fold_builtin_logb (loc, arg0, type);
10259 case BUILT_IN_ISASCII:
10260 return fold_builtin_isascii (loc, arg0);
10262 case BUILT_IN_TOASCII:
10263 return fold_builtin_toascii (loc, arg0);
10265 case BUILT_IN_ISDIGIT:
10266 return fold_builtin_isdigit (loc, arg0);
10268 CASE_FLT_FN (BUILT_IN_FINITE):
10269 case BUILT_IN_FINITED32:
10270 case BUILT_IN_FINITED64:
10271 case BUILT_IN_FINITED128:
10272 case BUILT_IN_ISFINITE:
10274 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10277 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10280 CASE_FLT_FN (BUILT_IN_ISINF):
10281 case BUILT_IN_ISINFD32:
10282 case BUILT_IN_ISINFD64:
10283 case BUILT_IN_ISINFD128:
10285 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10288 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10291 case BUILT_IN_ISNORMAL:
10292 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10294 case BUILT_IN_ISINF_SIGN:
10295 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10297 CASE_FLT_FN (BUILT_IN_ISNAN):
10298 case BUILT_IN_ISNAND32:
10299 case BUILT_IN_ISNAND64:
10300 case BUILT_IN_ISNAND128:
10301 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10303 case BUILT_IN_PRINTF:
10304 case BUILT_IN_PRINTF_UNLOCKED:
10305 case BUILT_IN_VPRINTF:
10306 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10308 case BUILT_IN_FREE:
10309 if (integer_zerop (arg0))
10310 return build_empty_stmt (loc);
10321 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10322 IGNORE is true if the result of the function call is ignored. This
10323 function returns NULL_TREE if no simplification was possible. */
10326 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10328 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10329 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10333 CASE_FLT_FN (BUILT_IN_JN):
10334 if (validate_arg (arg0, INTEGER_TYPE)
10335 && validate_arg (arg1, REAL_TYPE))
10336 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10339 CASE_FLT_FN (BUILT_IN_YN):
10340 if (validate_arg (arg0, INTEGER_TYPE)
10341 && validate_arg (arg1, REAL_TYPE))
10342 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10346 CASE_FLT_FN (BUILT_IN_DREM):
10347 CASE_FLT_FN (BUILT_IN_REMAINDER):
10348 if (validate_arg (arg0, REAL_TYPE)
10349 && validate_arg(arg1, REAL_TYPE))
10350 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10353 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10354 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10355 if (validate_arg (arg0, REAL_TYPE)
10356 && validate_arg(arg1, POINTER_TYPE))
10357 return do_mpfr_lgamma_r (arg0, arg1, type);
10360 CASE_FLT_FN (BUILT_IN_ATAN2):
10361 if (validate_arg (arg0, REAL_TYPE)
10362 && validate_arg(arg1, REAL_TYPE))
10363 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10366 CASE_FLT_FN (BUILT_IN_FDIM):
10367 if (validate_arg (arg0, REAL_TYPE)
10368 && validate_arg(arg1, REAL_TYPE))
10369 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10372 CASE_FLT_FN (BUILT_IN_HYPOT):
10373 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10375 CASE_FLT_FN (BUILT_IN_CPOW):
10376 if (validate_arg (arg0, COMPLEX_TYPE)
10377 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10378 && validate_arg (arg1, COMPLEX_TYPE)
10379 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10380 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10383 CASE_FLT_FN (BUILT_IN_LDEXP):
10384 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10385 CASE_FLT_FN (BUILT_IN_SCALBN):
10386 CASE_FLT_FN (BUILT_IN_SCALBLN):
10387 return fold_builtin_load_exponent (loc, arg0, arg1,
10388 type, /*ldexp=*/false);
10390 CASE_FLT_FN (BUILT_IN_FREXP):
10391 return fold_builtin_frexp (loc, arg0, arg1, type);
10393 CASE_FLT_FN (BUILT_IN_MODF):
10394 return fold_builtin_modf (loc, arg0, arg1, type);
10396 case BUILT_IN_BZERO:
10397 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10399 case BUILT_IN_FPUTS:
10400 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10402 case BUILT_IN_FPUTS_UNLOCKED:
10403 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10405 case BUILT_IN_STRSTR:
10406 return fold_builtin_strstr (loc, arg0, arg1, type);
10408 case BUILT_IN_STRCAT:
10409 return fold_builtin_strcat (loc, arg0, arg1);
10411 case BUILT_IN_STRSPN:
10412 return fold_builtin_strspn (loc, arg0, arg1);
10414 case BUILT_IN_STRCSPN:
10415 return fold_builtin_strcspn (loc, arg0, arg1);
10417 case BUILT_IN_STRCHR:
10418 case BUILT_IN_INDEX:
10419 return fold_builtin_strchr (loc, arg0, arg1, type);
10421 case BUILT_IN_STRRCHR:
10422 case BUILT_IN_RINDEX:
10423 return fold_builtin_strrchr (loc, arg0, arg1, type);
10425 case BUILT_IN_STRCPY:
10426 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10428 case BUILT_IN_STPCPY:
10431 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10435 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10438 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10441 case BUILT_IN_STRCMP:
10442 return fold_builtin_strcmp (loc, arg0, arg1);
10444 case BUILT_IN_STRPBRK:
10445 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10447 case BUILT_IN_EXPECT:
10448 return fold_builtin_expect (loc, arg0, arg1);
10450 CASE_FLT_FN (BUILT_IN_POW):
10451 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10453 CASE_FLT_FN (BUILT_IN_POWI):
10454 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10456 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10457 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10459 CASE_FLT_FN (BUILT_IN_FMIN):
10460 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10462 CASE_FLT_FN (BUILT_IN_FMAX):
10463 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10465 case BUILT_IN_ISGREATER:
10466 return fold_builtin_unordered_cmp (loc, fndecl,
10467 arg0, arg1, UNLE_EXPR, LE_EXPR);
10468 case BUILT_IN_ISGREATEREQUAL:
10469 return fold_builtin_unordered_cmp (loc, fndecl,
10470 arg0, arg1, UNLT_EXPR, LT_EXPR);
10471 case BUILT_IN_ISLESS:
10472 return fold_builtin_unordered_cmp (loc, fndecl,
10473 arg0, arg1, UNGE_EXPR, GE_EXPR);
10474 case BUILT_IN_ISLESSEQUAL:
10475 return fold_builtin_unordered_cmp (loc, fndecl,
10476 arg0, arg1, UNGT_EXPR, GT_EXPR);
10477 case BUILT_IN_ISLESSGREATER:
10478 return fold_builtin_unordered_cmp (loc, fndecl,
10479 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10480 case BUILT_IN_ISUNORDERED:
10481 return fold_builtin_unordered_cmp (loc, fndecl,
10482 arg0, arg1, UNORDERED_EXPR,
10485 /* We do the folding for va_start in the expander. */
10486 case BUILT_IN_VA_START:
10489 case BUILT_IN_SPRINTF:
10490 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10492 case BUILT_IN_OBJECT_SIZE:
10493 return fold_builtin_object_size (arg0, arg1);
10495 case BUILT_IN_PRINTF:
10496 case BUILT_IN_PRINTF_UNLOCKED:
10497 case BUILT_IN_VPRINTF:
10498 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10500 case BUILT_IN_PRINTF_CHK:
10501 case BUILT_IN_VPRINTF_CHK:
10502 if (!validate_arg (arg0, INTEGER_TYPE)
10503 || TREE_SIDE_EFFECTS (arg0))
10506 return fold_builtin_printf (loc, fndecl,
10507 arg1, NULL_TREE, ignore, fcode);
10510 case BUILT_IN_FPRINTF:
10511 case BUILT_IN_FPRINTF_UNLOCKED:
10512 case BUILT_IN_VFPRINTF:
10513 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10522 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10523 and ARG2. IGNORE is true if the result of the function call is ignored.
10524 This function returns NULL_TREE if no simplification was possible. */
10527 fold_builtin_3 (location_t loc, tree fndecl,
10528 tree arg0, tree arg1, tree arg2, bool ignore)
10530 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10531 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10535 CASE_FLT_FN (BUILT_IN_SINCOS):
10536 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10538 CASE_FLT_FN (BUILT_IN_FMA):
10539 if (validate_arg (arg0, REAL_TYPE)
10540 && validate_arg(arg1, REAL_TYPE)
10541 && validate_arg(arg2, REAL_TYPE))
10542 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10545 CASE_FLT_FN (BUILT_IN_REMQUO):
10546 if (validate_arg (arg0, REAL_TYPE)
10547 && validate_arg(arg1, REAL_TYPE)
10548 && validate_arg(arg2, POINTER_TYPE))
10549 return do_mpfr_remquo (arg0, arg1, arg2);
10552 case BUILT_IN_MEMSET:
10553 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10555 case BUILT_IN_BCOPY:
10556 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10557 void_type_node, true, /*endp=*/3);
10559 case BUILT_IN_MEMCPY:
10560 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10561 type, ignore, /*endp=*/0);
10563 case BUILT_IN_MEMPCPY:
10564 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10565 type, ignore, /*endp=*/1);
10567 case BUILT_IN_MEMMOVE:
10568 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10569 type, ignore, /*endp=*/3);
10571 case BUILT_IN_STRNCAT:
10572 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10574 case BUILT_IN_STRNCPY:
10575 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10577 case BUILT_IN_STRNCMP:
10578 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10580 case BUILT_IN_MEMCHR:
10581 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10583 case BUILT_IN_BCMP:
10584 case BUILT_IN_MEMCMP:
10585 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10587 case BUILT_IN_SPRINTF:
10588 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10590 case BUILT_IN_STRCPY_CHK:
10591 case BUILT_IN_STPCPY_CHK:
10592 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10595 case BUILT_IN_STRCAT_CHK:
10596 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10598 case BUILT_IN_PRINTF_CHK:
10599 case BUILT_IN_VPRINTF_CHK:
10600 if (!validate_arg (arg0, INTEGER_TYPE)
10601 || TREE_SIDE_EFFECTS (arg0))
10604 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10607 case BUILT_IN_FPRINTF:
10608 case BUILT_IN_FPRINTF_UNLOCKED:
10609 case BUILT_IN_VFPRINTF:
10610 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10613 case BUILT_IN_FPRINTF_CHK:
10614 case BUILT_IN_VFPRINTF_CHK:
10615 if (!validate_arg (arg1, INTEGER_TYPE)
10616 || TREE_SIDE_EFFECTS (arg1))
10619 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10628 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10629 ARG2, and ARG3. IGNORE is true if the result of the function call is
10630 ignored. This function returns NULL_TREE if no simplification was
10634 fold_builtin_4 (location_t loc, tree fndecl,
10635 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10637 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10641 case BUILT_IN_MEMCPY_CHK:
10642 case BUILT_IN_MEMPCPY_CHK:
10643 case BUILT_IN_MEMMOVE_CHK:
10644 case BUILT_IN_MEMSET_CHK:
10645 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10647 DECL_FUNCTION_CODE (fndecl));
10649 case BUILT_IN_STRNCPY_CHK:
10650 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10652 case BUILT_IN_STRNCAT_CHK:
10653 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10655 case BUILT_IN_FPRINTF_CHK:
10656 case BUILT_IN_VFPRINTF_CHK:
10657 if (!validate_arg (arg1, INTEGER_TYPE)
10658 || TREE_SIDE_EFFECTS (arg1))
10661 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10671 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10672 arguments, where NARGS <= 4. IGNORE is true if the result of the
10673 function call is ignored. This function returns NULL_TREE if no
10674 simplification was possible. Note that this only folds builtins with
10675 fixed argument patterns. Foldings that do varargs-to-varargs
10676 transformations, or that match calls with more than 4 arguments,
10677 need to be handled with fold_builtin_varargs instead. */
10679 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10682 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10684 tree ret = NULL_TREE;
10689 ret = fold_builtin_0 (loc, fndecl, ignore);
10692 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10695 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10698 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10701 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10709 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10710 SET_EXPR_LOCATION (ret, loc);
10711 TREE_NO_WARNING (ret) = 1;
10717 /* Builtins with folding operations that operate on "..." arguments
10718 need special handling; we need to store the arguments in a convenient
10719 data structure before attempting any folding. Fortunately there are
10720 only a few builtins that fall into this category. FNDECL is the
10721 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10722 result of the function call is ignored. */
10725 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10726 bool ignore ATTRIBUTE_UNUSED)
10728 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10729 tree ret = NULL_TREE;
10733 case BUILT_IN_SPRINTF_CHK:
10734 case BUILT_IN_VSPRINTF_CHK:
10735 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10738 case BUILT_IN_SNPRINTF_CHK:
10739 case BUILT_IN_VSNPRINTF_CHK:
10740 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10743 case BUILT_IN_FPCLASSIFY:
10744 ret = fold_builtin_fpclassify (loc, exp);
10752 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10753 SET_EXPR_LOCATION (ret, loc);
10754 TREE_NO_WARNING (ret) = 1;
10760 /* Return true if FNDECL shouldn't be folded right now.
10761 If a built-in function has an inline attribute always_inline
10762 wrapper, defer folding it after always_inline functions have
10763 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10764 might not be performed. */
10767 avoid_folding_inline_builtin (tree fndecl)
10769 return (DECL_DECLARED_INLINE_P (fndecl)
10770 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10772 && !cfun->always_inline_functions_inlined
10773 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10776 /* A wrapper function for builtin folding that prevents warnings for
10777 "statement without effect" and the like, caused by removing the
10778 call node earlier than the warning is generated. */
10781 fold_call_expr (location_t loc, tree exp, bool ignore)
10783 tree ret = NULL_TREE;
10784 tree fndecl = get_callee_fndecl (exp);
10786 && TREE_CODE (fndecl) == FUNCTION_DECL
10787 && DECL_BUILT_IN (fndecl)
10788 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10789 yet. Defer folding until we see all the arguments
10790 (after inlining). */
10791 && !CALL_EXPR_VA_ARG_PACK (exp))
10793 int nargs = call_expr_nargs (exp);
10795 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10796 instead last argument is __builtin_va_arg_pack (). Defer folding
10797 even in that case, until arguments are finalized. */
10798 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10800 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10802 && TREE_CODE (fndecl2) == FUNCTION_DECL
10803 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10804 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10808 if (avoid_folding_inline_builtin (fndecl))
10811 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10812 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10813 CALL_EXPR_ARGP (exp), ignore);
10816 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10818 tree *args = CALL_EXPR_ARGP (exp);
10819 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10822 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10830 /* Conveniently construct a function call expression. FNDECL names the
10831 function to be called and N arguments are passed in the array
10835 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10837 tree fntype = TREE_TYPE (fndecl);
10838 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10840 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10843 /* Conveniently construct a function call expression. FNDECL names the
10844 function to be called and the arguments are passed in the vector
10848 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10850 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10851 VEC_address (tree, vec));
10855 /* Conveniently construct a function call expression. FNDECL names the
10856 function to be called, N is the number of arguments, and the "..."
10857 parameters are the argument expressions. */
10860 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10863 tree *argarray = XALLOCAVEC (tree, n);
10867 for (i = 0; i < n; i++)
10868 argarray[i] = va_arg (ap, tree);
10870 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10873 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10874 varargs macros aren't supported by all bootstrap compilers. */
10877 build_call_expr (tree fndecl, int n, ...)
10880 tree *argarray = XALLOCAVEC (tree, n);
10884 for (i = 0; i < n; i++)
10885 argarray[i] = va_arg (ap, tree);
10887 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10890 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10891 N arguments are passed in the array ARGARRAY. */
10894 fold_builtin_call_array (location_t loc, tree type,
10899 tree ret = NULL_TREE;
10902 if (TREE_CODE (fn) == ADDR_EXPR)
10904 tree fndecl = TREE_OPERAND (fn, 0);
10905 if (TREE_CODE (fndecl) == FUNCTION_DECL
10906 && DECL_BUILT_IN (fndecl))
10908 /* If last argument is __builtin_va_arg_pack (), arguments to this
10909 function are not finalized yet. Defer folding until they are. */
10910 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10912 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10914 && TREE_CODE (fndecl2) == FUNCTION_DECL
10915 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10916 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10917 return build_call_array_loc (loc, type, fn, n, argarray);
10919 if (avoid_folding_inline_builtin (fndecl))
10920 return build_call_array_loc (loc, type, fn, n, argarray);
10921 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10923 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10927 return build_call_array_loc (loc, type, fn, n, argarray);
10929 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10931 /* First try the transformations that don't require consing up
10933 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10938 /* If we got this far, we need to build an exp. */
10939 exp = build_call_array_loc (loc, type, fn, n, argarray);
10940 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10941 return ret ? ret : exp;
10945 return build_call_array_loc (loc, type, fn, n, argarray);
10948 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10949 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10950 of arguments in ARGS to be omitted. OLDNARGS is the number of
10951 elements in ARGS. */
10954 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10955 int skip, tree fndecl, int n, va_list newargs)
10957 int nargs = oldnargs - skip + n;
10964 buffer = XALLOCAVEC (tree, nargs);
10965 for (i = 0; i < n; i++)
10966 buffer[i] = va_arg (newargs, tree);
10967 for (j = skip; j < oldnargs; j++, i++)
10968 buffer[i] = args[j];
10971 buffer = args + skip;
10973 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10976 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10977 list ARGS along with N new arguments specified as the "..."
10978 parameters. SKIP is the number of arguments in ARGS to be omitted.
10979 OLDNARGS is the number of elements in ARGS. */
10982 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10983 int skip, tree fndecl, int n, ...)
10989 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10995 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10996 along with N new arguments specified as the "..." parameters. SKIP
10997 is the number of arguments in EXP to be omitted. This function is used
10998 to do varargs-to-varargs transformations. */
11001 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11007 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11008 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11014 /* Validate a single argument ARG against a tree code CODE representing
11018 validate_arg (const_tree arg, enum tree_code code)
11022 else if (code == POINTER_TYPE)
11023 return POINTER_TYPE_P (TREE_TYPE (arg));
11024 else if (code == INTEGER_TYPE)
11025 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11026 return code == TREE_CODE (TREE_TYPE (arg));
11029 /* This function validates the types of a function call argument list
11030 against a specified list of tree_codes. If the last specifier is a 0,
11031 that represents an ellipses, otherwise the last specifier must be a
11034 This is the GIMPLE version of validate_arglist. Eventually we want to
11035 completely convert builtins.c to work from GIMPLEs and the tree based
11036 validate_arglist will then be removed. */
11039 validate_gimple_arglist (const_gimple call, ...)
11041 enum tree_code code;
11047 va_start (ap, call);
11052 code = (enum tree_code) va_arg (ap, int);
11056 /* This signifies an ellipses, any further arguments are all ok. */
11060 /* This signifies an endlink, if no arguments remain, return
11061 true, otherwise return false. */
11062 res = (i == gimple_call_num_args (call));
11065 /* If no parameters remain or the parameter's code does not
11066 match the specified code, return false. Otherwise continue
11067 checking any remaining arguments. */
11068 arg = gimple_call_arg (call, i++);
11069 if (!validate_arg (arg, code))
11076 /* We need gotos here since we can only have one VA_CLOSE in a
11084 /* This function validates the types of a function call argument list
11085 against a specified list of tree_codes. If the last specifier is a 0,
11086 that represents an ellipses, otherwise the last specifier must be a
11090 validate_arglist (const_tree callexpr, ...)
11092 enum tree_code code;
11095 const_call_expr_arg_iterator iter;
11098 va_start (ap, callexpr);
11099 init_const_call_expr_arg_iterator (callexpr, &iter);
11103 code = (enum tree_code) va_arg (ap, int);
11107 /* This signifies an ellipses, any further arguments are all ok. */
11111 /* This signifies an endlink, if no arguments remain, return
11112 true, otherwise return false. */
11113 res = !more_const_call_expr_args_p (&iter);
11116 /* If no parameters remain or the parameter's code does not
11117 match the specified code, return false. Otherwise continue
11118 checking any remaining arguments. */
11119 arg = next_const_call_expr_arg (&iter);
11120 if (!validate_arg (arg, code))
11127 /* We need gotos here since we can only have one VA_CLOSE in a
11135 /* Default target-specific builtin expander that does nothing. */
11138 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11139 rtx target ATTRIBUTE_UNUSED,
11140 rtx subtarget ATTRIBUTE_UNUSED,
11141 enum machine_mode mode ATTRIBUTE_UNUSED,
11142 int ignore ATTRIBUTE_UNUSED)
11147 /* Returns true is EXP represents data that would potentially reside
11148 in a readonly section. */
11151 readonly_data_expr (tree exp)
11155 if (TREE_CODE (exp) != ADDR_EXPR)
11158 exp = get_base_address (TREE_OPERAND (exp, 0));
11162 /* Make sure we call decl_readonly_section only for trees it
11163 can handle (since it returns true for everything it doesn't
11165 if (TREE_CODE (exp) == STRING_CST
11166 || TREE_CODE (exp) == CONSTRUCTOR
11167 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11168 return decl_readonly_section (exp, 0);
11173 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11174 to the call, and TYPE is its return type.
11176 Return NULL_TREE if no simplification was possible, otherwise return the
11177 simplified form of the call as a tree.
11179 The simplified form may be a constant or other expression which
11180 computes the same value, but in a more efficient manner (including
11181 calls to other builtin functions).
11183 The call may contain arguments which need to be evaluated, but
11184 which are not useful to determine the result of the call. In
11185 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11186 COMPOUND_EXPR will be an argument which must be evaluated.
11187 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11188 COMPOUND_EXPR in the chain will contain the tree for the simplified
11189 form of the builtin function call. */
11192 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11194 if (!validate_arg (s1, POINTER_TYPE)
11195 || !validate_arg (s2, POINTER_TYPE))
11200 const char *p1, *p2;
11202 p2 = c_getstr (s2);
11206 p1 = c_getstr (s1);
11209 const char *r = strstr (p1, p2);
11213 return build_int_cst (TREE_TYPE (s1), 0);
11215 /* Return an offset into the constant string argument. */
11216 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11217 s1, size_int (r - p1));
11218 return fold_convert_loc (loc, type, tem);
11221 /* The argument is const char *, and the result is char *, so we need
11222 a type conversion here to avoid a warning. */
11224 return fold_convert_loc (loc, type, s1);
11229 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11233 /* New argument list transforming strstr(s1, s2) to
11234 strchr(s1, s2[0]). */
11235 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11239 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11240 the call, and TYPE is its return type.
11242 Return NULL_TREE if no simplification was possible, otherwise return the
11243 simplified form of the call as a tree.
11245 The simplified form may be a constant or other expression which
11246 computes the same value, but in a more efficient manner (including
11247 calls to other builtin functions).
11249 The call may contain arguments which need to be evaluated, but
11250 which are not useful to determine the result of the call. In
11251 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11252 COMPOUND_EXPR will be an argument which must be evaluated.
11253 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11254 COMPOUND_EXPR in the chain will contain the tree for the simplified
11255 form of the builtin function call. */
11258 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11260 if (!validate_arg (s1, POINTER_TYPE)
11261 || !validate_arg (s2, INTEGER_TYPE))
11267 if (TREE_CODE (s2) != INTEGER_CST)
11270 p1 = c_getstr (s1);
11277 if (target_char_cast (s2, &c))
11280 r = strchr (p1, c);
11283 return build_int_cst (TREE_TYPE (s1), 0);
11285 /* Return an offset into the constant string argument. */
11286 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11287 s1, size_int (r - p1));
11288 return fold_convert_loc (loc, type, tem);
11294 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11295 the call, and TYPE is its return type.
11297 Return NULL_TREE if no simplification was possible, otherwise return the
11298 simplified form of the call as a tree.
11300 The simplified form may be a constant or other expression which
11301 computes the same value, but in a more efficient manner (including
11302 calls to other builtin functions).
11304 The call may contain arguments which need to be evaluated, but
11305 which are not useful to determine the result of the call. In
11306 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11307 COMPOUND_EXPR will be an argument which must be evaluated.
11308 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11309 COMPOUND_EXPR in the chain will contain the tree for the simplified
11310 form of the builtin function call. */
11313 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11315 if (!validate_arg (s1, POINTER_TYPE)
11316 || !validate_arg (s2, INTEGER_TYPE))
11323 if (TREE_CODE (s2) != INTEGER_CST)
11326 p1 = c_getstr (s1);
11333 if (target_char_cast (s2, &c))
11336 r = strrchr (p1, c);
11339 return build_int_cst (TREE_TYPE (s1), 0);
11341 /* Return an offset into the constant string argument. */
11342 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11343 s1, size_int (r - p1));
11344 return fold_convert_loc (loc, type, tem);
11347 if (! integer_zerop (s2))
11350 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11354 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11355 return build_call_expr_loc (loc, fn, 2, s1, s2);
11359 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11360 to the call, and TYPE is its return type.
11362 Return NULL_TREE if no simplification was possible, otherwise return the
11363 simplified form of the call as a tree.
11365 The simplified form may be a constant or other expression which
11366 computes the same value, but in a more efficient manner (including
11367 calls to other builtin functions).
11369 The call may contain arguments which need to be evaluated, but
11370 which are not useful to determine the result of the call. In
11371 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11372 COMPOUND_EXPR will be an argument which must be evaluated.
11373 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11374 COMPOUND_EXPR in the chain will contain the tree for the simplified
11375 form of the builtin function call. */
11378 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11380 if (!validate_arg (s1, POINTER_TYPE)
11381 || !validate_arg (s2, POINTER_TYPE))
11386 const char *p1, *p2;
11388 p2 = c_getstr (s2);
11392 p1 = c_getstr (s1);
11395 const char *r = strpbrk (p1, p2);
11399 return build_int_cst (TREE_TYPE (s1), 0);
11401 /* Return an offset into the constant string argument. */
11402 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11403 s1, size_int (r - p1));
11404 return fold_convert_loc (loc, type, tem);
11408 /* strpbrk(x, "") == NULL.
11409 Evaluate and ignore s1 in case it had side-effects. */
11410 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11413 return NULL_TREE; /* Really call strpbrk. */
11415 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11419 /* New argument list transforming strpbrk(s1, s2) to
11420 strchr(s1, s2[0]). */
11421 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11425 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11428 Return NULL_TREE if no simplification was possible, otherwise return the
11429 simplified form of the call as a tree.
11431 The simplified form may be a constant or other expression which
11432 computes the same value, but in a more efficient manner (including
11433 calls to other builtin functions).
11435 The call may contain arguments which need to be evaluated, but
11436 which are not useful to determine the result of the call. In
11437 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11438 COMPOUND_EXPR will be an argument which must be evaluated.
11439 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11440 COMPOUND_EXPR in the chain will contain the tree for the simplified
11441 form of the builtin function call. */
11444 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11446 if (!validate_arg (dst, POINTER_TYPE)
11447 || !validate_arg (src, POINTER_TYPE))
11451 const char *p = c_getstr (src);
11453 /* If the string length is zero, return the dst parameter. */
11454 if (p && *p == '\0')
11457 if (optimize_insn_for_speed_p ())
11459 /* See if we can store by pieces into (dst + strlen(dst)). */
11461 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11462 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11464 if (!strlen_fn || !strcpy_fn)
11467 /* If we don't have a movstr we don't want to emit an strcpy
11468 call. We have to do that if the length of the source string
11469 isn't computable (in that case we can use memcpy probably
11470 later expanding to a sequence of mov instructions). If we
11471 have movstr instructions we can emit strcpy calls. */
11474 tree len = c_strlen (src, 1);
11475 if (! len || TREE_SIDE_EFFECTS (len))
11479 /* Stabilize the argument list. */
11480 dst = builtin_save_expr (dst);
11482 /* Create strlen (dst). */
11483 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11484 /* Create (dst p+ strlen (dst)). */
11486 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11487 TREE_TYPE (dst), dst, newdst);
11488 newdst = builtin_save_expr (newdst);
11490 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11491 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11497 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11498 arguments to the call.
11500 Return NULL_TREE if no simplification was possible, otherwise return the
11501 simplified form of the call as a tree.
11503 The simplified form may be a constant or other expression which
11504 computes the same value, but in a more efficient manner (including
11505 calls to other builtin functions).
11507 The call may contain arguments which need to be evaluated, but
11508 which are not useful to determine the result of the call. In
11509 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11510 COMPOUND_EXPR will be an argument which must be evaluated.
11511 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11512 COMPOUND_EXPR in the chain will contain the tree for the simplified
11513 form of the builtin function call. */
11516 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11518 if (!validate_arg (dst, POINTER_TYPE)
11519 || !validate_arg (src, POINTER_TYPE)
11520 || !validate_arg (len, INTEGER_TYPE))
11524 const char *p = c_getstr (src);
11526 /* If the requested length is zero, or the src parameter string
11527 length is zero, return the dst parameter. */
11528 if (integer_zerop (len) || (p && *p == '\0'))
11529 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11531 /* If the requested len is greater than or equal to the string
11532 length, call strcat. */
11533 if (TREE_CODE (len) == INTEGER_CST && p
11534 && compare_tree_int (len, strlen (p)) >= 0)
11536 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11538 /* If the replacement _DECL isn't initialized, don't do the
11543 return build_call_expr_loc (loc, fn, 2, dst, src);
11549 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11552 Return NULL_TREE if no simplification was possible, otherwise return the
11553 simplified form of the call as a tree.
11555 The simplified form may be a constant or other expression which
11556 computes the same value, but in a more efficient manner (including
11557 calls to other builtin functions).
11559 The call may contain arguments which need to be evaluated, but
11560 which are not useful to determine the result of the call. In
11561 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11562 COMPOUND_EXPR will be an argument which must be evaluated.
11563 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11564 COMPOUND_EXPR in the chain will contain the tree for the simplified
11565 form of the builtin function call. */
11568 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11570 if (!validate_arg (s1, POINTER_TYPE)
11571 || !validate_arg (s2, POINTER_TYPE))
11575 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11577 /* If both arguments are constants, evaluate at compile-time. */
11580 const size_t r = strspn (p1, p2);
11581 return size_int (r);
11584 /* If either argument is "", return NULL_TREE. */
11585 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11586 /* Evaluate and ignore both arguments in case either one has
11588 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11594 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11597 Return NULL_TREE if no simplification was possible, otherwise return the
11598 simplified form of the call as a tree.
11600 The simplified form may be a constant or other expression which
11601 computes the same value, but in a more efficient manner (including
11602 calls to other builtin functions).
11604 The call may contain arguments which need to be evaluated, but
11605 which are not useful to determine the result of the call. In
11606 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11607 COMPOUND_EXPR will be an argument which must be evaluated.
11608 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11609 COMPOUND_EXPR in the chain will contain the tree for the simplified
11610 form of the builtin function call. */
11613 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11615 if (!validate_arg (s1, POINTER_TYPE)
11616 || !validate_arg (s2, POINTER_TYPE))
11620 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11622 /* If both arguments are constants, evaluate at compile-time. */
11625 const size_t r = strcspn (p1, p2);
11626 return size_int (r);
11629 /* If the first argument is "", return NULL_TREE. */
11630 if (p1 && *p1 == '\0')
11632 /* Evaluate and ignore argument s2 in case it has
11634 return omit_one_operand_loc (loc, size_type_node,
11635 size_zero_node, s2);
11638 /* If the second argument is "", return __builtin_strlen(s1). */
11639 if (p2 && *p2 == '\0')
11641 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11643 /* If the replacement _DECL isn't initialized, don't do the
11648 return build_call_expr_loc (loc, fn, 1, s1);
11654 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11655 to the call. IGNORE is true if the value returned
11656 by the builtin will be ignored. UNLOCKED is true is true if this
11657 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11658 the known length of the string. Return NULL_TREE if no simplification
11662 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11663 bool ignore, bool unlocked, tree len)
11665 /* If we're using an unlocked function, assume the other unlocked
11666 functions exist explicitly. */
11667 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11668 : implicit_built_in_decls[BUILT_IN_FPUTC];
11669 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11670 : implicit_built_in_decls[BUILT_IN_FWRITE];
11672 /* If the return value is used, don't do the transformation. */
11676 /* Verify the arguments in the original call. */
11677 if (!validate_arg (arg0, POINTER_TYPE)
11678 || !validate_arg (arg1, POINTER_TYPE))
11682 len = c_strlen (arg0, 0);
11684 /* Get the length of the string passed to fputs. If the length
11685 can't be determined, punt. */
11687 || TREE_CODE (len) != INTEGER_CST)
11690 switch (compare_tree_int (len, 1))
11692 case -1: /* length is 0, delete the call entirely . */
11693 return omit_one_operand_loc (loc, integer_type_node,
11694 integer_zero_node, arg1);;
11696 case 0: /* length is 1, call fputc. */
11698 const char *p = c_getstr (arg0);
11703 return build_call_expr_loc (loc, fn_fputc, 2,
11704 build_int_cst (NULL_TREE, p[0]), arg1);
11710 case 1: /* length is greater than 1, call fwrite. */
11712 /* If optimizing for size keep fputs. */
11713 if (optimize_function_for_size_p (cfun))
11715 /* New argument list transforming fputs(string, stream) to
11716 fwrite(string, 1, len, stream). */
11718 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11719 size_one_node, len, arg1);
11724 gcc_unreachable ();
11729 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11730 produced. False otherwise. This is done so that we don't output the error
11731 or warning twice or three times. */
11734 fold_builtin_next_arg (tree exp, bool va_start_p)
11736 tree fntype = TREE_TYPE (current_function_decl);
11737 int nargs = call_expr_nargs (exp);
11740 if (!stdarg_p (fntype))
11742 error ("%<va_start%> used in function with fixed args");
11748 if (va_start_p && (nargs != 2))
11750 error ("wrong number of arguments to function %<va_start%>");
11753 arg = CALL_EXPR_ARG (exp, 1);
11755 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11756 when we checked the arguments and if needed issued a warning. */
11761 /* Evidently an out of date version of <stdarg.h>; can't validate
11762 va_start's second argument, but can still work as intended. */
11763 warning (0, "%<__builtin_next_arg%> called without an argument");
11766 else if (nargs > 1)
11768 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11771 arg = CALL_EXPR_ARG (exp, 0);
11774 if (TREE_CODE (arg) == SSA_NAME)
11775 arg = SSA_NAME_VAR (arg);
11777 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11778 or __builtin_next_arg (0) the first time we see it, after checking
11779 the arguments and if needed issuing a warning. */
11780 if (!integer_zerop (arg))
11782 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11784 /* Strip off all nops for the sake of the comparison. This
11785 is not quite the same as STRIP_NOPS. It does more.
11786 We must also strip off INDIRECT_EXPR for C++ reference
11788 while (CONVERT_EXPR_P (arg)
11789 || TREE_CODE (arg) == INDIRECT_REF)
11790 arg = TREE_OPERAND (arg, 0);
11791 if (arg != last_parm)
11793 /* FIXME: Sometimes with the tree optimizers we can get the
11794 not the last argument even though the user used the last
11795 argument. We just warn and set the arg to be the last
11796 argument so that we will get wrong-code because of
11798 warning (0, "second parameter of %<va_start%> not last named argument");
11801 /* Undefined by C99 7.15.1.4p4 (va_start):
11802 "If the parameter parmN is declared with the register storage
11803 class, with a function or array type, or with a type that is
11804 not compatible with the type that results after application of
11805 the default argument promotions, the behavior is undefined."
11807 else if (DECL_REGISTER (arg))
11808 warning (0, "undefined behaviour when second parameter of "
11809 "%<va_start%> is declared with %<register%> storage");
11811 /* We want to verify the second parameter just once before the tree
11812 optimizers are run and then avoid keeping it in the tree,
11813 as otherwise we could warn even for correct code like:
11814 void foo (int i, ...)
11815 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11817 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11819 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11825 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11826 ORIG may be null if this is a 2-argument call. We don't attempt to
11827 simplify calls with more than 3 arguments.
11829 Return NULL_TREE if no simplification was possible, otherwise return the
11830 simplified form of the call as a tree. If IGNORED is true, it means that
11831 the caller does not use the returned value of the function. */
11834 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11835 tree orig, int ignored)
11838 const char *fmt_str = NULL;
11840 /* Verify the required arguments in the original call. We deal with two
11841 types of sprintf() calls: 'sprintf (str, fmt)' and
11842 'sprintf (dest, "%s", orig)'. */
11843 if (!validate_arg (dest, POINTER_TYPE)
11844 || !validate_arg (fmt, POINTER_TYPE))
11846 if (orig && !validate_arg (orig, POINTER_TYPE))
11849 /* Check whether the format is a literal string constant. */
11850 fmt_str = c_getstr (fmt);
11851 if (fmt_str == NULL)
11855 retval = NULL_TREE;
11857 if (!init_target_chars ())
11860 /* If the format doesn't contain % args or %%, use strcpy. */
11861 if (strchr (fmt_str, target_percent) == NULL)
11863 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11868 /* Don't optimize sprintf (buf, "abc", ptr++). */
11872 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11873 'format' is known to contain no % formats. */
11874 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11876 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11879 /* If the format is "%s", use strcpy if the result isn't used. */
11880 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11883 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11888 /* Don't crash on sprintf (str1, "%s"). */
11892 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11895 retval = c_strlen (orig, 1);
11896 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11899 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11902 if (call && retval)
11904 retval = fold_convert_loc
11905 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11907 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11913 /* Expand a call EXP to __builtin_object_size. */
11916 expand_builtin_object_size (tree exp)
11919 int object_size_type;
11920 tree fndecl = get_callee_fndecl (exp);
11922 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11924 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11926 expand_builtin_trap ();
11930 ost = CALL_EXPR_ARG (exp, 1);
11933 if (TREE_CODE (ost) != INTEGER_CST
11934 || tree_int_cst_sgn (ost) < 0
11935 || compare_tree_int (ost, 3) > 0)
11937 error ("%Klast argument of %D is not integer constant between 0 and 3",
11939 expand_builtin_trap ();
11943 object_size_type = tree_low_cst (ost, 0);
11945 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11948 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11949 FCODE is the BUILT_IN_* to use.
11950 Return NULL_RTX if we failed; the caller should emit a normal call,
11951 otherwise try to get the result in TARGET, if convenient (and in
11952 mode MODE if that's convenient). */
11955 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11956 enum built_in_function fcode)
11958 tree dest, src, len, size;
11960 if (!validate_arglist (exp,
11962 fcode == BUILT_IN_MEMSET_CHK
11963 ? INTEGER_TYPE : POINTER_TYPE,
11964 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11967 dest = CALL_EXPR_ARG (exp, 0);
11968 src = CALL_EXPR_ARG (exp, 1);
11969 len = CALL_EXPR_ARG (exp, 2);
11970 size = CALL_EXPR_ARG (exp, 3);
11972 if (! host_integerp (size, 1))
11975 if (host_integerp (len, 1) || integer_all_onesp (size))
11979 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11981 warning_at (tree_nonartificial_location (exp),
11982 0, "%Kcall to %D will always overflow destination buffer",
11983 exp, get_callee_fndecl (exp));
11988 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11989 mem{cpy,pcpy,move,set} is available. */
11992 case BUILT_IN_MEMCPY_CHK:
11993 fn = built_in_decls[BUILT_IN_MEMCPY];
11995 case BUILT_IN_MEMPCPY_CHK:
11996 fn = built_in_decls[BUILT_IN_MEMPCPY];
11998 case BUILT_IN_MEMMOVE_CHK:
11999 fn = built_in_decls[BUILT_IN_MEMMOVE];
12001 case BUILT_IN_MEMSET_CHK:
12002 fn = built_in_decls[BUILT_IN_MEMSET];
12011 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12012 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12013 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12014 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12016 else if (fcode == BUILT_IN_MEMSET_CHK)
12020 unsigned int dest_align
12021 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12023 /* If DEST is not a pointer type, call the normal function. */
12024 if (dest_align == 0)
12027 /* If SRC and DEST are the same (and not volatile), do nothing. */
12028 if (operand_equal_p (src, dest, 0))
12032 if (fcode != BUILT_IN_MEMPCPY_CHK)
12034 /* Evaluate and ignore LEN in case it has side-effects. */
12035 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12036 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12039 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12040 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12043 /* __memmove_chk special case. */
12044 if (fcode == BUILT_IN_MEMMOVE_CHK)
12046 unsigned int src_align
12047 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12049 if (src_align == 0)
12052 /* If src is categorized for a readonly section we can use
12053 normal __memcpy_chk. */
12054 if (readonly_data_expr (src))
12056 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12059 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12060 dest, src, len, size);
12061 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12062 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12063 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12070 /* Emit warning if a buffer overflow is detected at compile time. */
12073 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12077 location_t loc = tree_nonartificial_location (exp);
12081 case BUILT_IN_STRCPY_CHK:
12082 case BUILT_IN_STPCPY_CHK:
12083 /* For __strcat_chk the warning will be emitted only if overflowing
12084 by at least strlen (dest) + 1 bytes. */
12085 case BUILT_IN_STRCAT_CHK:
12086 len = CALL_EXPR_ARG (exp, 1);
12087 size = CALL_EXPR_ARG (exp, 2);
12090 case BUILT_IN_STRNCAT_CHK:
12091 case BUILT_IN_STRNCPY_CHK:
12092 len = CALL_EXPR_ARG (exp, 2);
12093 size = CALL_EXPR_ARG (exp, 3);
12095 case BUILT_IN_SNPRINTF_CHK:
12096 case BUILT_IN_VSNPRINTF_CHK:
12097 len = CALL_EXPR_ARG (exp, 1);
12098 size = CALL_EXPR_ARG (exp, 3);
12101 gcc_unreachable ();
12107 if (! host_integerp (size, 1) || integer_all_onesp (size))
12112 len = c_strlen (len, 1);
12113 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12116 else if (fcode == BUILT_IN_STRNCAT_CHK)
12118 tree src = CALL_EXPR_ARG (exp, 1);
12119 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12121 src = c_strlen (src, 1);
12122 if (! src || ! host_integerp (src, 1))
12124 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12125 exp, get_callee_fndecl (exp));
12128 else if (tree_int_cst_lt (src, size))
12131 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12134 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12135 exp, get_callee_fndecl (exp));
12138 /* Emit warning if a buffer overflow is detected at compile time
12139 in __sprintf_chk/__vsprintf_chk calls. */
12142 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12144 tree size, len, fmt;
12145 const char *fmt_str;
12146 int nargs = call_expr_nargs (exp);
12148 /* Verify the required arguments in the original call. */
12152 size = CALL_EXPR_ARG (exp, 2);
12153 fmt = CALL_EXPR_ARG (exp, 3);
12155 if (! host_integerp (size, 1) || integer_all_onesp (size))
12158 /* Check whether the format is a literal string constant. */
12159 fmt_str = c_getstr (fmt);
12160 if (fmt_str == NULL)
12163 if (!init_target_chars ())
12166 /* If the format doesn't contain % args or %%, we know its size. */
12167 if (strchr (fmt_str, target_percent) == 0)
12168 len = build_int_cstu (size_type_node, strlen (fmt_str));
12169 /* If the format is "%s" and first ... argument is a string literal,
12171 else if (fcode == BUILT_IN_SPRINTF_CHK
12172 && strcmp (fmt_str, target_percent_s) == 0)
12178 arg = CALL_EXPR_ARG (exp, 4);
12179 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12182 len = c_strlen (arg, 1);
12183 if (!len || ! host_integerp (len, 1))
12189 if (! tree_int_cst_lt (len, size))
12190 warning_at (tree_nonartificial_location (exp),
12191 0, "%Kcall to %D will always overflow destination buffer",
12192 exp, get_callee_fndecl (exp));
12195 /* Emit warning if a free is called with address of a variable. */
12198 maybe_emit_free_warning (tree exp)
12200 tree arg = CALL_EXPR_ARG (exp, 0);
12203 if (TREE_CODE (arg) != ADDR_EXPR)
12206 arg = get_base_address (TREE_OPERAND (arg, 0));
12207 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12210 if (SSA_VAR_P (arg))
12211 warning_at (tree_nonartificial_location (exp),
12212 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12214 warning_at (tree_nonartificial_location (exp),
12215 0, "%Kattempt to free a non-heap object", exp);
12218 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12222 fold_builtin_object_size (tree ptr, tree ost)
12224 unsigned HOST_WIDE_INT bytes;
12225 int object_size_type;
12227 if (!validate_arg (ptr, POINTER_TYPE)
12228 || !validate_arg (ost, INTEGER_TYPE))
12233 if (TREE_CODE (ost) != INTEGER_CST
12234 || tree_int_cst_sgn (ost) < 0
12235 || compare_tree_int (ost, 3) > 0)
12238 object_size_type = tree_low_cst (ost, 0);
12240 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12241 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12242 and (size_t) 0 for types 2 and 3. */
12243 if (TREE_SIDE_EFFECTS (ptr))
12244 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12246 if (TREE_CODE (ptr) == ADDR_EXPR)
12248 bytes = compute_builtin_object_size (ptr, object_size_type);
12249 if (double_int_fits_to_tree_p (size_type_node,
12250 uhwi_to_double_int (bytes)))
12251 return build_int_cstu (size_type_node, bytes);
12253 else if (TREE_CODE (ptr) == SSA_NAME)
12255 /* If object size is not known yet, delay folding until
12256 later. Maybe subsequent passes will help determining
12258 bytes = compute_builtin_object_size (ptr, object_size_type);
12259 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12260 && double_int_fits_to_tree_p (size_type_node,
12261 uhwi_to_double_int (bytes)))
12262 return build_int_cstu (size_type_node, bytes);
12268 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12269 DEST, SRC, LEN, and SIZE are the arguments to the call.
12270 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12271 code of the builtin. If MAXLEN is not NULL, it is maximum length
12272 passed as third argument. */
12275 fold_builtin_memory_chk (location_t loc, tree fndecl,
12276 tree dest, tree src, tree len, tree size,
12277 tree maxlen, bool ignore,
12278 enum built_in_function fcode)
12282 if (!validate_arg (dest, POINTER_TYPE)
12283 || !validate_arg (src,
12284 (fcode == BUILT_IN_MEMSET_CHK
12285 ? INTEGER_TYPE : POINTER_TYPE))
12286 || !validate_arg (len, INTEGER_TYPE)
12287 || !validate_arg (size, INTEGER_TYPE))
12290 /* If SRC and DEST are the same (and not volatile), return DEST
12291 (resp. DEST+LEN for __mempcpy_chk). */
12292 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12294 if (fcode != BUILT_IN_MEMPCPY_CHK)
12295 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12299 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12301 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12305 if (! host_integerp (size, 1))
12308 if (! integer_all_onesp (size))
12310 if (! host_integerp (len, 1))
12312 /* If LEN is not constant, try MAXLEN too.
12313 For MAXLEN only allow optimizing into non-_ocs function
12314 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12315 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12317 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12319 /* (void) __mempcpy_chk () can be optimized into
12320 (void) __memcpy_chk (). */
12321 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12325 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12333 if (tree_int_cst_lt (size, maxlen))
12338 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12339 mem{cpy,pcpy,move,set} is available. */
12342 case BUILT_IN_MEMCPY_CHK:
12343 fn = built_in_decls[BUILT_IN_MEMCPY];
12345 case BUILT_IN_MEMPCPY_CHK:
12346 fn = built_in_decls[BUILT_IN_MEMPCPY];
12348 case BUILT_IN_MEMMOVE_CHK:
12349 fn = built_in_decls[BUILT_IN_MEMMOVE];
12351 case BUILT_IN_MEMSET_CHK:
12352 fn = built_in_decls[BUILT_IN_MEMSET];
12361 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12364 /* Fold a call to the __st[rp]cpy_chk builtin.
12365 DEST, SRC, and SIZE are the arguments to the call.
12366 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12367 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12368 strings passed as second argument. */
12371 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12372 tree src, tree size,
12373 tree maxlen, bool ignore,
12374 enum built_in_function fcode)
12378 if (!validate_arg (dest, POINTER_TYPE)
12379 || !validate_arg (src, POINTER_TYPE)
12380 || !validate_arg (size, INTEGER_TYPE))
12383 /* If SRC and DEST are the same (and not volatile), return DEST. */
12384 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12385 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12387 if (! host_integerp (size, 1))
12390 if (! integer_all_onesp (size))
12392 len = c_strlen (src, 1);
12393 if (! len || ! host_integerp (len, 1))
12395 /* If LEN is not constant, try MAXLEN too.
12396 For MAXLEN only allow optimizing into non-_ocs function
12397 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12398 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12400 if (fcode == BUILT_IN_STPCPY_CHK)
12405 /* If return value of __stpcpy_chk is ignored,
12406 optimize into __strcpy_chk. */
12407 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12411 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12414 if (! len || TREE_SIDE_EFFECTS (len))
12417 /* If c_strlen returned something, but not a constant,
12418 transform __strcpy_chk into __memcpy_chk. */
12419 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12423 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12424 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12425 build_call_expr_loc (loc, fn, 4,
12426 dest, src, len, size));
12432 if (! tree_int_cst_lt (maxlen, size))
12436 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12437 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12438 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12442 return build_call_expr_loc (loc, fn, 2, dest, src);
12445 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12446 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12447 length passed as third argument. */
12450 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12451 tree len, tree size, tree maxlen)
12455 if (!validate_arg (dest, POINTER_TYPE)
12456 || !validate_arg (src, POINTER_TYPE)
12457 || !validate_arg (len, INTEGER_TYPE)
12458 || !validate_arg (size, INTEGER_TYPE))
12461 if (! host_integerp (size, 1))
12464 if (! integer_all_onesp (size))
12466 if (! host_integerp (len, 1))
12468 /* If LEN is not constant, try MAXLEN too.
12469 For MAXLEN only allow optimizing into non-_ocs function
12470 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12471 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12477 if (tree_int_cst_lt (size, maxlen))
12481 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12482 fn = built_in_decls[BUILT_IN_STRNCPY];
12486 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12489 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12490 are the arguments to the call. */
12493 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12494 tree src, tree size)
12499 if (!validate_arg (dest, POINTER_TYPE)
12500 || !validate_arg (src, POINTER_TYPE)
12501 || !validate_arg (size, INTEGER_TYPE))
12504 p = c_getstr (src);
12505 /* If the SRC parameter is "", return DEST. */
12506 if (p && *p == '\0')
12507 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12509 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12512 /* If __builtin_strcat_chk is used, assume strcat is available. */
12513 fn = built_in_decls[BUILT_IN_STRCAT];
12517 return build_call_expr_loc (loc, fn, 2, dest, src);
12520 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12524 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12525 tree dest, tree src, tree len, tree size)
12530 if (!validate_arg (dest, POINTER_TYPE)
12531 || !validate_arg (src, POINTER_TYPE)
12532 || !validate_arg (size, INTEGER_TYPE)
12533 || !validate_arg (size, INTEGER_TYPE))
12536 p = c_getstr (src);
12537 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12538 if (p && *p == '\0')
12539 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12540 else if (integer_zerop (len))
12541 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12543 if (! host_integerp (size, 1))
12546 if (! integer_all_onesp (size))
12548 tree src_len = c_strlen (src, 1);
12550 && host_integerp (src_len, 1)
12551 && host_integerp (len, 1)
12552 && ! tree_int_cst_lt (len, src_len))
12554 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12555 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12559 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12564 /* If __builtin_strncat_chk is used, assume strncat is available. */
12565 fn = built_in_decls[BUILT_IN_STRNCAT];
12569 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12572 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12573 Return NULL_TREE if a normal call should be emitted rather than
12574 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12575 or BUILT_IN_VSPRINTF_CHK. */
12578 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12579 enum built_in_function fcode)
12581 tree dest, size, len, fn, fmt, flag;
12582 const char *fmt_str;
12584 /* Verify the required arguments in the original call. */
12588 if (!validate_arg (dest, POINTER_TYPE))
12591 if (!validate_arg (flag, INTEGER_TYPE))
12594 if (!validate_arg (size, INTEGER_TYPE))
12597 if (!validate_arg (fmt, POINTER_TYPE))
12600 if (! host_integerp (size, 1))
12605 if (!init_target_chars ())
12608 /* Check whether the format is a literal string constant. */
12609 fmt_str = c_getstr (fmt);
12610 if (fmt_str != NULL)
12612 /* If the format doesn't contain % args or %%, we know the size. */
12613 if (strchr (fmt_str, target_percent) == 0)
12615 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12616 len = build_int_cstu (size_type_node, strlen (fmt_str));
12618 /* If the format is "%s" and first ... argument is a string literal,
12619 we know the size too. */
12620 else if (fcode == BUILT_IN_SPRINTF_CHK
12621 && strcmp (fmt_str, target_percent_s) == 0)
12628 if (validate_arg (arg, POINTER_TYPE))
12630 len = c_strlen (arg, 1);
12631 if (! len || ! host_integerp (len, 1))
12638 if (! integer_all_onesp (size))
12640 if (! len || ! tree_int_cst_lt (len, size))
12644 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12645 or if format doesn't contain % chars or is "%s". */
12646 if (! integer_zerop (flag))
12648 if (fmt_str == NULL)
12650 if (strchr (fmt_str, target_percent) != NULL
12651 && strcmp (fmt_str, target_percent_s))
12655 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12656 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12657 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12661 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12664 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12665 a normal call should be emitted rather than expanding the function
12666 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12669 fold_builtin_sprintf_chk (location_t loc, tree exp,
12670 enum built_in_function fcode)
12672 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12673 CALL_EXPR_ARGP (exp), fcode);
12676 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12677 NULL_TREE if a normal call should be emitted rather than expanding
12678 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12679 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12680 passed as second argument. */
12683 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12684 tree maxlen, enum built_in_function fcode)
12686 tree dest, size, len, fn, fmt, flag;
12687 const char *fmt_str;
12689 /* Verify the required arguments in the original call. */
12693 if (!validate_arg (dest, POINTER_TYPE))
12696 if (!validate_arg (len, INTEGER_TYPE))
12699 if (!validate_arg (flag, INTEGER_TYPE))
12702 if (!validate_arg (size, INTEGER_TYPE))
12705 if (!validate_arg (fmt, POINTER_TYPE))
12708 if (! host_integerp (size, 1))
12711 if (! integer_all_onesp (size))
12713 if (! host_integerp (len, 1))
12715 /* If LEN is not constant, try MAXLEN too.
12716 For MAXLEN only allow optimizing into non-_ocs function
12717 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12718 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12724 if (tree_int_cst_lt (size, maxlen))
12728 if (!init_target_chars ())
12731 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12732 or if format doesn't contain % chars or is "%s". */
12733 if (! integer_zerop (flag))
12735 fmt_str = c_getstr (fmt);
12736 if (fmt_str == NULL)
12738 if (strchr (fmt_str, target_percent) != NULL
12739 && strcmp (fmt_str, target_percent_s))
12743 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12745 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12746 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12750 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12753 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12754 a normal call should be emitted rather than expanding the function
12755 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12756 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12757 passed as second argument. */
12760 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12761 enum built_in_function fcode)
12763 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12764 CALL_EXPR_ARGP (exp), maxlen, fcode);
12767 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12768 FMT and ARG are the arguments to the call; we don't fold cases with
12769 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12771 Return NULL_TREE if no simplification was possible, otherwise return the
12772 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12773 code of the function to be simplified. */
12776 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12777 tree arg, bool ignore,
12778 enum built_in_function fcode)
12780 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12781 const char *fmt_str = NULL;
12783 /* If the return value is used, don't do the transformation. */
12787 /* Verify the required arguments in the original call. */
12788 if (!validate_arg (fmt, POINTER_TYPE))
12791 /* Check whether the format is a literal string constant. */
12792 fmt_str = c_getstr (fmt);
12793 if (fmt_str == NULL)
12796 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12798 /* If we're using an unlocked function, assume the other
12799 unlocked functions exist explicitly. */
12800 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12801 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12805 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12806 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12809 if (!init_target_chars ())
12812 if (strcmp (fmt_str, target_percent_s) == 0
12813 || strchr (fmt_str, target_percent) == NULL)
12817 if (strcmp (fmt_str, target_percent_s) == 0)
12819 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12822 if (!arg || !validate_arg (arg, POINTER_TYPE))
12825 str = c_getstr (arg);
12831 /* The format specifier doesn't contain any '%' characters. */
12832 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12838 /* If the string was "", printf does nothing. */
12839 if (str[0] == '\0')
12840 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12842 /* If the string has length of 1, call putchar. */
12843 if (str[1] == '\0')
12845 /* Given printf("c"), (where c is any one character,)
12846 convert "c"[0] to an int and pass that to the replacement
12848 newarg = build_int_cst (NULL_TREE, str[0]);
12850 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12854 /* If the string was "string\n", call puts("string"). */
12855 size_t len = strlen (str);
12856 if ((unsigned char)str[len - 1] == target_newline)
12858 /* Create a NUL-terminated string that's one char shorter
12859 than the original, stripping off the trailing '\n'. */
12860 char *newstr = XALLOCAVEC (char, len);
12861 memcpy (newstr, str, len - 1);
12862 newstr[len - 1] = 0;
12864 newarg = build_string_literal (len, newstr);
12866 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12869 /* We'd like to arrange to call fputs(string,stdout) here,
12870 but we need stdout and don't have a way to get it yet. */
12875 /* The other optimizations can be done only on the non-va_list variants. */
12876 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12879 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12880 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12882 if (!arg || !validate_arg (arg, POINTER_TYPE))
12885 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12888 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12889 else if (strcmp (fmt_str, target_percent_c) == 0)
12891 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12894 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12900 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12903 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12904 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12905 more than 3 arguments, and ARG may be null in the 2-argument case.
12907 Return NULL_TREE if no simplification was possible, otherwise return the
12908 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12909 code of the function to be simplified. */
12912 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12913 tree fmt, tree arg, bool ignore,
12914 enum built_in_function fcode)
12916 tree fn_fputc, fn_fputs, call = NULL_TREE;
12917 const char *fmt_str = NULL;
12919 /* If the return value is used, don't do the transformation. */
12923 /* Verify the required arguments in the original call. */
12924 if (!validate_arg (fp, POINTER_TYPE))
12926 if (!validate_arg (fmt, POINTER_TYPE))
12929 /* Check whether the format is a literal string constant. */
12930 fmt_str = c_getstr (fmt);
12931 if (fmt_str == NULL)
12934 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12936 /* If we're using an unlocked function, assume the other
12937 unlocked functions exist explicitly. */
12938 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12939 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12943 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12944 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12947 if (!init_target_chars ())
12950 /* If the format doesn't contain % args or %%, use strcpy. */
12951 if (strchr (fmt_str, target_percent) == NULL)
12953 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12957 /* If the format specifier was "", fprintf does nothing. */
12958 if (fmt_str[0] == '\0')
12960 /* If FP has side-effects, just wait until gimplification is
12962 if (TREE_SIDE_EFFECTS (fp))
12965 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12968 /* When "string" doesn't contain %, replace all cases of
12969 fprintf (fp, string) with fputs (string, fp). The fputs
12970 builtin will take care of special cases like length == 1. */
12972 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12975 /* The other optimizations can be done only on the non-va_list variants. */
12976 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12979 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12980 else if (strcmp (fmt_str, target_percent_s) == 0)
12982 if (!arg || !validate_arg (arg, POINTER_TYPE))
12985 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12988 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12989 else if (strcmp (fmt_str, target_percent_c) == 0)
12991 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12994 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12999 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13002 /* Initialize format string characters in the target charset. */
13005 init_target_chars (void)
13010 target_newline = lang_hooks.to_target_charset ('\n');
13011 target_percent = lang_hooks.to_target_charset ('%');
13012 target_c = lang_hooks.to_target_charset ('c');
13013 target_s = lang_hooks.to_target_charset ('s');
13014 if (target_newline == 0 || target_percent == 0 || target_c == 0
13018 target_percent_c[0] = target_percent;
13019 target_percent_c[1] = target_c;
13020 target_percent_c[2] = '\0';
13022 target_percent_s[0] = target_percent;
13023 target_percent_s[1] = target_s;
13024 target_percent_s[2] = '\0';
13026 target_percent_s_newline[0] = target_percent;
13027 target_percent_s_newline[1] = target_s;
13028 target_percent_s_newline[2] = target_newline;
13029 target_percent_s_newline[3] = '\0';
13036 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13037 and no overflow/underflow occurred. INEXACT is true if M was not
13038 exactly calculated. TYPE is the tree type for the result. This
13039 function assumes that you cleared the MPFR flags and then
13040 calculated M to see if anything subsequently set a flag prior to
13041 entering this function. Return NULL_TREE if any checks fail. */
13044 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13046 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13047 overflow/underflow occurred. If -frounding-math, proceed iff the
13048 result of calling FUNC was exact. */
13049 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13050 && (!flag_rounding_math || !inexact))
13052 REAL_VALUE_TYPE rr;
13054 real_from_mpfr (&rr, m, type, GMP_RNDN);
13055 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13056 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13057 but the mpft_t is not, then we underflowed in the
13059 if (real_isfinite (&rr)
13060 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13062 REAL_VALUE_TYPE rmode;
13064 real_convert (&rmode, TYPE_MODE (type), &rr);
13065 /* Proceed iff the specified mode can hold the value. */
13066 if (real_identical (&rmode, &rr))
13067 return build_real (type, rmode);
13073 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13074 number and no overflow/underflow occurred. INEXACT is true if M
13075 was not exactly calculated. TYPE is the tree type for the result.
13076 This function assumes that you cleared the MPFR flags and then
13077 calculated M to see if anything subsequently set a flag prior to
13078 entering this function. Return NULL_TREE if any checks fail, if
13079 FORCE_CONVERT is true, then bypass the checks. */
13082 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13084 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13085 overflow/underflow occurred. If -frounding-math, proceed iff the
13086 result of calling FUNC was exact. */
13088 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13089 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13090 && (!flag_rounding_math || !inexact)))
13092 REAL_VALUE_TYPE re, im;
13094 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13095 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13096 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13097 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13098 but the mpft_t is not, then we underflowed in the
13101 || (real_isfinite (&re) && real_isfinite (&im)
13102 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13103 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13105 REAL_VALUE_TYPE re_mode, im_mode;
13107 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13108 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13109 /* Proceed iff the specified mode can hold the value. */
13111 || (real_identical (&re_mode, &re)
13112 && real_identical (&im_mode, &im)))
13113 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13114 build_real (TREE_TYPE (type), im_mode));
13120 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13121 FUNC on it and return the resulting value as a tree with type TYPE.
13122 If MIN and/or MAX are not NULL, then the supplied ARG must be
13123 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13124 acceptable values, otherwise they are not. The mpfr precision is
13125 set to the precision of TYPE. We assume that function FUNC returns
13126 zero if the result could be calculated exactly within the requested
13130 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13131 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13134 tree result = NULL_TREE;
13138 /* To proceed, MPFR must exactly represent the target floating point
13139 format, which only happens when the target base equals two. */
13140 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13141 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13143 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13145 if (real_isfinite (ra)
13146 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13147 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13149 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13150 const int prec = fmt->p;
13151 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13155 mpfr_init2 (m, prec);
13156 mpfr_from_real (m, ra, GMP_RNDN);
13157 mpfr_clear_flags ();
13158 inexact = func (m, m, rnd);
13159 result = do_mpfr_ckconv (m, type, inexact);
13167 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13168 FUNC on it and return the resulting value as a tree with type TYPE.
13169 The mpfr precision is set to the precision of TYPE. We assume that
13170 function FUNC returns zero if the result could be calculated
13171 exactly within the requested precision. */
13174 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13175 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13177 tree result = NULL_TREE;
13182 /* To proceed, MPFR must exactly represent the target floating point
13183 format, which only happens when the target base equals two. */
13184 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13185 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13186 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13188 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13189 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13191 if (real_isfinite (ra1) && real_isfinite (ra2))
13193 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13194 const int prec = fmt->p;
13195 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13199 mpfr_inits2 (prec, m1, m2, NULL);
13200 mpfr_from_real (m1, ra1, GMP_RNDN);
13201 mpfr_from_real (m2, ra2, GMP_RNDN);
13202 mpfr_clear_flags ();
13203 inexact = func (m1, m1, m2, rnd);
13204 result = do_mpfr_ckconv (m1, type, inexact);
13205 mpfr_clears (m1, m2, NULL);
13212 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13213 FUNC on it and return the resulting value as a tree with type TYPE.
13214 The mpfr precision is set to the precision of TYPE. We assume that
13215 function FUNC returns zero if the result could be calculated
13216 exactly within the requested precision. */
13219 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13220 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13222 tree result = NULL_TREE;
13228 /* To proceed, MPFR must exactly represent the target floating point
13229 format, which only happens when the target base equals two. */
13230 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13231 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13232 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13233 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13235 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13236 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13237 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13239 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13241 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13242 const int prec = fmt->p;
13243 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13247 mpfr_inits2 (prec, m1, m2, m3, NULL);
13248 mpfr_from_real (m1, ra1, GMP_RNDN);
13249 mpfr_from_real (m2, ra2, GMP_RNDN);
13250 mpfr_from_real (m3, ra3, GMP_RNDN);
13251 mpfr_clear_flags ();
13252 inexact = func (m1, m1, m2, m3, rnd);
13253 result = do_mpfr_ckconv (m1, type, inexact);
13254 mpfr_clears (m1, m2, m3, NULL);
13261 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13262 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13263 If ARG_SINP and ARG_COSP are NULL then the result is returned
13264 as a complex value.
13265 The type is taken from the type of ARG and is used for setting the
13266 precision of the calculation and results. */
13269 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13271 tree const type = TREE_TYPE (arg);
13272 tree result = NULL_TREE;
13276 /* To proceed, MPFR must exactly represent the target floating point
13277 format, which only happens when the target base equals two. */
13278 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13279 && TREE_CODE (arg) == REAL_CST
13280 && !TREE_OVERFLOW (arg))
13282 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13284 if (real_isfinite (ra))
13286 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13287 const int prec = fmt->p;
13288 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13289 tree result_s, result_c;
13293 mpfr_inits2 (prec, m, ms, mc, NULL);
13294 mpfr_from_real (m, ra, GMP_RNDN);
13295 mpfr_clear_flags ();
13296 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13297 result_s = do_mpfr_ckconv (ms, type, inexact);
13298 result_c = do_mpfr_ckconv (mc, type, inexact);
13299 mpfr_clears (m, ms, mc, NULL);
13300 if (result_s && result_c)
13302 /* If we are to return in a complex value do so. */
13303 if (!arg_sinp && !arg_cosp)
13304 return build_complex (build_complex_type (type),
13305 result_c, result_s);
13307 /* Dereference the sin/cos pointer arguments. */
13308 arg_sinp = build_fold_indirect_ref (arg_sinp);
13309 arg_cosp = build_fold_indirect_ref (arg_cosp);
13310 /* Proceed if valid pointer type were passed in. */
13311 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13312 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13314 /* Set the values. */
13315 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13317 TREE_SIDE_EFFECTS (result_s) = 1;
13318 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13320 TREE_SIDE_EFFECTS (result_c) = 1;
13321 /* Combine the assignments into a compound expr. */
13322 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13323 result_s, result_c));
13331 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13332 two-argument mpfr order N Bessel function FUNC on them and return
13333 the resulting value as a tree with type TYPE. The mpfr precision
13334 is set to the precision of TYPE. We assume that function FUNC
13335 returns zero if the result could be calculated exactly within the
13336 requested precision. */
13338 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13339 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13340 const REAL_VALUE_TYPE *min, bool inclusive)
13342 tree result = NULL_TREE;
13347 /* To proceed, MPFR must exactly represent the target floating point
13348 format, which only happens when the target base equals two. */
13349 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13350 && host_integerp (arg1, 0)
13351 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13353 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13354 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13357 && real_isfinite (ra)
13358 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13360 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13361 const int prec = fmt->p;
13362 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13366 mpfr_init2 (m, prec);
13367 mpfr_from_real (m, ra, GMP_RNDN);
13368 mpfr_clear_flags ();
13369 inexact = func (m, n, m, rnd);
13370 result = do_mpfr_ckconv (m, type, inexact);
13378 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13379 the pointer *(ARG_QUO) and return the result. The type is taken
13380 from the type of ARG0 and is used for setting the precision of the
13381 calculation and results. */
13384 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13386 tree const type = TREE_TYPE (arg0);
13387 tree result = NULL_TREE;
13392 /* To proceed, MPFR must exactly represent the target floating point
13393 format, which only happens when the target base equals two. */
13394 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13395 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13396 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13398 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13399 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13401 if (real_isfinite (ra0) && real_isfinite (ra1))
13403 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13404 const int prec = fmt->p;
13405 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13410 mpfr_inits2 (prec, m0, m1, NULL);
13411 mpfr_from_real (m0, ra0, GMP_RNDN);
13412 mpfr_from_real (m1, ra1, GMP_RNDN);
13413 mpfr_clear_flags ();
13414 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13415 /* Remquo is independent of the rounding mode, so pass
13416 inexact=0 to do_mpfr_ckconv(). */
13417 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13418 mpfr_clears (m0, m1, NULL);
13421 /* MPFR calculates quo in the host's long so it may
13422 return more bits in quo than the target int can hold
13423 if sizeof(host long) > sizeof(target int). This can
13424 happen even for native compilers in LP64 mode. In
13425 these cases, modulo the quo value with the largest
13426 number that the target int can hold while leaving one
13427 bit for the sign. */
13428 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13429 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13431 /* Dereference the quo pointer argument. */
13432 arg_quo = build_fold_indirect_ref (arg_quo);
13433 /* Proceed iff a valid pointer type was passed in. */
13434 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13436 /* Set the value. */
13437 tree result_quo = fold_build2 (MODIFY_EXPR,
13438 TREE_TYPE (arg_quo), arg_quo,
13439 build_int_cst (NULL, integer_quo));
13440 TREE_SIDE_EFFECTS (result_quo) = 1;
13441 /* Combine the quo assignment with the rem. */
13442 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13443 result_quo, result_rem));
13451 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13452 resulting value as a tree with type TYPE. The mpfr precision is
13453 set to the precision of TYPE. We assume that this mpfr function
13454 returns zero if the result could be calculated exactly within the
13455 requested precision. In addition, the integer pointer represented
13456 by ARG_SG will be dereferenced and set to the appropriate signgam
13460 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13462 tree result = NULL_TREE;
13466 /* To proceed, MPFR must exactly represent the target floating point
13467 format, which only happens when the target base equals two. Also
13468 verify ARG is a constant and that ARG_SG is an int pointer. */
13469 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13470 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13471 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13472 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13474 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13476 /* In addition to NaN and Inf, the argument cannot be zero or a
13477 negative integer. */
13478 if (real_isfinite (ra)
13479 && ra->cl != rvc_zero
13480 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13482 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13483 const int prec = fmt->p;
13484 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13489 mpfr_init2 (m, prec);
13490 mpfr_from_real (m, ra, GMP_RNDN);
13491 mpfr_clear_flags ();
13492 inexact = mpfr_lgamma (m, &sg, m, rnd);
13493 result_lg = do_mpfr_ckconv (m, type, inexact);
13499 /* Dereference the arg_sg pointer argument. */
13500 arg_sg = build_fold_indirect_ref (arg_sg);
13501 /* Assign the signgam value into *arg_sg. */
13502 result_sg = fold_build2 (MODIFY_EXPR,
13503 TREE_TYPE (arg_sg), arg_sg,
13504 build_int_cst (NULL, sg));
13505 TREE_SIDE_EFFECTS (result_sg) = 1;
13506 /* Combine the signgam assignment with the lgamma result. */
13507 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13508 result_sg, result_lg));
13516 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13517 function FUNC on it and return the resulting value as a tree with
13518 type TYPE. The mpfr precision is set to the precision of TYPE. We
13519 assume that function FUNC returns zero if the result could be
13520 calculated exactly within the requested precision. */
13523 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13525 tree result = NULL_TREE;
13529 /* To proceed, MPFR must exactly represent the target floating point
13530 format, which only happens when the target base equals two. */
13531 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13532 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13533 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13535 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13536 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13538 if (real_isfinite (re) && real_isfinite (im))
13540 const struct real_format *const fmt =
13541 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13542 const int prec = fmt->p;
13543 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13544 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13548 mpc_init2 (m, prec);
13549 mpfr_from_real (mpc_realref(m), re, rnd);
13550 mpfr_from_real (mpc_imagref(m), im, rnd);
13551 mpfr_clear_flags ();
13552 inexact = func (m, m, crnd);
13553 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13561 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13562 mpc function FUNC on it and return the resulting value as a tree
13563 with type TYPE. The mpfr precision is set to the precision of
13564 TYPE. We assume that function FUNC returns zero if the result
13565 could be calculated exactly within the requested precision. If
13566 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13567 in the arguments and/or results. */
13570 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13571 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13573 tree result = NULL_TREE;
13578 /* To proceed, MPFR must exactly represent the target floating point
13579 format, which only happens when the target base equals two. */
13580 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13581 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13582 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13583 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13584 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13586 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13587 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13588 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13589 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13592 || (real_isfinite (re0) && real_isfinite (im0)
13593 && real_isfinite (re1) && real_isfinite (im1)))
13595 const struct real_format *const fmt =
13596 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13597 const int prec = fmt->p;
13598 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13599 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13603 mpc_init2 (m0, prec);
13604 mpc_init2 (m1, prec);
13605 mpfr_from_real (mpc_realref(m0), re0, rnd);
13606 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13607 mpfr_from_real (mpc_realref(m1), re1, rnd);
13608 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13609 mpfr_clear_flags ();
13610 inexact = func (m0, m0, m1, crnd);
13611 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13620 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13621 a normal call should be emitted rather than expanding the function
13622 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13625 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13627 int nargs = gimple_call_num_args (stmt);
13629 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13631 ? gimple_call_arg_ptr (stmt, 0)
13632 : &error_mark_node), fcode);
13635 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13636 a normal call should be emitted rather than expanding the function
13637 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13638 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13639 passed as second argument. */
13642 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13643 enum built_in_function fcode)
13645 int nargs = gimple_call_num_args (stmt);
13647 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13649 ? gimple_call_arg_ptr (stmt, 0)
13650 : &error_mark_node), maxlen, fcode);
13653 /* Builtins with folding operations that operate on "..." arguments
13654 need special handling; we need to store the arguments in a convenient
13655 data structure before attempting any folding. Fortunately there are
13656 only a few builtins that fall into this category. FNDECL is the
13657 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13658 result of the function call is ignored. */
13661 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13662 bool ignore ATTRIBUTE_UNUSED)
13664 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13665 tree ret = NULL_TREE;
13669 case BUILT_IN_SPRINTF_CHK:
13670 case BUILT_IN_VSPRINTF_CHK:
13671 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13674 case BUILT_IN_SNPRINTF_CHK:
13675 case BUILT_IN_VSNPRINTF_CHK:
13676 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13683 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13684 TREE_NO_WARNING (ret) = 1;
13690 /* A wrapper function for builtin folding that prevents warnings for
13691 "statement without effect" and the like, caused by removing the
13692 call node earlier than the warning is generated. */
13695 fold_call_stmt (gimple stmt, bool ignore)
13697 tree ret = NULL_TREE;
13698 tree fndecl = gimple_call_fndecl (stmt);
13699 location_t loc = gimple_location (stmt);
13701 && TREE_CODE (fndecl) == FUNCTION_DECL
13702 && DECL_BUILT_IN (fndecl)
13703 && !gimple_call_va_arg_pack_p (stmt))
13705 int nargs = gimple_call_num_args (stmt);
13706 tree *args = (nargs > 0
13707 ? gimple_call_arg_ptr (stmt, 0)
13708 : &error_mark_node);
13710 if (avoid_folding_inline_builtin (fndecl))
13712 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13714 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13718 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13719 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13721 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13724 /* Propagate location information from original call to
13725 expansion of builtin. Otherwise things like
13726 maybe_emit_chk_warning, that operate on the expansion
13727 of a builtin, will use the wrong location information. */
13728 if (gimple_has_location (stmt))
13730 tree realret = ret;
13731 if (TREE_CODE (ret) == NOP_EXPR)
13732 realret = TREE_OPERAND (ret, 0);
13733 if (CAN_HAVE_LOCATION_P (realret)
13734 && !EXPR_HAS_LOCATION (realret))
13735 SET_EXPR_LOCATION (realret, loc);
13745 /* Look up the function in built_in_decls that corresponds to DECL
13746 and set ASMSPEC as its user assembler name. DECL must be a
13747 function decl that declares a builtin. */
13750 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13753 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13754 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13757 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13758 set_user_assembler_name (builtin, asmspec);
13759 switch (DECL_FUNCTION_CODE (decl))
13761 case BUILT_IN_MEMCPY:
13762 init_block_move_fn (asmspec);
13763 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13765 case BUILT_IN_MEMSET:
13766 init_block_clear_fn (asmspec);
13767 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13769 case BUILT_IN_MEMMOVE:
13770 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13772 case BUILT_IN_MEMCMP:
13773 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13775 case BUILT_IN_ABORT:
13776 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13779 if (INT_TYPE_SIZE < BITS_PER_WORD)
13781 set_user_assembler_libfunc ("ffs", asmspec);
13782 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13783 MODE_INT, 0), "ffs");
13791 /* Return true if DECL is a builtin that expands to a constant or similarly
13794 is_simple_builtin (tree decl)
13796 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13797 switch (DECL_FUNCTION_CODE (decl))
13799 /* Builtins that expand to constants. */
13800 case BUILT_IN_CONSTANT_P:
13801 case BUILT_IN_EXPECT:
13802 case BUILT_IN_OBJECT_SIZE:
13803 case BUILT_IN_UNREACHABLE:
13804 /* Simple register moves or loads from stack. */
13805 case BUILT_IN_RETURN_ADDRESS:
13806 case BUILT_IN_EXTRACT_RETURN_ADDR:
13807 case BUILT_IN_FROB_RETURN_ADDR:
13808 case BUILT_IN_RETURN:
13809 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13810 case BUILT_IN_FRAME_ADDRESS:
13811 case BUILT_IN_VA_END:
13812 case BUILT_IN_STACK_SAVE:
13813 case BUILT_IN_STACK_RESTORE:
13814 /* Exception state returns or moves registers around. */
13815 case BUILT_IN_EH_FILTER:
13816 case BUILT_IN_EH_POINTER:
13817 case BUILT_IN_EH_COPY_VALUES:
13827 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13828 most probably expanded inline into reasonably simple code. This is a
13829 superset of is_simple_builtin. */
13831 is_inexpensive_builtin (tree decl)
13835 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13837 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13838 switch (DECL_FUNCTION_CODE (decl))
13841 case BUILT_IN_ALLOCA:
13842 case BUILT_IN_BSWAP32:
13843 case BUILT_IN_BSWAP64:
13845 case BUILT_IN_CLZIMAX:
13846 case BUILT_IN_CLZL:
13847 case BUILT_IN_CLZLL:
13849 case BUILT_IN_CTZIMAX:
13850 case BUILT_IN_CTZL:
13851 case BUILT_IN_CTZLL:
13853 case BUILT_IN_FFSIMAX:
13854 case BUILT_IN_FFSL:
13855 case BUILT_IN_FFSLL:
13856 case BUILT_IN_IMAXABS:
13857 case BUILT_IN_FINITE:
13858 case BUILT_IN_FINITEF:
13859 case BUILT_IN_FINITEL:
13860 case BUILT_IN_FINITED32:
13861 case BUILT_IN_FINITED64:
13862 case BUILT_IN_FINITED128:
13863 case BUILT_IN_FPCLASSIFY:
13864 case BUILT_IN_ISFINITE:
13865 case BUILT_IN_ISINF_SIGN:
13866 case BUILT_IN_ISINF:
13867 case BUILT_IN_ISINFF:
13868 case BUILT_IN_ISINFL:
13869 case BUILT_IN_ISINFD32:
13870 case BUILT_IN_ISINFD64:
13871 case BUILT_IN_ISINFD128:
13872 case BUILT_IN_ISNAN:
13873 case BUILT_IN_ISNANF:
13874 case BUILT_IN_ISNANL:
13875 case BUILT_IN_ISNAND32:
13876 case BUILT_IN_ISNAND64:
13877 case BUILT_IN_ISNAND128:
13878 case BUILT_IN_ISNORMAL:
13879 case BUILT_IN_ISGREATER:
13880 case BUILT_IN_ISGREATEREQUAL:
13881 case BUILT_IN_ISLESS:
13882 case BUILT_IN_ISLESSEQUAL:
13883 case BUILT_IN_ISLESSGREATER:
13884 case BUILT_IN_ISUNORDERED:
13885 case BUILT_IN_VA_ARG_PACK:
13886 case BUILT_IN_VA_ARG_PACK_LEN:
13887 case BUILT_IN_VA_COPY:
13888 case BUILT_IN_TRAP:
13889 case BUILT_IN_SAVEREGS:
13890 case BUILT_IN_POPCOUNTL:
13891 case BUILT_IN_POPCOUNTLL:
13892 case BUILT_IN_POPCOUNTIMAX:
13893 case BUILT_IN_POPCOUNT:
13894 case BUILT_IN_PARITYL:
13895 case BUILT_IN_PARITYLL:
13896 case BUILT_IN_PARITYIMAX:
13897 case BUILT_IN_PARITY:
13898 case BUILT_IN_LABS:
13899 case BUILT_IN_LLABS:
13900 case BUILT_IN_PREFETCH:
13904 return is_simple_builtin (decl);