1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic-core.h"
55 #ifndef SLOW_UNALIGNED_ACCESS
56 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
64 struct target_builtins default_target_builtins;
66 struct target_builtins *this_target_builtins = &default_target_builtins;
69 /* Define the names of the builtin function types and codes. */
70 const char *const built_in_class_names[4]
71 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
73 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
74 const char * built_in_names[(int) END_BUILTINS] =
76 #include "builtins.def"
80 /* Setup an array of _DECL trees, make sure each element is
81 initialized to NULL_TREE. */
82 tree built_in_decls[(int) END_BUILTINS];
83 /* Declarations used when constructing the builtin implicitly in the compiler.
84 It may be NULL_TREE when this is invalid (for instance runtime is not
85 required to implement the function call in all cases). */
86 tree implicit_built_in_decls[(int) END_BUILTINS];
88 static const char *c_getstr (tree);
89 static rtx c_readstr (const char *, enum machine_mode);
90 static int target_char_cast (tree, char *);
91 static rtx get_memory_rtx (tree, tree);
92 static int apply_args_size (void);
93 static int apply_result_size (void);
94 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
95 static rtx result_vector (int, rtx);
97 static void expand_builtin_update_setjmp_buf (rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static void expand_errno_check (tree, rtx);
106 static rtx expand_builtin_mathfn (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 enum machine_mode, int);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_alloca (tree, rtx);
136 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_expect (location_t, tree, tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static bool readonly_data_expr (tree);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_sqrt (location_t, tree, tree);
154 static tree fold_builtin_cbrt (location_t, tree, tree);
155 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_cos (location_t, tree, tree, tree);
158 static tree fold_builtin_cosh (location_t, tree, tree, tree);
159 static tree fold_builtin_tan (tree, tree);
160 static tree fold_builtin_trunc (location_t, tree, tree);
161 static tree fold_builtin_floor (location_t, tree, tree);
162 static tree fold_builtin_ceil (location_t, tree, tree);
163 static tree fold_builtin_round (location_t, tree, tree);
164 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
165 static tree fold_builtin_bitop (tree, tree);
166 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
167 static tree fold_builtin_strchr (location_t, tree, tree, tree);
168 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
170 static tree fold_builtin_strcmp (location_t, tree, tree);
171 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
172 static tree fold_builtin_signbit (location_t, tree, tree);
173 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_isascii (location_t, tree);
175 static tree fold_builtin_toascii (location_t, tree);
176 static tree fold_builtin_isdigit (location_t, tree);
177 static tree fold_builtin_fabs (location_t, tree, tree);
178 static tree fold_builtin_abs (location_t, tree, tree);
179 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
182 static tree fold_builtin_0 (location_t, tree, bool);
183 static tree fold_builtin_1 (location_t, tree, tree, bool);
184 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
186 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
187 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strcat (location_t, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
206 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
207 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
208 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
209 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
210 enum built_in_function);
211 static bool init_target_chars (void);
213 static unsigned HOST_WIDE_INT target_newline;
214 static unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 static char target_percent_c[3];
218 static char target_percent_s[3];
219 static char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
236 is_builtin_name (const char *name)
238 if (strncmp (name, "__builtin_", 10) == 0)
240 if (strncmp (name, "__sync_", 7) == 0)
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
260 called_as_built_in (tree node)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
273 get_object_alignment (tree exp, unsigned int max_align)
275 HOST_WIDE_INT bitsize, bitpos;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
279 unsigned int align, inner;
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp) == CONST_DECL)
289 exp = DECL_INITIAL (exp);
291 && TREE_CODE (exp) != LABEL_DECL)
292 align = DECL_ALIGN (exp);
293 else if (CONSTANT_CLASS_P (exp))
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 #ifdef CONSTANT_ALIGNMENT
297 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
300 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == INDIRECT_REF)
303 align = TYPE_ALIGN (TREE_TYPE (exp));
304 else if (TREE_CODE (exp) == MISALIGNED_INDIRECT_REF)
306 tree op1 = TREE_OPERAND (exp, 1);
307 align = integer_zerop (op1) ? BITS_PER_UNIT : TREE_INT_CST_LOW (op1);
309 else if (TREE_CODE (exp) == MEM_REF)
311 tree addr = TREE_OPERAND (exp, 0);
312 if (TREE_CODE (addr) == BIT_AND_EXPR
313 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
315 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
316 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
317 align *= BITS_PER_UNIT;
318 addr = TREE_OPERAND (addr, 0);
321 align = BITS_PER_UNIT;
322 if (TREE_CODE (addr) == ADDR_EXPR)
323 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
325 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
327 else if (TREE_CODE (exp) == TARGET_MEM_REF
330 align = get_object_alignment (TMR_SYMBOL (exp), max_align);
331 if (TMR_OFFSET (exp))
332 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
333 if (TMR_INDEX (exp) && TMR_STEP (exp))
335 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
336 align = MIN (align, (step & -step) * BITS_PER_UNIT);
338 else if (TMR_INDEX (exp))
339 align = BITS_PER_UNIT;
342 align = BITS_PER_UNIT;
344 /* If there is a non-constant offset part extract the maximum
345 alignment that can prevail. */
351 if (TREE_CODE (offset) == PLUS_EXPR)
353 next_offset = TREE_OPERAND (offset, 0);
354 offset = TREE_OPERAND (offset, 1);
358 if (host_integerp (offset, 1))
360 /* Any overflow in calculating offset_bits won't change
363 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
366 inner = MIN (inner, (offset_bits & -offset_bits));
368 else if (TREE_CODE (offset) == MULT_EXPR
369 && host_integerp (TREE_OPERAND (offset, 1), 1))
371 /* Any overflow in calculating offset_factor won't change
373 unsigned offset_factor
374 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
378 inner = MIN (inner, (offset_factor & -offset_factor));
382 inner = MIN (inner, BITS_PER_UNIT);
385 offset = next_offset;
388 /* Alignment is innermost object alignment adjusted by the constant
389 and non-constant offset parts. */
390 align = MIN (align, inner);
391 bitpos = bitpos & (align - 1);
393 /* align and bitpos now specify known low bits of the pointer.
394 ptr & (align - 1) == bitpos. */
397 align = (bitpos & -bitpos);
399 return MIN (align, max_align);
402 /* Returns true iff we can trust that alignment information has been
403 calculated properly. */
406 can_trust_pointer_alignment (void)
408 /* We rely on TER to compute accurate alignment information. */
409 return (optimize && flag_tree_ter);
412 /* Return the alignment in bits of EXP, a pointer valued expression.
413 But don't return more than MAX_ALIGN no matter what.
414 The alignment returned is, by default, the alignment of the thing that
415 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
417 Otherwise, look at the expression to see if we can do better, i.e., if the
418 expression is actually pointing at an object whose alignment is tighter. */
421 get_pointer_alignment (tree exp, unsigned int max_align)
423 unsigned int align, inner;
425 if (!can_trust_pointer_alignment ())
428 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
431 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
432 align = MIN (align, max_align);
436 switch (TREE_CODE (exp))
439 exp = TREE_OPERAND (exp, 0);
440 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
443 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
444 align = MIN (inner, max_align);
447 case POINTER_PLUS_EXPR:
448 /* If sum of pointer + int, restrict our maximum alignment to that
449 imposed by the integer. If not, we can't do any better than
451 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
454 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
455 & (max_align / BITS_PER_UNIT - 1))
459 exp = TREE_OPERAND (exp, 0);
463 /* See what we are pointing at and look at its alignment. */
464 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
472 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
473 way, because it could contain a zero byte in the middle.
474 TREE_STRING_LENGTH is the size of the character array, not the string.
476 ONLY_VALUE should be nonzero if the result is not going to be emitted
477 into the instruction stream and zero if it is going to be expanded.
478 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
479 is returned, otherwise NULL, since
480 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
481 evaluate the side-effects.
483 The value returned is of type `ssizetype'.
485 Unfortunately, string_constant can't access the values of const char
486 arrays with initializers, so neither can we do so here. */
489 c_strlen (tree src, int only_value)
492 HOST_WIDE_INT offset;
498 if (TREE_CODE (src) == COND_EXPR
499 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
503 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
504 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
505 if (tree_int_cst_equal (len1, len2))
509 if (TREE_CODE (src) == COMPOUND_EXPR
510 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
511 return c_strlen (TREE_OPERAND (src, 1), only_value);
513 if (EXPR_HAS_LOCATION (src))
514 loc = EXPR_LOCATION (src);
516 loc = input_location;
518 src = string_constant (src, &offset_node);
522 max = TREE_STRING_LENGTH (src) - 1;
523 ptr = TREE_STRING_POINTER (src);
525 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
527 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
528 compute the offset to the following null if we don't know where to
529 start searching for it. */
532 for (i = 0; i < max; i++)
536 /* We don't know the starting offset, but we do know that the string
537 has no internal zero bytes. We can assume that the offset falls
538 within the bounds of the string; otherwise, the programmer deserves
539 what he gets. Subtract the offset from the length of the string,
540 and return that. This would perhaps not be valid if we were dealing
541 with named arrays in addition to literal string constants. */
543 return size_diffop_loc (loc, size_int (max), offset_node);
546 /* We have a known offset into the string. Start searching there for
547 a null character if we can represent it as a single HOST_WIDE_INT. */
548 if (offset_node == 0)
550 else if (! host_integerp (offset_node, 0))
553 offset = tree_low_cst (offset_node, 0);
555 /* If the offset is known to be out of bounds, warn, and call strlen at
557 if (offset < 0 || offset > max)
559 /* Suppress multiple warnings for propagated constant strings. */
560 if (! TREE_NO_WARNING (src))
562 warning_at (loc, 0, "offset outside bounds of constant string");
563 TREE_NO_WARNING (src) = 1;
568 /* Use strlen to search for the first zero byte. Since any strings
569 constructed with build_string will have nulls appended, we win even
570 if we get handed something like (char[4])"abcd".
572 Since OFFSET is our starting index into the string, no further
573 calculation is needed. */
574 return ssize_int (strlen (ptr + offset));
577 /* Return a char pointer for a C string if it is a string constant
578 or sum of string constant and integer constant. */
585 src = string_constant (src, &offset_node);
589 if (offset_node == 0)
590 return TREE_STRING_POINTER (src);
591 else if (!host_integerp (offset_node, 1)
592 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
595 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
598 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
599 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
602 c_readstr (const char *str, enum machine_mode mode)
608 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
613 for (i = 0; i < GET_MODE_SIZE (mode); i++)
616 if (WORDS_BIG_ENDIAN)
617 j = GET_MODE_SIZE (mode) - i - 1;
618 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
619 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
620 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
622 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
625 ch = (unsigned char) str[i];
626 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
628 return immed_double_const (c[0], c[1], mode);
631 /* Cast a target constant CST to target CHAR and if that value fits into
632 host char type, return zero and put that value into variable pointed to by
636 target_char_cast (tree cst, char *p)
638 unsigned HOST_WIDE_INT val, hostval;
640 if (!host_integerp (cst, 1)
641 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
644 val = tree_low_cst (cst, 1);
645 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
646 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
649 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
650 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
659 /* Similar to save_expr, but assumes that arbitrary code is not executed
660 in between the multiple evaluations. In particular, we assume that a
661 non-addressable local variable will not be modified. */
664 builtin_save_expr (tree exp)
666 if (TREE_ADDRESSABLE (exp) == 0
667 && (TREE_CODE (exp) == PARM_DECL
668 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
671 return save_expr (exp);
674 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
675 times to get the address of either a higher stack frame, or a return
676 address located within it (depending on FNDECL_CODE). */
679 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
683 #ifdef INITIAL_FRAME_ADDRESS_RTX
684 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
688 /* For a zero count with __builtin_return_address, we don't care what
689 frame address we return, because target-specific definitions will
690 override us. Therefore frame pointer elimination is OK, and using
691 the soft frame pointer is OK.
693 For a nonzero count, or a zero count with __builtin_frame_address,
694 we require a stable offset from the current frame pointer to the
695 previous one, so we must use the hard frame pointer, and
696 we must disable frame pointer elimination. */
697 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
698 tem = frame_pointer_rtx;
701 tem = hard_frame_pointer_rtx;
703 /* Tell reload not to eliminate the frame pointer. */
704 crtl->accesses_prior_frames = 1;
708 /* Some machines need special handling before we can access
709 arbitrary frames. For example, on the SPARC, we must first flush
710 all register windows to the stack. */
711 #ifdef SETUP_FRAME_ADDRESSES
713 SETUP_FRAME_ADDRESSES ();
716 /* On the SPARC, the return address is not in the frame, it is in a
717 register. There is no way to access it off of the current frame
718 pointer, but it can be accessed off the previous frame pointer by
719 reading the value from the register window save area. */
720 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
721 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
725 /* Scan back COUNT frames to the specified frame. */
726 for (i = 0; i < count; i++)
728 /* Assume the dynamic chain pointer is in the word that the
729 frame address points to, unless otherwise specified. */
730 #ifdef DYNAMIC_CHAIN_ADDRESS
731 tem = DYNAMIC_CHAIN_ADDRESS (tem);
733 tem = memory_address (Pmode, tem);
734 tem = gen_frame_mem (Pmode, tem);
735 tem = copy_to_reg (tem);
738 /* For __builtin_frame_address, return what we've got. But, on
739 the SPARC for example, we may have to add a bias. */
740 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
741 #ifdef FRAME_ADDR_RTX
742 return FRAME_ADDR_RTX (tem);
747 /* For __builtin_return_address, get the return address from that frame. */
748 #ifdef RETURN_ADDR_RTX
749 tem = RETURN_ADDR_RTX (count, tem);
751 tem = memory_address (Pmode,
752 plus_constant (tem, GET_MODE_SIZE (Pmode)));
753 tem = gen_frame_mem (Pmode, tem);
758 /* Alias set used for setjmp buffer. */
759 static alias_set_type setjmp_alias_set = -1;
761 /* Construct the leading half of a __builtin_setjmp call. Control will
762 return to RECEIVER_LABEL. This is also called directly by the SJLJ
763 exception handling code. */
766 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
768 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
772 if (setjmp_alias_set == -1)
773 setjmp_alias_set = new_alias_set ();
775 buf_addr = convert_memory_address (Pmode, buf_addr);
777 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
779 /* We store the frame pointer and the address of receiver_label in
780 the buffer and use the rest of it for the stack save area, which
781 is machine-dependent. */
783 mem = gen_rtx_MEM (Pmode, buf_addr);
784 set_mem_alias_set (mem, setjmp_alias_set);
785 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
787 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
788 set_mem_alias_set (mem, setjmp_alias_set);
790 emit_move_insn (validize_mem (mem),
791 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
793 stack_save = gen_rtx_MEM (sa_mode,
794 plus_constant (buf_addr,
795 2 * GET_MODE_SIZE (Pmode)));
796 set_mem_alias_set (stack_save, setjmp_alias_set);
797 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
799 /* If there is further processing to do, do it. */
800 #ifdef HAVE_builtin_setjmp_setup
801 if (HAVE_builtin_setjmp_setup)
802 emit_insn (gen_builtin_setjmp_setup (buf_addr));
805 /* Tell optimize_save_area_alloca that extra work is going to
806 need to go on during alloca. */
807 cfun->calls_setjmp = 1;
809 /* We have a nonlocal label. */
810 cfun->has_nonlocal_label = 1;
813 /* Construct the trailing part of a __builtin_setjmp call. This is
814 also called directly by the SJLJ exception handling code. */
817 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
821 /* Clobber the FP when we get here, so we have to make sure it's
822 marked as used by this function. */
823 emit_use (hard_frame_pointer_rtx);
825 /* Mark the static chain as clobbered here so life information
826 doesn't get messed up for it. */
827 chain = targetm.calls.static_chain (current_function_decl, true);
828 if (chain && REG_P (chain))
829 emit_clobber (chain);
831 /* Now put in the code to restore the frame pointer, and argument
832 pointer, if needed. */
833 #ifdef HAVE_nonlocal_goto
834 if (! HAVE_nonlocal_goto)
837 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
838 /* This might change the hard frame pointer in ways that aren't
839 apparent to early optimization passes, so force a clobber. */
840 emit_clobber (hard_frame_pointer_rtx);
843 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
844 if (fixed_regs[ARG_POINTER_REGNUM])
846 #ifdef ELIMINABLE_REGS
848 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
850 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
851 if (elim_regs[i].from == ARG_POINTER_REGNUM
852 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
855 if (i == ARRAY_SIZE (elim_regs))
858 /* Now restore our arg pointer from the address at which it
859 was saved in our stack frame. */
860 emit_move_insn (crtl->args.internal_arg_pointer,
861 copy_to_reg (get_arg_pointer_save_area ()));
866 #ifdef HAVE_builtin_setjmp_receiver
867 if (HAVE_builtin_setjmp_receiver)
868 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
871 #ifdef HAVE_nonlocal_goto_receiver
872 if (HAVE_nonlocal_goto_receiver)
873 emit_insn (gen_nonlocal_goto_receiver ());
878 /* We must not allow the code we just generated to be reordered by
879 scheduling. Specifically, the update of the frame pointer must
880 happen immediately, not later. */
881 emit_insn (gen_blockage ());
884 /* __builtin_longjmp is passed a pointer to an array of five words (not
885 all will be used on all machines). It operates similarly to the C
886 library function of the same name, but is more efficient. Much of
887 the code below is copied from the handling of non-local gotos. */
890 expand_builtin_longjmp (rtx buf_addr, rtx value)
892 rtx fp, lab, stack, insn, last;
893 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
895 /* DRAP is needed for stack realign if longjmp is expanded to current
897 if (SUPPORTS_STACK_ALIGNMENT)
898 crtl->need_drap = true;
900 if (setjmp_alias_set == -1)
901 setjmp_alias_set = new_alias_set ();
903 buf_addr = convert_memory_address (Pmode, buf_addr);
905 buf_addr = force_reg (Pmode, buf_addr);
907 /* We require that the user must pass a second argument of 1, because
908 that is what builtin_setjmp will return. */
909 gcc_assert (value == const1_rtx);
911 last = get_last_insn ();
912 #ifdef HAVE_builtin_longjmp
913 if (HAVE_builtin_longjmp)
914 emit_insn (gen_builtin_longjmp (buf_addr));
918 fp = gen_rtx_MEM (Pmode, buf_addr);
919 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
920 GET_MODE_SIZE (Pmode)));
922 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
923 2 * GET_MODE_SIZE (Pmode)));
924 set_mem_alias_set (fp, setjmp_alias_set);
925 set_mem_alias_set (lab, setjmp_alias_set);
926 set_mem_alias_set (stack, setjmp_alias_set);
928 /* Pick up FP, label, and SP from the block and jump. This code is
929 from expand_goto in stmt.c; see there for detailed comments. */
930 #ifdef HAVE_nonlocal_goto
931 if (HAVE_nonlocal_goto)
932 /* We have to pass a value to the nonlocal_goto pattern that will
933 get copied into the static_chain pointer, but it does not matter
934 what that value is, because builtin_setjmp does not use it. */
935 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
939 lab = copy_to_reg (lab);
941 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
942 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
944 emit_move_insn (hard_frame_pointer_rtx, fp);
945 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
947 emit_use (hard_frame_pointer_rtx);
948 emit_use (stack_pointer_rtx);
949 emit_indirect_jump (lab);
953 /* Search backwards and mark the jump insn as a non-local goto.
954 Note that this precludes the use of __builtin_longjmp to a
955 __builtin_setjmp target in the same function. However, we've
956 already cautioned the user that these functions are for
957 internal exception handling use only. */
958 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
960 gcc_assert (insn != last);
964 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
967 else if (CALL_P (insn))
972 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
973 and the address of the save area. */
976 expand_builtin_nonlocal_goto (tree exp)
978 tree t_label, t_save_area;
979 rtx r_label, r_save_area, r_fp, r_sp, insn;
981 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
984 t_label = CALL_EXPR_ARG (exp, 0);
985 t_save_area = CALL_EXPR_ARG (exp, 1);
987 r_label = expand_normal (t_label);
988 r_label = convert_memory_address (Pmode, r_label);
989 r_save_area = expand_normal (t_save_area);
990 r_save_area = convert_memory_address (Pmode, r_save_area);
991 /* Copy the address of the save location to a register just in case it was based
992 on the frame pointer. */
993 r_save_area = copy_to_reg (r_save_area);
994 r_fp = gen_rtx_MEM (Pmode, r_save_area);
995 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
996 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
998 crtl->has_nonlocal_goto = 1;
1000 #ifdef HAVE_nonlocal_goto
1001 /* ??? We no longer need to pass the static chain value, afaik. */
1002 if (HAVE_nonlocal_goto)
1003 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1007 r_label = copy_to_reg (r_label);
1009 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1010 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1012 /* Restore frame pointer for containing function.
1013 This sets the actual hard register used for the frame pointer
1014 to the location of the function's incoming static chain info.
1015 The non-local goto handler will then adjust it to contain the
1016 proper value and reload the argument pointer, if needed. */
1017 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1018 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1020 /* USE of hard_frame_pointer_rtx added for consistency;
1021 not clear if really needed. */
1022 emit_use (hard_frame_pointer_rtx);
1023 emit_use (stack_pointer_rtx);
1025 /* If the architecture is using a GP register, we must
1026 conservatively assume that the target function makes use of it.
1027 The prologue of functions with nonlocal gotos must therefore
1028 initialize the GP register to the appropriate value, and we
1029 must then make sure that this value is live at the point
1030 of the jump. (Note that this doesn't necessarily apply
1031 to targets with a nonlocal_goto pattern; they are free
1032 to implement it in their own way. Note also that this is
1033 a no-op if the GP register is a global invariant.) */
1034 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1035 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1036 emit_use (pic_offset_table_rtx);
1038 emit_indirect_jump (r_label);
1041 /* Search backwards to the jump insn and mark it as a
1043 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1047 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1050 else if (CALL_P (insn))
1057 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1058 (not all will be used on all machines) that was passed to __builtin_setjmp.
1059 It updates the stack pointer in that block to correspond to the current
1063 expand_builtin_update_setjmp_buf (rtx buf_addr)
1065 enum machine_mode sa_mode = Pmode;
1069 #ifdef HAVE_save_stack_nonlocal
1070 if (HAVE_save_stack_nonlocal)
1071 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1073 #ifdef STACK_SAVEAREA_MODE
1074 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1078 = gen_rtx_MEM (sa_mode,
1081 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1085 emit_insn (gen_setjmp ());
1088 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1091 /* Expand a call to __builtin_prefetch. For a target that does not support
1092 data prefetch, evaluate the memory address argument in case it has side
1096 expand_builtin_prefetch (tree exp)
1098 tree arg0, arg1, arg2;
1102 if (!validate_arglist (exp, POINTER_TYPE, 0))
1105 arg0 = CALL_EXPR_ARG (exp, 0);
1107 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1108 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1110 nargs = call_expr_nargs (exp);
1112 arg1 = CALL_EXPR_ARG (exp, 1);
1114 arg1 = integer_zero_node;
1116 arg2 = CALL_EXPR_ARG (exp, 2);
1118 arg2 = integer_three_node;
1120 /* Argument 0 is an address. */
1121 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1123 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1124 if (TREE_CODE (arg1) != INTEGER_CST)
1126 error ("second argument to %<__builtin_prefetch%> must be a constant");
1127 arg1 = integer_zero_node;
1129 op1 = expand_normal (arg1);
1130 /* Argument 1 must be either zero or one. */
1131 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1133 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1138 /* Argument 2 (locality) must be a compile-time constant int. */
1139 if (TREE_CODE (arg2) != INTEGER_CST)
1141 error ("third argument to %<__builtin_prefetch%> must be a constant");
1142 arg2 = integer_zero_node;
1144 op2 = expand_normal (arg2);
1145 /* Argument 2 must be 0, 1, 2, or 3. */
1146 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1148 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1152 #ifdef HAVE_prefetch
1155 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1157 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1158 || (GET_MODE (op0) != Pmode))
1160 op0 = convert_memory_address (Pmode, op0);
1161 op0 = force_reg (Pmode, op0);
1163 emit_insn (gen_prefetch (op0, op1, op2));
1167 /* Don't do anything with direct references to volatile memory, but
1168 generate code to handle other side effects. */
1169 if (!MEM_P (op0) && side_effects_p (op0))
1173 /* Get a MEM rtx for expression EXP which is the address of an operand
1174 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1175 the maximum length of the block of memory that might be accessed or
1179 get_memory_rtx (tree exp, tree len)
1181 tree orig_exp = exp;
1185 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1186 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1187 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1188 exp = TREE_OPERAND (exp, 0);
1190 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1191 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1193 /* Get an expression we can use to find the attributes to assign to MEM.
1194 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1195 we can. First remove any nops. */
1196 while (CONVERT_EXPR_P (exp)
1197 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1198 exp = TREE_OPERAND (exp, 0);
1201 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1202 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1203 && host_integerp (TREE_OPERAND (exp, 1), 0)
1204 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1205 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1206 else if (TREE_CODE (exp) == ADDR_EXPR)
1207 exp = TREE_OPERAND (exp, 0);
1208 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1209 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1213 /* Honor attributes derived from exp, except for the alias set
1214 (as builtin stringops may alias with anything) and the size
1215 (as stringops may access multiple array elements). */
1218 set_mem_attributes (mem, exp, 0);
1221 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1223 /* Allow the string and memory builtins to overflow from one
1224 field into another, see http://gcc.gnu.org/PR23561.
1225 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1226 memory accessed by the string or memory builtin will fit
1227 within the field. */
1228 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1230 tree mem_expr = MEM_EXPR (mem);
1231 HOST_WIDE_INT offset = -1, length = -1;
1234 while (TREE_CODE (inner) == ARRAY_REF
1235 || CONVERT_EXPR_P (inner)
1236 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1237 || TREE_CODE (inner) == SAVE_EXPR)
1238 inner = TREE_OPERAND (inner, 0);
1240 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1242 if (MEM_OFFSET (mem)
1243 && CONST_INT_P (MEM_OFFSET (mem)))
1244 offset = INTVAL (MEM_OFFSET (mem));
1246 if (offset >= 0 && len && host_integerp (len, 0))
1247 length = tree_low_cst (len, 0);
1249 while (TREE_CODE (inner) == COMPONENT_REF)
1251 tree field = TREE_OPERAND (inner, 1);
1252 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1253 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1255 /* Bitfields are generally not byte-addressable. */
1256 gcc_assert (!DECL_BIT_FIELD (field)
1257 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1258 % BITS_PER_UNIT) == 0
1259 && host_integerp (DECL_SIZE (field), 0)
1260 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1261 % BITS_PER_UNIT) == 0));
1263 /* If we can prove that the memory starting at XEXP (mem, 0) and
1264 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1265 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1266 fields without DECL_SIZE_UNIT like flexible array members. */
1268 && DECL_SIZE_UNIT (field)
1269 && host_integerp (DECL_SIZE_UNIT (field), 0))
1272 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1275 && offset + length <= size)
1280 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1281 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1282 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1290 mem_expr = TREE_OPERAND (mem_expr, 0);
1291 inner = TREE_OPERAND (inner, 0);
1294 if (mem_expr == NULL)
1296 if (mem_expr != MEM_EXPR (mem))
1298 set_mem_expr (mem, mem_expr);
1299 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1302 set_mem_alias_set (mem, 0);
1303 set_mem_size (mem, NULL_RTX);
1309 /* Built-in functions to perform an untyped call and return. */
1311 #define apply_args_mode \
1312 (this_target_builtins->x_apply_args_mode)
1313 #define apply_result_mode \
1314 (this_target_builtins->x_apply_result_mode)
1316 /* Return the size required for the block returned by __builtin_apply_args,
1317 and initialize apply_args_mode. */
1320 apply_args_size (void)
1322 static int size = -1;
1325 enum machine_mode mode;
1327 /* The values computed by this function never change. */
1330 /* The first value is the incoming arg-pointer. */
1331 size = GET_MODE_SIZE (Pmode);
1333 /* The second value is the structure value address unless this is
1334 passed as an "invisible" first argument. */
1335 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1336 size += GET_MODE_SIZE (Pmode);
1338 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1339 if (FUNCTION_ARG_REGNO_P (regno))
1341 mode = reg_raw_mode[regno];
1343 gcc_assert (mode != VOIDmode);
1345 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1346 if (size % align != 0)
1347 size = CEIL (size, align) * align;
1348 size += GET_MODE_SIZE (mode);
1349 apply_args_mode[regno] = mode;
1353 apply_args_mode[regno] = VOIDmode;
1359 /* Return the size required for the block returned by __builtin_apply,
1360 and initialize apply_result_mode. */
1363 apply_result_size (void)
1365 static int size = -1;
1367 enum machine_mode mode;
1369 /* The values computed by this function never change. */
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if (targetm.calls.function_value_regno_p (regno))
1377 mode = reg_raw_mode[regno];
1379 gcc_assert (mode != VOIDmode);
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 size += GET_MODE_SIZE (mode);
1385 apply_result_mode[regno] = mode;
1388 apply_result_mode[regno] = VOIDmode;
1390 /* Allow targets that use untyped_call and untyped_return to override
1391 the size so that machine-specific information can be stored here. */
1392 #ifdef APPLY_RESULT_SIZE
1393 size = APPLY_RESULT_SIZE;
1399 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1400 /* Create a vector describing the result block RESULT. If SAVEP is true,
1401 the result block is used to save the values; otherwise it is used to
1402 restore the values. */
1405 result_vector (int savep, rtx result)
1407 int regno, size, align, nelts;
1408 enum machine_mode mode;
1410 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1414 if ((mode = apply_result_mode[regno]) != VOIDmode)
1416 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1417 if (size % align != 0)
1418 size = CEIL (size, align) * align;
1419 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1420 mem = adjust_address (result, mode, size);
1421 savevec[nelts++] = (savep
1422 ? gen_rtx_SET (VOIDmode, mem, reg)
1423 : gen_rtx_SET (VOIDmode, reg, mem));
1424 size += GET_MODE_SIZE (mode);
1426 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1428 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1430 /* Save the state required to perform an untyped call with the same
1431 arguments as were passed to the current function. */
1434 expand_builtin_apply_args_1 (void)
1437 int size, align, regno;
1438 enum machine_mode mode;
1439 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1441 /* Create a block where the arg-pointer, structure value address,
1442 and argument registers can be saved. */
1443 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1445 /* Walk past the arg-pointer and structure value address. */
1446 size = GET_MODE_SIZE (Pmode);
1447 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1448 size += GET_MODE_SIZE (Pmode);
1450 /* Save each register used in calling a function to the block. */
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_args_mode[regno]) != VOIDmode)
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1458 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1460 emit_move_insn (adjust_address (registers, mode, size), tem);
1461 size += GET_MODE_SIZE (mode);
1464 /* Save the arg pointer to the block. */
1465 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1466 #ifdef STACK_GROWS_DOWNWARD
1467 /* We need the pointer as the caller actually passed them to us, not
1468 as we might have pretended they were passed. Make sure it's a valid
1469 operand, as emit_move_insn isn't expected to handle a PLUS. */
1471 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1474 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1476 size = GET_MODE_SIZE (Pmode);
1478 /* Save the structure value address unless this is passed as an
1479 "invisible" first argument. */
1480 if (struct_incoming_value)
1482 emit_move_insn (adjust_address (registers, Pmode, size),
1483 copy_to_reg (struct_incoming_value));
1484 size += GET_MODE_SIZE (Pmode);
1487 /* Return the address of the block. */
1488 return copy_addr_to_reg (XEXP (registers, 0));
1491 /* __builtin_apply_args returns block of memory allocated on
1492 the stack into which is stored the arg pointer, structure
1493 value address, static chain, and all the registers that might
1494 possibly be used in performing a function call. The code is
1495 moved to the start of the function so the incoming values are
1499 expand_builtin_apply_args (void)
1501 /* Don't do __builtin_apply_args more than once in a function.
1502 Save the result of the first call and reuse it. */
1503 if (apply_args_value != 0)
1504 return apply_args_value;
1506 /* When this function is called, it means that registers must be
1507 saved on entry to this function. So we migrate the
1508 call to the first insn of this function. */
1513 temp = expand_builtin_apply_args_1 ();
1517 apply_args_value = temp;
1519 /* Put the insns after the NOTE that starts the function.
1520 If this is inside a start_sequence, make the outer-level insn
1521 chain current, so the code is placed at the start of the
1522 function. If internal_arg_pointer is a non-virtual pseudo,
1523 it needs to be placed after the function that initializes
1525 push_topmost_sequence ();
1526 if (REG_P (crtl->args.internal_arg_pointer)
1527 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1528 emit_insn_before (seq, parm_birth_insn);
1530 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1531 pop_topmost_sequence ();
1536 /* Perform an untyped call and save the state required to perform an
1537 untyped return of whatever value was returned by the given function. */
1540 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1542 int size, align, regno;
1543 enum machine_mode mode;
1544 rtx incoming_args, result, reg, dest, src, call_insn;
1545 rtx old_stack_level = 0;
1546 rtx call_fusage = 0;
1547 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1549 arguments = convert_memory_address (Pmode, arguments);
1551 /* Create a block where the return registers can be saved. */
1552 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1554 /* Fetch the arg pointer from the ARGUMENTS block. */
1555 incoming_args = gen_reg_rtx (Pmode);
1556 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1557 #ifndef STACK_GROWS_DOWNWARD
1558 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1559 incoming_args, 0, OPTAB_LIB_WIDEN);
1562 /* Push a new argument block and copy the arguments. Do not allow
1563 the (potential) memcpy call below to interfere with our stack
1565 do_pending_stack_adjust ();
1568 /* Save the stack with nonlocal if available. */
1569 #ifdef HAVE_save_stack_nonlocal
1570 if (HAVE_save_stack_nonlocal)
1571 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1574 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1576 /* Allocate a block of memory onto the stack and copy the memory
1577 arguments to the outgoing arguments address. */
1578 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1580 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1581 may have already set current_function_calls_alloca to true.
1582 current_function_calls_alloca won't be set if argsize is zero,
1583 so we have to guarantee need_drap is true here. */
1584 if (SUPPORTS_STACK_ALIGNMENT)
1585 crtl->need_drap = true;
1587 dest = virtual_outgoing_args_rtx;
1588 #ifndef STACK_GROWS_DOWNWARD
1589 if (CONST_INT_P (argsize))
1590 dest = plus_constant (dest, -INTVAL (argsize));
1592 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1594 dest = gen_rtx_MEM (BLKmode, dest);
1595 set_mem_align (dest, PARM_BOUNDARY);
1596 src = gen_rtx_MEM (BLKmode, incoming_args);
1597 set_mem_align (src, PARM_BOUNDARY);
1598 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1600 /* Refer to the argument block. */
1602 arguments = gen_rtx_MEM (BLKmode, arguments);
1603 set_mem_align (arguments, PARM_BOUNDARY);
1605 /* Walk past the arg-pointer and structure value address. */
1606 size = GET_MODE_SIZE (Pmode);
1608 size += GET_MODE_SIZE (Pmode);
1610 /* Restore each of the registers previously saved. Make USE insns
1611 for each of these registers for use in making the call. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_args_mode[regno]) != VOIDmode)
1615 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1616 if (size % align != 0)
1617 size = CEIL (size, align) * align;
1618 reg = gen_rtx_REG (mode, regno);
1619 emit_move_insn (reg, adjust_address (arguments, mode, size));
1620 use_reg (&call_fusage, reg);
1621 size += GET_MODE_SIZE (mode);
1624 /* Restore the structure value address unless this is passed as an
1625 "invisible" first argument. */
1626 size = GET_MODE_SIZE (Pmode);
1629 rtx value = gen_reg_rtx (Pmode);
1630 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1631 emit_move_insn (struct_value, value);
1632 if (REG_P (struct_value))
1633 use_reg (&call_fusage, struct_value);
1634 size += GET_MODE_SIZE (Pmode);
1637 /* All arguments and registers used for the call are set up by now! */
1638 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1640 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1641 and we don't want to load it into a register as an optimization,
1642 because prepare_call_address already did it if it should be done. */
1643 if (GET_CODE (function) != SYMBOL_REF)
1644 function = memory_address (FUNCTION_MODE, function);
1646 /* Generate the actual call instruction and save the return value. */
1647 #ifdef HAVE_untyped_call
1648 if (HAVE_untyped_call)
1649 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1650 result, result_vector (1, result)));
1653 #ifdef HAVE_call_value
1654 if (HAVE_call_value)
1658 /* Locate the unique return register. It is not possible to
1659 express a call that sets more than one return register using
1660 call_value; use untyped_call for that. In fact, untyped_call
1661 only needs to save the return registers in the given block. */
1662 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1663 if ((mode = apply_result_mode[regno]) != VOIDmode)
1665 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1667 valreg = gen_rtx_REG (mode, regno);
1670 emit_call_insn (GEN_CALL_VALUE (valreg,
1671 gen_rtx_MEM (FUNCTION_MODE, function),
1672 const0_rtx, NULL_RTX, const0_rtx));
1674 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1680 /* Find the CALL insn we just emitted, and attach the register usage
1682 call_insn = last_call_insn ();
1683 add_function_usage_to (call_insn, call_fusage);
1685 /* Restore the stack. */
1686 #ifdef HAVE_save_stack_nonlocal
1687 if (HAVE_save_stack_nonlocal)
1688 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1691 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1695 /* Return the address of the result block. */
1696 result = copy_addr_to_reg (XEXP (result, 0));
1697 return convert_memory_address (ptr_mode, result);
1700 /* Perform an untyped return. */
1703 expand_builtin_return (rtx result)
1705 int size, align, regno;
1706 enum machine_mode mode;
1708 rtx call_fusage = 0;
1710 result = convert_memory_address (Pmode, result);
1712 apply_result_size ();
1713 result = gen_rtx_MEM (BLKmode, result);
1715 #ifdef HAVE_untyped_return
1716 if (HAVE_untyped_return)
1718 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1724 /* Restore the return value and note that each value is used. */
1726 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1727 if ((mode = apply_result_mode[regno]) != VOIDmode)
1729 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1730 if (size % align != 0)
1731 size = CEIL (size, align) * align;
1732 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1733 emit_move_insn (reg, adjust_address (result, mode, size));
1735 push_to_sequence (call_fusage);
1737 call_fusage = get_insns ();
1739 size += GET_MODE_SIZE (mode);
1742 /* Put the USE insns before the return. */
1743 emit_insn (call_fusage);
1745 /* Return whatever values was restored by jumping directly to the end
1747 expand_naked_return ();
1750 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1752 static enum type_class
1753 type_to_class (tree type)
1755 switch (TREE_CODE (type))
1757 case VOID_TYPE: return void_type_class;
1758 case INTEGER_TYPE: return integer_type_class;
1759 case ENUMERAL_TYPE: return enumeral_type_class;
1760 case BOOLEAN_TYPE: return boolean_type_class;
1761 case POINTER_TYPE: return pointer_type_class;
1762 case REFERENCE_TYPE: return reference_type_class;
1763 case OFFSET_TYPE: return offset_type_class;
1764 case REAL_TYPE: return real_type_class;
1765 case COMPLEX_TYPE: return complex_type_class;
1766 case FUNCTION_TYPE: return function_type_class;
1767 case METHOD_TYPE: return method_type_class;
1768 case RECORD_TYPE: return record_type_class;
1770 case QUAL_UNION_TYPE: return union_type_class;
1771 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1772 ? string_type_class : array_type_class);
1773 case LANG_TYPE: return lang_type_class;
1774 default: return no_type_class;
1778 /* Expand a call EXP to __builtin_classify_type. */
1781 expand_builtin_classify_type (tree exp)
1783 if (call_expr_nargs (exp))
1784 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1785 return GEN_INT (no_type_class);
1788 /* This helper macro, meant to be used in mathfn_built_in below,
1789 determines which among a set of three builtin math functions is
1790 appropriate for a given type mode. The `F' and `L' cases are
1791 automatically generated from the `double' case. */
1792 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1793 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1794 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1795 fcodel = BUILT_IN_MATHFN##L ; break;
1796 /* Similar to above, but appends _R after any F/L suffix. */
1797 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1798 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1799 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1800 fcodel = BUILT_IN_MATHFN##L_R ; break;
1802 /* Return mathematic function equivalent to FN but operating directly
1803 on TYPE, if available. If IMPLICIT is true find the function in
1804 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1805 can't do the conversion, return zero. */
1808 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1810 tree const *const fn_arr
1811 = implicit ? implicit_built_in_decls : built_in_decls;
1812 enum built_in_function fcode, fcodef, fcodel;
1816 CASE_MATHFN (BUILT_IN_ACOS)
1817 CASE_MATHFN (BUILT_IN_ACOSH)
1818 CASE_MATHFN (BUILT_IN_ASIN)
1819 CASE_MATHFN (BUILT_IN_ASINH)
1820 CASE_MATHFN (BUILT_IN_ATAN)
1821 CASE_MATHFN (BUILT_IN_ATAN2)
1822 CASE_MATHFN (BUILT_IN_ATANH)
1823 CASE_MATHFN (BUILT_IN_CBRT)
1824 CASE_MATHFN (BUILT_IN_CEIL)
1825 CASE_MATHFN (BUILT_IN_CEXPI)
1826 CASE_MATHFN (BUILT_IN_COPYSIGN)
1827 CASE_MATHFN (BUILT_IN_COS)
1828 CASE_MATHFN (BUILT_IN_COSH)
1829 CASE_MATHFN (BUILT_IN_DREM)
1830 CASE_MATHFN (BUILT_IN_ERF)
1831 CASE_MATHFN (BUILT_IN_ERFC)
1832 CASE_MATHFN (BUILT_IN_EXP)
1833 CASE_MATHFN (BUILT_IN_EXP10)
1834 CASE_MATHFN (BUILT_IN_EXP2)
1835 CASE_MATHFN (BUILT_IN_EXPM1)
1836 CASE_MATHFN (BUILT_IN_FABS)
1837 CASE_MATHFN (BUILT_IN_FDIM)
1838 CASE_MATHFN (BUILT_IN_FLOOR)
1839 CASE_MATHFN (BUILT_IN_FMA)
1840 CASE_MATHFN (BUILT_IN_FMAX)
1841 CASE_MATHFN (BUILT_IN_FMIN)
1842 CASE_MATHFN (BUILT_IN_FMOD)
1843 CASE_MATHFN (BUILT_IN_FREXP)
1844 CASE_MATHFN (BUILT_IN_GAMMA)
1845 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1846 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1847 CASE_MATHFN (BUILT_IN_HYPOT)
1848 CASE_MATHFN (BUILT_IN_ILOGB)
1849 CASE_MATHFN (BUILT_IN_INF)
1850 CASE_MATHFN (BUILT_IN_ISINF)
1851 CASE_MATHFN (BUILT_IN_J0)
1852 CASE_MATHFN (BUILT_IN_J1)
1853 CASE_MATHFN (BUILT_IN_JN)
1854 CASE_MATHFN (BUILT_IN_LCEIL)
1855 CASE_MATHFN (BUILT_IN_LDEXP)
1856 CASE_MATHFN (BUILT_IN_LFLOOR)
1857 CASE_MATHFN (BUILT_IN_LGAMMA)
1858 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1859 CASE_MATHFN (BUILT_IN_LLCEIL)
1860 CASE_MATHFN (BUILT_IN_LLFLOOR)
1861 CASE_MATHFN (BUILT_IN_LLRINT)
1862 CASE_MATHFN (BUILT_IN_LLROUND)
1863 CASE_MATHFN (BUILT_IN_LOG)
1864 CASE_MATHFN (BUILT_IN_LOG10)
1865 CASE_MATHFN (BUILT_IN_LOG1P)
1866 CASE_MATHFN (BUILT_IN_LOG2)
1867 CASE_MATHFN (BUILT_IN_LOGB)
1868 CASE_MATHFN (BUILT_IN_LRINT)
1869 CASE_MATHFN (BUILT_IN_LROUND)
1870 CASE_MATHFN (BUILT_IN_MODF)
1871 CASE_MATHFN (BUILT_IN_NAN)
1872 CASE_MATHFN (BUILT_IN_NANS)
1873 CASE_MATHFN (BUILT_IN_NEARBYINT)
1874 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1875 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1876 CASE_MATHFN (BUILT_IN_POW)
1877 CASE_MATHFN (BUILT_IN_POWI)
1878 CASE_MATHFN (BUILT_IN_POW10)
1879 CASE_MATHFN (BUILT_IN_REMAINDER)
1880 CASE_MATHFN (BUILT_IN_REMQUO)
1881 CASE_MATHFN (BUILT_IN_RINT)
1882 CASE_MATHFN (BUILT_IN_ROUND)
1883 CASE_MATHFN (BUILT_IN_SCALB)
1884 CASE_MATHFN (BUILT_IN_SCALBLN)
1885 CASE_MATHFN (BUILT_IN_SCALBN)
1886 CASE_MATHFN (BUILT_IN_SIGNBIT)
1887 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1888 CASE_MATHFN (BUILT_IN_SIN)
1889 CASE_MATHFN (BUILT_IN_SINCOS)
1890 CASE_MATHFN (BUILT_IN_SINH)
1891 CASE_MATHFN (BUILT_IN_SQRT)
1892 CASE_MATHFN (BUILT_IN_TAN)
1893 CASE_MATHFN (BUILT_IN_TANH)
1894 CASE_MATHFN (BUILT_IN_TGAMMA)
1895 CASE_MATHFN (BUILT_IN_TRUNC)
1896 CASE_MATHFN (BUILT_IN_Y0)
1897 CASE_MATHFN (BUILT_IN_Y1)
1898 CASE_MATHFN (BUILT_IN_YN)
1904 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1905 return fn_arr[fcode];
1906 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1907 return fn_arr[fcodef];
1908 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1909 return fn_arr[fcodel];
1914 /* Like mathfn_built_in_1(), but always use the implicit array. */
1917 mathfn_built_in (tree type, enum built_in_function fn)
1919 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1922 /* If errno must be maintained, expand the RTL to check if the result,
1923 TARGET, of a built-in function call, EXP, is NaN, and if so set
1927 expand_errno_check (tree exp, rtx target)
1929 rtx lab = gen_label_rtx ();
1931 /* Test the result; if it is NaN, set errno=EDOM because
1932 the argument was not in the domain. */
1933 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1934 NULL_RTX, NULL_RTX, lab,
1935 /* The jump is very likely. */
1936 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1939 /* If this built-in doesn't throw an exception, set errno directly. */
1940 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1942 #ifdef GEN_ERRNO_RTX
1943 rtx errno_rtx = GEN_ERRNO_RTX;
1946 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1948 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1954 /* Make sure the library call isn't expanded as a tail call. */
1955 CALL_EXPR_TAILCALL (exp) = 0;
1957 /* We can't set errno=EDOM directly; let the library call do it.
1958 Pop the arguments right away in case the call gets deleted. */
1960 expand_call (exp, target, 0);
1965 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1966 Return NULL_RTX if a normal call should be emitted rather than expanding
1967 the function in-line. EXP is the expression that is a call to the builtin
1968 function; if convenient, the result should be placed in TARGET.
1969 SUBTARGET may be used as the target for computing one of EXP's operands. */
1972 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1974 optab builtin_optab;
1976 tree fndecl = get_callee_fndecl (exp);
1977 enum machine_mode mode;
1978 bool errno_set = false;
1981 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1984 arg = CALL_EXPR_ARG (exp, 0);
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_SQRT):
1989 errno_set = ! tree_expr_nonnegative_p (arg);
1990 builtin_optab = sqrt_optab;
1992 CASE_FLT_FN (BUILT_IN_EXP):
1993 errno_set = true; builtin_optab = exp_optab; break;
1994 CASE_FLT_FN (BUILT_IN_EXP10):
1995 CASE_FLT_FN (BUILT_IN_POW10):
1996 errno_set = true; builtin_optab = exp10_optab; break;
1997 CASE_FLT_FN (BUILT_IN_EXP2):
1998 errno_set = true; builtin_optab = exp2_optab; break;
1999 CASE_FLT_FN (BUILT_IN_EXPM1):
2000 errno_set = true; builtin_optab = expm1_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOGB):
2002 errno_set = true; builtin_optab = logb_optab; break;
2003 CASE_FLT_FN (BUILT_IN_LOG):
2004 errno_set = true; builtin_optab = log_optab; break;
2005 CASE_FLT_FN (BUILT_IN_LOG10):
2006 errno_set = true; builtin_optab = log10_optab; break;
2007 CASE_FLT_FN (BUILT_IN_LOG2):
2008 errno_set = true; builtin_optab = log2_optab; break;
2009 CASE_FLT_FN (BUILT_IN_LOG1P):
2010 errno_set = true; builtin_optab = log1p_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ASIN):
2012 builtin_optab = asin_optab; break;
2013 CASE_FLT_FN (BUILT_IN_ACOS):
2014 builtin_optab = acos_optab; break;
2015 CASE_FLT_FN (BUILT_IN_TAN):
2016 builtin_optab = tan_optab; break;
2017 CASE_FLT_FN (BUILT_IN_ATAN):
2018 builtin_optab = atan_optab; break;
2019 CASE_FLT_FN (BUILT_IN_FLOOR):
2020 builtin_optab = floor_optab; break;
2021 CASE_FLT_FN (BUILT_IN_CEIL):
2022 builtin_optab = ceil_optab; break;
2023 CASE_FLT_FN (BUILT_IN_TRUNC):
2024 builtin_optab = btrunc_optab; break;
2025 CASE_FLT_FN (BUILT_IN_ROUND):
2026 builtin_optab = round_optab; break;
2027 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2028 builtin_optab = nearbyint_optab;
2029 if (flag_trapping_math)
2031 /* Else fallthrough and expand as rint. */
2032 CASE_FLT_FN (BUILT_IN_RINT):
2033 builtin_optab = rint_optab; break;
2034 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2035 builtin_optab = significand_optab; break;
2040 /* Make a suitable register to place result in. */
2041 mode = TYPE_MODE (TREE_TYPE (exp));
2043 if (! flag_errno_math || ! HONOR_NANS (mode))
2046 /* Before working hard, check whether the instruction is available. */
2047 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2049 target = gen_reg_rtx (mode);
2051 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2052 need to expand the argument again. This way, we will not perform
2053 side-effects more the once. */
2054 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2056 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2060 /* Compute into TARGET.
2061 Set TARGET to wherever the result comes back. */
2062 target = expand_unop (mode, builtin_optab, op0, target, 0);
2067 expand_errno_check (exp, target);
2069 /* Output the entire sequence. */
2070 insns = get_insns ();
2076 /* If we were unable to expand via the builtin, stop the sequence
2077 (without outputting the insns) and call to the library function
2078 with the stabilized argument list. */
2082 return expand_call (exp, target, target == const0_rtx);
2085 /* Expand a call to the builtin binary math functions (pow and atan2).
2086 Return NULL_RTX if a normal call should be emitted rather than expanding the
2087 function in-line. EXP is the expression that is a call to the builtin
2088 function; if convenient, the result should be placed in TARGET.
2089 SUBTARGET may be used as the target for computing one of EXP's
2093 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2095 optab builtin_optab;
2096 rtx op0, op1, insns;
2097 int op1_type = REAL_TYPE;
2098 tree fndecl = get_callee_fndecl (exp);
2100 enum machine_mode mode;
2101 bool errno_set = true;
2103 switch (DECL_FUNCTION_CODE (fndecl))
2105 CASE_FLT_FN (BUILT_IN_SCALBN):
2106 CASE_FLT_FN (BUILT_IN_SCALBLN):
2107 CASE_FLT_FN (BUILT_IN_LDEXP):
2108 op1_type = INTEGER_TYPE;
2113 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2116 arg0 = CALL_EXPR_ARG (exp, 0);
2117 arg1 = CALL_EXPR_ARG (exp, 1);
2119 switch (DECL_FUNCTION_CODE (fndecl))
2121 CASE_FLT_FN (BUILT_IN_POW):
2122 builtin_optab = pow_optab; break;
2123 CASE_FLT_FN (BUILT_IN_ATAN2):
2124 builtin_optab = atan2_optab; break;
2125 CASE_FLT_FN (BUILT_IN_SCALB):
2126 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 builtin_optab = scalb_optab; break;
2129 CASE_FLT_FN (BUILT_IN_SCALBN):
2130 CASE_FLT_FN (BUILT_IN_SCALBLN):
2131 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2133 /* Fall through... */
2134 CASE_FLT_FN (BUILT_IN_LDEXP):
2135 builtin_optab = ldexp_optab; break;
2136 CASE_FLT_FN (BUILT_IN_FMOD):
2137 builtin_optab = fmod_optab; break;
2138 CASE_FLT_FN (BUILT_IN_REMAINDER):
2139 CASE_FLT_FN (BUILT_IN_DREM):
2140 builtin_optab = remainder_optab; break;
2145 /* Make a suitable register to place result in. */
2146 mode = TYPE_MODE (TREE_TYPE (exp));
2148 /* Before working hard, check whether the instruction is available. */
2149 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 target = gen_reg_rtx (mode);
2154 if (! flag_errno_math || ! HONOR_NANS (mode))
2157 /* Always stabilize the argument list. */
2158 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2159 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2161 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2162 op1 = expand_normal (arg1);
2166 /* Compute into TARGET.
2167 Set TARGET to wherever the result comes back. */
2168 target = expand_binop (mode, builtin_optab, op0, op1,
2169 target, 0, OPTAB_DIRECT);
2171 /* If we were unable to expand via the builtin, stop the sequence
2172 (without outputting the insns) and call to the library function
2173 with the stabilized argument list. */
2177 return expand_call (exp, target, target == const0_rtx);
2181 expand_errno_check (exp, target);
2183 /* Output the entire sequence. */
2184 insns = get_insns ();
2191 /* Expand a call to the builtin sin and cos math functions.
2192 Return NULL_RTX if a normal call should be emitted rather than expanding the
2193 function in-line. EXP is the expression that is a call to the builtin
2194 function; if convenient, the result should be placed in TARGET.
2195 SUBTARGET may be used as the target for computing one of EXP's
2199 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2201 optab builtin_optab;
2203 tree fndecl = get_callee_fndecl (exp);
2204 enum machine_mode mode;
2207 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2210 arg = CALL_EXPR_ARG (exp, 0);
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_SIN):
2215 CASE_FLT_FN (BUILT_IN_COS):
2216 builtin_optab = sincos_optab; break;
2221 /* Make a suitable register to place result in. */
2222 mode = TYPE_MODE (TREE_TYPE (exp));
2224 /* Check if sincos insn is available, otherwise fallback
2225 to sin or cos insn. */
2226 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_SIN):
2230 builtin_optab = sin_optab; break;
2231 CASE_FLT_FN (BUILT_IN_COS):
2232 builtin_optab = cos_optab; break;
2237 /* Before working hard, check whether the instruction is available. */
2238 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2240 target = gen_reg_rtx (mode);
2242 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2243 need to expand the argument again. This way, we will not perform
2244 side-effects more the once. */
2245 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2247 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2251 /* Compute into TARGET.
2252 Set TARGET to wherever the result comes back. */
2253 if (builtin_optab == sincos_optab)
2257 switch (DECL_FUNCTION_CODE (fndecl))
2259 CASE_FLT_FN (BUILT_IN_SIN):
2260 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2262 CASE_FLT_FN (BUILT_IN_COS):
2263 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2268 gcc_assert (result);
2272 target = expand_unop (mode, builtin_optab, op0, target, 0);
2277 /* Output the entire sequence. */
2278 insns = get_insns ();
2284 /* If we were unable to expand via the builtin, stop the sequence
2285 (without outputting the insns) and call to the library function
2286 with the stabilized argument list. */
2290 target = expand_call (exp, target, target == const0_rtx);
2295 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2296 return an RTL instruction code that implements the functionality.
2297 If that isn't possible or available return CODE_FOR_nothing. */
2299 static enum insn_code
2300 interclass_mathfn_icode (tree arg, tree fndecl)
2302 bool errno_set = false;
2303 optab builtin_optab = 0;
2304 enum machine_mode mode;
2306 switch (DECL_FUNCTION_CODE (fndecl))
2308 CASE_FLT_FN (BUILT_IN_ILOGB):
2309 errno_set = true; builtin_optab = ilogb_optab; break;
2310 CASE_FLT_FN (BUILT_IN_ISINF):
2311 builtin_optab = isinf_optab; break;
2312 case BUILT_IN_ISNORMAL:
2313 case BUILT_IN_ISFINITE:
2314 CASE_FLT_FN (BUILT_IN_FINITE):
2315 case BUILT_IN_FINITED32:
2316 case BUILT_IN_FINITED64:
2317 case BUILT_IN_FINITED128:
2318 case BUILT_IN_ISINFD32:
2319 case BUILT_IN_ISINFD64:
2320 case BUILT_IN_ISINFD128:
2321 /* These builtins have no optabs (yet). */
2327 /* There's no easy way to detect the case we need to set EDOM. */
2328 if (flag_errno_math && errno_set)
2329 return CODE_FOR_nothing;
2331 /* Optab mode depends on the mode of the input argument. */
2332 mode = TYPE_MODE (TREE_TYPE (arg));
2335 return optab_handler (builtin_optab, mode);
2336 return CODE_FOR_nothing;
2339 /* Expand a call to one of the builtin math functions that operate on
2340 floating point argument and output an integer result (ilogb, isinf,
2342 Return 0 if a normal call should be emitted rather than expanding the
2343 function in-line. EXP is the expression that is a call to the builtin
2344 function; if convenient, the result should be placed in TARGET.
2345 SUBTARGET may be used as the target for computing one of EXP's operands. */
2348 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2350 enum insn_code icode = CODE_FOR_nothing;
2352 tree fndecl = get_callee_fndecl (exp);
2353 enum machine_mode mode;
2356 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2359 arg = CALL_EXPR_ARG (exp, 0);
2360 icode = interclass_mathfn_icode (arg, fndecl);
2361 mode = TYPE_MODE (TREE_TYPE (arg));
2363 if (icode != CODE_FOR_nothing)
2365 rtx last = get_last_insn ();
2366 tree orig_arg = arg;
2367 /* Make a suitable register to place result in. */
2369 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2370 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2371 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2373 gcc_assert (insn_data[icode].operand[0].predicate
2374 (target, GET_MODE (target)));
2376 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2377 need to expand the argument again. This way, we will not perform
2378 side-effects more the once. */
2379 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2381 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2383 if (mode != GET_MODE (op0))
2384 op0 = convert_to_mode (mode, op0, 0);
2386 /* Compute into TARGET.
2387 Set TARGET to wherever the result comes back. */
2388 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2390 delete_insns_since (last);
2391 CALL_EXPR_ARG (exp, 0) = orig_arg;
2397 /* Expand a call to the builtin sincos math function.
2398 Return NULL_RTX if a normal call should be emitted rather than expanding the
2399 function in-line. EXP is the expression that is a call to the builtin
2403 expand_builtin_sincos (tree exp)
2405 rtx op0, op1, op2, target1, target2;
2406 enum machine_mode mode;
2407 tree arg, sinp, cosp;
2409 location_t loc = EXPR_LOCATION (exp);
2411 if (!validate_arglist (exp, REAL_TYPE,
2412 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2415 arg = CALL_EXPR_ARG (exp, 0);
2416 sinp = CALL_EXPR_ARG (exp, 1);
2417 cosp = CALL_EXPR_ARG (exp, 2);
2419 /* Make a suitable register to place result in. */
2420 mode = TYPE_MODE (TREE_TYPE (arg));
2422 /* Check if sincos insn is available, otherwise emit the call. */
2423 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2426 target1 = gen_reg_rtx (mode);
2427 target2 = gen_reg_rtx (mode);
2429 op0 = expand_normal (arg);
2430 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2431 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2433 /* Compute into target1 and target2.
2434 Set TARGET to wherever the result comes back. */
2435 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2436 gcc_assert (result);
2438 /* Move target1 and target2 to the memory locations indicated
2440 emit_move_insn (op1, target1);
2441 emit_move_insn (op2, target2);
2446 /* Expand a call to the internal cexpi builtin to the sincos math function.
2447 EXP is the expression that is a call to the builtin function; if convenient,
2448 the result should be placed in TARGET. SUBTARGET may be used as the target
2449 for computing one of EXP's operands. */
2452 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2454 tree fndecl = get_callee_fndecl (exp);
2456 enum machine_mode mode;
2458 location_t loc = EXPR_LOCATION (exp);
2460 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2463 arg = CALL_EXPR_ARG (exp, 0);
2464 type = TREE_TYPE (arg);
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2467 /* Try expanding via a sincos optab, fall back to emitting a libcall
2468 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2469 is only generated from sincos, cexp or if we have either of them. */
2470 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2472 op1 = gen_reg_rtx (mode);
2473 op2 = gen_reg_rtx (mode);
2475 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2477 /* Compute into op1 and op2. */
2478 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2480 else if (TARGET_HAS_SINCOS)
2482 tree call, fn = NULL_TREE;
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = built_in_decls[BUILT_IN_SINCOSF];
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = built_in_decls[BUILT_IN_SINCOS];
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = built_in_decls[BUILT_IN_SINCOSL];
2495 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2496 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2497 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2498 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2499 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2500 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2502 /* Make sure not to fold the sincos call again. */
2503 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2504 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2505 call, 3, arg, top1, top2));
2509 tree call, fn = NULL_TREE, narg;
2510 tree ctype = build_complex_type (type);
2512 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2513 fn = built_in_decls[BUILT_IN_CEXPF];
2514 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2515 fn = built_in_decls[BUILT_IN_CEXP];
2516 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2517 fn = built_in_decls[BUILT_IN_CEXPL];
2521 /* If we don't have a decl for cexp create one. This is the
2522 friendliest fallback if the user calls __builtin_cexpi
2523 without full target C99 function support. */
2524 if (fn == NULL_TREE)
2527 const char *name = NULL;
2529 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2531 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2533 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2536 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2537 fn = build_fn_decl (name, fntype);
2540 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2541 build_real (type, dconst0), arg);
2543 /* Make sure not to fold the cexp call again. */
2544 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2545 return expand_expr (build_call_nary (ctype, call, 1, narg),
2546 target, VOIDmode, EXPAND_NORMAL);
2549 /* Now build the proper return type. */
2550 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2551 make_tree (TREE_TYPE (arg), op2),
2552 make_tree (TREE_TYPE (arg), op1)),
2553 target, VOIDmode, EXPAND_NORMAL);
2556 /* Conveniently construct a function call expression. FNDECL names the
2557 function to be called, N is the number of arguments, and the "..."
2558 parameters are the argument expressions. Unlike build_call_exr
2559 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2562 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2565 tree fntype = TREE_TYPE (fndecl);
2566 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2569 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2571 SET_EXPR_LOCATION (fn, loc);
2575 /* Expand a call to one of the builtin rounding functions gcc defines
2576 as an extension (lfloor and lceil). As these are gcc extensions we
2577 do not need to worry about setting errno to EDOM.
2578 If expanding via optab fails, lower expression to (int)(floor(x)).
2579 EXP is the expression that is a call to the builtin function;
2580 if convenient, the result should be placed in TARGET. */
2583 expand_builtin_int_roundingfn (tree exp, rtx target)
2585 convert_optab builtin_optab;
2586 rtx op0, insns, tmp;
2587 tree fndecl = get_callee_fndecl (exp);
2588 enum built_in_function fallback_fn;
2589 tree fallback_fndecl;
2590 enum machine_mode mode;
2593 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2596 arg = CALL_EXPR_ARG (exp, 0);
2598 switch (DECL_FUNCTION_CODE (fndecl))
2600 CASE_FLT_FN (BUILT_IN_LCEIL):
2601 CASE_FLT_FN (BUILT_IN_LLCEIL):
2602 builtin_optab = lceil_optab;
2603 fallback_fn = BUILT_IN_CEIL;
2606 CASE_FLT_FN (BUILT_IN_LFLOOR):
2607 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2608 builtin_optab = lfloor_optab;
2609 fallback_fn = BUILT_IN_FLOOR;
2616 /* Make a suitable register to place result in. */
2617 mode = TYPE_MODE (TREE_TYPE (exp));
2619 target = gen_reg_rtx (mode);
2621 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2622 need to expand the argument again. This way, we will not perform
2623 side-effects more the once. */
2624 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2626 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2630 /* Compute into TARGET. */
2631 if (expand_sfix_optab (target, op0, builtin_optab))
2633 /* Output the entire sequence. */
2634 insns = get_insns ();
2640 /* If we were unable to expand via the builtin, stop the sequence
2641 (without outputting the insns). */
2644 /* Fall back to floating point rounding optab. */
2645 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2647 /* For non-C99 targets we may end up without a fallback fndecl here
2648 if the user called __builtin_lfloor directly. In this case emit
2649 a call to the floor/ceil variants nevertheless. This should result
2650 in the best user experience for not full C99 targets. */
2651 if (fallback_fndecl == NULL_TREE)
2654 const char *name = NULL;
2656 switch (DECL_FUNCTION_CODE (fndecl))
2658 case BUILT_IN_LCEIL:
2659 case BUILT_IN_LLCEIL:
2662 case BUILT_IN_LCEILF:
2663 case BUILT_IN_LLCEILF:
2666 case BUILT_IN_LCEILL:
2667 case BUILT_IN_LLCEILL:
2670 case BUILT_IN_LFLOOR:
2671 case BUILT_IN_LLFLOOR:
2674 case BUILT_IN_LFLOORF:
2675 case BUILT_IN_LLFLOORF:
2678 case BUILT_IN_LFLOORL:
2679 case BUILT_IN_LLFLOORL:
2686 fntype = build_function_type_list (TREE_TYPE (arg),
2687 TREE_TYPE (arg), NULL_TREE);
2688 fallback_fndecl = build_fn_decl (name, fntype);
2691 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2693 tmp = expand_normal (exp);
2695 /* Truncate the result of floating point optab to integer
2696 via expand_fix (). */
2697 target = gen_reg_rtx (mode);
2698 expand_fix (target, tmp, 0);
2703 /* Expand a call to one of the builtin math functions doing integer
2705 Return 0 if a normal call should be emitted rather than expanding the
2706 function in-line. EXP is the expression that is a call to the builtin
2707 function; if convenient, the result should be placed in TARGET. */
2710 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2712 convert_optab builtin_optab;
2714 tree fndecl = get_callee_fndecl (exp);
2716 enum machine_mode mode;
2718 /* There's no easy way to detect the case we need to set EDOM. */
2719 if (flag_errno_math)
2722 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 arg = CALL_EXPR_ARG (exp, 0);
2727 switch (DECL_FUNCTION_CODE (fndecl))
2729 CASE_FLT_FN (BUILT_IN_LRINT):
2730 CASE_FLT_FN (BUILT_IN_LLRINT):
2731 builtin_optab = lrint_optab; break;
2732 CASE_FLT_FN (BUILT_IN_LROUND):
2733 CASE_FLT_FN (BUILT_IN_LLROUND):
2734 builtin_optab = lround_optab; break;
2739 /* Make a suitable register to place result in. */
2740 mode = TYPE_MODE (TREE_TYPE (exp));
2742 target = gen_reg_rtx (mode);
2744 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2745 need to expand the argument again. This way, we will not perform
2746 side-effects more the once. */
2747 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2749 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2753 if (expand_sfix_optab (target, op0, builtin_optab))
2755 /* Output the entire sequence. */
2756 insns = get_insns ();
2762 /* If we were unable to expand via the builtin, stop the sequence
2763 (without outputting the insns) and call to the library function
2764 with the stabilized argument list. */
2767 target = expand_call (exp, target, target == const0_rtx);
2772 /* To evaluate powi(x,n), the floating point value x raised to the
2773 constant integer exponent n, we use a hybrid algorithm that
2774 combines the "window method" with look-up tables. For an
2775 introduction to exponentiation algorithms and "addition chains",
2776 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2777 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2778 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2779 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2781 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2782 multiplications to inline before calling the system library's pow
2783 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2784 so this default never requires calling pow, powf or powl. */
2786 #ifndef POWI_MAX_MULTS
2787 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2790 /* The size of the "optimal power tree" lookup table. All
2791 exponents less than this value are simply looked up in the
2792 powi_table below. This threshold is also used to size the
2793 cache of pseudo registers that hold intermediate results. */
2794 #define POWI_TABLE_SIZE 256
2796 /* The size, in bits of the window, used in the "window method"
2797 exponentiation algorithm. This is equivalent to a radix of
2798 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2799 #define POWI_WINDOW_SIZE 3
2801 /* The following table is an efficient representation of an
2802 "optimal power tree". For each value, i, the corresponding
2803 value, j, in the table states than an optimal evaluation
2804 sequence for calculating pow(x,i) can be found by evaluating
2805 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2806 100 integers is given in Knuth's "Seminumerical algorithms". */
2808 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2810 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2811 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2812 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2813 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2814 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2815 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2816 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2817 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2818 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2819 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2820 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2821 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2822 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2823 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2824 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2825 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2826 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2827 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2828 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2829 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2830 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2831 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2832 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2833 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2834 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2835 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2836 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2837 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2838 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2839 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2840 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2841 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2845 /* Return the number of multiplications required to calculate
2846 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2847 subroutine of powi_cost. CACHE is an array indicating
2848 which exponents have already been calculated. */
2851 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2853 /* If we've already calculated this exponent, then this evaluation
2854 doesn't require any additional multiplications. */
2859 return powi_lookup_cost (n - powi_table[n], cache)
2860 + powi_lookup_cost (powi_table[n], cache) + 1;
2863 /* Return the number of multiplications required to calculate
2864 powi(x,n) for an arbitrary x, given the exponent N. This
2865 function needs to be kept in sync with expand_powi below. */
2868 powi_cost (HOST_WIDE_INT n)
2870 bool cache[POWI_TABLE_SIZE];
2871 unsigned HOST_WIDE_INT digit;
2872 unsigned HOST_WIDE_INT val;
2878 /* Ignore the reciprocal when calculating the cost. */
2879 val = (n < 0) ? -n : n;
2881 /* Initialize the exponent cache. */
2882 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2887 while (val >= POWI_TABLE_SIZE)
2891 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2892 result += powi_lookup_cost (digit, cache)
2893 + POWI_WINDOW_SIZE + 1;
2894 val >>= POWI_WINDOW_SIZE;
2903 return result + powi_lookup_cost (val, cache);
2906 /* Recursive subroutine of expand_powi. This function takes the array,
2907 CACHE, of already calculated exponents and an exponent N and returns
2908 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2911 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2913 unsigned HOST_WIDE_INT digit;
2917 if (n < POWI_TABLE_SIZE)
2922 target = gen_reg_rtx (mode);
2925 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2926 op1 = expand_powi_1 (mode, powi_table[n], cache);
2930 target = gen_reg_rtx (mode);
2931 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2932 op0 = expand_powi_1 (mode, n - digit, cache);
2933 op1 = expand_powi_1 (mode, digit, cache);
2937 target = gen_reg_rtx (mode);
2938 op0 = expand_powi_1 (mode, n >> 1, cache);
2942 result = expand_mult (mode, op0, op1, target, 0);
2943 if (result != target)
2944 emit_move_insn (target, result);
2948 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2949 floating point operand in mode MODE, and N is the exponent. This
2950 function needs to be kept in sync with powi_cost above. */
2953 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2955 rtx cache[POWI_TABLE_SIZE];
2959 return CONST1_RTX (mode);
2961 memset (cache, 0, sizeof (cache));
2964 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2966 /* If the original exponent was negative, reciprocate the result. */
2968 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2969 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2974 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2975 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2976 if we can simplify it. */
2978 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2981 if (TREE_CODE (arg1) == REAL_CST
2982 && !TREE_OVERFLOW (arg1)
2983 && flag_unsafe_math_optimizations)
2985 enum machine_mode mode = TYPE_MODE (type);
2986 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2987 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2988 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2989 tree op = NULL_TREE;
2993 /* Optimize pow (x, 0.5) into sqrt. */
2994 if (REAL_VALUES_EQUAL (c, dconsthalf))
2995 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2999 REAL_VALUE_TYPE dconst1_4 = dconst1;
3000 REAL_VALUE_TYPE dconst3_4;
3001 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3003 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3004 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3006 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3007 machines that a builtin sqrt instruction is smaller than a
3008 call to pow with 0.25, so do this optimization even if
3010 if (REAL_VALUES_EQUAL (c, dconst1_4))
3012 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3013 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3016 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3017 are optimizing for space. */
3018 else if (optimize_insn_for_speed_p ()
3019 && !TREE_SIDE_EFFECTS (arg0)
3020 && REAL_VALUES_EQUAL (c, dconst3_4))
3022 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3023 tree sqrt2 = builtin_save_expr (sqrt1);
3024 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3025 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3030 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3031 cbrt/sqrts instead of pow (x, 1./6.). */
3033 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3035 /* First try 1/3. */
3036 REAL_VALUE_TYPE dconst1_3
3037 = real_value_truncate (mode, dconst_third ());
3039 if (REAL_VALUES_EQUAL (c, dconst1_3))
3040 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3043 else if (optimize_insn_for_speed_p ())
3045 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3046 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3048 if (REAL_VALUES_EQUAL (c, dconst1_6))
3050 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3051 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3057 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3063 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3064 a normal call should be emitted rather than expanding the function
3065 in-line. EXP is the expression that is a call to the builtin
3066 function; if convenient, the result should be placed in TARGET. */
3069 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3073 tree type = TREE_TYPE (exp);
3074 REAL_VALUE_TYPE cint, c, c2;
3077 enum machine_mode mode = TYPE_MODE (type);
3079 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3082 arg0 = CALL_EXPR_ARG (exp, 0);
3083 arg1 = CALL_EXPR_ARG (exp, 1);
3085 if (TREE_CODE (arg1) != REAL_CST
3086 || TREE_OVERFLOW (arg1))
3087 return expand_builtin_mathfn_2 (exp, target, subtarget);
3089 /* Handle constant exponents. */
3091 /* For integer valued exponents we can expand to an optimal multiplication
3092 sequence using expand_powi. */
3093 c = TREE_REAL_CST (arg1);
3094 n = real_to_integer (&c);
3095 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3096 if (real_identical (&c, &cint)
3097 && ((n >= -1 && n <= 2)
3098 || (flag_unsafe_math_optimizations
3099 && optimize_insn_for_speed_p ()
3100 && powi_cost (n) <= POWI_MAX_MULTS)))
3102 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3105 op = force_reg (mode, op);
3106 op = expand_powi (op, mode, n);
3111 narg0 = builtin_save_expr (arg0);
3113 /* If the exponent is not integer valued, check if it is half of an integer.
3114 In this case we can expand to sqrt (x) * x**(n/2). */
3115 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3116 if (fn != NULL_TREE)
3118 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3119 n = real_to_integer (&c2);
3120 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3121 if (real_identical (&c2, &cint)
3122 && ((flag_unsafe_math_optimizations
3123 && optimize_insn_for_speed_p ()
3124 && powi_cost (n/2) <= POWI_MAX_MULTS)
3125 /* Even the c == 0.5 case cannot be done unconditionally
3126 when we need to preserve signed zeros, as
3127 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3128 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3129 /* For c == 1.5 we can assume that x * sqrt (x) is always
3130 smaller than pow (x, 1.5) if sqrt will not be expanded
3133 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3135 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3137 /* Use expand_expr in case the newly built call expression
3138 was folded to a non-call. */
3139 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3142 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3143 op2 = force_reg (mode, op2);
3144 op2 = expand_powi (op2, mode, abs (n / 2));
3145 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3146 0, OPTAB_LIB_WIDEN);
3147 /* If the original exponent was negative, reciprocate the
3150 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3151 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3157 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3159 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3164 /* Try if the exponent is a third of an integer. In this case
3165 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3166 different from pow (x, 1./3.) due to rounding and behavior
3167 with negative x we need to constrain this transformation to
3168 unsafe math and positive x or finite math. */
3169 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3171 && flag_unsafe_math_optimizations
3172 && (tree_expr_nonnegative_p (arg0)
3173 || !HONOR_NANS (mode)))
3175 REAL_VALUE_TYPE dconst3;
3176 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3177 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3178 real_round (&c2, mode, &c2);
3179 n = real_to_integer (&c2);
3180 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3181 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3182 real_convert (&c2, mode, &c2);
3183 if (real_identical (&c2, &c)
3184 && ((optimize_insn_for_speed_p ()
3185 && powi_cost (n/3) <= POWI_MAX_MULTS)
3188 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3190 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3191 if (abs (n) % 3 == 2)
3192 op = expand_simple_binop (mode, MULT, op, op, op,
3193 0, OPTAB_LIB_WIDEN);
3196 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3197 op2 = force_reg (mode, op2);
3198 op2 = expand_powi (op2, mode, abs (n / 3));
3199 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3200 0, OPTAB_LIB_WIDEN);
3201 /* If the original exponent was negative, reciprocate the
3204 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3205 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3211 /* Fall back to optab expansion. */
3212 return expand_builtin_mathfn_2 (exp, target, subtarget);
3215 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3216 a normal call should be emitted rather than expanding the function
3217 in-line. EXP is the expression that is a call to the builtin
3218 function; if convenient, the result should be placed in TARGET. */
3221 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3225 enum machine_mode mode;
3226 enum machine_mode mode2;
3228 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3231 arg0 = CALL_EXPR_ARG (exp, 0);
3232 arg1 = CALL_EXPR_ARG (exp, 1);
3233 mode = TYPE_MODE (TREE_TYPE (exp));
3235 /* Handle constant power. */
3237 if (TREE_CODE (arg1) == INTEGER_CST
3238 && !TREE_OVERFLOW (arg1))
3240 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3242 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3243 Otherwise, check the number of multiplications required. */
3244 if ((TREE_INT_CST_HIGH (arg1) == 0
3245 || TREE_INT_CST_HIGH (arg1) == -1)
3246 && ((n >= -1 && n <= 2)
3247 || (optimize_insn_for_speed_p ()
3248 && powi_cost (n) <= POWI_MAX_MULTS)))
3250 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3251 op0 = force_reg (mode, op0);
3252 return expand_powi (op0, mode, n);
3256 /* Emit a libcall to libgcc. */
3258 /* Mode of the 2nd argument must match that of an int. */
3259 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3261 if (target == NULL_RTX)
3262 target = gen_reg_rtx (mode);
3264 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3265 if (GET_MODE (op0) != mode)
3266 op0 = convert_to_mode (mode, op0, 0);
3267 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3268 if (GET_MODE (op1) != mode2)
3269 op1 = convert_to_mode (mode2, op1, 0);
3271 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3272 target, LCT_CONST, mode, 2,
3273 op0, mode, op1, mode2);
3278 /* Expand expression EXP which is a call to the strlen builtin. Return
3279 NULL_RTX if we failed the caller should emit a normal call, otherwise
3280 try to get the result in TARGET, if convenient. */
3283 expand_builtin_strlen (tree exp, rtx target,
3284 enum machine_mode target_mode)
3286 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3292 tree src = CALL_EXPR_ARG (exp, 0);
3293 rtx result, src_reg, char_rtx, before_strlen;
3294 enum machine_mode insn_mode = target_mode, char_mode;
3295 enum insn_code icode = CODE_FOR_nothing;
3298 /* If the length can be computed at compile-time, return it. */
3299 len = c_strlen (src, 0);
3301 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3303 /* If the length can be computed at compile-time and is constant
3304 integer, but there are side-effects in src, evaluate
3305 src for side-effects, then return len.
3306 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3307 can be optimized into: i++; x = 3; */
3308 len = c_strlen (src, 1);
3309 if (len && TREE_CODE (len) == INTEGER_CST)
3311 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3312 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3315 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3317 /* If SRC is not a pointer type, don't do this operation inline. */
3321 /* Bail out if we can't compute strlen in the right mode. */
3322 while (insn_mode != VOIDmode)
3324 icode = optab_handler (strlen_optab, insn_mode);
3325 if (icode != CODE_FOR_nothing)
3328 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3330 if (insn_mode == VOIDmode)
3333 /* Make a place to write the result of the instruction. */
3337 && GET_MODE (result) == insn_mode
3338 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3339 result = gen_reg_rtx (insn_mode);
3341 /* Make a place to hold the source address. We will not expand
3342 the actual source until we are sure that the expansion will
3343 not fail -- there are trees that cannot be expanded twice. */
3344 src_reg = gen_reg_rtx (Pmode);
3346 /* Mark the beginning of the strlen sequence so we can emit the
3347 source operand later. */
3348 before_strlen = get_last_insn ();
3350 char_rtx = const0_rtx;
3351 char_mode = insn_data[(int) icode].operand[2].mode;
3352 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3354 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3356 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3357 char_rtx, GEN_INT (align));
3362 /* Now that we are assured of success, expand the source. */
3364 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3366 emit_move_insn (src_reg, pat);
3371 emit_insn_after (pat, before_strlen);
3373 emit_insn_before (pat, get_insns ());
3375 /* Return the value in the proper mode for this function. */
3376 if (GET_MODE (result) == target_mode)
3378 else if (target != 0)
3379 convert_move (target, result, 0);
3381 target = convert_to_mode (target_mode, result, 0);
3387 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3388 bytes from constant string DATA + OFFSET and return it as target
3392 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3393 enum machine_mode mode)
3395 const char *str = (const char *) data;
3397 gcc_assert (offset >= 0
3398 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3399 <= strlen (str) + 1));
3401 return c_readstr (str + offset, mode);
3404 /* Expand a call EXP to the memcpy builtin.
3405 Return NULL_RTX if we failed, the caller should emit a normal call,
3406 otherwise try to get the result in TARGET, if convenient (and in
3407 mode MODE if that's convenient). */
3410 expand_builtin_memcpy (tree exp, rtx target)
3412 if (!validate_arglist (exp,
3413 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3417 tree dest = CALL_EXPR_ARG (exp, 0);
3418 tree src = CALL_EXPR_ARG (exp, 1);
3419 tree len = CALL_EXPR_ARG (exp, 2);
3420 const char *src_str;
3421 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3422 unsigned int dest_align
3423 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3424 rtx dest_mem, src_mem, dest_addr, len_rtx;
3425 HOST_WIDE_INT expected_size = -1;
3426 unsigned int expected_align = 0;
3428 /* If DEST is not a pointer type, call the normal function. */
3429 if (dest_align == 0)
3432 /* If either SRC is not a pointer type, don't do this
3433 operation in-line. */
3437 if (currently_expanding_gimple_stmt)
3438 stringop_block_profile (currently_expanding_gimple_stmt,
3439 &expected_align, &expected_size);
3441 if (expected_align < dest_align)
3442 expected_align = dest_align;
3443 dest_mem = get_memory_rtx (dest, len);
3444 set_mem_align (dest_mem, dest_align);
3445 len_rtx = expand_normal (len);
3446 src_str = c_getstr (src);
3448 /* If SRC is a string constant and block move would be done
3449 by pieces, we can avoid loading the string from memory
3450 and only stored the computed constants. */
3452 && CONST_INT_P (len_rtx)
3453 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3454 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3455 CONST_CAST (char *, src_str),
3458 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3459 builtin_memcpy_read_str,
3460 CONST_CAST (char *, src_str),
3461 dest_align, false, 0);
3462 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3463 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3467 src_mem = get_memory_rtx (src, len);
3468 set_mem_align (src_mem, src_align);
3470 /* Copy word part most expediently. */
3471 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3472 CALL_EXPR_TAILCALL (exp)
3473 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3474 expected_align, expected_size);
3478 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3479 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3485 /* Expand a call EXP to the mempcpy builtin.
3486 Return NULL_RTX if we failed; the caller should emit a normal call,
3487 otherwise try to get the result in TARGET, if convenient (and in
3488 mode MODE if that's convenient). If ENDP is 0 return the
3489 destination pointer, if ENDP is 1 return the end pointer ala
3490 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3494 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3496 if (!validate_arglist (exp,
3497 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3501 tree dest = CALL_EXPR_ARG (exp, 0);
3502 tree src = CALL_EXPR_ARG (exp, 1);
3503 tree len = CALL_EXPR_ARG (exp, 2);
3504 return expand_builtin_mempcpy_args (dest, src, len,
3505 target, mode, /*endp=*/ 1);
3509 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3510 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3511 so that this can also be called without constructing an actual CALL_EXPR.
3512 The other arguments and return value are the same as for
3513 expand_builtin_mempcpy. */
3516 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3517 rtx target, enum machine_mode mode, int endp)
3519 /* If return value is ignored, transform mempcpy into memcpy. */
3520 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3522 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3523 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3525 return expand_expr (result, target, mode, EXPAND_NORMAL);
3529 const char *src_str;
3530 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3531 unsigned int dest_align
3532 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3533 rtx dest_mem, src_mem, len_rtx;
3535 /* If either SRC or DEST is not a pointer type, don't do this
3536 operation in-line. */
3537 if (dest_align == 0 || src_align == 0)
3540 /* If LEN is not constant, call the normal function. */
3541 if (! host_integerp (len, 1))
3544 len_rtx = expand_normal (len);
3545 src_str = c_getstr (src);
3547 /* If SRC is a string constant and block move would be done
3548 by pieces, we can avoid loading the string from memory
3549 and only stored the computed constants. */
3551 && CONST_INT_P (len_rtx)
3552 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3553 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3554 CONST_CAST (char *, src_str),
3557 dest_mem = get_memory_rtx (dest, len);
3558 set_mem_align (dest_mem, dest_align);
3559 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3560 builtin_memcpy_read_str,
3561 CONST_CAST (char *, src_str),
3562 dest_align, false, endp);
3563 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3564 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3568 if (CONST_INT_P (len_rtx)
3569 && can_move_by_pieces (INTVAL (len_rtx),
3570 MIN (dest_align, src_align)))
3572 dest_mem = get_memory_rtx (dest, len);
3573 set_mem_align (dest_mem, dest_align);
3574 src_mem = get_memory_rtx (src, len);
3575 set_mem_align (src_mem, src_align);
3576 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3577 MIN (dest_align, src_align), endp);
3578 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3579 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3588 # define HAVE_movstr 0
3589 # define CODE_FOR_movstr CODE_FOR_nothing
3592 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3593 we failed, the caller should emit a normal call, otherwise try to
3594 get the result in TARGET, if convenient. If ENDP is 0 return the
3595 destination pointer, if ENDP is 1 return the end pointer ala
3596 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3600 expand_movstr (tree dest, tree src, rtx target, int endp)
3606 const struct insn_data_d * data;
3611 dest_mem = get_memory_rtx (dest, NULL);
3612 src_mem = get_memory_rtx (src, NULL);
3613 data = insn_data + CODE_FOR_movstr;
3616 target = force_reg (Pmode, XEXP (dest_mem, 0));
3617 dest_mem = replace_equiv_address (dest_mem, target);
3618 end = gen_reg_rtx (Pmode);
3623 || target == const0_rtx
3624 || ! (*data->operand[0].predicate) (target, Pmode))
3626 end = gen_reg_rtx (Pmode);
3627 if (target != const0_rtx)
3634 if (data->operand[0].mode != VOIDmode)
3635 end = gen_lowpart (data->operand[0].mode, end);
3637 insn = data->genfun (end, dest_mem, src_mem);
3643 /* movstr is supposed to set end to the address of the NUL
3644 terminator. If the caller requested a mempcpy-like return value,
3646 if (endp == 1 && target != const0_rtx)
3648 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3649 emit_move_insn (target, force_operand (tem, NULL_RTX));
3655 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3656 NULL_RTX if we failed the caller should emit a normal call, otherwise
3657 try to get the result in TARGET, if convenient (and in mode MODE if that's
3661 expand_builtin_strcpy (tree exp, rtx target)
3663 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3665 tree dest = CALL_EXPR_ARG (exp, 0);
3666 tree src = CALL_EXPR_ARG (exp, 1);
3667 return expand_builtin_strcpy_args (dest, src, target);
3672 /* Helper function to do the actual work for expand_builtin_strcpy. The
3673 arguments to the builtin_strcpy call DEST and SRC are broken out
3674 so that this can also be called without constructing an actual CALL_EXPR.
3675 The other arguments and return value are the same as for
3676 expand_builtin_strcpy. */
3679 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3681 return expand_movstr (dest, src, target, /*endp=*/0);
3684 /* Expand a call EXP to the stpcpy builtin.
3685 Return NULL_RTX if we failed the caller should emit a normal call,
3686 otherwise try to get the result in TARGET, if convenient (and in
3687 mode MODE if that's convenient). */
3690 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3693 location_t loc = EXPR_LOCATION (exp);
3695 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3698 dst = CALL_EXPR_ARG (exp, 0);
3699 src = CALL_EXPR_ARG (exp, 1);
3701 /* If return value is ignored, transform stpcpy into strcpy. */
3702 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3704 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3705 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3706 return expand_expr (result, target, mode, EXPAND_NORMAL);
3713 /* Ensure we get an actual string whose length can be evaluated at
3714 compile-time, not an expression containing a string. This is
3715 because the latter will potentially produce pessimized code
3716 when used to produce the return value. */
3717 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3718 return expand_movstr (dst, src, target, /*endp=*/2);
3720 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3721 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3722 target, mode, /*endp=*/2);
3727 if (TREE_CODE (len) == INTEGER_CST)
3729 rtx len_rtx = expand_normal (len);
3731 if (CONST_INT_P (len_rtx))
3733 ret = expand_builtin_strcpy_args (dst, src, target);
3739 if (mode != VOIDmode)
3740 target = gen_reg_rtx (mode);
3742 target = gen_reg_rtx (GET_MODE (ret));
3744 if (GET_MODE (target) != GET_MODE (ret))
3745 ret = gen_lowpart (GET_MODE (target), ret);
3747 ret = plus_constant (ret, INTVAL (len_rtx));
3748 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3756 return expand_movstr (dst, src, target, /*endp=*/2);
3760 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3761 bytes from constant string DATA + OFFSET and return it as target
3765 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3766 enum machine_mode mode)
3768 const char *str = (const char *) data;
3770 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3773 return c_readstr (str + offset, mode);
3776 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3777 NULL_RTX if we failed the caller should emit a normal call. */
3780 expand_builtin_strncpy (tree exp, rtx target)
3782 location_t loc = EXPR_LOCATION (exp);
3784 if (validate_arglist (exp,
3785 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3787 tree dest = CALL_EXPR_ARG (exp, 0);
3788 tree src = CALL_EXPR_ARG (exp, 1);
3789 tree len = CALL_EXPR_ARG (exp, 2);
3790 tree slen = c_strlen (src, 1);
3792 /* We must be passed a constant len and src parameter. */
3793 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3796 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3798 /* We're required to pad with trailing zeros if the requested
3799 len is greater than strlen(s2)+1. In that case try to
3800 use store_by_pieces, if it fails, punt. */
3801 if (tree_int_cst_lt (slen, len))
3803 unsigned int dest_align
3804 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3805 const char *p = c_getstr (src);
3808 if (!p || dest_align == 0 || !host_integerp (len, 1)
3809 || !can_store_by_pieces (tree_low_cst (len, 1),
3810 builtin_strncpy_read_str,
3811 CONST_CAST (char *, p),
3815 dest_mem = get_memory_rtx (dest, len);
3816 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3817 builtin_strncpy_read_str,
3818 CONST_CAST (char *, p), dest_align, false, 0);
3819 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3820 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3827 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3828 bytes from constant string DATA + OFFSET and return it as target
3832 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3833 enum machine_mode mode)
3835 const char *c = (const char *) data;
3836 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3838 memset (p, *c, GET_MODE_SIZE (mode));
3840 return c_readstr (p, mode);
3843 /* Callback routine for store_by_pieces. Return the RTL of a register
3844 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3845 char value given in the RTL register data. For example, if mode is
3846 4 bytes wide, return the RTL for 0x01010101*data. */
3849 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3850 enum machine_mode mode)
3856 size = GET_MODE_SIZE (mode);
3860 p = XALLOCAVEC (char, size);
3861 memset (p, 1, size);
3862 coeff = c_readstr (p, mode);
3864 target = convert_to_mode (mode, (rtx) data, 1);
3865 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3866 return force_reg (mode, target);
3869 /* Expand expression EXP, which is a call to the memset builtin. Return
3870 NULL_RTX if we failed the caller should emit a normal call, otherwise
3871 try to get the result in TARGET, if convenient (and in mode MODE if that's
3875 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3877 if (!validate_arglist (exp,
3878 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3882 tree dest = CALL_EXPR_ARG (exp, 0);
3883 tree val = CALL_EXPR_ARG (exp, 1);
3884 tree len = CALL_EXPR_ARG (exp, 2);
3885 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3889 /* Helper function to do the actual work for expand_builtin_memset. The
3890 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3891 so that this can also be called without constructing an actual CALL_EXPR.
3892 The other arguments and return value are the same as for
3893 expand_builtin_memset. */
3896 expand_builtin_memset_args (tree dest, tree val, tree len,
3897 rtx target, enum machine_mode mode, tree orig_exp)
3900 enum built_in_function fcode;
3902 unsigned int dest_align;
3903 rtx dest_mem, dest_addr, len_rtx;
3904 HOST_WIDE_INT expected_size = -1;
3905 unsigned int expected_align = 0;
3907 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3909 /* If DEST is not a pointer type, don't do this operation in-line. */
3910 if (dest_align == 0)
3913 if (currently_expanding_gimple_stmt)
3914 stringop_block_profile (currently_expanding_gimple_stmt,
3915 &expected_align, &expected_size);
3917 if (expected_align < dest_align)
3918 expected_align = dest_align;
3920 /* If the LEN parameter is zero, return DEST. */
3921 if (integer_zerop (len))
3923 /* Evaluate and ignore VAL in case it has side-effects. */
3924 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3925 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3928 /* Stabilize the arguments in case we fail. */
3929 dest = builtin_save_expr (dest);
3930 val = builtin_save_expr (val);
3931 len = builtin_save_expr (len);
3933 len_rtx = expand_normal (len);
3934 dest_mem = get_memory_rtx (dest, len);
3936 if (TREE_CODE (val) != INTEGER_CST)
3940 val_rtx = expand_normal (val);
3941 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3944 /* Assume that we can memset by pieces if we can store
3945 * the coefficients by pieces (in the required modes).
3946 * We can't pass builtin_memset_gen_str as that emits RTL. */
3948 if (host_integerp (len, 1)
3949 && can_store_by_pieces (tree_low_cst (len, 1),
3950 builtin_memset_read_str, &c, dest_align,
3953 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3955 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3956 builtin_memset_gen_str, val_rtx, dest_align,
3959 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3960 dest_align, expected_align,
3964 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3965 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3969 if (target_char_cast (val, &c))
3974 if (host_integerp (len, 1)
3975 && can_store_by_pieces (tree_low_cst (len, 1),
3976 builtin_memset_read_str, &c, dest_align,
3978 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3979 builtin_memset_read_str, &c, dest_align, true, 0);
3980 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3981 dest_align, expected_align,
3985 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3986 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3990 set_mem_align (dest_mem, dest_align);
3991 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3992 CALL_EXPR_TAILCALL (orig_exp)
3993 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3994 expected_align, expected_size);
3998 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3999 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4005 fndecl = get_callee_fndecl (orig_exp);
4006 fcode = DECL_FUNCTION_CODE (fndecl);
4007 if (fcode == BUILT_IN_MEMSET)
4008 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4010 else if (fcode == BUILT_IN_BZERO)
4011 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4015 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4016 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4017 return expand_call (fn, target, target == const0_rtx);
4020 /* Expand expression EXP, which is a call to the bzero builtin. Return
4021 NULL_RTX if we failed the caller should emit a normal call. */
4024 expand_builtin_bzero (tree exp)
4027 location_t loc = EXPR_LOCATION (exp);
4029 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4032 dest = CALL_EXPR_ARG (exp, 0);
4033 size = CALL_EXPR_ARG (exp, 1);
4035 /* New argument list transforming bzero(ptr x, int y) to
4036 memset(ptr x, int 0, size_t y). This is done this way
4037 so that if it isn't expanded inline, we fallback to
4038 calling bzero instead of memset. */
4040 return expand_builtin_memset_args (dest, integer_zero_node,
4041 fold_convert_loc (loc, sizetype, size),
4042 const0_rtx, VOIDmode, exp);
4045 /* Expand expression EXP, which is a call to the memcmp built-in function.
4046 Return NULL_RTX if we failed and the
4047 caller should emit a normal call, otherwise try to get the result in
4048 TARGET, if convenient (and in mode MODE, if that's convenient). */
4051 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4052 ATTRIBUTE_UNUSED enum machine_mode mode)
4054 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4056 if (!validate_arglist (exp,
4057 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4060 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4062 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4065 tree arg1 = CALL_EXPR_ARG (exp, 0);
4066 tree arg2 = CALL_EXPR_ARG (exp, 1);
4067 tree len = CALL_EXPR_ARG (exp, 2);
4070 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4072 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4073 enum machine_mode insn_mode;
4075 #ifdef HAVE_cmpmemsi
4077 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4080 #ifdef HAVE_cmpstrnsi
4082 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4087 /* If we don't have POINTER_TYPE, call the function. */
4088 if (arg1_align == 0 || arg2_align == 0)
4091 /* Make a place to write the result of the instruction. */
4094 && REG_P (result) && GET_MODE (result) == insn_mode
4095 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4096 result = gen_reg_rtx (insn_mode);
4098 arg1_rtx = get_memory_rtx (arg1, len);
4099 arg2_rtx = get_memory_rtx (arg2, len);
4100 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4102 /* Set MEM_SIZE as appropriate. */
4103 if (CONST_INT_P (arg3_rtx))
4105 set_mem_size (arg1_rtx, arg3_rtx);
4106 set_mem_size (arg2_rtx, arg3_rtx);
4109 #ifdef HAVE_cmpmemsi
4111 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4112 GEN_INT (MIN (arg1_align, arg2_align)));
4115 #ifdef HAVE_cmpstrnsi
4117 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4118 GEN_INT (MIN (arg1_align, arg2_align)));
4126 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4127 TYPE_MODE (integer_type_node), 3,
4128 XEXP (arg1_rtx, 0), Pmode,
4129 XEXP (arg2_rtx, 0), Pmode,
4130 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4131 TYPE_UNSIGNED (sizetype)),
4132 TYPE_MODE (sizetype));
4134 /* Return the value in the proper mode for this function. */
4135 mode = TYPE_MODE (TREE_TYPE (exp));
4136 if (GET_MODE (result) == mode)
4138 else if (target != 0)
4140 convert_move (target, result, 0);
4144 return convert_to_mode (mode, result, 0);
4151 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4152 if we failed the caller should emit a normal call, otherwise try to get
4153 the result in TARGET, if convenient. */
4156 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4158 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4161 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4162 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4163 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4165 rtx arg1_rtx, arg2_rtx;
4166 rtx result, insn = NULL_RTX;
4168 tree arg1 = CALL_EXPR_ARG (exp, 0);
4169 tree arg2 = CALL_EXPR_ARG (exp, 1);
4172 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4174 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4176 /* If we don't have POINTER_TYPE, call the function. */
4177 if (arg1_align == 0 || arg2_align == 0)
4180 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4181 arg1 = builtin_save_expr (arg1);
4182 arg2 = builtin_save_expr (arg2);
4184 arg1_rtx = get_memory_rtx (arg1, NULL);
4185 arg2_rtx = get_memory_rtx (arg2, NULL);
4187 #ifdef HAVE_cmpstrsi
4188 /* Try to call cmpstrsi. */
4191 enum machine_mode insn_mode
4192 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4194 /* Make a place to write the result of the instruction. */
4197 && REG_P (result) && GET_MODE (result) == insn_mode
4198 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4199 result = gen_reg_rtx (insn_mode);
4201 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4202 GEN_INT (MIN (arg1_align, arg2_align)));
4205 #ifdef HAVE_cmpstrnsi
4206 /* Try to determine at least one length and call cmpstrnsi. */
4207 if (!insn && HAVE_cmpstrnsi)
4212 enum machine_mode insn_mode
4213 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4214 tree len1 = c_strlen (arg1, 1);
4215 tree len2 = c_strlen (arg2, 1);
4218 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4220 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4222 /* If we don't have a constant length for the first, use the length
4223 of the second, if we know it. We don't require a constant for
4224 this case; some cost analysis could be done if both are available
4225 but neither is constant. For now, assume they're equally cheap,
4226 unless one has side effects. If both strings have constant lengths,
4233 else if (TREE_SIDE_EFFECTS (len1))
4235 else if (TREE_SIDE_EFFECTS (len2))
4237 else if (TREE_CODE (len1) != INTEGER_CST)
4239 else if (TREE_CODE (len2) != INTEGER_CST)
4241 else if (tree_int_cst_lt (len1, len2))
4246 /* If both arguments have side effects, we cannot optimize. */
4247 if (!len || TREE_SIDE_EFFECTS (len))
4250 arg3_rtx = expand_normal (len);
4252 /* Make a place to write the result of the instruction. */
4255 && REG_P (result) && GET_MODE (result) == insn_mode
4256 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4257 result = gen_reg_rtx (insn_mode);
4259 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4260 GEN_INT (MIN (arg1_align, arg2_align)));
4266 enum machine_mode mode;
4269 /* Return the value in the proper mode for this function. */
4270 mode = TYPE_MODE (TREE_TYPE (exp));
4271 if (GET_MODE (result) == mode)
4274 return convert_to_mode (mode, result, 0);
4275 convert_move (target, result, 0);
4279 /* Expand the library call ourselves using a stabilized argument
4280 list to avoid re-evaluating the function's arguments twice. */
4281 #ifdef HAVE_cmpstrnsi
4284 fndecl = get_callee_fndecl (exp);
4285 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4286 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4287 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4288 return expand_call (fn, target, target == const0_rtx);
4294 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4295 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4296 the result in TARGET, if convenient. */
4299 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4300 ATTRIBUTE_UNUSED enum machine_mode mode)
4302 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4304 if (!validate_arglist (exp,
4305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4308 /* If c_strlen can determine an expression for one of the string
4309 lengths, and it doesn't have side effects, then emit cmpstrnsi
4310 using length MIN(strlen(string)+1, arg3). */
4311 #ifdef HAVE_cmpstrnsi
4314 tree len, len1, len2;
4315 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4318 tree arg1 = CALL_EXPR_ARG (exp, 0);
4319 tree arg2 = CALL_EXPR_ARG (exp, 1);
4320 tree arg3 = CALL_EXPR_ARG (exp, 2);
4323 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4325 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4326 enum machine_mode insn_mode
4327 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4329 len1 = c_strlen (arg1, 1);
4330 len2 = c_strlen (arg2, 1);
4333 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4335 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4337 /* If we don't have a constant length for the first, use the length
4338 of the second, if we know it. We don't require a constant for
4339 this case; some cost analysis could be done if both are available
4340 but neither is constant. For now, assume they're equally cheap,
4341 unless one has side effects. If both strings have constant lengths,
4348 else if (TREE_SIDE_EFFECTS (len1))
4350 else if (TREE_SIDE_EFFECTS (len2))
4352 else if (TREE_CODE (len1) != INTEGER_CST)
4354 else if (TREE_CODE (len2) != INTEGER_CST)
4356 else if (tree_int_cst_lt (len1, len2))
4361 /* If both arguments have side effects, we cannot optimize. */
4362 if (!len || TREE_SIDE_EFFECTS (len))
4365 /* The actual new length parameter is MIN(len,arg3). */
4366 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4367 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4369 /* If we don't have POINTER_TYPE, call the function. */
4370 if (arg1_align == 0 || arg2_align == 0)
4373 /* Make a place to write the result of the instruction. */
4376 && REG_P (result) && GET_MODE (result) == insn_mode
4377 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4378 result = gen_reg_rtx (insn_mode);
4380 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4381 arg1 = builtin_save_expr (arg1);
4382 arg2 = builtin_save_expr (arg2);
4383 len = builtin_save_expr (len);
4385 arg1_rtx = get_memory_rtx (arg1, len);
4386 arg2_rtx = get_memory_rtx (arg2, len);
4387 arg3_rtx = expand_normal (len);
4388 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4389 GEN_INT (MIN (arg1_align, arg2_align)));
4394 /* Return the value in the proper mode for this function. */
4395 mode = TYPE_MODE (TREE_TYPE (exp));
4396 if (GET_MODE (result) == mode)
4399 return convert_to_mode (mode, result, 0);
4400 convert_move (target, result, 0);
4404 /* Expand the library call ourselves using a stabilized argument
4405 list to avoid re-evaluating the function's arguments twice. */
4406 fndecl = get_callee_fndecl (exp);
4407 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4409 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4410 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4411 return expand_call (fn, target, target == const0_rtx);
4417 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4418 if that's convenient. */
4421 expand_builtin_saveregs (void)
4425 /* Don't do __builtin_saveregs more than once in a function.
4426 Save the result of the first call and reuse it. */
4427 if (saveregs_value != 0)
4428 return saveregs_value;
4430 /* When this function is called, it means that registers must be
4431 saved on entry to this function. So we migrate the call to the
4432 first insn of this function. */
4436 /* Do whatever the machine needs done in this case. */
4437 val = targetm.calls.expand_builtin_saveregs ();
4442 saveregs_value = val;
4444 /* Put the insns after the NOTE that starts the function. If this
4445 is inside a start_sequence, make the outer-level insn chain current, so
4446 the code is placed at the start of the function. */
4447 push_topmost_sequence ();
4448 emit_insn_after (seq, entry_of_function ());
4449 pop_topmost_sequence ();
4454 /* Expand a call to __builtin_next_arg. */
4457 expand_builtin_next_arg (void)
4459 /* Checking arguments is already done in fold_builtin_next_arg
4460 that must be called before this function. */
4461 return expand_binop (ptr_mode, add_optab,
4462 crtl->args.internal_arg_pointer,
4463 crtl->args.arg_offset_rtx,
4464 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4467 /* Make it easier for the backends by protecting the valist argument
4468 from multiple evaluations. */
4471 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4473 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4475 /* The current way of determining the type of valist is completely
4476 bogus. We should have the information on the va builtin instead. */
4478 vatype = targetm.fn_abi_va_list (cfun->decl);
4480 if (TREE_CODE (vatype) == ARRAY_TYPE)
4482 if (TREE_SIDE_EFFECTS (valist))
4483 valist = save_expr (valist);
4485 /* For this case, the backends will be expecting a pointer to
4486 vatype, but it's possible we've actually been given an array
4487 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4489 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4491 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4492 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4497 tree pt = build_pointer_type (vatype);
4501 if (! TREE_SIDE_EFFECTS (valist))
4504 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4505 TREE_SIDE_EFFECTS (valist) = 1;
4508 if (TREE_SIDE_EFFECTS (valist))
4509 valist = save_expr (valist);
4510 valist = fold_build2_loc (loc, MEM_REF,
4511 vatype, valist, build_int_cst (pt, 0));
4517 /* The "standard" definition of va_list is void*. */
4520 std_build_builtin_va_list (void)
4522 return ptr_type_node;
4525 /* The "standard" abi va_list is va_list_type_node. */
4528 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4530 return va_list_type_node;
4533 /* The "standard" type of va_list is va_list_type_node. */
4536 std_canonical_va_list_type (tree type)
4540 if (INDIRECT_REF_P (type))
4541 type = TREE_TYPE (type);
4542 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4543 type = TREE_TYPE (type);
4544 wtype = va_list_type_node;
4546 /* Treat structure va_list types. */
4547 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4548 htype = TREE_TYPE (htype);
4549 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4551 /* If va_list is an array type, the argument may have decayed
4552 to a pointer type, e.g. by being passed to another function.
4553 In that case, unwrap both types so that we can compare the
4554 underlying records. */
4555 if (TREE_CODE (htype) == ARRAY_TYPE
4556 || POINTER_TYPE_P (htype))
4558 wtype = TREE_TYPE (wtype);
4559 htype = TREE_TYPE (htype);
4562 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4563 return va_list_type_node;
4568 /* The "standard" implementation of va_start: just assign `nextarg' to
4572 std_expand_builtin_va_start (tree valist, rtx nextarg)
4574 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4575 convert_move (va_r, nextarg, 0);
4578 /* Expand EXP, a call to __builtin_va_start. */
4581 expand_builtin_va_start (tree exp)
4585 location_t loc = EXPR_LOCATION (exp);
4587 if (call_expr_nargs (exp) < 2)
4589 error_at (loc, "too few arguments to function %<va_start%>");
4593 if (fold_builtin_next_arg (exp, true))
4596 nextarg = expand_builtin_next_arg ();
4597 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4599 if (targetm.expand_builtin_va_start)
4600 targetm.expand_builtin_va_start (valist, nextarg);
4602 std_expand_builtin_va_start (valist, nextarg);
4607 /* The "standard" implementation of va_arg: read the value from the
4608 current (padded) address and increment by the (padded) size. */
4611 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4614 tree addr, t, type_size, rounded_size, valist_tmp;
4615 unsigned HOST_WIDE_INT align, boundary;
4618 #ifdef ARGS_GROW_DOWNWARD
4619 /* All of the alignment and movement below is for args-grow-up machines.
4620 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4621 implement their own specialized gimplify_va_arg_expr routines. */
4625 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4627 type = build_pointer_type (type);
4629 align = PARM_BOUNDARY / BITS_PER_UNIT;
4630 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4632 /* When we align parameter on stack for caller, if the parameter
4633 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4634 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4635 here with caller. */
4636 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4637 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4639 boundary /= BITS_PER_UNIT;
4641 /* Hoist the valist value into a temporary for the moment. */
4642 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4644 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4645 requires greater alignment, we must perform dynamic alignment. */
4646 if (boundary > align
4647 && !integer_zerop (TYPE_SIZE (type)))
4649 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4650 fold_build2 (POINTER_PLUS_EXPR,
4652 valist_tmp, size_int (boundary - 1)));
4653 gimplify_and_add (t, pre_p);
4655 t = fold_convert (sizetype, valist_tmp);
4656 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4657 fold_convert (TREE_TYPE (valist),
4658 fold_build2 (BIT_AND_EXPR, sizetype, t,
4659 size_int (-boundary))));
4660 gimplify_and_add (t, pre_p);
4665 /* If the actual alignment is less than the alignment of the type,
4666 adjust the type accordingly so that we don't assume strict alignment
4667 when dereferencing the pointer. */
4668 boundary *= BITS_PER_UNIT;
4669 if (boundary < TYPE_ALIGN (type))
4671 type = build_variant_type_copy (type);
4672 TYPE_ALIGN (type) = boundary;
4675 /* Compute the rounded size of the type. */
4676 type_size = size_in_bytes (type);
4677 rounded_size = round_up (type_size, align);
4679 /* Reduce rounded_size so it's sharable with the postqueue. */
4680 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4684 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4686 /* Small args are padded downward. */
4687 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4688 rounded_size, size_int (align));
4689 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4690 size_binop (MINUS_EXPR, rounded_size, type_size));
4691 addr = fold_build2 (POINTER_PLUS_EXPR,
4692 TREE_TYPE (addr), addr, t);
4695 /* Compute new value for AP. */
4696 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4697 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4698 gimplify_and_add (t, pre_p);
4700 addr = fold_convert (build_pointer_type (type), addr);
4703 addr = build_va_arg_indirect_ref (addr);
4705 return build_va_arg_indirect_ref (addr);
4708 /* Build an indirect-ref expression over the given TREE, which represents a
4709 piece of a va_arg() expansion. */
4711 build_va_arg_indirect_ref (tree addr)
4713 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4715 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4721 /* Return a dummy expression of type TYPE in order to keep going after an
4725 dummy_object (tree type)
4727 tree t = build_int_cst (build_pointer_type (type), 0);
4728 return build1 (INDIRECT_REF, type, t);
4731 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4732 builtin function, but a very special sort of operator. */
4734 enum gimplify_status
4735 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4737 tree promoted_type, have_va_type;
4738 tree valist = TREE_OPERAND (*expr_p, 0);
4739 tree type = TREE_TYPE (*expr_p);
4741 location_t loc = EXPR_LOCATION (*expr_p);
4743 /* Verify that valist is of the proper type. */
4744 have_va_type = TREE_TYPE (valist);
4745 if (have_va_type == error_mark_node)
4747 have_va_type = targetm.canonical_va_list_type (have_va_type);
4749 if (have_va_type == NULL_TREE)
4751 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4755 /* Generate a diagnostic for requesting data of a type that cannot
4756 be passed through `...' due to type promotion at the call site. */
4757 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4760 static bool gave_help;
4763 /* Unfortunately, this is merely undefined, rather than a constraint
4764 violation, so we cannot make this an error. If this call is never
4765 executed, the program is still strictly conforming. */
4766 warned = warning_at (loc, 0,
4767 "%qT is promoted to %qT when passed through %<...%>",
4768 type, promoted_type);
4769 if (!gave_help && warned)
4772 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4773 promoted_type, type);
4776 /* We can, however, treat "undefined" any way we please.
4777 Call abort to encourage the user to fix the program. */
4779 inform (loc, "if this code is reached, the program will abort");
4780 /* Before the abort, allow the evaluation of the va_list
4781 expression to exit or longjmp. */
4782 gimplify_and_add (valist, pre_p);
4783 t = build_call_expr_loc (loc,
4784 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4785 gimplify_and_add (t, pre_p);
4787 /* This is dead code, but go ahead and finish so that the
4788 mode of the result comes out right. */
4789 *expr_p = dummy_object (type);
4794 /* Make it easier for the backends by protecting the valist argument
4795 from multiple evaluations. */
4796 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4798 /* For this case, the backends will be expecting a pointer to
4799 TREE_TYPE (abi), but it's possible we've
4800 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4802 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4804 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4805 valist = fold_convert_loc (loc, p1,
4806 build_fold_addr_expr_loc (loc, valist));
4809 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4812 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4814 if (!targetm.gimplify_va_arg_expr)
4815 /* FIXME: Once most targets are converted we should merely
4816 assert this is non-null. */
4819 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4824 /* Expand EXP, a call to __builtin_va_end. */
4827 expand_builtin_va_end (tree exp)
4829 tree valist = CALL_EXPR_ARG (exp, 0);
4831 /* Evaluate for side effects, if needed. I hate macros that don't
4833 if (TREE_SIDE_EFFECTS (valist))
4834 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4839 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4840 builtin rather than just as an assignment in stdarg.h because of the
4841 nastiness of array-type va_list types. */
4844 expand_builtin_va_copy (tree exp)
4847 location_t loc = EXPR_LOCATION (exp);
4849 dst = CALL_EXPR_ARG (exp, 0);
4850 src = CALL_EXPR_ARG (exp, 1);
4852 dst = stabilize_va_list_loc (loc, dst, 1);
4853 src = stabilize_va_list_loc (loc, src, 0);
4855 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4857 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4859 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4860 TREE_SIDE_EFFECTS (t) = 1;
4861 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4865 rtx dstb, srcb, size;
4867 /* Evaluate to pointers. */
4868 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4869 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4870 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4871 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4873 dstb = convert_memory_address (Pmode, dstb);
4874 srcb = convert_memory_address (Pmode, srcb);
4876 /* "Dereference" to BLKmode memories. */
4877 dstb = gen_rtx_MEM (BLKmode, dstb);
4878 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4879 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4880 srcb = gen_rtx_MEM (BLKmode, srcb);
4881 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4882 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4885 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4891 /* Expand a call to one of the builtin functions __builtin_frame_address or
4892 __builtin_return_address. */
4895 expand_builtin_frame_address (tree fndecl, tree exp)
4897 /* The argument must be a nonnegative integer constant.
4898 It counts the number of frames to scan up the stack.
4899 The value is the return address saved in that frame. */
4900 if (call_expr_nargs (exp) == 0)
4901 /* Warning about missing arg was already issued. */
4903 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4905 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4906 error ("invalid argument to %<__builtin_frame_address%>");
4908 error ("invalid argument to %<__builtin_return_address%>");
4914 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4915 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4917 /* Some ports cannot access arbitrary stack frames. */
4920 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4921 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4923 warning (0, "unsupported argument to %<__builtin_return_address%>");
4927 /* For __builtin_frame_address, return what we've got. */
4928 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4932 && ! CONSTANT_P (tem))
4933 tem = copy_to_mode_reg (Pmode, tem);
4938 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4939 we failed and the caller should emit a normal call, otherwise try to get
4940 the result in TARGET, if convenient. */
4943 expand_builtin_alloca (tree exp, rtx target)
4948 /* Emit normal call if marked not-inlineable. */
4949 if (CALL_CANNOT_INLINE_P (exp))
4952 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4955 /* Compute the argument. */
4956 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4958 /* Allocate the desired space. */
4959 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4960 result = convert_memory_address (ptr_mode, result);
4965 /* Expand a call to a bswap builtin with argument ARG0. MODE
4966 is the mode to expand with. */
4969 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4971 enum machine_mode mode;
4975 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4978 arg = CALL_EXPR_ARG (exp, 0);
4979 mode = TYPE_MODE (TREE_TYPE (arg));
4980 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4982 target = expand_unop (mode, bswap_optab, op0, target, 1);
4984 gcc_assert (target);
4986 return convert_to_mode (mode, target, 0);
4989 /* Expand a call to a unary builtin in EXP.
4990 Return NULL_RTX if a normal call should be emitted rather than expanding the
4991 function in-line. If convenient, the result should be placed in TARGET.
4992 SUBTARGET may be used as the target for computing one of EXP's operands. */
4995 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4996 rtx subtarget, optab op_optab)
5000 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5003 /* Compute the argument. */
5004 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5005 VOIDmode, EXPAND_NORMAL);
5006 /* Compute op, into TARGET if possible.
5007 Set TARGET to wherever the result comes back. */
5008 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5009 op_optab, op0, target, 1);
5010 gcc_assert (target);
5012 return convert_to_mode (target_mode, target, 0);
5015 /* Expand a call to __builtin_expect. We just return our argument
5016 as the builtin_expect semantic should've been already executed by
5017 tree branch prediction pass. */
5020 expand_builtin_expect (tree exp, rtx target)
5024 if (call_expr_nargs (exp) < 2)
5026 arg = CALL_EXPR_ARG (exp, 0);
5028 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5029 /* When guessing was done, the hints should be already stripped away. */
5030 gcc_assert (!flag_guess_branch_prob
5031 || optimize == 0 || seen_error ());
5036 expand_builtin_trap (void)
5040 emit_insn (gen_trap ());
5043 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5047 /* Expand a call to __builtin_unreachable. We do nothing except emit
5048 a barrier saying that control flow will not pass here.
5050 It is the responsibility of the program being compiled to ensure
5051 that control flow does never reach __builtin_unreachable. */
5053 expand_builtin_unreachable (void)
5058 /* Expand EXP, a call to fabs, fabsf or fabsl.
5059 Return NULL_RTX if a normal call should be emitted rather than expanding
5060 the function inline. If convenient, the result should be placed
5061 in TARGET. SUBTARGET may be used as the target for computing
5065 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5067 enum machine_mode mode;
5071 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5074 arg = CALL_EXPR_ARG (exp, 0);
5075 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5076 mode = TYPE_MODE (TREE_TYPE (arg));
5077 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5078 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5081 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5082 Return NULL is a normal call should be emitted rather than expanding the
5083 function inline. If convenient, the result should be placed in TARGET.
5084 SUBTARGET may be used as the target for computing the operand. */
5087 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5092 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5095 arg = CALL_EXPR_ARG (exp, 0);
5096 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5098 arg = CALL_EXPR_ARG (exp, 1);
5099 op1 = expand_normal (arg);
5101 return expand_copysign (op0, op1, target);
5104 /* Create a new constant string literal and return a char* pointer to it.
5105 The STRING_CST value is the LEN characters at STR. */
5107 build_string_literal (int len, const char *str)
5109 tree t, elem, index, type;
5111 t = build_string (len, str);
5112 elem = build_type_variant (char_type_node, 1, 0);
5113 index = build_index_type (size_int (len - 1));
5114 type = build_array_type (elem, index);
5115 TREE_TYPE (t) = type;
5116 TREE_CONSTANT (t) = 1;
5117 TREE_READONLY (t) = 1;
5118 TREE_STATIC (t) = 1;
5120 type = build_pointer_type (elem);
5121 t = build1 (ADDR_EXPR, type,
5122 build4 (ARRAY_REF, elem,
5123 t, integer_zero_node, NULL_TREE, NULL_TREE));
5127 /* Expand a call to either the entry or exit function profiler. */
5130 expand_builtin_profile_func (bool exitp)
5132 rtx this_rtx, which;
5134 this_rtx = DECL_RTL (current_function_decl);
5135 gcc_assert (MEM_P (this_rtx));
5136 this_rtx = XEXP (this_rtx, 0);
5139 which = profile_function_exit_libfunc;
5141 which = profile_function_entry_libfunc;
5143 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5144 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5151 /* Expand a call to __builtin___clear_cache. */
5154 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5156 #ifndef HAVE_clear_cache
5157 #ifdef CLEAR_INSN_CACHE
5158 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5159 does something. Just do the default expansion to a call to
5163 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5164 does nothing. There is no need to call it. Do nothing. */
5166 #endif /* CLEAR_INSN_CACHE */
5168 /* We have a "clear_cache" insn, and it will handle everything. */
5170 rtx begin_rtx, end_rtx;
5171 enum insn_code icode;
5173 /* We must not expand to a library call. If we did, any
5174 fallback library function in libgcc that might contain a call to
5175 __builtin___clear_cache() would recurse infinitely. */
5176 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5178 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5182 if (HAVE_clear_cache)
5184 icode = CODE_FOR_clear_cache;
5186 begin = CALL_EXPR_ARG (exp, 0);
5187 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5188 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5189 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5190 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5192 end = CALL_EXPR_ARG (exp, 1);
5193 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5194 end_rtx = convert_memory_address (Pmode, end_rtx);
5195 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5196 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5198 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5201 #endif /* HAVE_clear_cache */
5204 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5207 round_trampoline_addr (rtx tramp)
5209 rtx temp, addend, mask;
5211 /* If we don't need too much alignment, we'll have been guaranteed
5212 proper alignment by get_trampoline_type. */
5213 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5216 /* Round address up to desired boundary. */
5217 temp = gen_reg_rtx (Pmode);
5218 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5219 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5221 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5222 temp, 0, OPTAB_LIB_WIDEN);
5223 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5224 temp, 0, OPTAB_LIB_WIDEN);
5230 expand_builtin_init_trampoline (tree exp)
5232 tree t_tramp, t_func, t_chain;
5233 rtx m_tramp, r_tramp, r_chain, tmp;
5235 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5236 POINTER_TYPE, VOID_TYPE))
5239 t_tramp = CALL_EXPR_ARG (exp, 0);
5240 t_func = CALL_EXPR_ARG (exp, 1);
5241 t_chain = CALL_EXPR_ARG (exp, 2);
5243 r_tramp = expand_normal (t_tramp);
5244 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5245 MEM_NOTRAP_P (m_tramp) = 1;
5247 /* The TRAMP argument should be the address of a field within the
5248 local function's FRAME decl. Let's see if we can fill in the
5249 to fill in the MEM_ATTRs for this memory. */
5250 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5251 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5254 tmp = round_trampoline_addr (r_tramp);
5257 m_tramp = change_address (m_tramp, BLKmode, tmp);
5258 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5259 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5262 /* The FUNC argument should be the address of the nested function.
5263 Extract the actual function decl to pass to the hook. */
5264 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5265 t_func = TREE_OPERAND (t_func, 0);
5266 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5268 r_chain = expand_normal (t_chain);
5270 /* Generate insns to initialize the trampoline. */
5271 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5273 trampolines_created = 1;
5275 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5276 "trampoline generated for nested function %qD", t_func);
5282 expand_builtin_adjust_trampoline (tree exp)
5286 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5289 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5290 tramp = round_trampoline_addr (tramp);
5291 if (targetm.calls.trampoline_adjust_address)
5292 tramp = targetm.calls.trampoline_adjust_address (tramp);
5297 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5298 function. The function first checks whether the back end provides
5299 an insn to implement signbit for the respective mode. If not, it
5300 checks whether the floating point format of the value is such that
5301 the sign bit can be extracted. If that is not the case, the
5302 function returns NULL_RTX to indicate that a normal call should be
5303 emitted rather than expanding the function in-line. EXP is the
5304 expression that is a call to the builtin function; if convenient,
5305 the result should be placed in TARGET. */
5307 expand_builtin_signbit (tree exp, rtx target)
5309 const struct real_format *fmt;
5310 enum machine_mode fmode, imode, rmode;
5313 enum insn_code icode;
5315 location_t loc = EXPR_LOCATION (exp);
5317 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5320 arg = CALL_EXPR_ARG (exp, 0);
5321 fmode = TYPE_MODE (TREE_TYPE (arg));
5322 rmode = TYPE_MODE (TREE_TYPE (exp));
5323 fmt = REAL_MODE_FORMAT (fmode);
5325 arg = builtin_save_expr (arg);
5327 /* Expand the argument yielding a RTX expression. */
5328 temp = expand_normal (arg);
5330 /* Check if the back end provides an insn that handles signbit for the
5332 icode = optab_handler (signbit_optab, fmode);
5333 if (icode != CODE_FOR_nothing)
5335 rtx last = get_last_insn ();
5336 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5337 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5339 delete_insns_since (last);
5342 /* For floating point formats without a sign bit, implement signbit
5344 bitpos = fmt->signbit_ro;
5347 /* But we can't do this if the format supports signed zero. */
5348 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5351 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5352 build_real (TREE_TYPE (arg), dconst0));
5353 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5356 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5358 imode = int_mode_for_mode (fmode);
5359 if (imode == BLKmode)
5361 temp = gen_lowpart (imode, temp);
5366 /* Handle targets with different FP word orders. */
5367 if (FLOAT_WORDS_BIG_ENDIAN)
5368 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5370 word = bitpos / BITS_PER_WORD;
5371 temp = operand_subword_force (temp, word, fmode);
5372 bitpos = bitpos % BITS_PER_WORD;
5375 /* Force the intermediate word_mode (or narrower) result into a
5376 register. This avoids attempting to create paradoxical SUBREGs
5377 of floating point modes below. */
5378 temp = force_reg (imode, temp);
5380 /* If the bitpos is within the "result mode" lowpart, the operation
5381 can be implement with a single bitwise AND. Otherwise, we need
5382 a right shift and an AND. */
5384 if (bitpos < GET_MODE_BITSIZE (rmode))
5386 double_int mask = double_int_setbit (double_int_zero, bitpos);
5388 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5389 temp = gen_lowpart (rmode, temp);
5390 temp = expand_binop (rmode, and_optab, temp,
5391 immed_double_int_const (mask, rmode),
5392 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5396 /* Perform a logical right shift to place the signbit in the least
5397 significant bit, then truncate the result to the desired mode
5398 and mask just this bit. */
5399 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5400 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5401 temp = gen_lowpart (rmode, temp);
5402 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5403 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5409 /* Expand fork or exec calls. TARGET is the desired target of the
5410 call. EXP is the call. FN is the
5411 identificator of the actual function. IGNORE is nonzero if the
5412 value is to be ignored. */
5415 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5420 /* If we are not profiling, just call the function. */
5421 if (!profile_arc_flag)
5424 /* Otherwise call the wrapper. This should be equivalent for the rest of
5425 compiler, so the code does not diverge, and the wrapper may run the
5426 code necessary for keeping the profiling sane. */
5428 switch (DECL_FUNCTION_CODE (fn))
5431 id = get_identifier ("__gcov_fork");
5434 case BUILT_IN_EXECL:
5435 id = get_identifier ("__gcov_execl");
5438 case BUILT_IN_EXECV:
5439 id = get_identifier ("__gcov_execv");
5442 case BUILT_IN_EXECLP:
5443 id = get_identifier ("__gcov_execlp");
5446 case BUILT_IN_EXECLE:
5447 id = get_identifier ("__gcov_execle");
5450 case BUILT_IN_EXECVP:
5451 id = get_identifier ("__gcov_execvp");
5454 case BUILT_IN_EXECVE:
5455 id = get_identifier ("__gcov_execve");
5462 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5463 FUNCTION_DECL, id, TREE_TYPE (fn));
5464 DECL_EXTERNAL (decl) = 1;
5465 TREE_PUBLIC (decl) = 1;
5466 DECL_ARTIFICIAL (decl) = 1;
5467 TREE_NOTHROW (decl) = 1;
5468 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5469 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5470 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5471 return expand_call (call, target, ignore);
5476 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5477 the pointer in these functions is void*, the tree optimizers may remove
5478 casts. The mode computed in expand_builtin isn't reliable either, due
5479 to __sync_bool_compare_and_swap.
5481 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5482 group of builtins. This gives us log2 of the mode size. */
5484 static inline enum machine_mode
5485 get_builtin_sync_mode (int fcode_diff)
5487 /* The size is not negotiable, so ask not to get BLKmode in return
5488 if the target indicates that a smaller size would be better. */
5489 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5492 /* Expand the memory expression LOC and return the appropriate memory operand
5493 for the builtin_sync operations. */
5496 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5500 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5501 addr = convert_memory_address (Pmode, addr);
5503 /* Note that we explicitly do not want any alias information for this
5504 memory, so that we kill all other live memories. Otherwise we don't
5505 satisfy the full barrier semantics of the intrinsic. */
5506 mem = validize_mem (gen_rtx_MEM (mode, addr));
5508 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5509 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5510 MEM_VOLATILE_P (mem) = 1;
5515 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5516 EXP is the CALL_EXPR. CODE is the rtx code
5517 that corresponds to the arithmetic or logical operation from the name;
5518 an exception here is that NOT actually means NAND. TARGET is an optional
5519 place for us to store the results; AFTER is true if this is the
5520 fetch_and_xxx form. IGNORE is true if we don't actually care about
5521 the result of the operation at all. */
5524 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5525 enum rtx_code code, bool after,
5526 rtx target, bool ignore)
5529 enum machine_mode old_mode;
5530 location_t loc = EXPR_LOCATION (exp);
5532 if (code == NOT && warn_sync_nand)
5534 tree fndecl = get_callee_fndecl (exp);
5535 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5537 static bool warned_f_a_n, warned_n_a_f;
5541 case BUILT_IN_FETCH_AND_NAND_1:
5542 case BUILT_IN_FETCH_AND_NAND_2:
5543 case BUILT_IN_FETCH_AND_NAND_4:
5544 case BUILT_IN_FETCH_AND_NAND_8:
5545 case BUILT_IN_FETCH_AND_NAND_16:
5550 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5551 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5552 warned_f_a_n = true;
5555 case BUILT_IN_NAND_AND_FETCH_1:
5556 case BUILT_IN_NAND_AND_FETCH_2:
5557 case BUILT_IN_NAND_AND_FETCH_4:
5558 case BUILT_IN_NAND_AND_FETCH_8:
5559 case BUILT_IN_NAND_AND_FETCH_16:
5564 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5565 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5566 warned_n_a_f = true;
5574 /* Expand the operands. */
5575 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5577 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5578 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5579 of CONST_INTs, where we know the old_mode only from the call argument. */
5580 old_mode = GET_MODE (val);
5581 if (old_mode == VOIDmode)
5582 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5583 val = convert_modes (mode, old_mode, val, 1);
5586 return expand_sync_operation (mem, val, code);
5588 return expand_sync_fetch_operation (mem, val, code, after, target);
5591 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5592 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5593 true if this is the boolean form. TARGET is a place for us to store the
5594 results; this is NOT optional if IS_BOOL is true. */
5597 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5598 bool is_bool, rtx target)
5600 rtx old_val, new_val, mem;
5601 enum machine_mode old_mode;
5603 /* Expand the operands. */
5604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5607 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5608 mode, EXPAND_NORMAL);
5609 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5610 of CONST_INTs, where we know the old_mode only from the call argument. */
5611 old_mode = GET_MODE (old_val);
5612 if (old_mode == VOIDmode)
5613 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5614 old_val = convert_modes (mode, old_mode, old_val, 1);
5616 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5617 mode, EXPAND_NORMAL);
5618 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5619 of CONST_INTs, where we know the old_mode only from the call argument. */
5620 old_mode = GET_MODE (new_val);
5621 if (old_mode == VOIDmode)
5622 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5623 new_val = convert_modes (mode, old_mode, new_val, 1);
5626 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5628 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5631 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5632 general form is actually an atomic exchange, and some targets only
5633 support a reduced form with the second argument being a constant 1.
5634 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5638 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5642 enum machine_mode old_mode;
5644 /* Expand the operands. */
5645 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5646 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5647 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5648 of CONST_INTs, where we know the old_mode only from the call argument. */
5649 old_mode = GET_MODE (val);
5650 if (old_mode == VOIDmode)
5651 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5652 val = convert_modes (mode, old_mode, val, 1);
5654 return expand_sync_lock_test_and_set (mem, val, target);
5657 /* Expand the __sync_synchronize intrinsic. */
5660 expand_builtin_synchronize (void)
5663 VEC (tree, gc) *v_clobbers;
5665 #ifdef HAVE_memory_barrier
5666 if (HAVE_memory_barrier)
5668 emit_insn (gen_memory_barrier ());
5673 if (synchronize_libfunc != NULL_RTX)
5675 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5679 /* If no explicit memory barrier instruction is available, create an
5680 empty asm stmt with a memory clobber. */
5681 v_clobbers = VEC_alloc (tree, gc, 1);
5682 VEC_quick_push (tree, v_clobbers,
5683 tree_cons (NULL, build_string (6, "memory"), NULL));
5684 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5685 gimple_asm_set_volatile (x, true);
5686 expand_asm_stmt (x);
5689 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5692 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5694 enum insn_code icode;
5696 rtx val = const0_rtx;
5698 /* Expand the operands. */
5699 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5701 /* If there is an explicit operation in the md file, use it. */
5702 icode = direct_optab_handler (sync_lock_release_optab, mode);
5703 if (icode != CODE_FOR_nothing)
5705 if (!insn_data[icode].operand[1].predicate (val, mode))
5706 val = force_reg (mode, val);
5708 insn = GEN_FCN (icode) (mem, val);
5716 /* Otherwise we can implement this operation by emitting a barrier
5717 followed by a store of zero. */
5718 expand_builtin_synchronize ();
5719 emit_move_insn (mem, val);
5722 /* Expand an expression EXP that calls a built-in function,
5723 with result going to TARGET if that's convenient
5724 (and in mode MODE if that's convenient).
5725 SUBTARGET may be used as the target for computing one of EXP's operands.
5726 IGNORE is nonzero if the value is to be ignored. */
5729 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5732 tree fndecl = get_callee_fndecl (exp);
5733 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5734 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5736 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5737 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5739 /* When not optimizing, generate calls to library functions for a certain
5742 && !called_as_built_in (fndecl)
5743 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5744 && fcode != BUILT_IN_ALLOCA
5745 && fcode != BUILT_IN_FREE)
5746 return expand_call (exp, target, ignore);
5748 /* The built-in function expanders test for target == const0_rtx
5749 to determine whether the function's result will be ignored. */
5751 target = const0_rtx;
5753 /* If the result of a pure or const built-in function is ignored, and
5754 none of its arguments are volatile, we can avoid expanding the
5755 built-in call and just evaluate the arguments for side-effects. */
5756 if (target == const0_rtx
5757 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5759 bool volatilep = false;
5761 call_expr_arg_iterator iter;
5763 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5764 if (TREE_THIS_VOLATILE (arg))
5772 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5773 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5780 CASE_FLT_FN (BUILT_IN_FABS):
5781 target = expand_builtin_fabs (exp, target, subtarget);
5786 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5787 target = expand_builtin_copysign (exp, target, subtarget);
5792 /* Just do a normal library call if we were unable to fold
5794 CASE_FLT_FN (BUILT_IN_CABS):
5797 CASE_FLT_FN (BUILT_IN_EXP):
5798 CASE_FLT_FN (BUILT_IN_EXP10):
5799 CASE_FLT_FN (BUILT_IN_POW10):
5800 CASE_FLT_FN (BUILT_IN_EXP2):
5801 CASE_FLT_FN (BUILT_IN_EXPM1):
5802 CASE_FLT_FN (BUILT_IN_LOGB):
5803 CASE_FLT_FN (BUILT_IN_LOG):
5804 CASE_FLT_FN (BUILT_IN_LOG10):
5805 CASE_FLT_FN (BUILT_IN_LOG2):
5806 CASE_FLT_FN (BUILT_IN_LOG1P):
5807 CASE_FLT_FN (BUILT_IN_TAN):
5808 CASE_FLT_FN (BUILT_IN_ASIN):
5809 CASE_FLT_FN (BUILT_IN_ACOS):
5810 CASE_FLT_FN (BUILT_IN_ATAN):
5811 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5812 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5813 because of possible accuracy problems. */
5814 if (! flag_unsafe_math_optimizations)
5816 CASE_FLT_FN (BUILT_IN_SQRT):
5817 CASE_FLT_FN (BUILT_IN_FLOOR):
5818 CASE_FLT_FN (BUILT_IN_CEIL):
5819 CASE_FLT_FN (BUILT_IN_TRUNC):
5820 CASE_FLT_FN (BUILT_IN_ROUND):
5821 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5822 CASE_FLT_FN (BUILT_IN_RINT):
5823 target = expand_builtin_mathfn (exp, target, subtarget);
5828 CASE_FLT_FN (BUILT_IN_ILOGB):
5829 if (! flag_unsafe_math_optimizations)
5831 CASE_FLT_FN (BUILT_IN_ISINF):
5832 CASE_FLT_FN (BUILT_IN_FINITE):
5833 case BUILT_IN_ISFINITE:
5834 case BUILT_IN_ISNORMAL:
5835 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5840 CASE_FLT_FN (BUILT_IN_LCEIL):
5841 CASE_FLT_FN (BUILT_IN_LLCEIL):
5842 CASE_FLT_FN (BUILT_IN_LFLOOR):
5843 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5844 target = expand_builtin_int_roundingfn (exp, target);
5849 CASE_FLT_FN (BUILT_IN_LRINT):
5850 CASE_FLT_FN (BUILT_IN_LLRINT):
5851 CASE_FLT_FN (BUILT_IN_LROUND):
5852 CASE_FLT_FN (BUILT_IN_LLROUND):
5853 target = expand_builtin_int_roundingfn_2 (exp, target);
5858 CASE_FLT_FN (BUILT_IN_POW):
5859 target = expand_builtin_pow (exp, target, subtarget);
5864 CASE_FLT_FN (BUILT_IN_POWI):
5865 target = expand_builtin_powi (exp, target, subtarget);
5870 CASE_FLT_FN (BUILT_IN_ATAN2):
5871 CASE_FLT_FN (BUILT_IN_LDEXP):
5872 CASE_FLT_FN (BUILT_IN_SCALB):
5873 CASE_FLT_FN (BUILT_IN_SCALBN):
5874 CASE_FLT_FN (BUILT_IN_SCALBLN):
5875 if (! flag_unsafe_math_optimizations)
5878 CASE_FLT_FN (BUILT_IN_FMOD):
5879 CASE_FLT_FN (BUILT_IN_REMAINDER):
5880 CASE_FLT_FN (BUILT_IN_DREM):
5881 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5886 CASE_FLT_FN (BUILT_IN_CEXPI):
5887 target = expand_builtin_cexpi (exp, target, subtarget);
5888 gcc_assert (target);
5891 CASE_FLT_FN (BUILT_IN_SIN):
5892 CASE_FLT_FN (BUILT_IN_COS):
5893 if (! flag_unsafe_math_optimizations)
5895 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5900 CASE_FLT_FN (BUILT_IN_SINCOS):
5901 if (! flag_unsafe_math_optimizations)
5903 target = expand_builtin_sincos (exp);
5908 case BUILT_IN_APPLY_ARGS:
5909 return expand_builtin_apply_args ();
5911 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5912 FUNCTION with a copy of the parameters described by
5913 ARGUMENTS, and ARGSIZE. It returns a block of memory
5914 allocated on the stack into which is stored all the registers
5915 that might possibly be used for returning the result of a
5916 function. ARGUMENTS is the value returned by
5917 __builtin_apply_args. ARGSIZE is the number of bytes of
5918 arguments that must be copied. ??? How should this value be
5919 computed? We'll also need a safe worst case value for varargs
5921 case BUILT_IN_APPLY:
5922 if (!validate_arglist (exp, POINTER_TYPE,
5923 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5924 && !validate_arglist (exp, REFERENCE_TYPE,
5925 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5931 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5932 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5933 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5935 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5938 /* __builtin_return (RESULT) causes the function to return the
5939 value described by RESULT. RESULT is address of the block of
5940 memory returned by __builtin_apply. */
5941 case BUILT_IN_RETURN:
5942 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5943 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5946 case BUILT_IN_SAVEREGS:
5947 return expand_builtin_saveregs ();
5949 case BUILT_IN_VA_ARG_PACK:
5950 /* All valid uses of __builtin_va_arg_pack () are removed during
5952 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5955 case BUILT_IN_VA_ARG_PACK_LEN:
5956 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5958 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5961 /* Return the address of the first anonymous stack arg. */
5962 case BUILT_IN_NEXT_ARG:
5963 if (fold_builtin_next_arg (exp, false))
5965 return expand_builtin_next_arg ();
5967 case BUILT_IN_CLEAR_CACHE:
5968 target = expand_builtin___clear_cache (exp);
5973 case BUILT_IN_CLASSIFY_TYPE:
5974 return expand_builtin_classify_type (exp);
5976 case BUILT_IN_CONSTANT_P:
5979 case BUILT_IN_FRAME_ADDRESS:
5980 case BUILT_IN_RETURN_ADDRESS:
5981 return expand_builtin_frame_address (fndecl, exp);
5983 /* Returns the address of the area where the structure is returned.
5985 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5986 if (call_expr_nargs (exp) != 0
5987 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5988 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5991 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5993 case BUILT_IN_ALLOCA:
5994 target = expand_builtin_alloca (exp, target);
5999 case BUILT_IN_STACK_SAVE:
6000 return expand_stack_save ();
6002 case BUILT_IN_STACK_RESTORE:
6003 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6006 case BUILT_IN_BSWAP32:
6007 case BUILT_IN_BSWAP64:
6008 target = expand_builtin_bswap (exp, target, subtarget);
6014 CASE_INT_FN (BUILT_IN_FFS):
6015 case BUILT_IN_FFSIMAX:
6016 target = expand_builtin_unop (target_mode, exp, target,
6017 subtarget, ffs_optab);
6022 CASE_INT_FN (BUILT_IN_CLZ):
6023 case BUILT_IN_CLZIMAX:
6024 target = expand_builtin_unop (target_mode, exp, target,
6025 subtarget, clz_optab);
6030 CASE_INT_FN (BUILT_IN_CTZ):
6031 case BUILT_IN_CTZIMAX:
6032 target = expand_builtin_unop (target_mode, exp, target,
6033 subtarget, ctz_optab);
6038 CASE_INT_FN (BUILT_IN_POPCOUNT):
6039 case BUILT_IN_POPCOUNTIMAX:
6040 target = expand_builtin_unop (target_mode, exp, target,
6041 subtarget, popcount_optab);
6046 CASE_INT_FN (BUILT_IN_PARITY):
6047 case BUILT_IN_PARITYIMAX:
6048 target = expand_builtin_unop (target_mode, exp, target,
6049 subtarget, parity_optab);
6054 case BUILT_IN_STRLEN:
6055 target = expand_builtin_strlen (exp, target, target_mode);
6060 case BUILT_IN_STRCPY:
6061 target = expand_builtin_strcpy (exp, target);
6066 case BUILT_IN_STRNCPY:
6067 target = expand_builtin_strncpy (exp, target);
6072 case BUILT_IN_STPCPY:
6073 target = expand_builtin_stpcpy (exp, target, mode);
6078 case BUILT_IN_MEMCPY:
6079 target = expand_builtin_memcpy (exp, target);
6084 case BUILT_IN_MEMPCPY:
6085 target = expand_builtin_mempcpy (exp, target, mode);
6090 case BUILT_IN_MEMSET:
6091 target = expand_builtin_memset (exp, target, mode);
6096 case BUILT_IN_BZERO:
6097 target = expand_builtin_bzero (exp);
6102 case BUILT_IN_STRCMP:
6103 target = expand_builtin_strcmp (exp, target);
6108 case BUILT_IN_STRNCMP:
6109 target = expand_builtin_strncmp (exp, target, mode);
6115 case BUILT_IN_MEMCMP:
6116 target = expand_builtin_memcmp (exp, target, mode);
6121 case BUILT_IN_SETJMP:
6122 /* This should have been lowered to the builtins below. */
6125 case BUILT_IN_SETJMP_SETUP:
6126 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6127 and the receiver label. */
6128 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6130 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6131 VOIDmode, EXPAND_NORMAL);
6132 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6133 rtx label_r = label_rtx (label);
6135 /* This is copied from the handling of non-local gotos. */
6136 expand_builtin_setjmp_setup (buf_addr, label_r);
6137 nonlocal_goto_handler_labels
6138 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6139 nonlocal_goto_handler_labels);
6140 /* ??? Do not let expand_label treat us as such since we would
6141 not want to be both on the list of non-local labels and on
6142 the list of forced labels. */
6143 FORCED_LABEL (label) = 0;
6148 case BUILT_IN_SETJMP_DISPATCHER:
6149 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6150 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6152 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6153 rtx label_r = label_rtx (label);
6155 /* Remove the dispatcher label from the list of non-local labels
6156 since the receiver labels have been added to it above. */
6157 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6162 case BUILT_IN_SETJMP_RECEIVER:
6163 /* __builtin_setjmp_receiver is passed the receiver label. */
6164 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6166 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6167 rtx label_r = label_rtx (label);
6169 expand_builtin_setjmp_receiver (label_r);
6174 /* __builtin_longjmp is passed a pointer to an array of five words.
6175 It's similar to the C library longjmp function but works with
6176 __builtin_setjmp above. */
6177 case BUILT_IN_LONGJMP:
6178 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6180 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6181 VOIDmode, EXPAND_NORMAL);
6182 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6184 if (value != const1_rtx)
6186 error ("%<__builtin_longjmp%> second argument must be 1");
6190 expand_builtin_longjmp (buf_addr, value);
6195 case BUILT_IN_NONLOCAL_GOTO:
6196 target = expand_builtin_nonlocal_goto (exp);
6201 /* This updates the setjmp buffer that is its argument with the value
6202 of the current stack pointer. */
6203 case BUILT_IN_UPDATE_SETJMP_BUF:
6204 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6207 = expand_normal (CALL_EXPR_ARG (exp, 0));
6209 expand_builtin_update_setjmp_buf (buf_addr);
6215 expand_builtin_trap ();
6218 case BUILT_IN_UNREACHABLE:
6219 expand_builtin_unreachable ();
6222 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6223 case BUILT_IN_SIGNBITD32:
6224 case BUILT_IN_SIGNBITD64:
6225 case BUILT_IN_SIGNBITD128:
6226 target = expand_builtin_signbit (exp, target);
6231 /* Various hooks for the DWARF 2 __throw routine. */
6232 case BUILT_IN_UNWIND_INIT:
6233 expand_builtin_unwind_init ();
6235 case BUILT_IN_DWARF_CFA:
6236 return virtual_cfa_rtx;
6237 #ifdef DWARF2_UNWIND_INFO
6238 case BUILT_IN_DWARF_SP_COLUMN:
6239 return expand_builtin_dwarf_sp_column ();
6240 case BUILT_IN_INIT_DWARF_REG_SIZES:
6241 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6244 case BUILT_IN_FROB_RETURN_ADDR:
6245 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6246 case BUILT_IN_EXTRACT_RETURN_ADDR:
6247 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6248 case BUILT_IN_EH_RETURN:
6249 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6250 CALL_EXPR_ARG (exp, 1));
6252 #ifdef EH_RETURN_DATA_REGNO
6253 case BUILT_IN_EH_RETURN_DATA_REGNO:
6254 return expand_builtin_eh_return_data_regno (exp);
6256 case BUILT_IN_EXTEND_POINTER:
6257 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6258 case BUILT_IN_EH_POINTER:
6259 return expand_builtin_eh_pointer (exp);
6260 case BUILT_IN_EH_FILTER:
6261 return expand_builtin_eh_filter (exp);
6262 case BUILT_IN_EH_COPY_VALUES:
6263 return expand_builtin_eh_copy_values (exp);
6265 case BUILT_IN_VA_START:
6266 return expand_builtin_va_start (exp);
6267 case BUILT_IN_VA_END:
6268 return expand_builtin_va_end (exp);
6269 case BUILT_IN_VA_COPY:
6270 return expand_builtin_va_copy (exp);
6271 case BUILT_IN_EXPECT:
6272 return expand_builtin_expect (exp, target);
6273 case BUILT_IN_PREFETCH:
6274 expand_builtin_prefetch (exp);
6277 case BUILT_IN_PROFILE_FUNC_ENTER:
6278 return expand_builtin_profile_func (false);
6279 case BUILT_IN_PROFILE_FUNC_EXIT:
6280 return expand_builtin_profile_func (true);
6282 case BUILT_IN_INIT_TRAMPOLINE:
6283 return expand_builtin_init_trampoline (exp);
6284 case BUILT_IN_ADJUST_TRAMPOLINE:
6285 return expand_builtin_adjust_trampoline (exp);
6288 case BUILT_IN_EXECL:
6289 case BUILT_IN_EXECV:
6290 case BUILT_IN_EXECLP:
6291 case BUILT_IN_EXECLE:
6292 case BUILT_IN_EXECVP:
6293 case BUILT_IN_EXECVE:
6294 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6299 case BUILT_IN_FETCH_AND_ADD_1:
6300 case BUILT_IN_FETCH_AND_ADD_2:
6301 case BUILT_IN_FETCH_AND_ADD_4:
6302 case BUILT_IN_FETCH_AND_ADD_8:
6303 case BUILT_IN_FETCH_AND_ADD_16:
6304 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6305 target = expand_builtin_sync_operation (mode, exp, PLUS,
6306 false, target, ignore);
6311 case BUILT_IN_FETCH_AND_SUB_1:
6312 case BUILT_IN_FETCH_AND_SUB_2:
6313 case BUILT_IN_FETCH_AND_SUB_4:
6314 case BUILT_IN_FETCH_AND_SUB_8:
6315 case BUILT_IN_FETCH_AND_SUB_16:
6316 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6317 target = expand_builtin_sync_operation (mode, exp, MINUS,
6318 false, target, ignore);
6323 case BUILT_IN_FETCH_AND_OR_1:
6324 case BUILT_IN_FETCH_AND_OR_2:
6325 case BUILT_IN_FETCH_AND_OR_4:
6326 case BUILT_IN_FETCH_AND_OR_8:
6327 case BUILT_IN_FETCH_AND_OR_16:
6328 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6329 target = expand_builtin_sync_operation (mode, exp, IOR,
6330 false, target, ignore);
6335 case BUILT_IN_FETCH_AND_AND_1:
6336 case BUILT_IN_FETCH_AND_AND_2:
6337 case BUILT_IN_FETCH_AND_AND_4:
6338 case BUILT_IN_FETCH_AND_AND_8:
6339 case BUILT_IN_FETCH_AND_AND_16:
6340 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6341 target = expand_builtin_sync_operation (mode, exp, AND,
6342 false, target, ignore);
6347 case BUILT_IN_FETCH_AND_XOR_1:
6348 case BUILT_IN_FETCH_AND_XOR_2:
6349 case BUILT_IN_FETCH_AND_XOR_4:
6350 case BUILT_IN_FETCH_AND_XOR_8:
6351 case BUILT_IN_FETCH_AND_XOR_16:
6352 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6353 target = expand_builtin_sync_operation (mode, exp, XOR,
6354 false, target, ignore);
6359 case BUILT_IN_FETCH_AND_NAND_1:
6360 case BUILT_IN_FETCH_AND_NAND_2:
6361 case BUILT_IN_FETCH_AND_NAND_4:
6362 case BUILT_IN_FETCH_AND_NAND_8:
6363 case BUILT_IN_FETCH_AND_NAND_16:
6364 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6365 target = expand_builtin_sync_operation (mode, exp, NOT,
6366 false, target, ignore);
6371 case BUILT_IN_ADD_AND_FETCH_1:
6372 case BUILT_IN_ADD_AND_FETCH_2:
6373 case BUILT_IN_ADD_AND_FETCH_4:
6374 case BUILT_IN_ADD_AND_FETCH_8:
6375 case BUILT_IN_ADD_AND_FETCH_16:
6376 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6377 target = expand_builtin_sync_operation (mode, exp, PLUS,
6378 true, target, ignore);
6383 case BUILT_IN_SUB_AND_FETCH_1:
6384 case BUILT_IN_SUB_AND_FETCH_2:
6385 case BUILT_IN_SUB_AND_FETCH_4:
6386 case BUILT_IN_SUB_AND_FETCH_8:
6387 case BUILT_IN_SUB_AND_FETCH_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6389 target = expand_builtin_sync_operation (mode, exp, MINUS,
6390 true, target, ignore);
6395 case BUILT_IN_OR_AND_FETCH_1:
6396 case BUILT_IN_OR_AND_FETCH_2:
6397 case BUILT_IN_OR_AND_FETCH_4:
6398 case BUILT_IN_OR_AND_FETCH_8:
6399 case BUILT_IN_OR_AND_FETCH_16:
6400 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6401 target = expand_builtin_sync_operation (mode, exp, IOR,
6402 true, target, ignore);
6407 case BUILT_IN_AND_AND_FETCH_1:
6408 case BUILT_IN_AND_AND_FETCH_2:
6409 case BUILT_IN_AND_AND_FETCH_4:
6410 case BUILT_IN_AND_AND_FETCH_8:
6411 case BUILT_IN_AND_AND_FETCH_16:
6412 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6413 target = expand_builtin_sync_operation (mode, exp, AND,
6414 true, target, ignore);
6419 case BUILT_IN_XOR_AND_FETCH_1:
6420 case BUILT_IN_XOR_AND_FETCH_2:
6421 case BUILT_IN_XOR_AND_FETCH_4:
6422 case BUILT_IN_XOR_AND_FETCH_8:
6423 case BUILT_IN_XOR_AND_FETCH_16:
6424 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6425 target = expand_builtin_sync_operation (mode, exp, XOR,
6426 true, target, ignore);
6431 case BUILT_IN_NAND_AND_FETCH_1:
6432 case BUILT_IN_NAND_AND_FETCH_2:
6433 case BUILT_IN_NAND_AND_FETCH_4:
6434 case BUILT_IN_NAND_AND_FETCH_8:
6435 case BUILT_IN_NAND_AND_FETCH_16:
6436 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6437 target = expand_builtin_sync_operation (mode, exp, NOT,
6438 true, target, ignore);
6443 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6444 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6445 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6446 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6447 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6448 if (mode == VOIDmode)
6449 mode = TYPE_MODE (boolean_type_node);
6450 if (!target || !register_operand (target, mode))
6451 target = gen_reg_rtx (mode);
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6454 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6459 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6460 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6461 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6462 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6463 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6465 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6470 case BUILT_IN_LOCK_TEST_AND_SET_1:
6471 case BUILT_IN_LOCK_TEST_AND_SET_2:
6472 case BUILT_IN_LOCK_TEST_AND_SET_4:
6473 case BUILT_IN_LOCK_TEST_AND_SET_8:
6474 case BUILT_IN_LOCK_TEST_AND_SET_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6476 target = expand_builtin_lock_test_and_set (mode, exp, target);
6481 case BUILT_IN_LOCK_RELEASE_1:
6482 case BUILT_IN_LOCK_RELEASE_2:
6483 case BUILT_IN_LOCK_RELEASE_4:
6484 case BUILT_IN_LOCK_RELEASE_8:
6485 case BUILT_IN_LOCK_RELEASE_16:
6486 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6487 expand_builtin_lock_release (mode, exp);
6490 case BUILT_IN_SYNCHRONIZE:
6491 expand_builtin_synchronize ();
6494 case BUILT_IN_OBJECT_SIZE:
6495 return expand_builtin_object_size (exp);
6497 case BUILT_IN_MEMCPY_CHK:
6498 case BUILT_IN_MEMPCPY_CHK:
6499 case BUILT_IN_MEMMOVE_CHK:
6500 case BUILT_IN_MEMSET_CHK:
6501 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6506 case BUILT_IN_STRCPY_CHK:
6507 case BUILT_IN_STPCPY_CHK:
6508 case BUILT_IN_STRNCPY_CHK:
6509 case BUILT_IN_STRCAT_CHK:
6510 case BUILT_IN_STRNCAT_CHK:
6511 case BUILT_IN_SNPRINTF_CHK:
6512 case BUILT_IN_VSNPRINTF_CHK:
6513 maybe_emit_chk_warning (exp, fcode);
6516 case BUILT_IN_SPRINTF_CHK:
6517 case BUILT_IN_VSPRINTF_CHK:
6518 maybe_emit_sprintf_chk_warning (exp, fcode);
6522 maybe_emit_free_warning (exp);
6525 default: /* just do library call, if unknown builtin */
6529 /* The switch statement above can drop through to cause the function
6530 to be called normally. */
6531 return expand_call (exp, target, ignore);
6534 /* Determine whether a tree node represents a call to a built-in
6535 function. If the tree T is a call to a built-in function with
6536 the right number of arguments of the appropriate types, return
6537 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6538 Otherwise the return value is END_BUILTINS. */
6540 enum built_in_function
6541 builtin_mathfn_code (const_tree t)
6543 const_tree fndecl, arg, parmlist;
6544 const_tree argtype, parmtype;
6545 const_call_expr_arg_iterator iter;
6547 if (TREE_CODE (t) != CALL_EXPR
6548 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6549 return END_BUILTINS;
6551 fndecl = get_callee_fndecl (t);
6552 if (fndecl == NULL_TREE
6553 || TREE_CODE (fndecl) != FUNCTION_DECL
6554 || ! DECL_BUILT_IN (fndecl)
6555 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6556 return END_BUILTINS;
6558 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6559 init_const_call_expr_arg_iterator (t, &iter);
6560 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6562 /* If a function doesn't take a variable number of arguments,
6563 the last element in the list will have type `void'. */
6564 parmtype = TREE_VALUE (parmlist);
6565 if (VOID_TYPE_P (parmtype))
6567 if (more_const_call_expr_args_p (&iter))
6568 return END_BUILTINS;
6569 return DECL_FUNCTION_CODE (fndecl);
6572 if (! more_const_call_expr_args_p (&iter))
6573 return END_BUILTINS;
6575 arg = next_const_call_expr_arg (&iter);
6576 argtype = TREE_TYPE (arg);
6578 if (SCALAR_FLOAT_TYPE_P (parmtype))
6580 if (! SCALAR_FLOAT_TYPE_P (argtype))
6581 return END_BUILTINS;
6583 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6585 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6586 return END_BUILTINS;
6588 else if (POINTER_TYPE_P (parmtype))
6590 if (! POINTER_TYPE_P (argtype))
6591 return END_BUILTINS;
6593 else if (INTEGRAL_TYPE_P (parmtype))
6595 if (! INTEGRAL_TYPE_P (argtype))
6596 return END_BUILTINS;
6599 return END_BUILTINS;
6602 /* Variable-length argument list. */
6603 return DECL_FUNCTION_CODE (fndecl);
6606 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6607 evaluate to a constant. */
6610 fold_builtin_constant_p (tree arg)
6612 /* We return 1 for a numeric type that's known to be a constant
6613 value at compile-time or for an aggregate type that's a
6614 literal constant. */
6617 /* If we know this is a constant, emit the constant of one. */
6618 if (CONSTANT_CLASS_P (arg)
6619 || (TREE_CODE (arg) == CONSTRUCTOR
6620 && TREE_CONSTANT (arg)))
6621 return integer_one_node;
6622 if (TREE_CODE (arg) == ADDR_EXPR)
6624 tree op = TREE_OPERAND (arg, 0);
6625 if (TREE_CODE (op) == STRING_CST
6626 || (TREE_CODE (op) == ARRAY_REF
6627 && integer_zerop (TREE_OPERAND (op, 1))
6628 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6629 return integer_one_node;
6632 /* If this expression has side effects, show we don't know it to be a
6633 constant. Likewise if it's a pointer or aggregate type since in
6634 those case we only want literals, since those are only optimized
6635 when generating RTL, not later.
6636 And finally, if we are compiling an initializer, not code, we
6637 need to return a definite result now; there's not going to be any
6638 more optimization done. */
6639 if (TREE_SIDE_EFFECTS (arg)
6640 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6641 || POINTER_TYPE_P (TREE_TYPE (arg))
6643 || folding_initializer)
6644 return integer_zero_node;
6649 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6650 return it as a truthvalue. */
6653 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6655 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6657 fn = built_in_decls[BUILT_IN_EXPECT];
6658 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6659 ret_type = TREE_TYPE (TREE_TYPE (fn));
6660 pred_type = TREE_VALUE (arg_types);
6661 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6663 pred = fold_convert_loc (loc, pred_type, pred);
6664 expected = fold_convert_loc (loc, expected_type, expected);
6665 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6667 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6668 build_int_cst (ret_type, 0));
6671 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6672 NULL_TREE if no simplification is possible. */
6675 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6678 enum tree_code code;
6680 /* If this is a builtin_expect within a builtin_expect keep the
6681 inner one. See through a comparison against a constant. It
6682 might have been added to create a thruthvalue. */
6684 if (COMPARISON_CLASS_P (inner)
6685 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6686 inner = TREE_OPERAND (inner, 0);
6688 if (TREE_CODE (inner) == CALL_EXPR
6689 && (fndecl = get_callee_fndecl (inner))
6690 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6691 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6694 /* Distribute the expected value over short-circuiting operators.
6695 See through the cast from truthvalue_type_node to long. */
6697 while (TREE_CODE (inner) == NOP_EXPR
6698 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6699 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6700 inner = TREE_OPERAND (inner, 0);
6702 code = TREE_CODE (inner);
6703 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6705 tree op0 = TREE_OPERAND (inner, 0);
6706 tree op1 = TREE_OPERAND (inner, 1);
6708 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6709 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6710 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6712 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6715 /* If the argument isn't invariant then there's nothing else we can do. */
6716 if (!TREE_CONSTANT (arg0))
6719 /* If we expect that a comparison against the argument will fold to
6720 a constant return the constant. In practice, this means a true
6721 constant or the address of a non-weak symbol. */
6724 if (TREE_CODE (inner) == ADDR_EXPR)
6728 inner = TREE_OPERAND (inner, 0);
6730 while (TREE_CODE (inner) == COMPONENT_REF
6731 || TREE_CODE (inner) == ARRAY_REF);
6732 if ((TREE_CODE (inner) == VAR_DECL
6733 || TREE_CODE (inner) == FUNCTION_DECL)
6734 && DECL_WEAK (inner))
6738 /* Otherwise, ARG0 already has the proper type for the return value. */
6742 /* Fold a call to __builtin_classify_type with argument ARG. */
6745 fold_builtin_classify_type (tree arg)
6748 return build_int_cst (NULL_TREE, no_type_class);
6750 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6753 /* Fold a call to __builtin_strlen with argument ARG. */
6756 fold_builtin_strlen (location_t loc, tree type, tree arg)
6758 if (!validate_arg (arg, POINTER_TYPE))
6762 tree len = c_strlen (arg, 0);
6765 return fold_convert_loc (loc, type, len);
6771 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6774 fold_builtin_inf (location_t loc, tree type, int warn)
6776 REAL_VALUE_TYPE real;
6778 /* __builtin_inff is intended to be usable to define INFINITY on all
6779 targets. If an infinity is not available, INFINITY expands "to a
6780 positive constant of type float that overflows at translation
6781 time", footnote "In this case, using INFINITY will violate the
6782 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6783 Thus we pedwarn to ensure this constraint violation is
6785 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6786 pedwarn (loc, 0, "target format does not support infinity");
6789 return build_real (type, real);
6792 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6795 fold_builtin_nan (tree arg, tree type, int quiet)
6797 REAL_VALUE_TYPE real;
6800 if (!validate_arg (arg, POINTER_TYPE))
6802 str = c_getstr (arg);
6806 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6809 return build_real (type, real);
6812 /* Return true if the floating point expression T has an integer value.
6813 We also allow +Inf, -Inf and NaN to be considered integer values. */
6816 integer_valued_real_p (tree t)
6818 switch (TREE_CODE (t))
6825 return integer_valued_real_p (TREE_OPERAND (t, 0));
6830 return integer_valued_real_p (TREE_OPERAND (t, 1));
6837 return integer_valued_real_p (TREE_OPERAND (t, 0))
6838 && integer_valued_real_p (TREE_OPERAND (t, 1));
6841 return integer_valued_real_p (TREE_OPERAND (t, 1))
6842 && integer_valued_real_p (TREE_OPERAND (t, 2));
6845 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6849 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6850 if (TREE_CODE (type) == INTEGER_TYPE)
6852 if (TREE_CODE (type) == REAL_TYPE)
6853 return integer_valued_real_p (TREE_OPERAND (t, 0));
6858 switch (builtin_mathfn_code (t))
6860 CASE_FLT_FN (BUILT_IN_CEIL):
6861 CASE_FLT_FN (BUILT_IN_FLOOR):
6862 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6863 CASE_FLT_FN (BUILT_IN_RINT):
6864 CASE_FLT_FN (BUILT_IN_ROUND):
6865 CASE_FLT_FN (BUILT_IN_TRUNC):
6868 CASE_FLT_FN (BUILT_IN_FMIN):
6869 CASE_FLT_FN (BUILT_IN_FMAX):
6870 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6871 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6884 /* FNDECL is assumed to be a builtin where truncation can be propagated
6885 across (for instance floor((double)f) == (double)floorf (f).
6886 Do the transformation for a call with argument ARG. */
6889 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6891 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6893 if (!validate_arg (arg, REAL_TYPE))
6896 /* Integer rounding functions are idempotent. */
6897 if (fcode == builtin_mathfn_code (arg))
6900 /* If argument is already integer valued, and we don't need to worry
6901 about setting errno, there's no need to perform rounding. */
6902 if (! flag_errno_math && integer_valued_real_p (arg))
6907 tree arg0 = strip_float_extensions (arg);
6908 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6909 tree newtype = TREE_TYPE (arg0);
6912 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6913 && (decl = mathfn_built_in (newtype, fcode)))
6914 return fold_convert_loc (loc, ftype,
6915 build_call_expr_loc (loc, decl, 1,
6916 fold_convert_loc (loc,
6923 /* FNDECL is assumed to be builtin which can narrow the FP type of
6924 the argument, for instance lround((double)f) -> lroundf (f).
6925 Do the transformation for a call with argument ARG. */
6928 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6930 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6932 if (!validate_arg (arg, REAL_TYPE))
6935 /* If argument is already integer valued, and we don't need to worry
6936 about setting errno, there's no need to perform rounding. */
6937 if (! flag_errno_math && integer_valued_real_p (arg))
6938 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6939 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6943 tree ftype = TREE_TYPE (arg);
6944 tree arg0 = strip_float_extensions (arg);
6945 tree newtype = TREE_TYPE (arg0);
6948 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6949 && (decl = mathfn_built_in (newtype, fcode)))
6950 return build_call_expr_loc (loc, decl, 1,
6951 fold_convert_loc (loc, newtype, arg0));
6954 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6955 sizeof (long long) == sizeof (long). */
6956 if (TYPE_PRECISION (long_long_integer_type_node)
6957 == TYPE_PRECISION (long_integer_type_node))
6959 tree newfn = NULL_TREE;
6962 CASE_FLT_FN (BUILT_IN_LLCEIL):
6963 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6966 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6967 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6970 CASE_FLT_FN (BUILT_IN_LLROUND):
6971 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6974 CASE_FLT_FN (BUILT_IN_LLRINT):
6975 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6984 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6985 return fold_convert_loc (loc,
6986 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6993 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6994 return type. Return NULL_TREE if no simplification can be made. */
6997 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7001 if (!validate_arg (arg, COMPLEX_TYPE)
7002 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7005 /* Calculate the result when the argument is a constant. */
7006 if (TREE_CODE (arg) == COMPLEX_CST
7007 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7011 if (TREE_CODE (arg) == COMPLEX_EXPR)
7013 tree real = TREE_OPERAND (arg, 0);
7014 tree imag = TREE_OPERAND (arg, 1);
7016 /* If either part is zero, cabs is fabs of the other. */
7017 if (real_zerop (real))
7018 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7019 if (real_zerop (imag))
7020 return fold_build1_loc (loc, ABS_EXPR, type, real);
7022 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7023 if (flag_unsafe_math_optimizations
7024 && operand_equal_p (real, imag, OEP_PURE_SAME))
7026 const REAL_VALUE_TYPE sqrt2_trunc
7027 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7029 return fold_build2_loc (loc, MULT_EXPR, type,
7030 fold_build1_loc (loc, ABS_EXPR, type, real),
7031 build_real (type, sqrt2_trunc));
7035 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7036 if (TREE_CODE (arg) == NEGATE_EXPR
7037 || TREE_CODE (arg) == CONJ_EXPR)
7038 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7040 /* Don't do this when optimizing for size. */
7041 if (flag_unsafe_math_optimizations
7042 && optimize && optimize_function_for_speed_p (cfun))
7044 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7046 if (sqrtfn != NULL_TREE)
7048 tree rpart, ipart, result;
7050 arg = builtin_save_expr (arg);
7052 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7053 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7055 rpart = builtin_save_expr (rpart);
7056 ipart = builtin_save_expr (ipart);
7058 result = fold_build2_loc (loc, PLUS_EXPR, type,
7059 fold_build2_loc (loc, MULT_EXPR, type,
7061 fold_build2_loc (loc, MULT_EXPR, type,
7064 return build_call_expr_loc (loc, sqrtfn, 1, result);
7071 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7072 complex tree type of the result. If NEG is true, the imaginary
7073 zero is negative. */
7076 build_complex_cproj (tree type, bool neg)
7078 REAL_VALUE_TYPE rinf, rzero = dconst0;
7082 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7083 build_real (TREE_TYPE (type), rzero));
7086 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7087 return type. Return NULL_TREE if no simplification can be made. */
7090 fold_builtin_cproj (location_t loc, tree arg, tree type)
7092 if (!validate_arg (arg, COMPLEX_TYPE)
7093 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7096 /* If there are no infinities, return arg. */
7097 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7098 return non_lvalue_loc (loc, arg);
7100 /* Calculate the result when the argument is a constant. */
7101 if (TREE_CODE (arg) == COMPLEX_CST)
7103 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7104 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7106 if (real_isinf (real) || real_isinf (imag))
7107 return build_complex_cproj (type, imag->sign);
7111 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7113 tree real = TREE_OPERAND (arg, 0);
7114 tree imag = TREE_OPERAND (arg, 1);
7119 /* If the real part is inf and the imag part is known to be
7120 nonnegative, return (inf + 0i). Remember side-effects are
7121 possible in the imag part. */
7122 if (TREE_CODE (real) == REAL_CST
7123 && real_isinf (TREE_REAL_CST_PTR (real))
7124 && tree_expr_nonnegative_p (imag))
7125 return omit_one_operand_loc (loc, type,
7126 build_complex_cproj (type, false),
7129 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7130 Remember side-effects are possible in the real part. */
7131 if (TREE_CODE (imag) == REAL_CST
7132 && real_isinf (TREE_REAL_CST_PTR (imag)))
7134 omit_one_operand_loc (loc, type,
7135 build_complex_cproj (type, TREE_REAL_CST_PTR
7136 (imag)->sign), arg);
7142 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7143 Return NULL_TREE if no simplification can be made. */
7146 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7149 enum built_in_function fcode;
7152 if (!validate_arg (arg, REAL_TYPE))
7155 /* Calculate the result when the argument is a constant. */
7156 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7159 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7160 fcode = builtin_mathfn_code (arg);
7161 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7163 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7164 arg = fold_build2_loc (loc, MULT_EXPR, type,
7165 CALL_EXPR_ARG (arg, 0),
7166 build_real (type, dconsthalf));
7167 return build_call_expr_loc (loc, expfn, 1, arg);
7170 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7171 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7173 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7177 tree arg0 = CALL_EXPR_ARG (arg, 0);
7179 /* The inner root was either sqrt or cbrt. */
7180 /* This was a conditional expression but it triggered a bug
7182 REAL_VALUE_TYPE dconstroot;
7183 if (BUILTIN_SQRT_P (fcode))
7184 dconstroot = dconsthalf;
7186 dconstroot = dconst_third ();
7188 /* Adjust for the outer root. */
7189 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7190 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7191 tree_root = build_real (type, dconstroot);
7192 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7196 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7197 if (flag_unsafe_math_optimizations
7198 && (fcode == BUILT_IN_POW
7199 || fcode == BUILT_IN_POWF
7200 || fcode == BUILT_IN_POWL))
7202 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7203 tree arg0 = CALL_EXPR_ARG (arg, 0);
7204 tree arg1 = CALL_EXPR_ARG (arg, 1);
7206 if (!tree_expr_nonnegative_p (arg0))
7207 arg0 = build1 (ABS_EXPR, type, arg0);
7208 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7209 build_real (type, dconsthalf));
7210 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7216 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7217 Return NULL_TREE if no simplification can be made. */
7220 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7222 const enum built_in_function fcode = builtin_mathfn_code (arg);
7225 if (!validate_arg (arg, REAL_TYPE))
7228 /* Calculate the result when the argument is a constant. */
7229 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7232 if (flag_unsafe_math_optimizations)
7234 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7235 if (BUILTIN_EXPONENT_P (fcode))
7237 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7238 const REAL_VALUE_TYPE third_trunc =
7239 real_value_truncate (TYPE_MODE (type), dconst_third ());
7240 arg = fold_build2_loc (loc, MULT_EXPR, type,
7241 CALL_EXPR_ARG (arg, 0),
7242 build_real (type, third_trunc));
7243 return build_call_expr_loc (loc, expfn, 1, arg);
7246 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7247 if (BUILTIN_SQRT_P (fcode))
7249 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7253 tree arg0 = CALL_EXPR_ARG (arg, 0);
7255 REAL_VALUE_TYPE dconstroot = dconst_third ();
7257 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7258 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7259 tree_root = build_real (type, dconstroot);
7260 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7264 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7265 if (BUILTIN_CBRT_P (fcode))
7267 tree arg0 = CALL_EXPR_ARG (arg, 0);
7268 if (tree_expr_nonnegative_p (arg0))
7270 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7275 REAL_VALUE_TYPE dconstroot;
7277 real_arithmetic (&dconstroot, MULT_EXPR,
7278 dconst_third_ptr (), dconst_third_ptr ());
7279 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7280 tree_root = build_real (type, dconstroot);
7281 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7286 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7287 if (fcode == BUILT_IN_POW
7288 || fcode == BUILT_IN_POWF
7289 || fcode == BUILT_IN_POWL)
7291 tree arg00 = CALL_EXPR_ARG (arg, 0);
7292 tree arg01 = CALL_EXPR_ARG (arg, 1);
7293 if (tree_expr_nonnegative_p (arg00))
7295 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7296 const REAL_VALUE_TYPE dconstroot
7297 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7298 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7299 build_real (type, dconstroot));
7300 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7307 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7308 TYPE is the type of the return value. Return NULL_TREE if no
7309 simplification can be made. */
7312 fold_builtin_cos (location_t loc,
7313 tree arg, tree type, tree fndecl)
7317 if (!validate_arg (arg, REAL_TYPE))
7320 /* Calculate the result when the argument is a constant. */
7321 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7324 /* Optimize cos(-x) into cos (x). */
7325 if ((narg = fold_strip_sign_ops (arg)))
7326 return build_call_expr_loc (loc, fndecl, 1, narg);
7331 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7332 Return NULL_TREE if no simplification can be made. */
7335 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7337 if (validate_arg (arg, REAL_TYPE))
7341 /* Calculate the result when the argument is a constant. */
7342 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7345 /* Optimize cosh(-x) into cosh (x). */
7346 if ((narg = fold_strip_sign_ops (arg)))
7347 return build_call_expr_loc (loc, fndecl, 1, narg);
7353 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7354 argument ARG. TYPE is the type of the return value. Return
7355 NULL_TREE if no simplification can be made. */
7358 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7361 if (validate_arg (arg, COMPLEX_TYPE)
7362 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7366 /* Calculate the result when the argument is a constant. */
7367 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7370 /* Optimize fn(-x) into fn(x). */
7371 if ((tmp = fold_strip_sign_ops (arg)))
7372 return build_call_expr_loc (loc, fndecl, 1, tmp);
7378 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7379 Return NULL_TREE if no simplification can be made. */
7382 fold_builtin_tan (tree arg, tree type)
7384 enum built_in_function fcode;
7387 if (!validate_arg (arg, REAL_TYPE))
7390 /* Calculate the result when the argument is a constant. */
7391 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7394 /* Optimize tan(atan(x)) = x. */
7395 fcode = builtin_mathfn_code (arg);
7396 if (flag_unsafe_math_optimizations
7397 && (fcode == BUILT_IN_ATAN
7398 || fcode == BUILT_IN_ATANF
7399 || fcode == BUILT_IN_ATANL))
7400 return CALL_EXPR_ARG (arg, 0);
7405 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7406 NULL_TREE if no simplification can be made. */
7409 fold_builtin_sincos (location_t loc,
7410 tree arg0, tree arg1, tree arg2)
7415 if (!validate_arg (arg0, REAL_TYPE)
7416 || !validate_arg (arg1, POINTER_TYPE)
7417 || !validate_arg (arg2, POINTER_TYPE))
7420 type = TREE_TYPE (arg0);
7422 /* Calculate the result when the argument is a constant. */
7423 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7426 /* Canonicalize sincos to cexpi. */
7427 if (!TARGET_C99_FUNCTIONS)
7429 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7433 call = build_call_expr_loc (loc, fn, 1, arg0);
7434 call = builtin_save_expr (call);
7436 return build2 (COMPOUND_EXPR, void_type_node,
7437 build2 (MODIFY_EXPR, void_type_node,
7438 build_fold_indirect_ref_loc (loc, arg1),
7439 build1 (IMAGPART_EXPR, type, call)),
7440 build2 (MODIFY_EXPR, void_type_node,
7441 build_fold_indirect_ref_loc (loc, arg2),
7442 build1 (REALPART_EXPR, type, call)));
7445 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7446 NULL_TREE if no simplification can be made. */
7449 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7452 tree realp, imagp, ifn;
7455 if (!validate_arg (arg0, COMPLEX_TYPE)
7456 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7459 /* Calculate the result when the argument is a constant. */
7460 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7463 rtype = TREE_TYPE (TREE_TYPE (arg0));
7465 /* In case we can figure out the real part of arg0 and it is constant zero
7467 if (!TARGET_C99_FUNCTIONS)
7469 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7473 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7474 && real_zerop (realp))
7476 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7477 return build_call_expr_loc (loc, ifn, 1, narg);
7480 /* In case we can easily decompose real and imaginary parts split cexp
7481 to exp (r) * cexpi (i). */
7482 if (flag_unsafe_math_optimizations
7485 tree rfn, rcall, icall;
7487 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7491 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7495 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7496 icall = builtin_save_expr (icall);
7497 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7498 rcall = builtin_save_expr (rcall);
7499 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7500 fold_build2_loc (loc, MULT_EXPR, rtype,
7502 fold_build1_loc (loc, REALPART_EXPR,
7504 fold_build2_loc (loc, MULT_EXPR, rtype,
7506 fold_build1_loc (loc, IMAGPART_EXPR,
7513 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7514 Return NULL_TREE if no simplification can be made. */
7517 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7519 if (!validate_arg (arg, REAL_TYPE))
7522 /* Optimize trunc of constant value. */
7523 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7525 REAL_VALUE_TYPE r, x;
7526 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7528 x = TREE_REAL_CST (arg);
7529 real_trunc (&r, TYPE_MODE (type), &x);
7530 return build_real (type, r);
7533 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7536 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7537 Return NULL_TREE if no simplification can be made. */
7540 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7542 if (!validate_arg (arg, REAL_TYPE))
7545 /* Optimize floor of constant value. */
7546 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7550 x = TREE_REAL_CST (arg);
7551 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7553 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7556 real_floor (&r, TYPE_MODE (type), &x);
7557 return build_real (type, r);
7561 /* Fold floor (x) where x is nonnegative to trunc (x). */
7562 if (tree_expr_nonnegative_p (arg))
7564 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7566 return build_call_expr_loc (loc, truncfn, 1, arg);
7569 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7572 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7573 Return NULL_TREE if no simplification can be made. */
7576 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7578 if (!validate_arg (arg, REAL_TYPE))
7581 /* Optimize ceil of constant value. */
7582 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7586 x = TREE_REAL_CST (arg);
7587 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7589 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7592 real_ceil (&r, TYPE_MODE (type), &x);
7593 return build_real (type, r);
7597 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7600 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7601 Return NULL_TREE if no simplification can be made. */
7604 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7606 if (!validate_arg (arg, REAL_TYPE))
7609 /* Optimize round of constant value. */
7610 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7614 x = TREE_REAL_CST (arg);
7615 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7620 real_round (&r, TYPE_MODE (type), &x);
7621 return build_real (type, r);
7625 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7628 /* Fold function call to builtin lround, lroundf or lroundl (or the
7629 corresponding long long versions) and other rounding functions. ARG
7630 is the argument to the call. Return NULL_TREE if no simplification
7634 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7636 if (!validate_arg (arg, REAL_TYPE))
7639 /* Optimize lround of constant value. */
7640 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7642 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7644 if (real_isfinite (&x))
7646 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7647 tree ftype = TREE_TYPE (arg);
7651 switch (DECL_FUNCTION_CODE (fndecl))
7653 CASE_FLT_FN (BUILT_IN_LFLOOR):
7654 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7655 real_floor (&r, TYPE_MODE (ftype), &x);
7658 CASE_FLT_FN (BUILT_IN_LCEIL):
7659 CASE_FLT_FN (BUILT_IN_LLCEIL):
7660 real_ceil (&r, TYPE_MODE (ftype), &x);
7663 CASE_FLT_FN (BUILT_IN_LROUND):
7664 CASE_FLT_FN (BUILT_IN_LLROUND):
7665 real_round (&r, TYPE_MODE (ftype), &x);
7672 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7673 if (double_int_fits_to_tree_p (itype, val))
7674 return double_int_to_tree (itype, val);
7678 switch (DECL_FUNCTION_CODE (fndecl))
7680 CASE_FLT_FN (BUILT_IN_LFLOOR):
7681 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7682 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7683 if (tree_expr_nonnegative_p (arg))
7684 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7685 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7690 return fold_fixed_mathfn (loc, fndecl, arg);
7693 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7694 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7695 the argument to the call. Return NULL_TREE if no simplification can
7699 fold_builtin_bitop (tree fndecl, tree arg)
7701 if (!validate_arg (arg, INTEGER_TYPE))
7704 /* Optimize for constant argument. */
7705 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7707 HOST_WIDE_INT hi, width, result;
7708 unsigned HOST_WIDE_INT lo;
7711 type = TREE_TYPE (arg);
7712 width = TYPE_PRECISION (type);
7713 lo = TREE_INT_CST_LOW (arg);
7715 /* Clear all the bits that are beyond the type's precision. */
7716 if (width > HOST_BITS_PER_WIDE_INT)
7718 hi = TREE_INT_CST_HIGH (arg);
7719 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7720 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7725 if (width < HOST_BITS_PER_WIDE_INT)
7726 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7729 switch (DECL_FUNCTION_CODE (fndecl))
7731 CASE_INT_FN (BUILT_IN_FFS):
7733 result = ffs_hwi (lo);
7735 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7740 CASE_INT_FN (BUILT_IN_CLZ):
7742 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7744 result = width - floor_log2 (lo) - 1;
7745 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7749 CASE_INT_FN (BUILT_IN_CTZ):
7751 result = ctz_hwi (lo);
7753 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7754 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7758 CASE_INT_FN (BUILT_IN_POPCOUNT):
7761 result++, lo &= lo - 1;
7763 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7766 CASE_INT_FN (BUILT_IN_PARITY):
7769 result++, lo &= lo - 1;
7771 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7779 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7785 /* Fold function call to builtin_bswap and the long and long long
7786 variants. Return NULL_TREE if no simplification can be made. */
7788 fold_builtin_bswap (tree fndecl, tree arg)
7790 if (! validate_arg (arg, INTEGER_TYPE))
7793 /* Optimize constant value. */
7794 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7796 HOST_WIDE_INT hi, width, r_hi = 0;
7797 unsigned HOST_WIDE_INT lo, r_lo = 0;
7800 type = TREE_TYPE (arg);
7801 width = TYPE_PRECISION (type);
7802 lo = TREE_INT_CST_LOW (arg);
7803 hi = TREE_INT_CST_HIGH (arg);
7805 switch (DECL_FUNCTION_CODE (fndecl))
7807 case BUILT_IN_BSWAP32:
7808 case BUILT_IN_BSWAP64:
7812 for (s = 0; s < width; s += 8)
7814 int d = width - s - 8;
7815 unsigned HOST_WIDE_INT byte;
7817 if (s < HOST_BITS_PER_WIDE_INT)
7818 byte = (lo >> s) & 0xff;
7820 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7822 if (d < HOST_BITS_PER_WIDE_INT)
7825 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7835 if (width < HOST_BITS_PER_WIDE_INT)
7836 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7838 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7844 /* A subroutine of fold_builtin to fold the various logarithmic
7845 functions. Return NULL_TREE if no simplification can me made.
7846 FUNC is the corresponding MPFR logarithm function. */
7849 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7850 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7852 if (validate_arg (arg, REAL_TYPE))
7854 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7856 const enum built_in_function fcode = builtin_mathfn_code (arg);
7858 /* Calculate the result when the argument is a constant. */
7859 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7862 /* Special case, optimize logN(expN(x)) = x. */
7863 if (flag_unsafe_math_optimizations
7864 && ((func == mpfr_log
7865 && (fcode == BUILT_IN_EXP
7866 || fcode == BUILT_IN_EXPF
7867 || fcode == BUILT_IN_EXPL))
7868 || (func == mpfr_log2
7869 && (fcode == BUILT_IN_EXP2
7870 || fcode == BUILT_IN_EXP2F
7871 || fcode == BUILT_IN_EXP2L))
7872 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7873 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7875 /* Optimize logN(func()) for various exponential functions. We
7876 want to determine the value "x" and the power "exponent" in
7877 order to transform logN(x**exponent) into exponent*logN(x). */
7878 if (flag_unsafe_math_optimizations)
7880 tree exponent = 0, x = 0;
7884 CASE_FLT_FN (BUILT_IN_EXP):
7885 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7886 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7888 exponent = CALL_EXPR_ARG (arg, 0);
7890 CASE_FLT_FN (BUILT_IN_EXP2):
7891 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7892 x = build_real (type, dconst2);
7893 exponent = CALL_EXPR_ARG (arg, 0);
7895 CASE_FLT_FN (BUILT_IN_EXP10):
7896 CASE_FLT_FN (BUILT_IN_POW10):
7897 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7899 REAL_VALUE_TYPE dconst10;
7900 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7901 x = build_real (type, dconst10);
7903 exponent = CALL_EXPR_ARG (arg, 0);
7905 CASE_FLT_FN (BUILT_IN_SQRT):
7906 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7907 x = CALL_EXPR_ARG (arg, 0);
7908 exponent = build_real (type, dconsthalf);
7910 CASE_FLT_FN (BUILT_IN_CBRT):
7911 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7912 x = CALL_EXPR_ARG (arg, 0);
7913 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7916 CASE_FLT_FN (BUILT_IN_POW):
7917 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7918 x = CALL_EXPR_ARG (arg, 0);
7919 exponent = CALL_EXPR_ARG (arg, 1);
7925 /* Now perform the optimization. */
7928 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7929 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7937 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7938 NULL_TREE if no simplification can be made. */
7941 fold_builtin_hypot (location_t loc, tree fndecl,
7942 tree arg0, tree arg1, tree type)
7944 tree res, narg0, narg1;
7946 if (!validate_arg (arg0, REAL_TYPE)
7947 || !validate_arg (arg1, REAL_TYPE))
7950 /* Calculate the result when the argument is a constant. */
7951 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7954 /* If either argument to hypot has a negate or abs, strip that off.
7955 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7956 narg0 = fold_strip_sign_ops (arg0);
7957 narg1 = fold_strip_sign_ops (arg1);
7960 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7961 narg1 ? narg1 : arg1);
7964 /* If either argument is zero, hypot is fabs of the other. */
7965 if (real_zerop (arg0))
7966 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7967 else if (real_zerop (arg1))
7968 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7970 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7971 if (flag_unsafe_math_optimizations
7972 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7974 const REAL_VALUE_TYPE sqrt2_trunc
7975 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7976 return fold_build2_loc (loc, MULT_EXPR, type,
7977 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7978 build_real (type, sqrt2_trunc));
7985 /* Fold a builtin function call to pow, powf, or powl. Return
7986 NULL_TREE if no simplification can be made. */
7988 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7992 if (!validate_arg (arg0, REAL_TYPE)
7993 || !validate_arg (arg1, REAL_TYPE))
7996 /* Calculate the result when the argument is a constant. */
7997 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8000 /* Optimize pow(1.0,y) = 1.0. */
8001 if (real_onep (arg0))
8002 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8004 if (TREE_CODE (arg1) == REAL_CST
8005 && !TREE_OVERFLOW (arg1))
8007 REAL_VALUE_TYPE cint;
8011 c = TREE_REAL_CST (arg1);
8013 /* Optimize pow(x,0.0) = 1.0. */
8014 if (REAL_VALUES_EQUAL (c, dconst0))
8015 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8018 /* Optimize pow(x,1.0) = x. */
8019 if (REAL_VALUES_EQUAL (c, dconst1))
8022 /* Optimize pow(x,-1.0) = 1.0/x. */
8023 if (REAL_VALUES_EQUAL (c, dconstm1))
8024 return fold_build2_loc (loc, RDIV_EXPR, type,
8025 build_real (type, dconst1), arg0);
8027 /* Optimize pow(x,0.5) = sqrt(x). */
8028 if (flag_unsafe_math_optimizations
8029 && REAL_VALUES_EQUAL (c, dconsthalf))
8031 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8033 if (sqrtfn != NULL_TREE)
8034 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8037 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8038 if (flag_unsafe_math_optimizations)
8040 const REAL_VALUE_TYPE dconstroot
8041 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8043 if (REAL_VALUES_EQUAL (c, dconstroot))
8045 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8046 if (cbrtfn != NULL_TREE)
8047 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8051 /* Check for an integer exponent. */
8052 n = real_to_integer (&c);
8053 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8054 if (real_identical (&c, &cint))
8056 /* Attempt to evaluate pow at compile-time, unless this should
8057 raise an exception. */
8058 if (TREE_CODE (arg0) == REAL_CST
8059 && !TREE_OVERFLOW (arg0)
8061 || (!flag_trapping_math && !flag_errno_math)
8062 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8067 x = TREE_REAL_CST (arg0);
8068 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8069 if (flag_unsafe_math_optimizations || !inexact)
8070 return build_real (type, x);
8073 /* Strip sign ops from even integer powers. */
8074 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8076 tree narg0 = fold_strip_sign_ops (arg0);
8078 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8083 if (flag_unsafe_math_optimizations)
8085 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8087 /* Optimize pow(expN(x),y) = expN(x*y). */
8088 if (BUILTIN_EXPONENT_P (fcode))
8090 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8091 tree arg = CALL_EXPR_ARG (arg0, 0);
8092 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8093 return build_call_expr_loc (loc, expfn, 1, arg);
8096 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8097 if (BUILTIN_SQRT_P (fcode))
8099 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8100 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8101 build_real (type, dconsthalf));
8102 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8105 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8106 if (BUILTIN_CBRT_P (fcode))
8108 tree arg = CALL_EXPR_ARG (arg0, 0);
8109 if (tree_expr_nonnegative_p (arg))
8111 const REAL_VALUE_TYPE dconstroot
8112 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8113 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8114 build_real (type, dconstroot));
8115 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8119 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8120 if (fcode == BUILT_IN_POW
8121 || fcode == BUILT_IN_POWF
8122 || fcode == BUILT_IN_POWL)
8124 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8125 if (tree_expr_nonnegative_p (arg00))
8127 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8128 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8129 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8137 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8138 Return NULL_TREE if no simplification can be made. */
8140 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8141 tree arg0, tree arg1, tree type)
8143 if (!validate_arg (arg0, REAL_TYPE)
8144 || !validate_arg (arg1, INTEGER_TYPE))
8147 /* Optimize pow(1.0,y) = 1.0. */
8148 if (real_onep (arg0))
8149 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8151 if (host_integerp (arg1, 0))
8153 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8155 /* Evaluate powi at compile-time. */
8156 if (TREE_CODE (arg0) == REAL_CST
8157 && !TREE_OVERFLOW (arg0))
8160 x = TREE_REAL_CST (arg0);
8161 real_powi (&x, TYPE_MODE (type), &x, c);
8162 return build_real (type, x);
8165 /* Optimize pow(x,0) = 1.0. */
8167 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8170 /* Optimize pow(x,1) = x. */
8174 /* Optimize pow(x,-1) = 1.0/x. */
8176 return fold_build2_loc (loc, RDIV_EXPR, type,
8177 build_real (type, dconst1), arg0);
8183 /* A subroutine of fold_builtin to fold the various exponent
8184 functions. Return NULL_TREE if no simplification can be made.
8185 FUNC is the corresponding MPFR exponent function. */
8188 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8189 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8191 if (validate_arg (arg, REAL_TYPE))
8193 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8196 /* Calculate the result when the argument is a constant. */
8197 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8200 /* Optimize expN(logN(x)) = x. */
8201 if (flag_unsafe_math_optimizations)
8203 const enum built_in_function fcode = builtin_mathfn_code (arg);
8205 if ((func == mpfr_exp
8206 && (fcode == BUILT_IN_LOG
8207 || fcode == BUILT_IN_LOGF
8208 || fcode == BUILT_IN_LOGL))
8209 || (func == mpfr_exp2
8210 && (fcode == BUILT_IN_LOG2
8211 || fcode == BUILT_IN_LOG2F
8212 || fcode == BUILT_IN_LOG2L))
8213 || (func == mpfr_exp10
8214 && (fcode == BUILT_IN_LOG10
8215 || fcode == BUILT_IN_LOG10F
8216 || fcode == BUILT_IN_LOG10L)))
8217 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8224 /* Return true if VAR is a VAR_DECL or a component thereof. */
8227 var_decl_component_p (tree var)
8230 while (handled_component_p (inner))
8231 inner = TREE_OPERAND (inner, 0);
8232 return SSA_VAR_P (inner);
8235 /* Fold function call to builtin memset. Return
8236 NULL_TREE if no simplification can be made. */
8239 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8240 tree type, bool ignore)
8242 tree var, ret, etype;
8243 unsigned HOST_WIDE_INT length, cval;
8245 if (! validate_arg (dest, POINTER_TYPE)
8246 || ! validate_arg (c, INTEGER_TYPE)
8247 || ! validate_arg (len, INTEGER_TYPE))
8250 if (! host_integerp (len, 1))
8253 /* If the LEN parameter is zero, return DEST. */
8254 if (integer_zerop (len))
8255 return omit_one_operand_loc (loc, type, dest, c);
8257 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8262 if (TREE_CODE (var) != ADDR_EXPR)
8265 var = TREE_OPERAND (var, 0);
8266 if (TREE_THIS_VOLATILE (var))
8269 etype = TREE_TYPE (var);
8270 if (TREE_CODE (etype) == ARRAY_TYPE)
8271 etype = TREE_TYPE (etype);
8273 if (!INTEGRAL_TYPE_P (etype)
8274 && !POINTER_TYPE_P (etype))
8277 if (! var_decl_component_p (var))
8280 length = tree_low_cst (len, 1);
8281 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8282 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8286 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8289 if (integer_zerop (c))
8293 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8296 cval = tree_low_cst (c, 1);
8300 cval |= (cval << 31) << 1;
8303 ret = build_int_cst_type (etype, cval);
8304 var = build_fold_indirect_ref_loc (loc,
8305 fold_convert_loc (loc,
8306 build_pointer_type (etype),
8308 ret = build2 (MODIFY_EXPR, etype, var, ret);
8312 return omit_one_operand_loc (loc, type, dest, ret);
8315 /* Fold function call to builtin memset. Return
8316 NULL_TREE if no simplification can be made. */
8319 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8321 if (! validate_arg (dest, POINTER_TYPE)
8322 || ! validate_arg (size, INTEGER_TYPE))
8328 /* New argument list transforming bzero(ptr x, int y) to
8329 memset(ptr x, int 0, size_t y). This is done this way
8330 so that if it isn't expanded inline, we fallback to
8331 calling bzero instead of memset. */
8333 return fold_builtin_memset (loc, dest, integer_zero_node,
8334 fold_convert_loc (loc, sizetype, size),
8335 void_type_node, ignore);
8338 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8339 NULL_TREE if no simplification can be made.
8340 If ENDP is 0, return DEST (like memcpy).
8341 If ENDP is 1, return DEST+LEN (like mempcpy).
8342 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8343 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8347 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8348 tree len, tree type, bool ignore, int endp)
8350 tree destvar, srcvar, expr;
8352 if (! validate_arg (dest, POINTER_TYPE)
8353 || ! validate_arg (src, POINTER_TYPE)
8354 || ! validate_arg (len, INTEGER_TYPE))
8357 /* If the LEN parameter is zero, return DEST. */
8358 if (integer_zerop (len))
8359 return omit_one_operand_loc (loc, type, dest, src);
8361 /* If SRC and DEST are the same (and not volatile), return
8362 DEST{,+LEN,+LEN-1}. */
8363 if (operand_equal_p (src, dest, 0))
8367 tree srctype, desttype;
8368 int src_align, dest_align;
8373 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8374 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8376 /* Both DEST and SRC must be pointer types.
8377 ??? This is what old code did. Is the testing for pointer types
8380 If either SRC is readonly or length is 1, we can use memcpy. */
8381 if (!dest_align || !src_align)
8383 if (readonly_data_expr (src)
8384 || (host_integerp (len, 1)
8385 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8386 >= tree_low_cst (len, 1))))
8388 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8391 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8394 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8395 if (TREE_CODE (src) == ADDR_EXPR
8396 && TREE_CODE (dest) == ADDR_EXPR)
8398 tree src_base, dest_base, fn;
8399 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8400 HOST_WIDE_INT size = -1;
8401 HOST_WIDE_INT maxsize = -1;
8403 srcvar = TREE_OPERAND (src, 0);
8404 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8406 destvar = TREE_OPERAND (dest, 0);
8407 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8409 if (host_integerp (len, 1))
8410 maxsize = tree_low_cst (len, 1);
8413 src_offset /= BITS_PER_UNIT;
8414 dest_offset /= BITS_PER_UNIT;
8415 if (SSA_VAR_P (src_base)
8416 && SSA_VAR_P (dest_base))
8418 if (operand_equal_p (src_base, dest_base, 0)
8419 && ranges_overlap_p (src_offset, maxsize,
8420 dest_offset, maxsize))
8423 else if (TREE_CODE (src_base) == MEM_REF
8424 && TREE_CODE (dest_base) == MEM_REF)
8427 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8428 TREE_OPERAND (dest_base, 0), 0))
8430 off = double_int_add (mem_ref_offset (src_base),
8431 shwi_to_double_int (src_offset));
8432 if (!double_int_fits_in_shwi_p (off))
8434 src_offset = off.low;
8435 off = double_int_add (mem_ref_offset (dest_base),
8436 shwi_to_double_int (dest_offset));
8437 if (!double_int_fits_in_shwi_p (off))
8439 dest_offset = off.low;
8440 if (ranges_overlap_p (src_offset, maxsize,
8441 dest_offset, maxsize))
8447 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8450 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8455 if (!host_integerp (len, 0))
8458 This logic lose for arguments like (type *)malloc (sizeof (type)),
8459 since we strip the casts of up to VOID return value from malloc.
8460 Perhaps we ought to inherit type from non-VOID argument here? */
8463 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8464 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8466 tree tem = TREE_OPERAND (src, 0);
8468 if (tem != TREE_OPERAND (src, 0))
8469 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8471 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8473 tree tem = TREE_OPERAND (dest, 0);
8475 if (tem != TREE_OPERAND (dest, 0))
8476 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8478 srctype = TREE_TYPE (TREE_TYPE (src));
8480 && TREE_CODE (srctype) == ARRAY_TYPE
8481 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8483 srctype = TREE_TYPE (srctype);
8485 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8487 desttype = TREE_TYPE (TREE_TYPE (dest));
8489 && TREE_CODE (desttype) == ARRAY_TYPE
8490 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8492 desttype = TREE_TYPE (desttype);
8494 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8496 if (!srctype || !desttype
8497 || TREE_ADDRESSABLE (srctype)
8498 || TREE_ADDRESSABLE (desttype)
8499 || !TYPE_SIZE_UNIT (srctype)
8500 || !TYPE_SIZE_UNIT (desttype)
8501 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8502 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8505 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8506 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8507 if (dest_align < (int) TYPE_ALIGN (desttype)
8508 || src_align < (int) TYPE_ALIGN (srctype))
8512 dest = builtin_save_expr (dest);
8514 /* Build accesses at offset zero with a ref-all character type. */
8515 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8516 ptr_mode, true), 0);
8519 STRIP_NOPS (destvar);
8520 if (TREE_CODE (destvar) == ADDR_EXPR
8521 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8522 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8523 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8525 destvar = NULL_TREE;
8528 STRIP_NOPS (srcvar);
8529 if (TREE_CODE (srcvar) == ADDR_EXPR
8530 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8531 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8532 && (!STRICT_ALIGNMENT
8534 || src_align >= (int) TYPE_ALIGN (desttype)))
8535 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8540 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8543 if (srcvar == NULL_TREE)
8545 if (STRICT_ALIGNMENT
8546 && src_align < (int) TYPE_ALIGN (desttype))
8549 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8551 else if (destvar == NULL_TREE)
8553 if (STRICT_ALIGNMENT
8554 && dest_align < (int) TYPE_ALIGN (srctype))
8557 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8560 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8566 if (endp == 0 || endp == 3)
8567 return omit_one_operand_loc (loc, type, dest, expr);
8573 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8576 len = fold_convert_loc (loc, sizetype, len);
8577 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8578 dest = fold_convert_loc (loc, type, dest);
8580 dest = omit_one_operand_loc (loc, type, dest, expr);
8584 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8585 If LEN is not NULL, it represents the length of the string to be
8586 copied. Return NULL_TREE if no simplification can be made. */
8589 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8593 if (!validate_arg (dest, POINTER_TYPE)
8594 || !validate_arg (src, POINTER_TYPE))
8597 /* If SRC and DEST are the same (and not volatile), return DEST. */
8598 if (operand_equal_p (src, dest, 0))
8599 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8601 if (optimize_function_for_size_p (cfun))
8604 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8610 len = c_strlen (src, 1);
8611 if (! len || TREE_SIDE_EFFECTS (len))
8615 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8616 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8617 build_call_expr_loc (loc, fn, 3, dest, src, len));
8620 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8621 Return NULL_TREE if no simplification can be made. */
8624 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8626 tree fn, len, lenp1, call, type;
8628 if (!validate_arg (dest, POINTER_TYPE)
8629 || !validate_arg (src, POINTER_TYPE))
8632 len = c_strlen (src, 1);
8634 || TREE_CODE (len) != INTEGER_CST)
8637 if (optimize_function_for_size_p (cfun)
8638 /* If length is zero it's small enough. */
8639 && !integer_zerop (len))
8642 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8646 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8647 /* We use dest twice in building our expression. Save it from
8648 multiple expansions. */
8649 dest = builtin_save_expr (dest);
8650 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8652 type = TREE_TYPE (TREE_TYPE (fndecl));
8653 len = fold_convert_loc (loc, sizetype, len);
8654 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8655 dest = fold_convert_loc (loc, type, dest);
8656 dest = omit_one_operand_loc (loc, type, dest, call);
8660 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8661 If SLEN is not NULL, it represents the length of the source string.
8662 Return NULL_TREE if no simplification can be made. */
8665 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8666 tree src, tree len, tree slen)
8670 if (!validate_arg (dest, POINTER_TYPE)
8671 || !validate_arg (src, POINTER_TYPE)
8672 || !validate_arg (len, INTEGER_TYPE))
8675 /* If the LEN parameter is zero, return DEST. */
8676 if (integer_zerop (len))
8677 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8679 /* We can't compare slen with len as constants below if len is not a
8681 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8685 slen = c_strlen (src, 1);
8687 /* Now, we must be passed a constant src ptr parameter. */
8688 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8691 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8693 /* We do not support simplification of this case, though we do
8694 support it when expanding trees into RTL. */
8695 /* FIXME: generate a call to __builtin_memset. */
8696 if (tree_int_cst_lt (slen, len))
8699 /* OK transform into builtin memcpy. */
8700 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8703 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8704 build_call_expr_loc (loc, fn, 3, dest, src, len));
8707 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8708 arguments to the call, and TYPE is its return type.
8709 Return NULL_TREE if no simplification can be made. */
8712 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8714 if (!validate_arg (arg1, POINTER_TYPE)
8715 || !validate_arg (arg2, INTEGER_TYPE)
8716 || !validate_arg (len, INTEGER_TYPE))
8722 if (TREE_CODE (arg2) != INTEGER_CST
8723 || !host_integerp (len, 1))
8726 p1 = c_getstr (arg1);
8727 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8733 if (target_char_cast (arg2, &c))
8736 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8739 return build_int_cst (TREE_TYPE (arg1), 0);
8741 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8743 return fold_convert_loc (loc, type, tem);
8749 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8750 Return NULL_TREE if no simplification can be made. */
8753 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8755 const char *p1, *p2;
8757 if (!validate_arg (arg1, POINTER_TYPE)
8758 || !validate_arg (arg2, POINTER_TYPE)
8759 || !validate_arg (len, INTEGER_TYPE))
8762 /* If the LEN parameter is zero, return zero. */
8763 if (integer_zerop (len))
8764 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8767 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8768 if (operand_equal_p (arg1, arg2, 0))
8769 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8771 p1 = c_getstr (arg1);
8772 p2 = c_getstr (arg2);
8774 /* If all arguments are constant, and the value of len is not greater
8775 than the lengths of arg1 and arg2, evaluate at compile-time. */
8776 if (host_integerp (len, 1) && p1 && p2
8777 && compare_tree_int (len, strlen (p1) + 1) <= 0
8778 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8780 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8783 return integer_one_node;
8785 return integer_minus_one_node;
8787 return integer_zero_node;
8790 /* If len parameter is one, return an expression corresponding to
8791 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8792 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8794 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8795 tree cst_uchar_ptr_node
8796 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8799 = fold_convert_loc (loc, integer_type_node,
8800 build1 (INDIRECT_REF, cst_uchar_node,
8801 fold_convert_loc (loc,
8805 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8810 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8816 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8817 Return NULL_TREE if no simplification can be made. */
8820 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8822 const char *p1, *p2;
8824 if (!validate_arg (arg1, POINTER_TYPE)
8825 || !validate_arg (arg2, POINTER_TYPE))
8828 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8829 if (operand_equal_p (arg1, arg2, 0))
8830 return integer_zero_node;
8832 p1 = c_getstr (arg1);
8833 p2 = c_getstr (arg2);
8837 const int i = strcmp (p1, p2);
8839 return integer_minus_one_node;
8841 return integer_one_node;
8843 return integer_zero_node;
8846 /* If the second arg is "", return *(const unsigned char*)arg1. */
8847 if (p2 && *p2 == '\0')
8849 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8850 tree cst_uchar_ptr_node
8851 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8853 return fold_convert_loc (loc, integer_type_node,
8854 build1 (INDIRECT_REF, cst_uchar_node,
8855 fold_convert_loc (loc,
8860 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8861 if (p1 && *p1 == '\0')
8863 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8864 tree cst_uchar_ptr_node
8865 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8868 = fold_convert_loc (loc, integer_type_node,
8869 build1 (INDIRECT_REF, cst_uchar_node,
8870 fold_convert_loc (loc,
8873 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8879 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8880 Return NULL_TREE if no simplification can be made. */
8883 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8885 const char *p1, *p2;
8887 if (!validate_arg (arg1, POINTER_TYPE)
8888 || !validate_arg (arg2, POINTER_TYPE)
8889 || !validate_arg (len, INTEGER_TYPE))
8892 /* If the LEN parameter is zero, return zero. */
8893 if (integer_zerop (len))
8894 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8897 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8898 if (operand_equal_p (arg1, arg2, 0))
8899 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8901 p1 = c_getstr (arg1);
8902 p2 = c_getstr (arg2);
8904 if (host_integerp (len, 1) && p1 && p2)
8906 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8908 return integer_one_node;
8910 return integer_minus_one_node;
8912 return integer_zero_node;
8915 /* If the second arg is "", and the length is greater than zero,
8916 return *(const unsigned char*)arg1. */
8917 if (p2 && *p2 == '\0'
8918 && TREE_CODE (len) == INTEGER_CST
8919 && tree_int_cst_sgn (len) == 1)
8921 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8922 tree cst_uchar_ptr_node
8923 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8925 return fold_convert_loc (loc, integer_type_node,
8926 build1 (INDIRECT_REF, cst_uchar_node,
8927 fold_convert_loc (loc,
8932 /* If the first arg is "", and the length is greater than zero,
8933 return -*(const unsigned char*)arg2. */
8934 if (p1 && *p1 == '\0'
8935 && TREE_CODE (len) == INTEGER_CST
8936 && tree_int_cst_sgn (len) == 1)
8938 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8939 tree cst_uchar_ptr_node
8940 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8942 tree temp = fold_convert_loc (loc, integer_type_node,
8943 build1 (INDIRECT_REF, cst_uchar_node,
8944 fold_convert_loc (loc,
8947 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8950 /* If len parameter is one, return an expression corresponding to
8951 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8952 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8958 tree ind1 = fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8963 tree ind2 = fold_convert_loc (loc, integer_type_node,
8964 build1 (INDIRECT_REF, cst_uchar_node,
8965 fold_convert_loc (loc,
8968 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8974 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8975 ARG. Return NULL_TREE if no simplification can be made. */
8978 fold_builtin_signbit (location_t loc, tree arg, tree type)
8982 if (!validate_arg (arg, REAL_TYPE))
8985 /* If ARG is a compile-time constant, determine the result. */
8986 if (TREE_CODE (arg) == REAL_CST
8987 && !TREE_OVERFLOW (arg))
8991 c = TREE_REAL_CST (arg);
8992 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8993 return fold_convert_loc (loc, type, temp);
8996 /* If ARG is non-negative, the result is always zero. */
8997 if (tree_expr_nonnegative_p (arg))
8998 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9000 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9001 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9002 return fold_build2_loc (loc, LT_EXPR, type, arg,
9003 build_real (TREE_TYPE (arg), dconst0));
9008 /* Fold function call to builtin copysign, copysignf or copysignl with
9009 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9013 fold_builtin_copysign (location_t loc, tree fndecl,
9014 tree arg1, tree arg2, tree type)
9018 if (!validate_arg (arg1, REAL_TYPE)
9019 || !validate_arg (arg2, REAL_TYPE))
9022 /* copysign(X,X) is X. */
9023 if (operand_equal_p (arg1, arg2, 0))
9024 return fold_convert_loc (loc, type, arg1);
9026 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9027 if (TREE_CODE (arg1) == REAL_CST
9028 && TREE_CODE (arg2) == REAL_CST
9029 && !TREE_OVERFLOW (arg1)
9030 && !TREE_OVERFLOW (arg2))
9032 REAL_VALUE_TYPE c1, c2;
9034 c1 = TREE_REAL_CST (arg1);
9035 c2 = TREE_REAL_CST (arg2);
9036 /* c1.sign := c2.sign. */
9037 real_copysign (&c1, &c2);
9038 return build_real (type, c1);
9041 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9042 Remember to evaluate Y for side-effects. */
9043 if (tree_expr_nonnegative_p (arg2))
9044 return omit_one_operand_loc (loc, type,
9045 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9048 /* Strip sign changing operations for the first argument. */
9049 tem = fold_strip_sign_ops (arg1);
9051 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9056 /* Fold a call to builtin isascii with argument ARG. */
9059 fold_builtin_isascii (location_t loc, tree arg)
9061 if (!validate_arg (arg, INTEGER_TYPE))
9065 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9066 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9067 build_int_cst (NULL_TREE,
9068 ~ (unsigned HOST_WIDE_INT) 0x7f));
9069 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9070 arg, integer_zero_node);
9074 /* Fold a call to builtin toascii with argument ARG. */
9077 fold_builtin_toascii (location_t loc, tree arg)
9079 if (!validate_arg (arg, INTEGER_TYPE))
9082 /* Transform toascii(c) -> (c & 0x7f). */
9083 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9084 build_int_cst (NULL_TREE, 0x7f));
9087 /* Fold a call to builtin isdigit with argument ARG. */
9090 fold_builtin_isdigit (location_t loc, tree arg)
9092 if (!validate_arg (arg, INTEGER_TYPE))
9096 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9097 /* According to the C standard, isdigit is unaffected by locale.
9098 However, it definitely is affected by the target character set. */
9099 unsigned HOST_WIDE_INT target_digit0
9100 = lang_hooks.to_target_charset ('0');
9102 if (target_digit0 == 0)
9105 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9106 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9107 build_int_cst (unsigned_type_node, target_digit0));
9108 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9109 build_int_cst (unsigned_type_node, 9));
9113 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9116 fold_builtin_fabs (location_t loc, tree arg, tree type)
9118 if (!validate_arg (arg, REAL_TYPE))
9121 arg = fold_convert_loc (loc, type, arg);
9122 if (TREE_CODE (arg) == REAL_CST)
9123 return fold_abs_const (arg, type);
9124 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9127 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9130 fold_builtin_abs (location_t loc, tree arg, tree type)
9132 if (!validate_arg (arg, INTEGER_TYPE))
9135 arg = fold_convert_loc (loc, type, arg);
9136 if (TREE_CODE (arg) == INTEGER_CST)
9137 return fold_abs_const (arg, type);
9138 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9141 /* Fold a call to builtin fmin or fmax. */
9144 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9145 tree type, bool max)
9147 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9149 /* Calculate the result when the argument is a constant. */
9150 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9155 /* If either argument is NaN, return the other one. Avoid the
9156 transformation if we get (and honor) a signalling NaN. Using
9157 omit_one_operand() ensures we create a non-lvalue. */
9158 if (TREE_CODE (arg0) == REAL_CST
9159 && real_isnan (&TREE_REAL_CST (arg0))
9160 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9161 || ! TREE_REAL_CST (arg0).signalling))
9162 return omit_one_operand_loc (loc, type, arg1, arg0);
9163 if (TREE_CODE (arg1) == REAL_CST
9164 && real_isnan (&TREE_REAL_CST (arg1))
9165 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9166 || ! TREE_REAL_CST (arg1).signalling))
9167 return omit_one_operand_loc (loc, type, arg0, arg1);
9169 /* Transform fmin/fmax(x,x) -> x. */
9170 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9171 return omit_one_operand_loc (loc, type, arg0, arg1);
9173 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9174 functions to return the numeric arg if the other one is NaN.
9175 These tree codes don't honor that, so only transform if
9176 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9177 handled, so we don't have to worry about it either. */
9178 if (flag_finite_math_only)
9179 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9180 fold_convert_loc (loc, type, arg0),
9181 fold_convert_loc (loc, type, arg1));
9186 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9189 fold_builtin_carg (location_t loc, tree arg, tree type)
9191 if (validate_arg (arg, COMPLEX_TYPE)
9192 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9194 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9198 tree new_arg = builtin_save_expr (arg);
9199 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9200 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9201 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9208 /* Fold a call to builtin logb/ilogb. */
9211 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9213 if (! validate_arg (arg, REAL_TYPE))
9218 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9220 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9226 /* If arg is Inf or NaN and we're logb, return it. */
9227 if (TREE_CODE (rettype) == REAL_TYPE)
9228 return fold_convert_loc (loc, rettype, arg);
9229 /* Fall through... */
9231 /* Zero may set errno and/or raise an exception for logb, also
9232 for ilogb we don't know FP_ILOGB0. */
9235 /* For normal numbers, proceed iff radix == 2. In GCC,
9236 normalized significands are in the range [0.5, 1.0). We
9237 want the exponent as if they were [1.0, 2.0) so get the
9238 exponent and subtract 1. */
9239 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9240 return fold_convert_loc (loc, rettype,
9241 build_int_cst (NULL_TREE,
9242 REAL_EXP (value)-1));
9250 /* Fold a call to builtin significand, if radix == 2. */
9253 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9255 if (! validate_arg (arg, REAL_TYPE))
9260 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9262 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9269 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9270 return fold_convert_loc (loc, rettype, arg);
9272 /* For normal numbers, proceed iff radix == 2. */
9273 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9275 REAL_VALUE_TYPE result = *value;
9276 /* In GCC, normalized significands are in the range [0.5,
9277 1.0). We want them to be [1.0, 2.0) so set the
9279 SET_REAL_EXP (&result, 1);
9280 return build_real (rettype, result);
9289 /* Fold a call to builtin frexp, we can assume the base is 2. */
9292 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9294 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9299 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9302 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9304 /* Proceed if a valid pointer type was passed in. */
9305 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9307 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9313 /* For +-0, return (*exp = 0, +-0). */
9314 exp = integer_zero_node;
9319 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9320 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9323 /* Since the frexp function always expects base 2, and in
9324 GCC normalized significands are already in the range
9325 [0.5, 1.0), we have exactly what frexp wants. */
9326 REAL_VALUE_TYPE frac_rvt = *value;
9327 SET_REAL_EXP (&frac_rvt, 0);
9328 frac = build_real (rettype, frac_rvt);
9329 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9336 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9337 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9338 TREE_SIDE_EFFECTS (arg1) = 1;
9339 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9345 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9346 then we can assume the base is two. If it's false, then we have to
9347 check the mode of the TYPE parameter in certain cases. */
9350 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9351 tree type, bool ldexp)
9353 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9358 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9359 if (real_zerop (arg0) || integer_zerop (arg1)
9360 || (TREE_CODE (arg0) == REAL_CST
9361 && !real_isfinite (&TREE_REAL_CST (arg0))))
9362 return omit_one_operand_loc (loc, type, arg0, arg1);
9364 /* If both arguments are constant, then try to evaluate it. */
9365 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9366 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9367 && host_integerp (arg1, 0))
9369 /* Bound the maximum adjustment to twice the range of the
9370 mode's valid exponents. Use abs to ensure the range is
9371 positive as a sanity check. */
9372 const long max_exp_adj = 2 *
9373 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9374 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9376 /* Get the user-requested adjustment. */
9377 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9379 /* The requested adjustment must be inside this range. This
9380 is a preliminary cap to avoid things like overflow, we
9381 may still fail to compute the result for other reasons. */
9382 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9384 REAL_VALUE_TYPE initial_result;
9386 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9388 /* Ensure we didn't overflow. */
9389 if (! real_isinf (&initial_result))
9391 const REAL_VALUE_TYPE trunc_result
9392 = real_value_truncate (TYPE_MODE (type), initial_result);
9394 /* Only proceed if the target mode can hold the
9396 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9397 return build_real (type, trunc_result);
9406 /* Fold a call to builtin modf. */
9409 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9411 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9416 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9419 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9421 /* Proceed if a valid pointer type was passed in. */
9422 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9424 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9425 REAL_VALUE_TYPE trunc, frac;
9431 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9432 trunc = frac = *value;
9435 /* For +-Inf, return (*arg1 = arg0, +-0). */
9437 frac.sign = value->sign;
9441 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9442 real_trunc (&trunc, VOIDmode, value);
9443 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9444 /* If the original number was negative and already
9445 integral, then the fractional part is -0.0. */
9446 if (value->sign && frac.cl == rvc_zero)
9447 frac.sign = value->sign;
9451 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9452 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9453 build_real (rettype, trunc));
9454 TREE_SIDE_EFFECTS (arg1) = 1;
9455 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9456 build_real (rettype, frac));
9462 /* Given a location LOC, an interclass builtin function decl FNDECL
9463 and its single argument ARG, return an folded expression computing
9464 the same, or NULL_TREE if we either couldn't or didn't want to fold
9465 (the latter happen if there's an RTL instruction available). */
9468 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9470 enum machine_mode mode;
9472 if (!validate_arg (arg, REAL_TYPE))
9475 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9478 mode = TYPE_MODE (TREE_TYPE (arg));
9480 /* If there is no optab, try generic code. */
9481 switch (DECL_FUNCTION_CODE (fndecl))
9485 CASE_FLT_FN (BUILT_IN_ISINF):
9487 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9488 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9489 tree const type = TREE_TYPE (arg);
9493 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9494 real_from_string (&r, buf);
9495 result = build_call_expr (isgr_fn, 2,
9496 fold_build1_loc (loc, ABS_EXPR, type, arg),
9497 build_real (type, r));
9500 CASE_FLT_FN (BUILT_IN_FINITE):
9501 case BUILT_IN_ISFINITE:
9503 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9504 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9505 tree const type = TREE_TYPE (arg);
9509 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9510 real_from_string (&r, buf);
9511 result = build_call_expr (isle_fn, 2,
9512 fold_build1_loc (loc, ABS_EXPR, type, arg),
9513 build_real (type, r));
9514 /*result = fold_build2_loc (loc, UNGT_EXPR,
9515 TREE_TYPE (TREE_TYPE (fndecl)),
9516 fold_build1_loc (loc, ABS_EXPR, type, arg),
9517 build_real (type, r));
9518 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9519 TREE_TYPE (TREE_TYPE (fndecl)),
9523 case BUILT_IN_ISNORMAL:
9525 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9526 islessequal(fabs(x),DBL_MAX). */
9527 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9528 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9529 tree const type = TREE_TYPE (arg);
9530 REAL_VALUE_TYPE rmax, rmin;
9533 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9534 real_from_string (&rmax, buf);
9535 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9536 real_from_string (&rmin, buf);
9537 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9538 result = build_call_expr (isle_fn, 2, arg,
9539 build_real (type, rmax));
9540 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9541 build_call_expr (isge_fn, 2, arg,
9542 build_real (type, rmin)));
9552 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9553 ARG is the argument for the call. */
9556 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9558 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9561 if (!validate_arg (arg, REAL_TYPE))
9564 switch (builtin_index)
9566 case BUILT_IN_ISINF:
9567 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9568 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9570 if (TREE_CODE (arg) == REAL_CST)
9572 r = TREE_REAL_CST (arg);
9573 if (real_isinf (&r))
9574 return real_compare (GT_EXPR, &r, &dconst0)
9575 ? integer_one_node : integer_minus_one_node;
9577 return integer_zero_node;
9582 case BUILT_IN_ISINF_SIGN:
9584 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9585 /* In a boolean context, GCC will fold the inner COND_EXPR to
9586 1. So e.g. "if (isinf_sign(x))" would be folded to just
9587 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9588 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9589 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9590 tree tmp = NULL_TREE;
9592 arg = builtin_save_expr (arg);
9594 if (signbit_fn && isinf_fn)
9596 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9597 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9599 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9600 signbit_call, integer_zero_node);
9601 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9602 isinf_call, integer_zero_node);
9604 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9605 integer_minus_one_node, integer_one_node);
9606 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9614 case BUILT_IN_ISFINITE:
9615 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9616 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9617 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9619 if (TREE_CODE (arg) == REAL_CST)
9621 r = TREE_REAL_CST (arg);
9622 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9627 case BUILT_IN_ISNAN:
9628 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9629 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9631 if (TREE_CODE (arg) == REAL_CST)
9633 r = TREE_REAL_CST (arg);
9634 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9637 arg = builtin_save_expr (arg);
9638 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9645 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9646 This builtin will generate code to return the appropriate floating
9647 point classification depending on the value of the floating point
9648 number passed in. The possible return values must be supplied as
9649 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9650 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9651 one floating point argument which is "type generic". */
9654 fold_builtin_fpclassify (location_t loc, tree exp)
9656 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9657 arg, type, res, tmp;
9658 enum machine_mode mode;
9662 /* Verify the required arguments in the original call. */
9663 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9664 INTEGER_TYPE, INTEGER_TYPE,
9665 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9668 fp_nan = CALL_EXPR_ARG (exp, 0);
9669 fp_infinite = CALL_EXPR_ARG (exp, 1);
9670 fp_normal = CALL_EXPR_ARG (exp, 2);
9671 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9672 fp_zero = CALL_EXPR_ARG (exp, 4);
9673 arg = CALL_EXPR_ARG (exp, 5);
9674 type = TREE_TYPE (arg);
9675 mode = TYPE_MODE (type);
9676 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9680 (fabs(x) == Inf ? FP_INFINITE :
9681 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9682 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9684 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9685 build_real (type, dconst0));
9686 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9687 tmp, fp_zero, fp_subnormal);
9689 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9690 real_from_string (&r, buf);
9691 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9692 arg, build_real (type, r));
9693 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9695 if (HONOR_INFINITIES (mode))
9698 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9699 build_real (type, r));
9700 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9704 if (HONOR_NANS (mode))
9706 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9707 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9713 /* Fold a call to an unordered comparison function such as
9714 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9715 being called and ARG0 and ARG1 are the arguments for the call.
9716 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9717 the opposite of the desired result. UNORDERED_CODE is used
9718 for modes that can hold NaNs and ORDERED_CODE is used for
9722 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9723 enum tree_code unordered_code,
9724 enum tree_code ordered_code)
9726 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9727 enum tree_code code;
9729 enum tree_code code0, code1;
9730 tree cmp_type = NULL_TREE;
9732 type0 = TREE_TYPE (arg0);
9733 type1 = TREE_TYPE (arg1);
9735 code0 = TREE_CODE (type0);
9736 code1 = TREE_CODE (type1);
9738 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9739 /* Choose the wider of two real types. */
9740 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9742 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9744 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9747 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9748 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9750 if (unordered_code == UNORDERED_EXPR)
9752 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9753 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9754 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9757 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9759 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9760 fold_build2_loc (loc, code, type, arg0, arg1));
9763 /* Fold a call to built-in function FNDECL with 0 arguments.
9764 IGNORE is true if the result of the function call is ignored. This
9765 function returns NULL_TREE if no simplification was possible. */
9768 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9770 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9771 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9774 CASE_FLT_FN (BUILT_IN_INF):
9775 case BUILT_IN_INFD32:
9776 case BUILT_IN_INFD64:
9777 case BUILT_IN_INFD128:
9778 return fold_builtin_inf (loc, type, true);
9780 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9781 return fold_builtin_inf (loc, type, false);
9783 case BUILT_IN_CLASSIFY_TYPE:
9784 return fold_builtin_classify_type (NULL_TREE);
9792 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9793 IGNORE is true if the result of the function call is ignored. This
9794 function returns NULL_TREE if no simplification was possible. */
9797 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9799 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9800 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9803 case BUILT_IN_CONSTANT_P:
9805 tree val = fold_builtin_constant_p (arg0);
9807 /* Gimplification will pull the CALL_EXPR for the builtin out of
9808 an if condition. When not optimizing, we'll not CSE it back.
9809 To avoid link error types of regressions, return false now. */
9810 if (!val && !optimize)
9811 val = integer_zero_node;
9816 case BUILT_IN_CLASSIFY_TYPE:
9817 return fold_builtin_classify_type (arg0);
9819 case BUILT_IN_STRLEN:
9820 return fold_builtin_strlen (loc, type, arg0);
9822 CASE_FLT_FN (BUILT_IN_FABS):
9823 return fold_builtin_fabs (loc, arg0, type);
9827 case BUILT_IN_LLABS:
9828 case BUILT_IN_IMAXABS:
9829 return fold_builtin_abs (loc, arg0, type);
9831 CASE_FLT_FN (BUILT_IN_CONJ):
9832 if (validate_arg (arg0, COMPLEX_TYPE)
9833 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9834 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9837 CASE_FLT_FN (BUILT_IN_CREAL):
9838 if (validate_arg (arg0, COMPLEX_TYPE)
9839 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9840 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9843 CASE_FLT_FN (BUILT_IN_CIMAG):
9844 if (validate_arg (arg0, COMPLEX_TYPE)
9845 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9846 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9849 CASE_FLT_FN (BUILT_IN_CCOS):
9850 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9852 CASE_FLT_FN (BUILT_IN_CCOSH):
9853 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9855 CASE_FLT_FN (BUILT_IN_CPROJ):
9856 return fold_builtin_cproj(loc, arg0, type);
9858 CASE_FLT_FN (BUILT_IN_CSIN):
9859 if (validate_arg (arg0, COMPLEX_TYPE)
9860 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9861 return do_mpc_arg1 (arg0, type, mpc_sin);
9864 CASE_FLT_FN (BUILT_IN_CSINH):
9865 if (validate_arg (arg0, COMPLEX_TYPE)
9866 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9867 return do_mpc_arg1 (arg0, type, mpc_sinh);
9870 CASE_FLT_FN (BUILT_IN_CTAN):
9871 if (validate_arg (arg0, COMPLEX_TYPE)
9872 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9873 return do_mpc_arg1 (arg0, type, mpc_tan);
9876 CASE_FLT_FN (BUILT_IN_CTANH):
9877 if (validate_arg (arg0, COMPLEX_TYPE)
9878 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9879 return do_mpc_arg1 (arg0, type, mpc_tanh);
9882 CASE_FLT_FN (BUILT_IN_CLOG):
9883 if (validate_arg (arg0, COMPLEX_TYPE)
9884 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9885 return do_mpc_arg1 (arg0, type, mpc_log);
9888 CASE_FLT_FN (BUILT_IN_CSQRT):
9889 if (validate_arg (arg0, COMPLEX_TYPE)
9890 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9891 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9894 CASE_FLT_FN (BUILT_IN_CASIN):
9895 if (validate_arg (arg0, COMPLEX_TYPE)
9896 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9897 return do_mpc_arg1 (arg0, type, mpc_asin);
9900 CASE_FLT_FN (BUILT_IN_CACOS):
9901 if (validate_arg (arg0, COMPLEX_TYPE)
9902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9903 return do_mpc_arg1 (arg0, type, mpc_acos);
9906 CASE_FLT_FN (BUILT_IN_CATAN):
9907 if (validate_arg (arg0, COMPLEX_TYPE)
9908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9909 return do_mpc_arg1 (arg0, type, mpc_atan);
9912 CASE_FLT_FN (BUILT_IN_CASINH):
9913 if (validate_arg (arg0, COMPLEX_TYPE)
9914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9915 return do_mpc_arg1 (arg0, type, mpc_asinh);
9918 CASE_FLT_FN (BUILT_IN_CACOSH):
9919 if (validate_arg (arg0, COMPLEX_TYPE)
9920 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9921 return do_mpc_arg1 (arg0, type, mpc_acosh);
9924 CASE_FLT_FN (BUILT_IN_CATANH):
9925 if (validate_arg (arg0, COMPLEX_TYPE)
9926 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9927 return do_mpc_arg1 (arg0, type, mpc_atanh);
9930 CASE_FLT_FN (BUILT_IN_CABS):
9931 return fold_builtin_cabs (loc, arg0, type, fndecl);
9933 CASE_FLT_FN (BUILT_IN_CARG):
9934 return fold_builtin_carg (loc, arg0, type);
9936 CASE_FLT_FN (BUILT_IN_SQRT):
9937 return fold_builtin_sqrt (loc, arg0, type);
9939 CASE_FLT_FN (BUILT_IN_CBRT):
9940 return fold_builtin_cbrt (loc, arg0, type);
9942 CASE_FLT_FN (BUILT_IN_ASIN):
9943 if (validate_arg (arg0, REAL_TYPE))
9944 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9945 &dconstm1, &dconst1, true);
9948 CASE_FLT_FN (BUILT_IN_ACOS):
9949 if (validate_arg (arg0, REAL_TYPE))
9950 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9951 &dconstm1, &dconst1, true);
9954 CASE_FLT_FN (BUILT_IN_ATAN):
9955 if (validate_arg (arg0, REAL_TYPE))
9956 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9959 CASE_FLT_FN (BUILT_IN_ASINH):
9960 if (validate_arg (arg0, REAL_TYPE))
9961 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9964 CASE_FLT_FN (BUILT_IN_ACOSH):
9965 if (validate_arg (arg0, REAL_TYPE))
9966 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9967 &dconst1, NULL, true);
9970 CASE_FLT_FN (BUILT_IN_ATANH):
9971 if (validate_arg (arg0, REAL_TYPE))
9972 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9973 &dconstm1, &dconst1, false);
9976 CASE_FLT_FN (BUILT_IN_SIN):
9977 if (validate_arg (arg0, REAL_TYPE))
9978 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9981 CASE_FLT_FN (BUILT_IN_COS):
9982 return fold_builtin_cos (loc, arg0, type, fndecl);
9984 CASE_FLT_FN (BUILT_IN_TAN):
9985 return fold_builtin_tan (arg0, type);
9987 CASE_FLT_FN (BUILT_IN_CEXP):
9988 return fold_builtin_cexp (loc, arg0, type);
9990 CASE_FLT_FN (BUILT_IN_CEXPI):
9991 if (validate_arg (arg0, REAL_TYPE))
9992 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9995 CASE_FLT_FN (BUILT_IN_SINH):
9996 if (validate_arg (arg0, REAL_TYPE))
9997 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10000 CASE_FLT_FN (BUILT_IN_COSH):
10001 return fold_builtin_cosh (loc, arg0, type, fndecl);
10003 CASE_FLT_FN (BUILT_IN_TANH):
10004 if (validate_arg (arg0, REAL_TYPE))
10005 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10008 CASE_FLT_FN (BUILT_IN_ERF):
10009 if (validate_arg (arg0, REAL_TYPE))
10010 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10013 CASE_FLT_FN (BUILT_IN_ERFC):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10018 CASE_FLT_FN (BUILT_IN_TGAMMA):
10019 if (validate_arg (arg0, REAL_TYPE))
10020 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10023 CASE_FLT_FN (BUILT_IN_EXP):
10024 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10026 CASE_FLT_FN (BUILT_IN_EXP2):
10027 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10029 CASE_FLT_FN (BUILT_IN_EXP10):
10030 CASE_FLT_FN (BUILT_IN_POW10):
10031 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10033 CASE_FLT_FN (BUILT_IN_EXPM1):
10034 if (validate_arg (arg0, REAL_TYPE))
10035 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10038 CASE_FLT_FN (BUILT_IN_LOG):
10039 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10041 CASE_FLT_FN (BUILT_IN_LOG2):
10042 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10044 CASE_FLT_FN (BUILT_IN_LOG10):
10045 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10047 CASE_FLT_FN (BUILT_IN_LOG1P):
10048 if (validate_arg (arg0, REAL_TYPE))
10049 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10050 &dconstm1, NULL, false);
10053 CASE_FLT_FN (BUILT_IN_J0):
10054 if (validate_arg (arg0, REAL_TYPE))
10055 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10059 CASE_FLT_FN (BUILT_IN_J1):
10060 if (validate_arg (arg0, REAL_TYPE))
10061 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10065 CASE_FLT_FN (BUILT_IN_Y0):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10068 &dconst0, NULL, false);
10071 CASE_FLT_FN (BUILT_IN_Y1):
10072 if (validate_arg (arg0, REAL_TYPE))
10073 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10074 &dconst0, NULL, false);
10077 CASE_FLT_FN (BUILT_IN_NAN):
10078 case BUILT_IN_NAND32:
10079 case BUILT_IN_NAND64:
10080 case BUILT_IN_NAND128:
10081 return fold_builtin_nan (arg0, type, true);
10083 CASE_FLT_FN (BUILT_IN_NANS):
10084 return fold_builtin_nan (arg0, type, false);
10086 CASE_FLT_FN (BUILT_IN_FLOOR):
10087 return fold_builtin_floor (loc, fndecl, arg0);
10089 CASE_FLT_FN (BUILT_IN_CEIL):
10090 return fold_builtin_ceil (loc, fndecl, arg0);
10092 CASE_FLT_FN (BUILT_IN_TRUNC):
10093 return fold_builtin_trunc (loc, fndecl, arg0);
10095 CASE_FLT_FN (BUILT_IN_ROUND):
10096 return fold_builtin_round (loc, fndecl, arg0);
10098 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10099 CASE_FLT_FN (BUILT_IN_RINT):
10100 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10102 CASE_FLT_FN (BUILT_IN_LCEIL):
10103 CASE_FLT_FN (BUILT_IN_LLCEIL):
10104 CASE_FLT_FN (BUILT_IN_LFLOOR):
10105 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10106 CASE_FLT_FN (BUILT_IN_LROUND):
10107 CASE_FLT_FN (BUILT_IN_LLROUND):
10108 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10110 CASE_FLT_FN (BUILT_IN_LRINT):
10111 CASE_FLT_FN (BUILT_IN_LLRINT):
10112 return fold_fixed_mathfn (loc, fndecl, arg0);
10114 case BUILT_IN_BSWAP32:
10115 case BUILT_IN_BSWAP64:
10116 return fold_builtin_bswap (fndecl, arg0);
10118 CASE_INT_FN (BUILT_IN_FFS):
10119 CASE_INT_FN (BUILT_IN_CLZ):
10120 CASE_INT_FN (BUILT_IN_CTZ):
10121 CASE_INT_FN (BUILT_IN_POPCOUNT):
10122 CASE_INT_FN (BUILT_IN_PARITY):
10123 return fold_builtin_bitop (fndecl, arg0);
10125 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10126 return fold_builtin_signbit (loc, arg0, type);
10128 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10129 return fold_builtin_significand (loc, arg0, type);
10131 CASE_FLT_FN (BUILT_IN_ILOGB):
10132 CASE_FLT_FN (BUILT_IN_LOGB):
10133 return fold_builtin_logb (loc, arg0, type);
10135 case BUILT_IN_ISASCII:
10136 return fold_builtin_isascii (loc, arg0);
10138 case BUILT_IN_TOASCII:
10139 return fold_builtin_toascii (loc, arg0);
10141 case BUILT_IN_ISDIGIT:
10142 return fold_builtin_isdigit (loc, arg0);
10144 CASE_FLT_FN (BUILT_IN_FINITE):
10145 case BUILT_IN_FINITED32:
10146 case BUILT_IN_FINITED64:
10147 case BUILT_IN_FINITED128:
10148 case BUILT_IN_ISFINITE:
10150 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10153 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10156 CASE_FLT_FN (BUILT_IN_ISINF):
10157 case BUILT_IN_ISINFD32:
10158 case BUILT_IN_ISINFD64:
10159 case BUILT_IN_ISINFD128:
10161 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10164 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10167 case BUILT_IN_ISNORMAL:
10168 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10170 case BUILT_IN_ISINF_SIGN:
10171 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10173 CASE_FLT_FN (BUILT_IN_ISNAN):
10174 case BUILT_IN_ISNAND32:
10175 case BUILT_IN_ISNAND64:
10176 case BUILT_IN_ISNAND128:
10177 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10179 case BUILT_IN_PRINTF:
10180 case BUILT_IN_PRINTF_UNLOCKED:
10181 case BUILT_IN_VPRINTF:
10182 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10184 case BUILT_IN_FREE:
10185 if (integer_zerop (arg0))
10186 return build_empty_stmt (loc);
10197 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10198 IGNORE is true if the result of the function call is ignored. This
10199 function returns NULL_TREE if no simplification was possible. */
10202 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10204 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10205 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10209 CASE_FLT_FN (BUILT_IN_JN):
10210 if (validate_arg (arg0, INTEGER_TYPE)
10211 && validate_arg (arg1, REAL_TYPE))
10212 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10215 CASE_FLT_FN (BUILT_IN_YN):
10216 if (validate_arg (arg0, INTEGER_TYPE)
10217 && validate_arg (arg1, REAL_TYPE))
10218 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10222 CASE_FLT_FN (BUILT_IN_DREM):
10223 CASE_FLT_FN (BUILT_IN_REMAINDER):
10224 if (validate_arg (arg0, REAL_TYPE)
10225 && validate_arg(arg1, REAL_TYPE))
10226 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10229 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10230 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10231 if (validate_arg (arg0, REAL_TYPE)
10232 && validate_arg(arg1, POINTER_TYPE))
10233 return do_mpfr_lgamma_r (arg0, arg1, type);
10236 CASE_FLT_FN (BUILT_IN_ATAN2):
10237 if (validate_arg (arg0, REAL_TYPE)
10238 && validate_arg(arg1, REAL_TYPE))
10239 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10242 CASE_FLT_FN (BUILT_IN_FDIM):
10243 if (validate_arg (arg0, REAL_TYPE)
10244 && validate_arg(arg1, REAL_TYPE))
10245 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10248 CASE_FLT_FN (BUILT_IN_HYPOT):
10249 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10251 CASE_FLT_FN (BUILT_IN_CPOW):
10252 if (validate_arg (arg0, COMPLEX_TYPE)
10253 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10254 && validate_arg (arg1, COMPLEX_TYPE)
10255 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10256 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10259 CASE_FLT_FN (BUILT_IN_LDEXP):
10260 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10261 CASE_FLT_FN (BUILT_IN_SCALBN):
10262 CASE_FLT_FN (BUILT_IN_SCALBLN):
10263 return fold_builtin_load_exponent (loc, arg0, arg1,
10264 type, /*ldexp=*/false);
10266 CASE_FLT_FN (BUILT_IN_FREXP):
10267 return fold_builtin_frexp (loc, arg0, arg1, type);
10269 CASE_FLT_FN (BUILT_IN_MODF):
10270 return fold_builtin_modf (loc, arg0, arg1, type);
10272 case BUILT_IN_BZERO:
10273 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10275 case BUILT_IN_FPUTS:
10276 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10278 case BUILT_IN_FPUTS_UNLOCKED:
10279 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10281 case BUILT_IN_STRSTR:
10282 return fold_builtin_strstr (loc, arg0, arg1, type);
10284 case BUILT_IN_STRCAT:
10285 return fold_builtin_strcat (loc, arg0, arg1);
10287 case BUILT_IN_STRSPN:
10288 return fold_builtin_strspn (loc, arg0, arg1);
10290 case BUILT_IN_STRCSPN:
10291 return fold_builtin_strcspn (loc, arg0, arg1);
10293 case BUILT_IN_STRCHR:
10294 case BUILT_IN_INDEX:
10295 return fold_builtin_strchr (loc, arg0, arg1, type);
10297 case BUILT_IN_STRRCHR:
10298 case BUILT_IN_RINDEX:
10299 return fold_builtin_strrchr (loc, arg0, arg1, type);
10301 case BUILT_IN_STRCPY:
10302 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10304 case BUILT_IN_STPCPY:
10307 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10311 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10314 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10317 case BUILT_IN_STRCMP:
10318 return fold_builtin_strcmp (loc, arg0, arg1);
10320 case BUILT_IN_STRPBRK:
10321 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10323 case BUILT_IN_EXPECT:
10324 return fold_builtin_expect (loc, arg0, arg1);
10326 CASE_FLT_FN (BUILT_IN_POW):
10327 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10329 CASE_FLT_FN (BUILT_IN_POWI):
10330 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10332 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10333 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10335 CASE_FLT_FN (BUILT_IN_FMIN):
10336 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10338 CASE_FLT_FN (BUILT_IN_FMAX):
10339 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10341 case BUILT_IN_ISGREATER:
10342 return fold_builtin_unordered_cmp (loc, fndecl,
10343 arg0, arg1, UNLE_EXPR, LE_EXPR);
10344 case BUILT_IN_ISGREATEREQUAL:
10345 return fold_builtin_unordered_cmp (loc, fndecl,
10346 arg0, arg1, UNLT_EXPR, LT_EXPR);
10347 case BUILT_IN_ISLESS:
10348 return fold_builtin_unordered_cmp (loc, fndecl,
10349 arg0, arg1, UNGE_EXPR, GE_EXPR);
10350 case BUILT_IN_ISLESSEQUAL:
10351 return fold_builtin_unordered_cmp (loc, fndecl,
10352 arg0, arg1, UNGT_EXPR, GT_EXPR);
10353 case BUILT_IN_ISLESSGREATER:
10354 return fold_builtin_unordered_cmp (loc, fndecl,
10355 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10356 case BUILT_IN_ISUNORDERED:
10357 return fold_builtin_unordered_cmp (loc, fndecl,
10358 arg0, arg1, UNORDERED_EXPR,
10361 /* We do the folding for va_start in the expander. */
10362 case BUILT_IN_VA_START:
10365 case BUILT_IN_SPRINTF:
10366 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10368 case BUILT_IN_OBJECT_SIZE:
10369 return fold_builtin_object_size (arg0, arg1);
10371 case BUILT_IN_PRINTF:
10372 case BUILT_IN_PRINTF_UNLOCKED:
10373 case BUILT_IN_VPRINTF:
10374 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10376 case BUILT_IN_PRINTF_CHK:
10377 case BUILT_IN_VPRINTF_CHK:
10378 if (!validate_arg (arg0, INTEGER_TYPE)
10379 || TREE_SIDE_EFFECTS (arg0))
10382 return fold_builtin_printf (loc, fndecl,
10383 arg1, NULL_TREE, ignore, fcode);
10386 case BUILT_IN_FPRINTF:
10387 case BUILT_IN_FPRINTF_UNLOCKED:
10388 case BUILT_IN_VFPRINTF:
10389 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10398 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10399 and ARG2. IGNORE is true if the result of the function call is ignored.
10400 This function returns NULL_TREE if no simplification was possible. */
10403 fold_builtin_3 (location_t loc, tree fndecl,
10404 tree arg0, tree arg1, tree arg2, bool ignore)
10406 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10407 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10411 CASE_FLT_FN (BUILT_IN_SINCOS):
10412 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10414 CASE_FLT_FN (BUILT_IN_FMA):
10415 if (validate_arg (arg0, REAL_TYPE)
10416 && validate_arg(arg1, REAL_TYPE)
10417 && validate_arg(arg2, REAL_TYPE))
10418 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10421 CASE_FLT_FN (BUILT_IN_REMQUO):
10422 if (validate_arg (arg0, REAL_TYPE)
10423 && validate_arg(arg1, REAL_TYPE)
10424 && validate_arg(arg2, POINTER_TYPE))
10425 return do_mpfr_remquo (arg0, arg1, arg2);
10428 case BUILT_IN_MEMSET:
10429 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10431 case BUILT_IN_BCOPY:
10432 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10433 void_type_node, true, /*endp=*/3);
10435 case BUILT_IN_MEMCPY:
10436 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10437 type, ignore, /*endp=*/0);
10439 case BUILT_IN_MEMPCPY:
10440 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10441 type, ignore, /*endp=*/1);
10443 case BUILT_IN_MEMMOVE:
10444 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10445 type, ignore, /*endp=*/3);
10447 case BUILT_IN_STRNCAT:
10448 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10450 case BUILT_IN_STRNCPY:
10451 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10453 case BUILT_IN_STRNCMP:
10454 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10456 case BUILT_IN_MEMCHR:
10457 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10459 case BUILT_IN_BCMP:
10460 case BUILT_IN_MEMCMP:
10461 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10463 case BUILT_IN_SPRINTF:
10464 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10466 case BUILT_IN_STRCPY_CHK:
10467 case BUILT_IN_STPCPY_CHK:
10468 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10471 case BUILT_IN_STRCAT_CHK:
10472 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10474 case BUILT_IN_PRINTF_CHK:
10475 case BUILT_IN_VPRINTF_CHK:
10476 if (!validate_arg (arg0, INTEGER_TYPE)
10477 || TREE_SIDE_EFFECTS (arg0))
10480 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10483 case BUILT_IN_FPRINTF:
10484 case BUILT_IN_FPRINTF_UNLOCKED:
10485 case BUILT_IN_VFPRINTF:
10486 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10489 case BUILT_IN_FPRINTF_CHK:
10490 case BUILT_IN_VFPRINTF_CHK:
10491 if (!validate_arg (arg1, INTEGER_TYPE)
10492 || TREE_SIDE_EFFECTS (arg1))
10495 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10504 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10505 ARG2, and ARG3. IGNORE is true if the result of the function call is
10506 ignored. This function returns NULL_TREE if no simplification was
10510 fold_builtin_4 (location_t loc, tree fndecl,
10511 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10513 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10517 case BUILT_IN_MEMCPY_CHK:
10518 case BUILT_IN_MEMPCPY_CHK:
10519 case BUILT_IN_MEMMOVE_CHK:
10520 case BUILT_IN_MEMSET_CHK:
10521 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10523 DECL_FUNCTION_CODE (fndecl));
10525 case BUILT_IN_STRNCPY_CHK:
10526 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10528 case BUILT_IN_STRNCAT_CHK:
10529 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10531 case BUILT_IN_FPRINTF_CHK:
10532 case BUILT_IN_VFPRINTF_CHK:
10533 if (!validate_arg (arg1, INTEGER_TYPE)
10534 || TREE_SIDE_EFFECTS (arg1))
10537 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10547 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10548 arguments, where NARGS <= 4. IGNORE is true if the result of the
10549 function call is ignored. This function returns NULL_TREE if no
10550 simplification was possible. Note that this only folds builtins with
10551 fixed argument patterns. Foldings that do varargs-to-varargs
10552 transformations, or that match calls with more than 4 arguments,
10553 need to be handled with fold_builtin_varargs instead. */
10555 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10558 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10560 tree ret = NULL_TREE;
10565 ret = fold_builtin_0 (loc, fndecl, ignore);
10568 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10571 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10574 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10577 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10585 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10586 SET_EXPR_LOCATION (ret, loc);
10587 TREE_NO_WARNING (ret) = 1;
10593 /* Builtins with folding operations that operate on "..." arguments
10594 need special handling; we need to store the arguments in a convenient
10595 data structure before attempting any folding. Fortunately there are
10596 only a few builtins that fall into this category. FNDECL is the
10597 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10598 result of the function call is ignored. */
10601 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10602 bool ignore ATTRIBUTE_UNUSED)
10604 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10605 tree ret = NULL_TREE;
10609 case BUILT_IN_SPRINTF_CHK:
10610 case BUILT_IN_VSPRINTF_CHK:
10611 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10614 case BUILT_IN_SNPRINTF_CHK:
10615 case BUILT_IN_VSNPRINTF_CHK:
10616 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10619 case BUILT_IN_FPCLASSIFY:
10620 ret = fold_builtin_fpclassify (loc, exp);
10628 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10629 SET_EXPR_LOCATION (ret, loc);
10630 TREE_NO_WARNING (ret) = 1;
10636 /* Return true if FNDECL shouldn't be folded right now.
10637 If a built-in function has an inline attribute always_inline
10638 wrapper, defer folding it after always_inline functions have
10639 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10640 might not be performed. */
10643 avoid_folding_inline_builtin (tree fndecl)
10645 return (DECL_DECLARED_INLINE_P (fndecl)
10646 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10648 && !cfun->always_inline_functions_inlined
10649 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10652 /* A wrapper function for builtin folding that prevents warnings for
10653 "statement without effect" and the like, caused by removing the
10654 call node earlier than the warning is generated. */
10657 fold_call_expr (location_t loc, tree exp, bool ignore)
10659 tree ret = NULL_TREE;
10660 tree fndecl = get_callee_fndecl (exp);
10662 && TREE_CODE (fndecl) == FUNCTION_DECL
10663 && DECL_BUILT_IN (fndecl)
10664 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10665 yet. Defer folding until we see all the arguments
10666 (after inlining). */
10667 && !CALL_EXPR_VA_ARG_PACK (exp))
10669 int nargs = call_expr_nargs (exp);
10671 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10672 instead last argument is __builtin_va_arg_pack (). Defer folding
10673 even in that case, until arguments are finalized. */
10674 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10676 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10678 && TREE_CODE (fndecl2) == FUNCTION_DECL
10679 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10680 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10684 if (avoid_folding_inline_builtin (fndecl))
10687 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10688 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10689 CALL_EXPR_ARGP (exp), ignore);
10692 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10694 tree *args = CALL_EXPR_ARGP (exp);
10695 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10698 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10706 /* Conveniently construct a function call expression. FNDECL names the
10707 function to be called and N arguments are passed in the array
10711 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10713 tree fntype = TREE_TYPE (fndecl);
10714 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10716 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10719 /* Conveniently construct a function call expression. FNDECL names the
10720 function to be called and the arguments are passed in the vector
10724 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10726 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10727 VEC_address (tree, vec));
10731 /* Conveniently construct a function call expression. FNDECL names the
10732 function to be called, N is the number of arguments, and the "..."
10733 parameters are the argument expressions. */
10736 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10739 tree *argarray = XALLOCAVEC (tree, n);
10743 for (i = 0; i < n; i++)
10744 argarray[i] = va_arg (ap, tree);
10746 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10749 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10750 varargs macros aren't supported by all bootstrap compilers. */
10753 build_call_expr (tree fndecl, int n, ...)
10756 tree *argarray = XALLOCAVEC (tree, n);
10760 for (i = 0; i < n; i++)
10761 argarray[i] = va_arg (ap, tree);
10763 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10766 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10767 N arguments are passed in the array ARGARRAY. */
10770 fold_builtin_call_array (location_t loc, tree type,
10775 tree ret = NULL_TREE;
10778 if (TREE_CODE (fn) == ADDR_EXPR)
10780 tree fndecl = TREE_OPERAND (fn, 0);
10781 if (TREE_CODE (fndecl) == FUNCTION_DECL
10782 && DECL_BUILT_IN (fndecl))
10784 /* If last argument is __builtin_va_arg_pack (), arguments to this
10785 function are not finalized yet. Defer folding until they are. */
10786 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10788 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10790 && TREE_CODE (fndecl2) == FUNCTION_DECL
10791 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10792 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10793 return build_call_array_loc (loc, type, fn, n, argarray);
10795 if (avoid_folding_inline_builtin (fndecl))
10796 return build_call_array_loc (loc, type, fn, n, argarray);
10797 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10799 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10803 return build_call_array_loc (loc, type, fn, n, argarray);
10805 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10807 /* First try the transformations that don't require consing up
10809 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10814 /* If we got this far, we need to build an exp. */
10815 exp = build_call_array_loc (loc, type, fn, n, argarray);
10816 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10817 return ret ? ret : exp;
10821 return build_call_array_loc (loc, type, fn, n, argarray);
10824 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10825 along with N new arguments specified as the "..." parameters. SKIP
10826 is the number of arguments in EXP to be omitted. This function is used
10827 to do varargs-to-varargs transformations. */
10830 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10832 int oldnargs = call_expr_nargs (exp);
10833 int nargs = oldnargs - skip + n;
10834 tree fntype = TREE_TYPE (fndecl);
10835 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10843 buffer = XALLOCAVEC (tree, nargs);
10845 for (i = 0; i < n; i++)
10846 buffer[i] = va_arg (ap, tree);
10848 for (j = skip; j < oldnargs; j++, i++)
10849 buffer[i] = CALL_EXPR_ARG (exp, j);
10852 buffer = CALL_EXPR_ARGP (exp) + skip;
10854 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10857 /* Validate a single argument ARG against a tree code CODE representing
10861 validate_arg (const_tree arg, enum tree_code code)
10865 else if (code == POINTER_TYPE)
10866 return POINTER_TYPE_P (TREE_TYPE (arg));
10867 else if (code == INTEGER_TYPE)
10868 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10869 return code == TREE_CODE (TREE_TYPE (arg));
10872 /* This function validates the types of a function call argument list
10873 against a specified list of tree_codes. If the last specifier is a 0,
10874 that represents an ellipses, otherwise the last specifier must be a
10877 This is the GIMPLE version of validate_arglist. Eventually we want to
10878 completely convert builtins.c to work from GIMPLEs and the tree based
10879 validate_arglist will then be removed. */
10882 validate_gimple_arglist (const_gimple call, ...)
10884 enum tree_code code;
10890 va_start (ap, call);
10895 code = (enum tree_code) va_arg (ap, int);
10899 /* This signifies an ellipses, any further arguments are all ok. */
10903 /* This signifies an endlink, if no arguments remain, return
10904 true, otherwise return false. */
10905 res = (i == gimple_call_num_args (call));
10908 /* If no parameters remain or the parameter's code does not
10909 match the specified code, return false. Otherwise continue
10910 checking any remaining arguments. */
10911 arg = gimple_call_arg (call, i++);
10912 if (!validate_arg (arg, code))
10919 /* We need gotos here since we can only have one VA_CLOSE in a
10927 /* This function validates the types of a function call argument list
10928 against a specified list of tree_codes. If the last specifier is a 0,
10929 that represents an ellipses, otherwise the last specifier must be a
10933 validate_arglist (const_tree callexpr, ...)
10935 enum tree_code code;
10938 const_call_expr_arg_iterator iter;
10941 va_start (ap, callexpr);
10942 init_const_call_expr_arg_iterator (callexpr, &iter);
10946 code = (enum tree_code) va_arg (ap, int);
10950 /* This signifies an ellipses, any further arguments are all ok. */
10954 /* This signifies an endlink, if no arguments remain, return
10955 true, otherwise return false. */
10956 res = !more_const_call_expr_args_p (&iter);
10959 /* If no parameters remain or the parameter's code does not
10960 match the specified code, return false. Otherwise continue
10961 checking any remaining arguments. */
10962 arg = next_const_call_expr_arg (&iter);
10963 if (!validate_arg (arg, code))
10970 /* We need gotos here since we can only have one VA_CLOSE in a
10978 /* Default target-specific builtin expander that does nothing. */
10981 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10982 rtx target ATTRIBUTE_UNUSED,
10983 rtx subtarget ATTRIBUTE_UNUSED,
10984 enum machine_mode mode ATTRIBUTE_UNUSED,
10985 int ignore ATTRIBUTE_UNUSED)
10990 /* Returns true is EXP represents data that would potentially reside
10991 in a readonly section. */
10994 readonly_data_expr (tree exp)
10998 if (TREE_CODE (exp) != ADDR_EXPR)
11001 exp = get_base_address (TREE_OPERAND (exp, 0));
11005 /* Make sure we call decl_readonly_section only for trees it
11006 can handle (since it returns true for everything it doesn't
11008 if (TREE_CODE (exp) == STRING_CST
11009 || TREE_CODE (exp) == CONSTRUCTOR
11010 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11011 return decl_readonly_section (exp, 0);
11016 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11017 to the call, and TYPE is its return type.
11019 Return NULL_TREE if no simplification was possible, otherwise return the
11020 simplified form of the call as a tree.
11022 The simplified form may be a constant or other expression which
11023 computes the same value, but in a more efficient manner (including
11024 calls to other builtin functions).
11026 The call may contain arguments which need to be evaluated, but
11027 which are not useful to determine the result of the call. In
11028 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11029 COMPOUND_EXPR will be an argument which must be evaluated.
11030 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11031 COMPOUND_EXPR in the chain will contain the tree for the simplified
11032 form of the builtin function call. */
11035 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11037 if (!validate_arg (s1, POINTER_TYPE)
11038 || !validate_arg (s2, POINTER_TYPE))
11043 const char *p1, *p2;
11045 p2 = c_getstr (s2);
11049 p1 = c_getstr (s1);
11052 const char *r = strstr (p1, p2);
11056 return build_int_cst (TREE_TYPE (s1), 0);
11058 /* Return an offset into the constant string argument. */
11059 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11060 s1, size_int (r - p1));
11061 return fold_convert_loc (loc, type, tem);
11064 /* The argument is const char *, and the result is char *, so we need
11065 a type conversion here to avoid a warning. */
11067 return fold_convert_loc (loc, type, s1);
11072 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11076 /* New argument list transforming strstr(s1, s2) to
11077 strchr(s1, s2[0]). */
11078 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11082 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11083 the call, and TYPE is its return type.
11085 Return NULL_TREE if no simplification was possible, otherwise return the
11086 simplified form of the call as a tree.
11088 The simplified form may be a constant or other expression which
11089 computes the same value, but in a more efficient manner (including
11090 calls to other builtin functions).
11092 The call may contain arguments which need to be evaluated, but
11093 which are not useful to determine the result of the call. In
11094 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11095 COMPOUND_EXPR will be an argument which must be evaluated.
11096 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11097 COMPOUND_EXPR in the chain will contain the tree for the simplified
11098 form of the builtin function call. */
11101 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11103 if (!validate_arg (s1, POINTER_TYPE)
11104 || !validate_arg (s2, INTEGER_TYPE))
11110 if (TREE_CODE (s2) != INTEGER_CST)
11113 p1 = c_getstr (s1);
11120 if (target_char_cast (s2, &c))
11123 r = strchr (p1, c);
11126 return build_int_cst (TREE_TYPE (s1), 0);
11128 /* Return an offset into the constant string argument. */
11129 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11130 s1, size_int (r - p1));
11131 return fold_convert_loc (loc, type, tem);
11137 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11138 the call, and TYPE is its return type.
11140 Return NULL_TREE if no simplification was possible, otherwise return the
11141 simplified form of the call as a tree.
11143 The simplified form may be a constant or other expression which
11144 computes the same value, but in a more efficient manner (including
11145 calls to other builtin functions).
11147 The call may contain arguments which need to be evaluated, but
11148 which are not useful to determine the result of the call. In
11149 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11150 COMPOUND_EXPR will be an argument which must be evaluated.
11151 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11152 COMPOUND_EXPR in the chain will contain the tree for the simplified
11153 form of the builtin function call. */
11156 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11158 if (!validate_arg (s1, POINTER_TYPE)
11159 || !validate_arg (s2, INTEGER_TYPE))
11166 if (TREE_CODE (s2) != INTEGER_CST)
11169 p1 = c_getstr (s1);
11176 if (target_char_cast (s2, &c))
11179 r = strrchr (p1, c);
11182 return build_int_cst (TREE_TYPE (s1), 0);
11184 /* Return an offset into the constant string argument. */
11185 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11186 s1, size_int (r - p1));
11187 return fold_convert_loc (loc, type, tem);
11190 if (! integer_zerop (s2))
11193 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11197 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11198 return build_call_expr_loc (loc, fn, 2, s1, s2);
11202 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11203 to the call, and TYPE is its return type.
11205 Return NULL_TREE if no simplification was possible, otherwise return the
11206 simplified form of the call as a tree.
11208 The simplified form may be a constant or other expression which
11209 computes the same value, but in a more efficient manner (including
11210 calls to other builtin functions).
11212 The call may contain arguments which need to be evaluated, but
11213 which are not useful to determine the result of the call. In
11214 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11215 COMPOUND_EXPR will be an argument which must be evaluated.
11216 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11217 COMPOUND_EXPR in the chain will contain the tree for the simplified
11218 form of the builtin function call. */
11221 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11223 if (!validate_arg (s1, POINTER_TYPE)
11224 || !validate_arg (s2, POINTER_TYPE))
11229 const char *p1, *p2;
11231 p2 = c_getstr (s2);
11235 p1 = c_getstr (s1);
11238 const char *r = strpbrk (p1, p2);
11242 return build_int_cst (TREE_TYPE (s1), 0);
11244 /* Return an offset into the constant string argument. */
11245 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11246 s1, size_int (r - p1));
11247 return fold_convert_loc (loc, type, tem);
11251 /* strpbrk(x, "") == NULL.
11252 Evaluate and ignore s1 in case it had side-effects. */
11253 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11256 return NULL_TREE; /* Really call strpbrk. */
11258 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11262 /* New argument list transforming strpbrk(s1, s2) to
11263 strchr(s1, s2[0]). */
11264 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11268 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11271 Return NULL_TREE if no simplification was possible, otherwise return the
11272 simplified form of the call as a tree.
11274 The simplified form may be a constant or other expression which
11275 computes the same value, but in a more efficient manner (including
11276 calls to other builtin functions).
11278 The call may contain arguments which need to be evaluated, but
11279 which are not useful to determine the result of the call. In
11280 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11281 COMPOUND_EXPR will be an argument which must be evaluated.
11282 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11283 COMPOUND_EXPR in the chain will contain the tree for the simplified
11284 form of the builtin function call. */
11287 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11289 if (!validate_arg (dst, POINTER_TYPE)
11290 || !validate_arg (src, POINTER_TYPE))
11294 const char *p = c_getstr (src);
11296 /* If the string length is zero, return the dst parameter. */
11297 if (p && *p == '\0')
11300 if (optimize_insn_for_speed_p ())
11302 /* See if we can store by pieces into (dst + strlen(dst)). */
11304 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11305 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11307 if (!strlen_fn || !strcpy_fn)
11310 /* If we don't have a movstr we don't want to emit an strcpy
11311 call. We have to do that if the length of the source string
11312 isn't computable (in that case we can use memcpy probably
11313 later expanding to a sequence of mov instructions). If we
11314 have movstr instructions we can emit strcpy calls. */
11317 tree len = c_strlen (src, 1);
11318 if (! len || TREE_SIDE_EFFECTS (len))
11322 /* Stabilize the argument list. */
11323 dst = builtin_save_expr (dst);
11325 /* Create strlen (dst). */
11326 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11327 /* Create (dst p+ strlen (dst)). */
11329 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11330 TREE_TYPE (dst), dst, newdst);
11331 newdst = builtin_save_expr (newdst);
11333 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11334 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11340 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11341 arguments to the call.
11343 Return NULL_TREE if no simplification was possible, otherwise return the
11344 simplified form of the call as a tree.
11346 The simplified form may be a constant or other expression which
11347 computes the same value, but in a more efficient manner (including
11348 calls to other builtin functions).
11350 The call may contain arguments which need to be evaluated, but
11351 which are not useful to determine the result of the call. In
11352 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11353 COMPOUND_EXPR will be an argument which must be evaluated.
11354 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11355 COMPOUND_EXPR in the chain will contain the tree for the simplified
11356 form of the builtin function call. */
11359 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11361 if (!validate_arg (dst, POINTER_TYPE)
11362 || !validate_arg (src, POINTER_TYPE)
11363 || !validate_arg (len, INTEGER_TYPE))
11367 const char *p = c_getstr (src);
11369 /* If the requested length is zero, or the src parameter string
11370 length is zero, return the dst parameter. */
11371 if (integer_zerop (len) || (p && *p == '\0'))
11372 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11374 /* If the requested len is greater than or equal to the string
11375 length, call strcat. */
11376 if (TREE_CODE (len) == INTEGER_CST && p
11377 && compare_tree_int (len, strlen (p)) >= 0)
11379 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11381 /* If the replacement _DECL isn't initialized, don't do the
11386 return build_call_expr_loc (loc, fn, 2, dst, src);
11392 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11395 Return NULL_TREE if no simplification was possible, otherwise return the
11396 simplified form of the call as a tree.
11398 The simplified form may be a constant or other expression which
11399 computes the same value, but in a more efficient manner (including
11400 calls to other builtin functions).
11402 The call may contain arguments which need to be evaluated, but
11403 which are not useful to determine the result of the call. In
11404 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11405 COMPOUND_EXPR will be an argument which must be evaluated.
11406 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11407 COMPOUND_EXPR in the chain will contain the tree for the simplified
11408 form of the builtin function call. */
11411 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11413 if (!validate_arg (s1, POINTER_TYPE)
11414 || !validate_arg (s2, POINTER_TYPE))
11418 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11420 /* If both arguments are constants, evaluate at compile-time. */
11423 const size_t r = strspn (p1, p2);
11424 return size_int (r);
11427 /* If either argument is "", return NULL_TREE. */
11428 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11429 /* Evaluate and ignore both arguments in case either one has
11431 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11437 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11440 Return NULL_TREE if no simplification was possible, otherwise return the
11441 simplified form of the call as a tree.
11443 The simplified form may be a constant or other expression which
11444 computes the same value, but in a more efficient manner (including
11445 calls to other builtin functions).
11447 The call may contain arguments which need to be evaluated, but
11448 which are not useful to determine the result of the call. In
11449 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11450 COMPOUND_EXPR will be an argument which must be evaluated.
11451 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11452 COMPOUND_EXPR in the chain will contain the tree for the simplified
11453 form of the builtin function call. */
11456 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11458 if (!validate_arg (s1, POINTER_TYPE)
11459 || !validate_arg (s2, POINTER_TYPE))
11463 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11465 /* If both arguments are constants, evaluate at compile-time. */
11468 const size_t r = strcspn (p1, p2);
11469 return size_int (r);
11472 /* If the first argument is "", return NULL_TREE. */
11473 if (p1 && *p1 == '\0')
11475 /* Evaluate and ignore argument s2 in case it has
11477 return omit_one_operand_loc (loc, size_type_node,
11478 size_zero_node, s2);
11481 /* If the second argument is "", return __builtin_strlen(s1). */
11482 if (p2 && *p2 == '\0')
11484 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11486 /* If the replacement _DECL isn't initialized, don't do the
11491 return build_call_expr_loc (loc, fn, 1, s1);
11497 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11498 to the call. IGNORE is true if the value returned
11499 by the builtin will be ignored. UNLOCKED is true is true if this
11500 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11501 the known length of the string. Return NULL_TREE if no simplification
11505 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11506 bool ignore, bool unlocked, tree len)
11508 /* If we're using an unlocked function, assume the other unlocked
11509 functions exist explicitly. */
11510 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11511 : implicit_built_in_decls[BUILT_IN_FPUTC];
11512 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11513 : implicit_built_in_decls[BUILT_IN_FWRITE];
11515 /* If the return value is used, don't do the transformation. */
11519 /* Verify the arguments in the original call. */
11520 if (!validate_arg (arg0, POINTER_TYPE)
11521 || !validate_arg (arg1, POINTER_TYPE))
11525 len = c_strlen (arg0, 0);
11527 /* Get the length of the string passed to fputs. If the length
11528 can't be determined, punt. */
11530 || TREE_CODE (len) != INTEGER_CST)
11533 switch (compare_tree_int (len, 1))
11535 case -1: /* length is 0, delete the call entirely . */
11536 return omit_one_operand_loc (loc, integer_type_node,
11537 integer_zero_node, arg1);;
11539 case 0: /* length is 1, call fputc. */
11541 const char *p = c_getstr (arg0);
11546 return build_call_expr_loc (loc, fn_fputc, 2,
11547 build_int_cst (NULL_TREE, p[0]), arg1);
11553 case 1: /* length is greater than 1, call fwrite. */
11555 /* If optimizing for size keep fputs. */
11556 if (optimize_function_for_size_p (cfun))
11558 /* New argument list transforming fputs(string, stream) to
11559 fwrite(string, 1, len, stream). */
11561 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11562 size_one_node, len, arg1);
11567 gcc_unreachable ();
11572 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11573 produced. False otherwise. This is done so that we don't output the error
11574 or warning twice or three times. */
11577 fold_builtin_next_arg (tree exp, bool va_start_p)
11579 tree fntype = TREE_TYPE (current_function_decl);
11580 int nargs = call_expr_nargs (exp);
11583 if (!stdarg_p (fntype))
11585 error ("%<va_start%> used in function with fixed args");
11591 if (va_start_p && (nargs != 2))
11593 error ("wrong number of arguments to function %<va_start%>");
11596 arg = CALL_EXPR_ARG (exp, 1);
11598 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11599 when we checked the arguments and if needed issued a warning. */
11604 /* Evidently an out of date version of <stdarg.h>; can't validate
11605 va_start's second argument, but can still work as intended. */
11606 warning (0, "%<__builtin_next_arg%> called without an argument");
11609 else if (nargs > 1)
11611 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11614 arg = CALL_EXPR_ARG (exp, 0);
11617 if (TREE_CODE (arg) == SSA_NAME)
11618 arg = SSA_NAME_VAR (arg);
11620 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11621 or __builtin_next_arg (0) the first time we see it, after checking
11622 the arguments and if needed issuing a warning. */
11623 if (!integer_zerop (arg))
11625 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11627 /* Strip off all nops for the sake of the comparison. This
11628 is not quite the same as STRIP_NOPS. It does more.
11629 We must also strip off INDIRECT_EXPR for C++ reference
11631 while (CONVERT_EXPR_P (arg)
11632 || TREE_CODE (arg) == INDIRECT_REF)
11633 arg = TREE_OPERAND (arg, 0);
11634 if (arg != last_parm)
11636 /* FIXME: Sometimes with the tree optimizers we can get the
11637 not the last argument even though the user used the last
11638 argument. We just warn and set the arg to be the last
11639 argument so that we will get wrong-code because of
11641 warning (0, "second parameter of %<va_start%> not last named argument");
11644 /* Undefined by C99 7.15.1.4p4 (va_start):
11645 "If the parameter parmN is declared with the register storage
11646 class, with a function or array type, or with a type that is
11647 not compatible with the type that results after application of
11648 the default argument promotions, the behavior is undefined."
11650 else if (DECL_REGISTER (arg))
11651 warning (0, "undefined behaviour when second parameter of "
11652 "%<va_start%> is declared with %<register%> storage");
11654 /* We want to verify the second parameter just once before the tree
11655 optimizers are run and then avoid keeping it in the tree,
11656 as otherwise we could warn even for correct code like:
11657 void foo (int i, ...)
11658 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11660 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11662 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11668 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11669 ORIG may be null if this is a 2-argument call. We don't attempt to
11670 simplify calls with more than 3 arguments.
11672 Return NULL_TREE if no simplification was possible, otherwise return the
11673 simplified form of the call as a tree. If IGNORED is true, it means that
11674 the caller does not use the returned value of the function. */
11677 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11678 tree orig, int ignored)
11681 const char *fmt_str = NULL;
11683 /* Verify the required arguments in the original call. We deal with two
11684 types of sprintf() calls: 'sprintf (str, fmt)' and
11685 'sprintf (dest, "%s", orig)'. */
11686 if (!validate_arg (dest, POINTER_TYPE)
11687 || !validate_arg (fmt, POINTER_TYPE))
11689 if (orig && !validate_arg (orig, POINTER_TYPE))
11692 /* Check whether the format is a literal string constant. */
11693 fmt_str = c_getstr (fmt);
11694 if (fmt_str == NULL)
11698 retval = NULL_TREE;
11700 if (!init_target_chars ())
11703 /* If the format doesn't contain % args or %%, use strcpy. */
11704 if (strchr (fmt_str, target_percent) == NULL)
11706 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11711 /* Don't optimize sprintf (buf, "abc", ptr++). */
11715 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11716 'format' is known to contain no % formats. */
11717 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11719 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11722 /* If the format is "%s", use strcpy if the result isn't used. */
11723 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11726 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11731 /* Don't crash on sprintf (str1, "%s"). */
11735 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11738 retval = c_strlen (orig, 1);
11739 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11742 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11745 if (call && retval)
11747 retval = fold_convert_loc
11748 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11750 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11756 /* Expand a call EXP to __builtin_object_size. */
11759 expand_builtin_object_size (tree exp)
11762 int object_size_type;
11763 tree fndecl = get_callee_fndecl (exp);
11765 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11767 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11769 expand_builtin_trap ();
11773 ost = CALL_EXPR_ARG (exp, 1);
11776 if (TREE_CODE (ost) != INTEGER_CST
11777 || tree_int_cst_sgn (ost) < 0
11778 || compare_tree_int (ost, 3) > 0)
11780 error ("%Klast argument of %D is not integer constant between 0 and 3",
11782 expand_builtin_trap ();
11786 object_size_type = tree_low_cst (ost, 0);
11788 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11791 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11792 FCODE is the BUILT_IN_* to use.
11793 Return NULL_RTX if we failed; the caller should emit a normal call,
11794 otherwise try to get the result in TARGET, if convenient (and in
11795 mode MODE if that's convenient). */
11798 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11799 enum built_in_function fcode)
11801 tree dest, src, len, size;
11803 if (!validate_arglist (exp,
11805 fcode == BUILT_IN_MEMSET_CHK
11806 ? INTEGER_TYPE : POINTER_TYPE,
11807 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11810 dest = CALL_EXPR_ARG (exp, 0);
11811 src = CALL_EXPR_ARG (exp, 1);
11812 len = CALL_EXPR_ARG (exp, 2);
11813 size = CALL_EXPR_ARG (exp, 3);
11815 if (! host_integerp (size, 1))
11818 if (host_integerp (len, 1) || integer_all_onesp (size))
11822 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11824 warning_at (tree_nonartificial_location (exp),
11825 0, "%Kcall to %D will always overflow destination buffer",
11826 exp, get_callee_fndecl (exp));
11831 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11832 mem{cpy,pcpy,move,set} is available. */
11835 case BUILT_IN_MEMCPY_CHK:
11836 fn = built_in_decls[BUILT_IN_MEMCPY];
11838 case BUILT_IN_MEMPCPY_CHK:
11839 fn = built_in_decls[BUILT_IN_MEMPCPY];
11841 case BUILT_IN_MEMMOVE_CHK:
11842 fn = built_in_decls[BUILT_IN_MEMMOVE];
11844 case BUILT_IN_MEMSET_CHK:
11845 fn = built_in_decls[BUILT_IN_MEMSET];
11854 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11855 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11856 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11857 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11859 else if (fcode == BUILT_IN_MEMSET_CHK)
11863 unsigned int dest_align
11864 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11866 /* If DEST is not a pointer type, call the normal function. */
11867 if (dest_align == 0)
11870 /* If SRC and DEST are the same (and not volatile), do nothing. */
11871 if (operand_equal_p (src, dest, 0))
11875 if (fcode != BUILT_IN_MEMPCPY_CHK)
11877 /* Evaluate and ignore LEN in case it has side-effects. */
11878 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11879 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11882 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11883 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11886 /* __memmove_chk special case. */
11887 if (fcode == BUILT_IN_MEMMOVE_CHK)
11889 unsigned int src_align
11890 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11892 if (src_align == 0)
11895 /* If src is categorized for a readonly section we can use
11896 normal __memcpy_chk. */
11897 if (readonly_data_expr (src))
11899 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11902 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11903 dest, src, len, size);
11904 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11905 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11906 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11913 /* Emit warning if a buffer overflow is detected at compile time. */
11916 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11920 location_t loc = tree_nonartificial_location (exp);
11924 case BUILT_IN_STRCPY_CHK:
11925 case BUILT_IN_STPCPY_CHK:
11926 /* For __strcat_chk the warning will be emitted only if overflowing
11927 by at least strlen (dest) + 1 bytes. */
11928 case BUILT_IN_STRCAT_CHK:
11929 len = CALL_EXPR_ARG (exp, 1);
11930 size = CALL_EXPR_ARG (exp, 2);
11933 case BUILT_IN_STRNCAT_CHK:
11934 case BUILT_IN_STRNCPY_CHK:
11935 len = CALL_EXPR_ARG (exp, 2);
11936 size = CALL_EXPR_ARG (exp, 3);
11938 case BUILT_IN_SNPRINTF_CHK:
11939 case BUILT_IN_VSNPRINTF_CHK:
11940 len = CALL_EXPR_ARG (exp, 1);
11941 size = CALL_EXPR_ARG (exp, 3);
11944 gcc_unreachable ();
11950 if (! host_integerp (size, 1) || integer_all_onesp (size))
11955 len = c_strlen (len, 1);
11956 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11959 else if (fcode == BUILT_IN_STRNCAT_CHK)
11961 tree src = CALL_EXPR_ARG (exp, 1);
11962 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11964 src = c_strlen (src, 1);
11965 if (! src || ! host_integerp (src, 1))
11967 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11968 exp, get_callee_fndecl (exp));
11971 else if (tree_int_cst_lt (src, size))
11974 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11977 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11978 exp, get_callee_fndecl (exp));
11981 /* Emit warning if a buffer overflow is detected at compile time
11982 in __sprintf_chk/__vsprintf_chk calls. */
11985 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11987 tree size, len, fmt;
11988 const char *fmt_str;
11989 int nargs = call_expr_nargs (exp);
11991 /* Verify the required arguments in the original call. */
11995 size = CALL_EXPR_ARG (exp, 2);
11996 fmt = CALL_EXPR_ARG (exp, 3);
11998 if (! host_integerp (size, 1) || integer_all_onesp (size))
12001 /* Check whether the format is a literal string constant. */
12002 fmt_str = c_getstr (fmt);
12003 if (fmt_str == NULL)
12006 if (!init_target_chars ())
12009 /* If the format doesn't contain % args or %%, we know its size. */
12010 if (strchr (fmt_str, target_percent) == 0)
12011 len = build_int_cstu (size_type_node, strlen (fmt_str));
12012 /* If the format is "%s" and first ... argument is a string literal,
12014 else if (fcode == BUILT_IN_SPRINTF_CHK
12015 && strcmp (fmt_str, target_percent_s) == 0)
12021 arg = CALL_EXPR_ARG (exp, 4);
12022 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12025 len = c_strlen (arg, 1);
12026 if (!len || ! host_integerp (len, 1))
12032 if (! tree_int_cst_lt (len, size))
12033 warning_at (tree_nonartificial_location (exp),
12034 0, "%Kcall to %D will always overflow destination buffer",
12035 exp, get_callee_fndecl (exp));
12038 /* Emit warning if a free is called with address of a variable. */
12041 maybe_emit_free_warning (tree exp)
12043 tree arg = CALL_EXPR_ARG (exp, 0);
12046 if (TREE_CODE (arg) != ADDR_EXPR)
12049 arg = get_base_address (TREE_OPERAND (arg, 0));
12050 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12053 if (SSA_VAR_P (arg))
12054 warning_at (tree_nonartificial_location (exp),
12055 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12057 warning_at (tree_nonartificial_location (exp),
12058 0, "%Kattempt to free a non-heap object", exp);
12061 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12065 fold_builtin_object_size (tree ptr, tree ost)
12067 unsigned HOST_WIDE_INT bytes;
12068 int object_size_type;
12070 if (!validate_arg (ptr, POINTER_TYPE)
12071 || !validate_arg (ost, INTEGER_TYPE))
12076 if (TREE_CODE (ost) != INTEGER_CST
12077 || tree_int_cst_sgn (ost) < 0
12078 || compare_tree_int (ost, 3) > 0)
12081 object_size_type = tree_low_cst (ost, 0);
12083 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12084 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12085 and (size_t) 0 for types 2 and 3. */
12086 if (TREE_SIDE_EFFECTS (ptr))
12087 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12089 if (TREE_CODE (ptr) == ADDR_EXPR)
12091 bytes = compute_builtin_object_size (ptr, object_size_type);
12092 if (double_int_fits_to_tree_p (size_type_node,
12093 uhwi_to_double_int (bytes)))
12094 return build_int_cstu (size_type_node, bytes);
12096 else if (TREE_CODE (ptr) == SSA_NAME)
12098 /* If object size is not known yet, delay folding until
12099 later. Maybe subsequent passes will help determining
12101 bytes = compute_builtin_object_size (ptr, object_size_type);
12102 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12103 && double_int_fits_to_tree_p (size_type_node,
12104 uhwi_to_double_int (bytes)))
12105 return build_int_cstu (size_type_node, bytes);
12111 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12112 DEST, SRC, LEN, and SIZE are the arguments to the call.
12113 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12114 code of the builtin. If MAXLEN is not NULL, it is maximum length
12115 passed as third argument. */
12118 fold_builtin_memory_chk (location_t loc, tree fndecl,
12119 tree dest, tree src, tree len, tree size,
12120 tree maxlen, bool ignore,
12121 enum built_in_function fcode)
12125 if (!validate_arg (dest, POINTER_TYPE)
12126 || !validate_arg (src,
12127 (fcode == BUILT_IN_MEMSET_CHK
12128 ? INTEGER_TYPE : POINTER_TYPE))
12129 || !validate_arg (len, INTEGER_TYPE)
12130 || !validate_arg (size, INTEGER_TYPE))
12133 /* If SRC and DEST are the same (and not volatile), return DEST
12134 (resp. DEST+LEN for __mempcpy_chk). */
12135 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12137 if (fcode != BUILT_IN_MEMPCPY_CHK)
12138 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12142 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12144 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12148 if (! host_integerp (size, 1))
12151 if (! integer_all_onesp (size))
12153 if (! host_integerp (len, 1))
12155 /* If LEN is not constant, try MAXLEN too.
12156 For MAXLEN only allow optimizing into non-_ocs function
12157 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12158 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12160 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12162 /* (void) __mempcpy_chk () can be optimized into
12163 (void) __memcpy_chk (). */
12164 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12168 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12176 if (tree_int_cst_lt (size, maxlen))
12181 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12182 mem{cpy,pcpy,move,set} is available. */
12185 case BUILT_IN_MEMCPY_CHK:
12186 fn = built_in_decls[BUILT_IN_MEMCPY];
12188 case BUILT_IN_MEMPCPY_CHK:
12189 fn = built_in_decls[BUILT_IN_MEMPCPY];
12191 case BUILT_IN_MEMMOVE_CHK:
12192 fn = built_in_decls[BUILT_IN_MEMMOVE];
12194 case BUILT_IN_MEMSET_CHK:
12195 fn = built_in_decls[BUILT_IN_MEMSET];
12204 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12207 /* Fold a call to the __st[rp]cpy_chk builtin.
12208 DEST, SRC, and SIZE are the arguments to the call.
12209 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12210 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12211 strings passed as second argument. */
12214 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12215 tree src, tree size,
12216 tree maxlen, bool ignore,
12217 enum built_in_function fcode)
12221 if (!validate_arg (dest, POINTER_TYPE)
12222 || !validate_arg (src, POINTER_TYPE)
12223 || !validate_arg (size, INTEGER_TYPE))
12226 /* If SRC and DEST are the same (and not volatile), return DEST. */
12227 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12228 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12230 if (! host_integerp (size, 1))
12233 if (! integer_all_onesp (size))
12235 len = c_strlen (src, 1);
12236 if (! len || ! host_integerp (len, 1))
12238 /* If LEN is not constant, try MAXLEN too.
12239 For MAXLEN only allow optimizing into non-_ocs function
12240 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12241 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12243 if (fcode == BUILT_IN_STPCPY_CHK)
12248 /* If return value of __stpcpy_chk is ignored,
12249 optimize into __strcpy_chk. */
12250 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12254 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12257 if (! len || TREE_SIDE_EFFECTS (len))
12260 /* If c_strlen returned something, but not a constant,
12261 transform __strcpy_chk into __memcpy_chk. */
12262 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12266 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12267 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12268 build_call_expr_loc (loc, fn, 4,
12269 dest, src, len, size));
12275 if (! tree_int_cst_lt (maxlen, size))
12279 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12280 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12281 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12285 return build_call_expr_loc (loc, fn, 2, dest, src);
12288 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12289 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12290 length passed as third argument. */
12293 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12294 tree len, tree size, tree maxlen)
12298 if (!validate_arg (dest, POINTER_TYPE)
12299 || !validate_arg (src, POINTER_TYPE)
12300 || !validate_arg (len, INTEGER_TYPE)
12301 || !validate_arg (size, INTEGER_TYPE))
12304 if (! host_integerp (size, 1))
12307 if (! integer_all_onesp (size))
12309 if (! host_integerp (len, 1))
12311 /* If LEN is not constant, try MAXLEN too.
12312 For MAXLEN only allow optimizing into non-_ocs function
12313 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12314 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12320 if (tree_int_cst_lt (size, maxlen))
12324 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12325 fn = built_in_decls[BUILT_IN_STRNCPY];
12329 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12332 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12333 are the arguments to the call. */
12336 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12337 tree src, tree size)
12342 if (!validate_arg (dest, POINTER_TYPE)
12343 || !validate_arg (src, POINTER_TYPE)
12344 || !validate_arg (size, INTEGER_TYPE))
12347 p = c_getstr (src);
12348 /* If the SRC parameter is "", return DEST. */
12349 if (p && *p == '\0')
12350 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12352 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12355 /* If __builtin_strcat_chk is used, assume strcat is available. */
12356 fn = built_in_decls[BUILT_IN_STRCAT];
12360 return build_call_expr_loc (loc, fn, 2, dest, src);
12363 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12367 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12368 tree dest, tree src, tree len, tree size)
12373 if (!validate_arg (dest, POINTER_TYPE)
12374 || !validate_arg (src, POINTER_TYPE)
12375 || !validate_arg (size, INTEGER_TYPE)
12376 || !validate_arg (size, INTEGER_TYPE))
12379 p = c_getstr (src);
12380 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12381 if (p && *p == '\0')
12382 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12383 else if (integer_zerop (len))
12384 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12386 if (! host_integerp (size, 1))
12389 if (! integer_all_onesp (size))
12391 tree src_len = c_strlen (src, 1);
12393 && host_integerp (src_len, 1)
12394 && host_integerp (len, 1)
12395 && ! tree_int_cst_lt (len, src_len))
12397 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12398 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12402 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12407 /* If __builtin_strncat_chk is used, assume strncat is available. */
12408 fn = built_in_decls[BUILT_IN_STRNCAT];
12412 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12415 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12416 a normal call should be emitted rather than expanding the function
12417 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12420 fold_builtin_sprintf_chk (location_t loc, tree exp,
12421 enum built_in_function fcode)
12423 tree dest, size, len, fn, fmt, flag;
12424 const char *fmt_str;
12425 int nargs = call_expr_nargs (exp);
12427 /* Verify the required arguments in the original call. */
12430 dest = CALL_EXPR_ARG (exp, 0);
12431 if (!validate_arg (dest, POINTER_TYPE))
12433 flag = CALL_EXPR_ARG (exp, 1);
12434 if (!validate_arg (flag, INTEGER_TYPE))
12436 size = CALL_EXPR_ARG (exp, 2);
12437 if (!validate_arg (size, INTEGER_TYPE))
12439 fmt = CALL_EXPR_ARG (exp, 3);
12440 if (!validate_arg (fmt, POINTER_TYPE))
12443 if (! host_integerp (size, 1))
12448 if (!init_target_chars ())
12451 /* Check whether the format is a literal string constant. */
12452 fmt_str = c_getstr (fmt);
12453 if (fmt_str != NULL)
12455 /* If the format doesn't contain % args or %%, we know the size. */
12456 if (strchr (fmt_str, target_percent) == 0)
12458 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12459 len = build_int_cstu (size_type_node, strlen (fmt_str));
12461 /* If the format is "%s" and first ... argument is a string literal,
12462 we know the size too. */
12463 else if (fcode == BUILT_IN_SPRINTF_CHK
12464 && strcmp (fmt_str, target_percent_s) == 0)
12470 arg = CALL_EXPR_ARG (exp, 4);
12471 if (validate_arg (arg, POINTER_TYPE))
12473 len = c_strlen (arg, 1);
12474 if (! len || ! host_integerp (len, 1))
12481 if (! integer_all_onesp (size))
12483 if (! len || ! tree_int_cst_lt (len, size))
12487 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12488 or if format doesn't contain % chars or is "%s". */
12489 if (! integer_zerop (flag))
12491 if (fmt_str == NULL)
12493 if (strchr (fmt_str, target_percent) != NULL
12494 && strcmp (fmt_str, target_percent_s))
12498 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12499 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12500 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12504 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12507 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12508 a normal call should be emitted rather than expanding the function
12509 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12510 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12511 passed as second argument. */
12514 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12515 enum built_in_function fcode)
12517 tree dest, size, len, fn, fmt, flag;
12518 const char *fmt_str;
12520 /* Verify the required arguments in the original call. */
12521 if (call_expr_nargs (exp) < 5)
12523 dest = CALL_EXPR_ARG (exp, 0);
12524 if (!validate_arg (dest, POINTER_TYPE))
12526 len = CALL_EXPR_ARG (exp, 1);
12527 if (!validate_arg (len, INTEGER_TYPE))
12529 flag = CALL_EXPR_ARG (exp, 2);
12530 if (!validate_arg (flag, INTEGER_TYPE))
12532 size = CALL_EXPR_ARG (exp, 3);
12533 if (!validate_arg (size, INTEGER_TYPE))
12535 fmt = CALL_EXPR_ARG (exp, 4);
12536 if (!validate_arg (fmt, POINTER_TYPE))
12539 if (! host_integerp (size, 1))
12542 if (! integer_all_onesp (size))
12544 if (! host_integerp (len, 1))
12546 /* If LEN is not constant, try MAXLEN too.
12547 For MAXLEN only allow optimizing into non-_ocs function
12548 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12549 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12555 if (tree_int_cst_lt (size, maxlen))
12559 if (!init_target_chars ())
12562 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12563 or if format doesn't contain % chars or is "%s". */
12564 if (! integer_zerop (flag))
12566 fmt_str = c_getstr (fmt);
12567 if (fmt_str == NULL)
12569 if (strchr (fmt_str, target_percent) != NULL
12570 && strcmp (fmt_str, target_percent_s))
12574 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12576 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12577 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12581 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12584 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12585 FMT and ARG are the arguments to the call; we don't fold cases with
12586 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12588 Return NULL_TREE if no simplification was possible, otherwise return the
12589 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12590 code of the function to be simplified. */
12593 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12594 tree arg, bool ignore,
12595 enum built_in_function fcode)
12597 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12598 const char *fmt_str = NULL;
12600 /* If the return value is used, don't do the transformation. */
12604 /* Verify the required arguments in the original call. */
12605 if (!validate_arg (fmt, POINTER_TYPE))
12608 /* Check whether the format is a literal string constant. */
12609 fmt_str = c_getstr (fmt);
12610 if (fmt_str == NULL)
12613 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12615 /* If we're using an unlocked function, assume the other
12616 unlocked functions exist explicitly. */
12617 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12618 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12622 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12623 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12626 if (!init_target_chars ())
12629 if (strcmp (fmt_str, target_percent_s) == 0
12630 || strchr (fmt_str, target_percent) == NULL)
12634 if (strcmp (fmt_str, target_percent_s) == 0)
12636 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12639 if (!arg || !validate_arg (arg, POINTER_TYPE))
12642 str = c_getstr (arg);
12648 /* The format specifier doesn't contain any '%' characters. */
12649 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12655 /* If the string was "", printf does nothing. */
12656 if (str[0] == '\0')
12657 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12659 /* If the string has length of 1, call putchar. */
12660 if (str[1] == '\0')
12662 /* Given printf("c"), (where c is any one character,)
12663 convert "c"[0] to an int and pass that to the replacement
12665 newarg = build_int_cst (NULL_TREE, str[0]);
12667 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12671 /* If the string was "string\n", call puts("string"). */
12672 size_t len = strlen (str);
12673 if ((unsigned char)str[len - 1] == target_newline)
12675 /* Create a NUL-terminated string that's one char shorter
12676 than the original, stripping off the trailing '\n'. */
12677 char *newstr = XALLOCAVEC (char, len);
12678 memcpy (newstr, str, len - 1);
12679 newstr[len - 1] = 0;
12681 newarg = build_string_literal (len, newstr);
12683 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12686 /* We'd like to arrange to call fputs(string,stdout) here,
12687 but we need stdout and don't have a way to get it yet. */
12692 /* The other optimizations can be done only on the non-va_list variants. */
12693 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12696 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12697 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12699 if (!arg || !validate_arg (arg, POINTER_TYPE))
12702 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12705 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12706 else if (strcmp (fmt_str, target_percent_c) == 0)
12708 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12711 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12717 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12720 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12721 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12722 more than 3 arguments, and ARG may be null in the 2-argument case.
12724 Return NULL_TREE if no simplification was possible, otherwise return the
12725 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12726 code of the function to be simplified. */
12729 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12730 tree fmt, tree arg, bool ignore,
12731 enum built_in_function fcode)
12733 tree fn_fputc, fn_fputs, call = NULL_TREE;
12734 const char *fmt_str = NULL;
12736 /* If the return value is used, don't do the transformation. */
12740 /* Verify the required arguments in the original call. */
12741 if (!validate_arg (fp, POINTER_TYPE))
12743 if (!validate_arg (fmt, POINTER_TYPE))
12746 /* Check whether the format is a literal string constant. */
12747 fmt_str = c_getstr (fmt);
12748 if (fmt_str == NULL)
12751 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12753 /* If we're using an unlocked function, assume the other
12754 unlocked functions exist explicitly. */
12755 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12756 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12760 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12761 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12764 if (!init_target_chars ())
12767 /* If the format doesn't contain % args or %%, use strcpy. */
12768 if (strchr (fmt_str, target_percent) == NULL)
12770 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12774 /* If the format specifier was "", fprintf does nothing. */
12775 if (fmt_str[0] == '\0')
12777 /* If FP has side-effects, just wait until gimplification is
12779 if (TREE_SIDE_EFFECTS (fp))
12782 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12785 /* When "string" doesn't contain %, replace all cases of
12786 fprintf (fp, string) with fputs (string, fp). The fputs
12787 builtin will take care of special cases like length == 1. */
12789 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12792 /* The other optimizations can be done only on the non-va_list variants. */
12793 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12796 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12797 else if (strcmp (fmt_str, target_percent_s) == 0)
12799 if (!arg || !validate_arg (arg, POINTER_TYPE))
12802 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12805 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12806 else if (strcmp (fmt_str, target_percent_c) == 0)
12808 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12811 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12816 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12819 /* Initialize format string characters in the target charset. */
12822 init_target_chars (void)
12827 target_newline = lang_hooks.to_target_charset ('\n');
12828 target_percent = lang_hooks.to_target_charset ('%');
12829 target_c = lang_hooks.to_target_charset ('c');
12830 target_s = lang_hooks.to_target_charset ('s');
12831 if (target_newline == 0 || target_percent == 0 || target_c == 0
12835 target_percent_c[0] = target_percent;
12836 target_percent_c[1] = target_c;
12837 target_percent_c[2] = '\0';
12839 target_percent_s[0] = target_percent;
12840 target_percent_s[1] = target_s;
12841 target_percent_s[2] = '\0';
12843 target_percent_s_newline[0] = target_percent;
12844 target_percent_s_newline[1] = target_s;
12845 target_percent_s_newline[2] = target_newline;
12846 target_percent_s_newline[3] = '\0';
12853 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12854 and no overflow/underflow occurred. INEXACT is true if M was not
12855 exactly calculated. TYPE is the tree type for the result. This
12856 function assumes that you cleared the MPFR flags and then
12857 calculated M to see if anything subsequently set a flag prior to
12858 entering this function. Return NULL_TREE if any checks fail. */
12861 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12863 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12864 overflow/underflow occurred. If -frounding-math, proceed iff the
12865 result of calling FUNC was exact. */
12866 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12867 && (!flag_rounding_math || !inexact))
12869 REAL_VALUE_TYPE rr;
12871 real_from_mpfr (&rr, m, type, GMP_RNDN);
12872 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12873 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12874 but the mpft_t is not, then we underflowed in the
12876 if (real_isfinite (&rr)
12877 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12879 REAL_VALUE_TYPE rmode;
12881 real_convert (&rmode, TYPE_MODE (type), &rr);
12882 /* Proceed iff the specified mode can hold the value. */
12883 if (real_identical (&rmode, &rr))
12884 return build_real (type, rmode);
12890 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12891 number and no overflow/underflow occurred. INEXACT is true if M
12892 was not exactly calculated. TYPE is the tree type for the result.
12893 This function assumes that you cleared the MPFR flags and then
12894 calculated M to see if anything subsequently set a flag prior to
12895 entering this function. Return NULL_TREE if any checks fail, if
12896 FORCE_CONVERT is true, then bypass the checks. */
12899 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12901 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12902 overflow/underflow occurred. If -frounding-math, proceed iff the
12903 result of calling FUNC was exact. */
12905 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12906 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12907 && (!flag_rounding_math || !inexact)))
12909 REAL_VALUE_TYPE re, im;
12911 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12912 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12913 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12914 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12915 but the mpft_t is not, then we underflowed in the
12918 || (real_isfinite (&re) && real_isfinite (&im)
12919 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12920 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12922 REAL_VALUE_TYPE re_mode, im_mode;
12924 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12925 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12926 /* Proceed iff the specified mode can hold the value. */
12928 || (real_identical (&re_mode, &re)
12929 && real_identical (&im_mode, &im)))
12930 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12931 build_real (TREE_TYPE (type), im_mode));
12937 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12938 FUNC on it and return the resulting value as a tree with type TYPE.
12939 If MIN and/or MAX are not NULL, then the supplied ARG must be
12940 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12941 acceptable values, otherwise they are not. The mpfr precision is
12942 set to the precision of TYPE. We assume that function FUNC returns
12943 zero if the result could be calculated exactly within the requested
12947 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12948 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12951 tree result = NULL_TREE;
12955 /* To proceed, MPFR must exactly represent the target floating point
12956 format, which only happens when the target base equals two. */
12957 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12958 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12960 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12962 if (real_isfinite (ra)
12963 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12964 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12966 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12967 const int prec = fmt->p;
12968 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12972 mpfr_init2 (m, prec);
12973 mpfr_from_real (m, ra, GMP_RNDN);
12974 mpfr_clear_flags ();
12975 inexact = func (m, m, rnd);
12976 result = do_mpfr_ckconv (m, type, inexact);
12984 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12985 FUNC on it and return the resulting value as a tree with type TYPE.
12986 The mpfr precision is set to the precision of TYPE. We assume that
12987 function FUNC returns zero if the result could be calculated
12988 exactly within the requested precision. */
12991 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12992 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12994 tree result = NULL_TREE;
12999 /* To proceed, MPFR must exactly represent the target floating point
13000 format, which only happens when the target base equals two. */
13001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13002 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13003 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13005 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13006 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13008 if (real_isfinite (ra1) && real_isfinite (ra2))
13010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13011 const int prec = fmt->p;
13012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13016 mpfr_inits2 (prec, m1, m2, NULL);
13017 mpfr_from_real (m1, ra1, GMP_RNDN);
13018 mpfr_from_real (m2, ra2, GMP_RNDN);
13019 mpfr_clear_flags ();
13020 inexact = func (m1, m1, m2, rnd);
13021 result = do_mpfr_ckconv (m1, type, inexact);
13022 mpfr_clears (m1, m2, NULL);
13029 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13030 FUNC on it and return the resulting value as a tree with type TYPE.
13031 The mpfr precision is set to the precision of TYPE. We assume that
13032 function FUNC returns zero if the result could be calculated
13033 exactly within the requested precision. */
13036 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13037 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13039 tree result = NULL_TREE;
13045 /* To proceed, MPFR must exactly represent the target floating point
13046 format, which only happens when the target base equals two. */
13047 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13048 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13049 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13050 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13052 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13053 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13054 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13056 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13058 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13059 const int prec = fmt->p;
13060 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13064 mpfr_inits2 (prec, m1, m2, m3, NULL);
13065 mpfr_from_real (m1, ra1, GMP_RNDN);
13066 mpfr_from_real (m2, ra2, GMP_RNDN);
13067 mpfr_from_real (m3, ra3, GMP_RNDN);
13068 mpfr_clear_flags ();
13069 inexact = func (m1, m1, m2, m3, rnd);
13070 result = do_mpfr_ckconv (m1, type, inexact);
13071 mpfr_clears (m1, m2, m3, NULL);
13078 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13079 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13080 If ARG_SINP and ARG_COSP are NULL then the result is returned
13081 as a complex value.
13082 The type is taken from the type of ARG and is used for setting the
13083 precision of the calculation and results. */
13086 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13088 tree const type = TREE_TYPE (arg);
13089 tree result = NULL_TREE;
13093 /* To proceed, MPFR must exactly represent the target floating point
13094 format, which only happens when the target base equals two. */
13095 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13096 && TREE_CODE (arg) == REAL_CST
13097 && !TREE_OVERFLOW (arg))
13099 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13101 if (real_isfinite (ra))
13103 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13104 const int prec = fmt->p;
13105 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13106 tree result_s, result_c;
13110 mpfr_inits2 (prec, m, ms, mc, NULL);
13111 mpfr_from_real (m, ra, GMP_RNDN);
13112 mpfr_clear_flags ();
13113 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13114 result_s = do_mpfr_ckconv (ms, type, inexact);
13115 result_c = do_mpfr_ckconv (mc, type, inexact);
13116 mpfr_clears (m, ms, mc, NULL);
13117 if (result_s && result_c)
13119 /* If we are to return in a complex value do so. */
13120 if (!arg_sinp && !arg_cosp)
13121 return build_complex (build_complex_type (type),
13122 result_c, result_s);
13124 /* Dereference the sin/cos pointer arguments. */
13125 arg_sinp = build_fold_indirect_ref (arg_sinp);
13126 arg_cosp = build_fold_indirect_ref (arg_cosp);
13127 /* Proceed if valid pointer type were passed in. */
13128 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13129 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13131 /* Set the values. */
13132 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13134 TREE_SIDE_EFFECTS (result_s) = 1;
13135 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13137 TREE_SIDE_EFFECTS (result_c) = 1;
13138 /* Combine the assignments into a compound expr. */
13139 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13140 result_s, result_c));
13148 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13149 two-argument mpfr order N Bessel function FUNC on them and return
13150 the resulting value as a tree with type TYPE. The mpfr precision
13151 is set to the precision of TYPE. We assume that function FUNC
13152 returns zero if the result could be calculated exactly within the
13153 requested precision. */
13155 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13156 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13157 const REAL_VALUE_TYPE *min, bool inclusive)
13159 tree result = NULL_TREE;
13164 /* To proceed, MPFR must exactly represent the target floating point
13165 format, which only happens when the target base equals two. */
13166 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13167 && host_integerp (arg1, 0)
13168 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13170 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13171 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13174 && real_isfinite (ra)
13175 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13177 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13178 const int prec = fmt->p;
13179 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13183 mpfr_init2 (m, prec);
13184 mpfr_from_real (m, ra, GMP_RNDN);
13185 mpfr_clear_flags ();
13186 inexact = func (m, n, m, rnd);
13187 result = do_mpfr_ckconv (m, type, inexact);
13195 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13196 the pointer *(ARG_QUO) and return the result. The type is taken
13197 from the type of ARG0 and is used for setting the precision of the
13198 calculation and results. */
13201 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13203 tree const type = TREE_TYPE (arg0);
13204 tree result = NULL_TREE;
13209 /* To proceed, MPFR must exactly represent the target floating point
13210 format, which only happens when the target base equals two. */
13211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13212 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13213 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13215 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13216 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13218 if (real_isfinite (ra0) && real_isfinite (ra1))
13220 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13221 const int prec = fmt->p;
13222 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13227 mpfr_inits2 (prec, m0, m1, NULL);
13228 mpfr_from_real (m0, ra0, GMP_RNDN);
13229 mpfr_from_real (m1, ra1, GMP_RNDN);
13230 mpfr_clear_flags ();
13231 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13232 /* Remquo is independent of the rounding mode, so pass
13233 inexact=0 to do_mpfr_ckconv(). */
13234 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13235 mpfr_clears (m0, m1, NULL);
13238 /* MPFR calculates quo in the host's long so it may
13239 return more bits in quo than the target int can hold
13240 if sizeof(host long) > sizeof(target int). This can
13241 happen even for native compilers in LP64 mode. In
13242 these cases, modulo the quo value with the largest
13243 number that the target int can hold while leaving one
13244 bit for the sign. */
13245 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13246 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13248 /* Dereference the quo pointer argument. */
13249 arg_quo = build_fold_indirect_ref (arg_quo);
13250 /* Proceed iff a valid pointer type was passed in. */
13251 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13253 /* Set the value. */
13254 tree result_quo = fold_build2 (MODIFY_EXPR,
13255 TREE_TYPE (arg_quo), arg_quo,
13256 build_int_cst (NULL, integer_quo));
13257 TREE_SIDE_EFFECTS (result_quo) = 1;
13258 /* Combine the quo assignment with the rem. */
13259 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13260 result_quo, result_rem));
13268 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13269 resulting value as a tree with type TYPE. The mpfr precision is
13270 set to the precision of TYPE. We assume that this mpfr function
13271 returns zero if the result could be calculated exactly within the
13272 requested precision. In addition, the integer pointer represented
13273 by ARG_SG will be dereferenced and set to the appropriate signgam
13277 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13279 tree result = NULL_TREE;
13283 /* To proceed, MPFR must exactly represent the target floating point
13284 format, which only happens when the target base equals two. Also
13285 verify ARG is a constant and that ARG_SG is an int pointer. */
13286 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13287 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13288 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13289 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13291 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13293 /* In addition to NaN and Inf, the argument cannot be zero or a
13294 negative integer. */
13295 if (real_isfinite (ra)
13296 && ra->cl != rvc_zero
13297 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13299 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13300 const int prec = fmt->p;
13301 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13306 mpfr_init2 (m, prec);
13307 mpfr_from_real (m, ra, GMP_RNDN);
13308 mpfr_clear_flags ();
13309 inexact = mpfr_lgamma (m, &sg, m, rnd);
13310 result_lg = do_mpfr_ckconv (m, type, inexact);
13316 /* Dereference the arg_sg pointer argument. */
13317 arg_sg = build_fold_indirect_ref (arg_sg);
13318 /* Assign the signgam value into *arg_sg. */
13319 result_sg = fold_build2 (MODIFY_EXPR,
13320 TREE_TYPE (arg_sg), arg_sg,
13321 build_int_cst (NULL, sg));
13322 TREE_SIDE_EFFECTS (result_sg) = 1;
13323 /* Combine the signgam assignment with the lgamma result. */
13324 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13325 result_sg, result_lg));
13333 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13334 function FUNC on it and return the resulting value as a tree with
13335 type TYPE. The mpfr precision is set to the precision of TYPE. We
13336 assume that function FUNC returns zero if the result could be
13337 calculated exactly within the requested precision. */
13340 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13342 tree result = NULL_TREE;
13346 /* To proceed, MPFR must exactly represent the target floating point
13347 format, which only happens when the target base equals two. */
13348 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13349 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13350 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13352 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13353 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13355 if (real_isfinite (re) && real_isfinite (im))
13357 const struct real_format *const fmt =
13358 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13359 const int prec = fmt->p;
13360 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13361 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13365 mpc_init2 (m, prec);
13366 mpfr_from_real (mpc_realref(m), re, rnd);
13367 mpfr_from_real (mpc_imagref(m), im, rnd);
13368 mpfr_clear_flags ();
13369 inexact = func (m, m, crnd);
13370 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13378 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13379 mpc function FUNC on it and return the resulting value as a tree
13380 with type TYPE. The mpfr precision is set to the precision of
13381 TYPE. We assume that function FUNC returns zero if the result
13382 could be calculated exactly within the requested precision. If
13383 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13384 in the arguments and/or results. */
13387 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13388 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13390 tree result = NULL_TREE;
13395 /* To proceed, MPFR must exactly represent the target floating point
13396 format, which only happens when the target base equals two. */
13397 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13398 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13399 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13400 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13401 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13403 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13404 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13405 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13406 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13409 || (real_isfinite (re0) && real_isfinite (im0)
13410 && real_isfinite (re1) && real_isfinite (im1)))
13412 const struct real_format *const fmt =
13413 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13414 const int prec = fmt->p;
13415 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13416 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13420 mpc_init2 (m0, prec);
13421 mpc_init2 (m1, prec);
13422 mpfr_from_real (mpc_realref(m0), re0, rnd);
13423 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13424 mpfr_from_real (mpc_realref(m1), re1, rnd);
13425 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13426 mpfr_clear_flags ();
13427 inexact = func (m0, m0, m1, crnd);
13428 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13438 The functions below provide an alternate interface for folding
13439 builtin function calls presented as GIMPLE_CALL statements rather
13440 than as CALL_EXPRs. The folded result is still expressed as a
13441 tree. There is too much code duplication in the handling of
13442 varargs functions, and a more intrusive re-factoring would permit
13443 better sharing of code between the tree and statement-based
13444 versions of these functions. */
13446 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13447 along with N new arguments specified as the "..." parameters. SKIP
13448 is the number of arguments in STMT to be omitted. This function is used
13449 to do varargs-to-varargs transformations. */
13452 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13454 int oldnargs = gimple_call_num_args (stmt);
13455 int nargs = oldnargs - skip + n;
13456 tree fntype = TREE_TYPE (fndecl);
13457 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13461 location_t loc = gimple_location (stmt);
13463 buffer = XALLOCAVEC (tree, nargs);
13465 for (i = 0; i < n; i++)
13466 buffer[i] = va_arg (ap, tree);
13468 for (j = skip; j < oldnargs; j++, i++)
13469 buffer[i] = gimple_call_arg (stmt, j);
13471 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13474 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13475 a normal call should be emitted rather than expanding the function
13476 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13479 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13481 tree dest, size, len, fn, fmt, flag;
13482 const char *fmt_str;
13483 int nargs = gimple_call_num_args (stmt);
13485 /* Verify the required arguments in the original call. */
13488 dest = gimple_call_arg (stmt, 0);
13489 if (!validate_arg (dest, POINTER_TYPE))
13491 flag = gimple_call_arg (stmt, 1);
13492 if (!validate_arg (flag, INTEGER_TYPE))
13494 size = gimple_call_arg (stmt, 2);
13495 if (!validate_arg (size, INTEGER_TYPE))
13497 fmt = gimple_call_arg (stmt, 3);
13498 if (!validate_arg (fmt, POINTER_TYPE))
13501 if (! host_integerp (size, 1))
13506 if (!init_target_chars ())
13509 /* Check whether the format is a literal string constant. */
13510 fmt_str = c_getstr (fmt);
13511 if (fmt_str != NULL)
13513 /* If the format doesn't contain % args or %%, we know the size. */
13514 if (strchr (fmt_str, target_percent) == 0)
13516 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13517 len = build_int_cstu (size_type_node, strlen (fmt_str));
13519 /* If the format is "%s" and first ... argument is a string literal,
13520 we know the size too. */
13521 else if (fcode == BUILT_IN_SPRINTF_CHK
13522 && strcmp (fmt_str, target_percent_s) == 0)
13528 arg = gimple_call_arg (stmt, 4);
13529 if (validate_arg (arg, POINTER_TYPE))
13531 len = c_strlen (arg, 1);
13532 if (! len || ! host_integerp (len, 1))
13539 if (! integer_all_onesp (size))
13541 if (! len || ! tree_int_cst_lt (len, size))
13545 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13546 or if format doesn't contain % chars or is "%s". */
13547 if (! integer_zerop (flag))
13549 if (fmt_str == NULL)
13551 if (strchr (fmt_str, target_percent) != NULL
13552 && strcmp (fmt_str, target_percent_s))
13556 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13557 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13558 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13562 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13565 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13566 a normal call should be emitted rather than expanding the function
13567 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13568 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13569 passed as second argument. */
13572 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13573 enum built_in_function fcode)
13575 tree dest, size, len, fn, fmt, flag;
13576 const char *fmt_str;
13578 /* Verify the required arguments in the original call. */
13579 if (gimple_call_num_args (stmt) < 5)
13581 dest = gimple_call_arg (stmt, 0);
13582 if (!validate_arg (dest, POINTER_TYPE))
13584 len = gimple_call_arg (stmt, 1);
13585 if (!validate_arg (len, INTEGER_TYPE))
13587 flag = gimple_call_arg (stmt, 2);
13588 if (!validate_arg (flag, INTEGER_TYPE))
13590 size = gimple_call_arg (stmt, 3);
13591 if (!validate_arg (size, INTEGER_TYPE))
13593 fmt = gimple_call_arg (stmt, 4);
13594 if (!validate_arg (fmt, POINTER_TYPE))
13597 if (! host_integerp (size, 1))
13600 if (! integer_all_onesp (size))
13602 if (! host_integerp (len, 1))
13604 /* If LEN is not constant, try MAXLEN too.
13605 For MAXLEN only allow optimizing into non-_ocs function
13606 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13607 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13613 if (tree_int_cst_lt (size, maxlen))
13617 if (!init_target_chars ())
13620 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13621 or if format doesn't contain % chars or is "%s". */
13622 if (! integer_zerop (flag))
13624 fmt_str = c_getstr (fmt);
13625 if (fmt_str == NULL)
13627 if (strchr (fmt_str, target_percent) != NULL
13628 && strcmp (fmt_str, target_percent_s))
13632 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13634 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13635 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13639 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13642 /* Builtins with folding operations that operate on "..." arguments
13643 need special handling; we need to store the arguments in a convenient
13644 data structure before attempting any folding. Fortunately there are
13645 only a few builtins that fall into this category. FNDECL is the
13646 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13647 result of the function call is ignored. */
13650 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13651 bool ignore ATTRIBUTE_UNUSED)
13653 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13654 tree ret = NULL_TREE;
13658 case BUILT_IN_SPRINTF_CHK:
13659 case BUILT_IN_VSPRINTF_CHK:
13660 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13663 case BUILT_IN_SNPRINTF_CHK:
13664 case BUILT_IN_VSNPRINTF_CHK:
13665 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13672 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13673 TREE_NO_WARNING (ret) = 1;
13679 /* A wrapper function for builtin folding that prevents warnings for
13680 "statement without effect" and the like, caused by removing the
13681 call node earlier than the warning is generated. */
13684 fold_call_stmt (gimple stmt, bool ignore)
13686 tree ret = NULL_TREE;
13687 tree fndecl = gimple_call_fndecl (stmt);
13688 location_t loc = gimple_location (stmt);
13690 && TREE_CODE (fndecl) == FUNCTION_DECL
13691 && DECL_BUILT_IN (fndecl)
13692 && !gimple_call_va_arg_pack_p (stmt))
13694 int nargs = gimple_call_num_args (stmt);
13696 if (avoid_folding_inline_builtin (fndecl))
13698 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13700 return targetm.fold_builtin (fndecl, nargs,
13702 ? gimple_call_arg_ptr (stmt, 0)
13703 : &error_mark_node), ignore);
13707 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13709 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13711 for (i = 0; i < nargs; i++)
13712 args[i] = gimple_call_arg (stmt, i);
13713 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13716 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13719 /* Propagate location information from original call to
13720 expansion of builtin. Otherwise things like
13721 maybe_emit_chk_warning, that operate on the expansion
13722 of a builtin, will use the wrong location information. */
13723 if (gimple_has_location (stmt))
13725 tree realret = ret;
13726 if (TREE_CODE (ret) == NOP_EXPR)
13727 realret = TREE_OPERAND (ret, 0);
13728 if (CAN_HAVE_LOCATION_P (realret)
13729 && !EXPR_HAS_LOCATION (realret))
13730 SET_EXPR_LOCATION (realret, loc);
13740 /* Look up the function in built_in_decls that corresponds to DECL
13741 and set ASMSPEC as its user assembler name. DECL must be a
13742 function decl that declares a builtin. */
13745 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13748 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13749 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13752 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13753 set_user_assembler_name (builtin, asmspec);
13754 switch (DECL_FUNCTION_CODE (decl))
13756 case BUILT_IN_MEMCPY:
13757 init_block_move_fn (asmspec);
13758 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13760 case BUILT_IN_MEMSET:
13761 init_block_clear_fn (asmspec);
13762 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13764 case BUILT_IN_MEMMOVE:
13765 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13767 case BUILT_IN_MEMCMP:
13768 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13770 case BUILT_IN_ABORT:
13771 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13774 if (INT_TYPE_SIZE < BITS_PER_WORD)
13776 set_user_assembler_libfunc ("ffs", asmspec);
13777 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13778 MODE_INT, 0), "ffs");
13786 /* Return true if DECL is a builtin that expands to a constant or similarly
13789 is_simple_builtin (tree decl)
13791 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13792 switch (DECL_FUNCTION_CODE (decl))
13794 /* Builtins that expand to constants. */
13795 case BUILT_IN_CONSTANT_P:
13796 case BUILT_IN_EXPECT:
13797 case BUILT_IN_OBJECT_SIZE:
13798 case BUILT_IN_UNREACHABLE:
13799 /* Simple register moves or loads from stack. */
13800 case BUILT_IN_RETURN_ADDRESS:
13801 case BUILT_IN_EXTRACT_RETURN_ADDR:
13802 case BUILT_IN_FROB_RETURN_ADDR:
13803 case BUILT_IN_RETURN:
13804 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13805 case BUILT_IN_FRAME_ADDRESS:
13806 case BUILT_IN_VA_END:
13807 case BUILT_IN_STACK_SAVE:
13808 case BUILT_IN_STACK_RESTORE:
13809 /* Exception state returns or moves registers around. */
13810 case BUILT_IN_EH_FILTER:
13811 case BUILT_IN_EH_POINTER:
13812 case BUILT_IN_EH_COPY_VALUES:
13822 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13823 most probably expanded inline into reasonably simple code. This is a
13824 superset of is_simple_builtin. */
13826 is_inexpensive_builtin (tree decl)
13830 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13832 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13833 switch (DECL_FUNCTION_CODE (decl))
13836 case BUILT_IN_ALLOCA:
13837 case BUILT_IN_BSWAP32:
13838 case BUILT_IN_BSWAP64:
13840 case BUILT_IN_CLZIMAX:
13841 case BUILT_IN_CLZL:
13842 case BUILT_IN_CLZLL:
13844 case BUILT_IN_CTZIMAX:
13845 case BUILT_IN_CTZL:
13846 case BUILT_IN_CTZLL:
13848 case BUILT_IN_FFSIMAX:
13849 case BUILT_IN_FFSL:
13850 case BUILT_IN_FFSLL:
13851 case BUILT_IN_IMAXABS:
13852 case BUILT_IN_FINITE:
13853 case BUILT_IN_FINITEF:
13854 case BUILT_IN_FINITEL:
13855 case BUILT_IN_FINITED32:
13856 case BUILT_IN_FINITED64:
13857 case BUILT_IN_FINITED128:
13858 case BUILT_IN_FPCLASSIFY:
13859 case BUILT_IN_ISFINITE:
13860 case BUILT_IN_ISINF_SIGN:
13861 case BUILT_IN_ISINF:
13862 case BUILT_IN_ISINFF:
13863 case BUILT_IN_ISINFL:
13864 case BUILT_IN_ISINFD32:
13865 case BUILT_IN_ISINFD64:
13866 case BUILT_IN_ISINFD128:
13867 case BUILT_IN_ISNAN:
13868 case BUILT_IN_ISNANF:
13869 case BUILT_IN_ISNANL:
13870 case BUILT_IN_ISNAND32:
13871 case BUILT_IN_ISNAND64:
13872 case BUILT_IN_ISNAND128:
13873 case BUILT_IN_ISNORMAL:
13874 case BUILT_IN_ISGREATER:
13875 case BUILT_IN_ISGREATEREQUAL:
13876 case BUILT_IN_ISLESS:
13877 case BUILT_IN_ISLESSEQUAL:
13878 case BUILT_IN_ISLESSGREATER:
13879 case BUILT_IN_ISUNORDERED:
13880 case BUILT_IN_VA_ARG_PACK:
13881 case BUILT_IN_VA_ARG_PACK_LEN:
13882 case BUILT_IN_VA_COPY:
13883 case BUILT_IN_TRAP:
13884 case BUILT_IN_SAVEREGS:
13885 case BUILT_IN_POPCOUNTL:
13886 case BUILT_IN_POPCOUNTLL:
13887 case BUILT_IN_POPCOUNTIMAX:
13888 case BUILT_IN_POPCOUNT:
13889 case BUILT_IN_PARITYL:
13890 case BUILT_IN_PARITYLL:
13891 case BUILT_IN_PARITYIMAX:
13892 case BUILT_IN_PARITY:
13893 case BUILT_IN_LABS:
13894 case BUILT_IN_LLABS:
13895 case BUILT_IN_PREFETCH:
13899 return is_simple_builtin (decl);