1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
66 #include "builtins.def"
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
243 /* This array records the insn_code of insns to imlement the signbit
245 enum insn_code signbit_optab[NUM_MACHINE_MODES];
248 /* Return true if NODE should be considered for inline expansion regardless
249 of the optimization level. This means whenever a function is invoked with
250 its "internal" name, which normally contains the prefix "__builtin". */
252 static bool called_as_built_in (tree node)
254 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
255 if (strncmp (name, "__builtin_", 10) == 0)
257 if (strncmp (name, "__sync_", 7) == 0)
262 /* Return the alignment in bits of EXP, a pointer valued expression.
263 But don't return more than MAX_ALIGN no matter what.
264 The alignment returned is, by default, the alignment of the thing that
265 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
267 Otherwise, look at the expression to see if we can do better, i.e., if the
268 expression is actually pointing at an object whose alignment is tighter. */
271 get_pointer_alignment (tree exp, unsigned int max_align)
273 unsigned int align, inner;
275 /* We rely on TER to compute accurate alignment information. */
276 if (!(optimize && flag_tree_ter))
279 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
282 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
283 align = MIN (align, max_align);
287 switch (TREE_CODE (exp))
291 case NON_LVALUE_EXPR:
292 exp = TREE_OPERAND (exp, 0);
293 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
296 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
297 align = MIN (inner, max_align);
300 case POINTER_PLUS_EXPR:
301 /* If sum of pointer + int, restrict our maximum alignment to that
302 imposed by the integer. If not, we can't do any better than
304 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
307 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
308 & (max_align / BITS_PER_UNIT - 1))
312 exp = TREE_OPERAND (exp, 0);
316 /* See what we are pointing at and look at its alignment. */
317 exp = TREE_OPERAND (exp, 0);
319 if (handled_component_p (exp))
321 HOST_WIDE_INT bitsize, bitpos;
323 enum machine_mode mode;
324 int unsignedp, volatilep;
326 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
327 &mode, &unsignedp, &volatilep, true);
329 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
330 if (offset && TREE_CODE (offset) == PLUS_EXPR
331 && host_integerp (TREE_OPERAND (offset, 1), 1))
333 /* Any overflow in calculating offset_bits won't change
336 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
340 inner = MIN (inner, (offset_bits & -offset_bits));
341 offset = TREE_OPERAND (offset, 0);
343 if (offset && TREE_CODE (offset) == MULT_EXPR
344 && host_integerp (TREE_OPERAND (offset, 1), 1))
346 /* Any overflow in calculating offset_factor won't change
348 unsigned offset_factor
349 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
353 inner = MIN (inner, (offset_factor & -offset_factor));
356 inner = MIN (inner, BITS_PER_UNIT);
359 align = MIN (inner, DECL_ALIGN (exp));
360 #ifdef CONSTANT_ALIGNMENT
361 else if (CONSTANT_CLASS_P (exp))
362 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
364 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
365 || TREE_CODE (exp) == INDIRECT_REF)
366 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
368 align = MIN (align, inner);
369 return MIN (align, max_align);
377 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
378 way, because it could contain a zero byte in the middle.
379 TREE_STRING_LENGTH is the size of the character array, not the string.
381 ONLY_VALUE should be nonzero if the result is not going to be emitted
382 into the instruction stream and zero if it is going to be expanded.
383 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
384 is returned, otherwise NULL, since
385 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
386 evaluate the side-effects.
388 The value returned is of type `ssizetype'.
390 Unfortunately, string_constant can't access the values of const char
391 arrays with initializers, so neither can we do so here. */
394 c_strlen (tree src, int only_value)
397 HOST_WIDE_INT offset;
402 if (TREE_CODE (src) == COND_EXPR
403 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
407 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
408 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
409 if (tree_int_cst_equal (len1, len2))
413 if (TREE_CODE (src) == COMPOUND_EXPR
414 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
415 return c_strlen (TREE_OPERAND (src, 1), only_value);
417 src = string_constant (src, &offset_node);
421 max = TREE_STRING_LENGTH (src) - 1;
422 ptr = TREE_STRING_POINTER (src);
424 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
426 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
427 compute the offset to the following null if we don't know where to
428 start searching for it. */
431 for (i = 0; i < max; i++)
435 /* We don't know the starting offset, but we do know that the string
436 has no internal zero bytes. We can assume that the offset falls
437 within the bounds of the string; otherwise, the programmer deserves
438 what he gets. Subtract the offset from the length of the string,
439 and return that. This would perhaps not be valid if we were dealing
440 with named arrays in addition to literal string constants. */
442 return size_diffop (size_int (max), offset_node);
445 /* We have a known offset into the string. Start searching there for
446 a null character if we can represent it as a single HOST_WIDE_INT. */
447 if (offset_node == 0)
449 else if (! host_integerp (offset_node, 0))
452 offset = tree_low_cst (offset_node, 0);
454 /* If the offset is known to be out of bounds, warn, and call strlen at
456 if (offset < 0 || offset > max)
458 warning (0, "offset outside bounds of constant string");
462 /* Use strlen to search for the first zero byte. Since any strings
463 constructed with build_string will have nulls appended, we win even
464 if we get handed something like (char[4])"abcd".
466 Since OFFSET is our starting index into the string, no further
467 calculation is needed. */
468 return ssize_int (strlen (ptr + offset));
471 /* Return a char pointer for a C string if it is a string constant
472 or sum of string constant and integer constant. */
479 src = string_constant (src, &offset_node);
483 if (offset_node == 0)
484 return TREE_STRING_POINTER (src);
485 else if (!host_integerp (offset_node, 1)
486 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
489 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
492 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
493 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
496 c_readstr (const char *str, enum machine_mode mode)
502 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
507 for (i = 0; i < GET_MODE_SIZE (mode); i++)
510 if (WORDS_BIG_ENDIAN)
511 j = GET_MODE_SIZE (mode) - i - 1;
512 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
513 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
514 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
516 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
519 ch = (unsigned char) str[i];
520 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
522 return immed_double_const (c[0], c[1], mode);
525 /* Cast a target constant CST to target CHAR and if that value fits into
526 host char type, return zero and put that value into variable pointed to by
530 target_char_cast (tree cst, char *p)
532 unsigned HOST_WIDE_INT val, hostval;
534 if (!host_integerp (cst, 1)
535 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
538 val = tree_low_cst (cst, 1);
539 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
540 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
543 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
544 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
553 /* Similar to save_expr, but assumes that arbitrary code is not executed
554 in between the multiple evaluations. In particular, we assume that a
555 non-addressable local variable will not be modified. */
558 builtin_save_expr (tree exp)
560 if (TREE_ADDRESSABLE (exp) == 0
561 && (TREE_CODE (exp) == PARM_DECL
562 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
565 return save_expr (exp);
568 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
569 times to get the address of either a higher stack frame, or a return
570 address located within it (depending on FNDECL_CODE). */
573 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
577 #ifdef INITIAL_FRAME_ADDRESS_RTX
578 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
582 /* For a zero count with __builtin_return_address, we don't care what
583 frame address we return, because target-specific definitions will
584 override us. Therefore frame pointer elimination is OK, and using
585 the soft frame pointer is OK.
587 For a nonzero count, or a zero count with __builtin_frame_address,
588 we require a stable offset from the current frame pointer to the
589 previous one, so we must use the hard frame pointer, and
590 we must disable frame pointer elimination. */
591 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
592 tem = frame_pointer_rtx;
595 tem = hard_frame_pointer_rtx;
597 /* Tell reload not to eliminate the frame pointer. */
598 current_function_accesses_prior_frames = 1;
602 /* Some machines need special handling before we can access
603 arbitrary frames. For example, on the SPARC, we must first flush
604 all register windows to the stack. */
605 #ifdef SETUP_FRAME_ADDRESSES
607 SETUP_FRAME_ADDRESSES ();
610 /* On the SPARC, the return address is not in the frame, it is in a
611 register. There is no way to access it off of the current frame
612 pointer, but it can be accessed off the previous frame pointer by
613 reading the value from the register window save area. */
614 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
615 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
619 /* Scan back COUNT frames to the specified frame. */
620 for (i = 0; i < count; i++)
622 /* Assume the dynamic chain pointer is in the word that the
623 frame address points to, unless otherwise specified. */
624 #ifdef DYNAMIC_CHAIN_ADDRESS
625 tem = DYNAMIC_CHAIN_ADDRESS (tem);
627 tem = memory_address (Pmode, tem);
628 tem = gen_frame_mem (Pmode, tem);
629 tem = copy_to_reg (tem);
632 /* For __builtin_frame_address, return what we've got. But, on
633 the SPARC for example, we may have to add a bias. */
634 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
635 #ifdef FRAME_ADDR_RTX
636 return FRAME_ADDR_RTX (tem);
641 /* For __builtin_return_address, get the return address from that frame. */
642 #ifdef RETURN_ADDR_RTX
643 tem = RETURN_ADDR_RTX (count, tem);
645 tem = memory_address (Pmode,
646 plus_constant (tem, GET_MODE_SIZE (Pmode)));
647 tem = gen_frame_mem (Pmode, tem);
652 /* Alias set used for setjmp buffer. */
653 static HOST_WIDE_INT setjmp_alias_set = -1;
655 /* Construct the leading half of a __builtin_setjmp call. Control will
656 return to RECEIVER_LABEL. This is also called directly by the SJLJ
657 exception handling code. */
660 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
662 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
666 if (setjmp_alias_set == -1)
667 setjmp_alias_set = new_alias_set ();
669 buf_addr = convert_memory_address (Pmode, buf_addr);
671 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
673 /* We store the frame pointer and the address of receiver_label in
674 the buffer and use the rest of it for the stack save area, which
675 is machine-dependent. */
677 mem = gen_rtx_MEM (Pmode, buf_addr);
678 set_mem_alias_set (mem, setjmp_alias_set);
679 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
681 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
682 set_mem_alias_set (mem, setjmp_alias_set);
684 emit_move_insn (validize_mem (mem),
685 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
687 stack_save = gen_rtx_MEM (sa_mode,
688 plus_constant (buf_addr,
689 2 * GET_MODE_SIZE (Pmode)));
690 set_mem_alias_set (stack_save, setjmp_alias_set);
691 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
693 /* If there is further processing to do, do it. */
694 #ifdef HAVE_builtin_setjmp_setup
695 if (HAVE_builtin_setjmp_setup)
696 emit_insn (gen_builtin_setjmp_setup (buf_addr));
699 /* Tell optimize_save_area_alloca that extra work is going to
700 need to go on during alloca. */
701 current_function_calls_setjmp = 1;
703 /* We have a nonlocal label. */
704 current_function_has_nonlocal_label = 1;
707 /* Construct the trailing part of a __builtin_setjmp call. This is
708 also called directly by the SJLJ exception handling code. */
711 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
713 /* Clobber the FP when we get here, so we have to make sure it's
714 marked as used by this function. */
715 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
717 /* Mark the static chain as clobbered here so life information
718 doesn't get messed up for it. */
719 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
721 /* Now put in the code to restore the frame pointer, and argument
722 pointer, if needed. */
723 #ifdef HAVE_nonlocal_goto
724 if (! HAVE_nonlocal_goto)
727 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
728 /* This might change the hard frame pointer in ways that aren't
729 apparent to early optimization passes, so force a clobber. */
730 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
733 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
734 if (fixed_regs[ARG_POINTER_REGNUM])
736 #ifdef ELIMINABLE_REGS
738 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
740 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
741 if (elim_regs[i].from == ARG_POINTER_REGNUM
742 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
745 if (i == ARRAY_SIZE (elim_regs))
748 /* Now restore our arg pointer from the address at which it
749 was saved in our stack frame. */
750 emit_move_insn (virtual_incoming_args_rtx,
751 copy_to_reg (get_arg_pointer_save_area (cfun)));
756 #ifdef HAVE_builtin_setjmp_receiver
757 if (HAVE_builtin_setjmp_receiver)
758 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
761 #ifdef HAVE_nonlocal_goto_receiver
762 if (HAVE_nonlocal_goto_receiver)
763 emit_insn (gen_nonlocal_goto_receiver ());
768 /* We must not allow the code we just generated to be reordered by
769 scheduling. Specifically, the update of the frame pointer must
770 happen immediately, not later. */
771 emit_insn (gen_blockage ());
774 /* __builtin_longjmp is passed a pointer to an array of five words (not
775 all will be used on all machines). It operates similarly to the C
776 library function of the same name, but is more efficient. Much of
777 the code below is copied from the handling of non-local gotos. */
780 expand_builtin_longjmp (rtx buf_addr, rtx value)
782 rtx fp, lab, stack, insn, last;
783 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
785 if (setjmp_alias_set == -1)
786 setjmp_alias_set = new_alias_set ();
788 buf_addr = convert_memory_address (Pmode, buf_addr);
790 buf_addr = force_reg (Pmode, buf_addr);
792 /* We used to store value in static_chain_rtx, but that fails if pointers
793 are smaller than integers. We instead require that the user must pass
794 a second argument of 1, because that is what builtin_setjmp will
795 return. This also makes EH slightly more efficient, since we are no
796 longer copying around a value that we don't care about. */
797 gcc_assert (value == const1_rtx);
799 last = get_last_insn ();
800 #ifdef HAVE_builtin_longjmp
801 if (HAVE_builtin_longjmp)
802 emit_insn (gen_builtin_longjmp (buf_addr));
806 fp = gen_rtx_MEM (Pmode, buf_addr);
807 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
808 GET_MODE_SIZE (Pmode)));
810 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
811 2 * GET_MODE_SIZE (Pmode)));
812 set_mem_alias_set (fp, setjmp_alias_set);
813 set_mem_alias_set (lab, setjmp_alias_set);
814 set_mem_alias_set (stack, setjmp_alias_set);
816 /* Pick up FP, label, and SP from the block and jump. This code is
817 from expand_goto in stmt.c; see there for detailed comments. */
818 #ifdef HAVE_nonlocal_goto
819 if (HAVE_nonlocal_goto)
820 /* We have to pass a value to the nonlocal_goto pattern that will
821 get copied into the static_chain pointer, but it does not matter
822 what that value is, because builtin_setjmp does not use it. */
823 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
827 lab = copy_to_reg (lab);
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 gen_rtx_SCRATCH (VOIDmode))));
832 emit_insn (gen_rtx_CLOBBER (VOIDmode,
833 gen_rtx_MEM (BLKmode,
834 hard_frame_pointer_rtx)));
836 emit_move_insn (hard_frame_pointer_rtx, fp);
837 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
839 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
840 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
841 emit_indirect_jump (lab);
845 /* Search backwards and mark the jump insn as a non-local goto.
846 Note that this precludes the use of __builtin_longjmp to a
847 __builtin_setjmp target in the same function. However, we've
848 already cautioned the user that these functions are for
849 internal exception handling use only. */
850 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
852 gcc_assert (insn != last);
856 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
860 else if (CALL_P (insn))
865 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
866 and the address of the save area. */
869 expand_builtin_nonlocal_goto (tree exp)
871 tree t_label, t_save_area;
872 rtx r_label, r_save_area, r_fp, r_sp, insn;
874 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
877 t_label = CALL_EXPR_ARG (exp, 0);
878 t_save_area = CALL_EXPR_ARG (exp, 1);
880 r_label = expand_normal (t_label);
881 r_label = convert_memory_address (Pmode, r_label);
882 r_save_area = expand_normal (t_save_area);
883 r_save_area = convert_memory_address (Pmode, r_save_area);
884 r_fp = gen_rtx_MEM (Pmode, r_save_area);
885 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
886 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
888 current_function_has_nonlocal_goto = 1;
890 #ifdef HAVE_nonlocal_goto
891 /* ??? We no longer need to pass the static chain value, afaik. */
892 if (HAVE_nonlocal_goto)
893 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
897 r_label = copy_to_reg (r_label);
899 emit_insn (gen_rtx_CLOBBER (VOIDmode,
900 gen_rtx_MEM (BLKmode,
901 gen_rtx_SCRATCH (VOIDmode))));
903 emit_insn (gen_rtx_CLOBBER (VOIDmode,
904 gen_rtx_MEM (BLKmode,
905 hard_frame_pointer_rtx)));
907 /* Restore frame pointer for containing function.
908 This sets the actual hard register used for the frame pointer
909 to the location of the function's incoming static chain info.
910 The non-local goto handler will then adjust it to contain the
911 proper value and reload the argument pointer, if needed. */
912 emit_move_insn (hard_frame_pointer_rtx, r_fp);
913 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
915 /* USE of hard_frame_pointer_rtx added for consistency;
916 not clear if really needed. */
917 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
918 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
919 emit_indirect_jump (r_label);
922 /* Search backwards to the jump insn and mark it as a
924 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
928 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
929 const0_rtx, REG_NOTES (insn));
932 else if (CALL_P (insn))
939 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
940 (not all will be used on all machines) that was passed to __builtin_setjmp.
941 It updates the stack pointer in that block to correspond to the current
945 expand_builtin_update_setjmp_buf (rtx buf_addr)
947 enum machine_mode sa_mode = Pmode;
951 #ifdef HAVE_save_stack_nonlocal
952 if (HAVE_save_stack_nonlocal)
953 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
955 #ifdef STACK_SAVEAREA_MODE
956 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
960 = gen_rtx_MEM (sa_mode,
963 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
967 emit_insn (gen_setjmp ());
970 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
973 /* Expand a call to __builtin_prefetch. For a target that does not support
974 data prefetch, evaluate the memory address argument in case it has side
978 expand_builtin_prefetch (tree exp)
980 tree arg0, arg1, arg2;
984 if (!validate_arglist (exp, POINTER_TYPE, 0))
987 arg0 = CALL_EXPR_ARG (exp, 0);
989 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
990 zero (read) and argument 2 (locality) defaults to 3 (high degree of
992 nargs = call_expr_nargs (exp);
994 arg1 = CALL_EXPR_ARG (exp, 1);
996 arg1 = integer_zero_node;
998 arg2 = CALL_EXPR_ARG (exp, 2);
1000 arg2 = build_int_cst (NULL_TREE, 3);
1002 /* Argument 0 is an address. */
1003 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1005 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1006 if (TREE_CODE (arg1) != INTEGER_CST)
1008 error ("second argument to %<__builtin_prefetch%> must be a constant");
1009 arg1 = integer_zero_node;
1011 op1 = expand_normal (arg1);
1012 /* Argument 1 must be either zero or one. */
1013 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1015 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1020 /* Argument 2 (locality) must be a compile-time constant int. */
1021 if (TREE_CODE (arg2) != INTEGER_CST)
1023 error ("third argument to %<__builtin_prefetch%> must be a constant");
1024 arg2 = integer_zero_node;
1026 op2 = expand_normal (arg2);
1027 /* Argument 2 must be 0, 1, 2, or 3. */
1028 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1030 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1034 #ifdef HAVE_prefetch
1037 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1039 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1040 || (GET_MODE (op0) != Pmode))
1042 op0 = convert_memory_address (Pmode, op0);
1043 op0 = force_reg (Pmode, op0);
1045 emit_insn (gen_prefetch (op0, op1, op2));
1049 /* Don't do anything with direct references to volatile memory, but
1050 generate code to handle other side effects. */
1051 if (!MEM_P (op0) && side_effects_p (op0))
1055 /* Get a MEM rtx for expression EXP which is the address of an operand
1056 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1057 the maximum length of the block of memory that might be accessed or
1061 get_memory_rtx (tree exp, tree len)
1063 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1064 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1066 /* Get an expression we can use to find the attributes to assign to MEM.
1067 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1068 we can. First remove any nops. */
1069 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1070 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1071 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1072 exp = TREE_OPERAND (exp, 0);
1074 if (TREE_CODE (exp) == ADDR_EXPR)
1075 exp = TREE_OPERAND (exp, 0);
1076 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1077 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1081 /* Honor attributes derived from exp, except for the alias set
1082 (as builtin stringops may alias with anything) and the size
1083 (as stringops may access multiple array elements). */
1086 set_mem_attributes (mem, exp, 0);
1088 /* Allow the string and memory builtins to overflow from one
1089 field into another, see http://gcc.gnu.org/PR23561.
1090 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1091 memory accessed by the string or memory builtin will fit
1092 within the field. */
1093 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1095 tree mem_expr = MEM_EXPR (mem);
1096 HOST_WIDE_INT offset = -1, length = -1;
1099 while (TREE_CODE (inner) == ARRAY_REF
1100 || TREE_CODE (inner) == NOP_EXPR
1101 || TREE_CODE (inner) == CONVERT_EXPR
1102 || TREE_CODE (inner) == NON_LVALUE_EXPR
1103 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1104 || TREE_CODE (inner) == SAVE_EXPR)
1105 inner = TREE_OPERAND (inner, 0);
1107 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1109 if (MEM_OFFSET (mem)
1110 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1111 offset = INTVAL (MEM_OFFSET (mem));
1113 if (offset >= 0 && len && host_integerp (len, 0))
1114 length = tree_low_cst (len, 0);
1116 while (TREE_CODE (inner) == COMPONENT_REF)
1118 tree field = TREE_OPERAND (inner, 1);
1119 gcc_assert (! DECL_BIT_FIELD (field));
1120 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1121 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1124 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1125 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1128 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1129 /* If we can prove the memory starting at XEXP (mem, 0)
1130 and ending at XEXP (mem, 0) + LENGTH will fit into
1131 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1134 && offset + length <= size)
1139 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1140 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1141 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1149 mem_expr = TREE_OPERAND (mem_expr, 0);
1150 inner = TREE_OPERAND (inner, 0);
1153 if (mem_expr == NULL)
1155 if (mem_expr != MEM_EXPR (mem))
1157 set_mem_expr (mem, mem_expr);
1158 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1161 set_mem_alias_set (mem, 0);
1162 set_mem_size (mem, NULL_RTX);
1168 /* Built-in functions to perform an untyped call and return. */
1170 /* For each register that may be used for calling a function, this
1171 gives a mode used to copy the register's value. VOIDmode indicates
1172 the register is not used for calling a function. If the machine
1173 has register windows, this gives only the outbound registers.
1174 INCOMING_REGNO gives the corresponding inbound register. */
1175 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1177 /* For each register that may be used for returning values, this gives
1178 a mode used to copy the register's value. VOIDmode indicates the
1179 register is not used for returning values. If the machine has
1180 register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for calling a function, this
1185 gives the offset of that register into the block returned by
1186 __builtin_apply_args. 0 indicates that the register is not
1187 used for calling a function. */
1188 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1190 /* Return the size required for the block returned by __builtin_apply_args,
1191 and initialize apply_args_mode. */
1194 apply_args_size (void)
1196 static int size = -1;
1199 enum machine_mode mode;
1201 /* The values computed by this function never change. */
1204 /* The first value is the incoming arg-pointer. */
1205 size = GET_MODE_SIZE (Pmode);
1207 /* The second value is the structure value address unless this is
1208 passed as an "invisible" first argument. */
1209 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1210 size += GET_MODE_SIZE (Pmode);
1212 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1213 if (FUNCTION_ARG_REGNO_P (regno))
1215 mode = reg_raw_mode[regno];
1217 gcc_assert (mode != VOIDmode);
1219 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1220 if (size % align != 0)
1221 size = CEIL (size, align) * align;
1222 apply_args_reg_offset[regno] = size;
1223 size += GET_MODE_SIZE (mode);
1224 apply_args_mode[regno] = mode;
1228 apply_args_mode[regno] = VOIDmode;
1229 apply_args_reg_offset[regno] = 0;
1235 /* Return the size required for the block returned by __builtin_apply,
1236 and initialize apply_result_mode. */
1239 apply_result_size (void)
1241 static int size = -1;
1243 enum machine_mode mode;
1245 /* The values computed by this function never change. */
1250 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1251 if (FUNCTION_VALUE_REGNO_P (regno))
1253 mode = reg_raw_mode[regno];
1255 gcc_assert (mode != VOIDmode);
1257 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1258 if (size % align != 0)
1259 size = CEIL (size, align) * align;
1260 size += GET_MODE_SIZE (mode);
1261 apply_result_mode[regno] = mode;
1264 apply_result_mode[regno] = VOIDmode;
1266 /* Allow targets that use untyped_call and untyped_return to override
1267 the size so that machine-specific information can be stored here. */
1268 #ifdef APPLY_RESULT_SIZE
1269 size = APPLY_RESULT_SIZE;
1275 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1276 /* Create a vector describing the result block RESULT. If SAVEP is true,
1277 the result block is used to save the values; otherwise it is used to
1278 restore the values. */
1281 result_vector (int savep, rtx result)
1283 int regno, size, align, nelts;
1284 enum machine_mode mode;
1286 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1290 if ((mode = apply_result_mode[regno]) != VOIDmode)
1292 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1293 if (size % align != 0)
1294 size = CEIL (size, align) * align;
1295 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1296 mem = adjust_address (result, mode, size);
1297 savevec[nelts++] = (savep
1298 ? gen_rtx_SET (VOIDmode, mem, reg)
1299 : gen_rtx_SET (VOIDmode, reg, mem));
1300 size += GET_MODE_SIZE (mode);
1302 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1304 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1306 /* Save the state required to perform an untyped call with the same
1307 arguments as were passed to the current function. */
1310 expand_builtin_apply_args_1 (void)
1313 int size, align, regno;
1314 enum machine_mode mode;
1315 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1317 /* Create a block where the arg-pointer, structure value address,
1318 and argument registers can be saved. */
1319 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1321 /* Walk past the arg-pointer and structure value address. */
1322 size = GET_MODE_SIZE (Pmode);
1323 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1324 size += GET_MODE_SIZE (Pmode);
1326 /* Save each register used in calling a function to the block. */
1327 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1328 if ((mode = apply_args_mode[regno]) != VOIDmode)
1330 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1331 if (size % align != 0)
1332 size = CEIL (size, align) * align;
1334 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1336 emit_move_insn (adjust_address (registers, mode, size), tem);
1337 size += GET_MODE_SIZE (mode);
1340 /* Save the arg pointer to the block. */
1341 tem = copy_to_reg (virtual_incoming_args_rtx);
1342 #ifdef STACK_GROWS_DOWNWARD
1343 /* We need the pointer as the caller actually passed them to us, not
1344 as we might have pretended they were passed. Make sure it's a valid
1345 operand, as emit_move_insn isn't expected to handle a PLUS. */
1347 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1350 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1352 size = GET_MODE_SIZE (Pmode);
1354 /* Save the structure value address unless this is passed as an
1355 "invisible" first argument. */
1356 if (struct_incoming_value)
1358 emit_move_insn (adjust_address (registers, Pmode, size),
1359 copy_to_reg (struct_incoming_value));
1360 size += GET_MODE_SIZE (Pmode);
1363 /* Return the address of the block. */
1364 return copy_addr_to_reg (XEXP (registers, 0));
1367 /* __builtin_apply_args returns block of memory allocated on
1368 the stack into which is stored the arg pointer, structure
1369 value address, static chain, and all the registers that might
1370 possibly be used in performing a function call. The code is
1371 moved to the start of the function so the incoming values are
1375 expand_builtin_apply_args (void)
1377 /* Don't do __builtin_apply_args more than once in a function.
1378 Save the result of the first call and reuse it. */
1379 if (apply_args_value != 0)
1380 return apply_args_value;
1382 /* When this function is called, it means that registers must be
1383 saved on entry to this function. So we migrate the
1384 call to the first insn of this function. */
1389 temp = expand_builtin_apply_args_1 ();
1393 apply_args_value = temp;
1395 /* Put the insns after the NOTE that starts the function.
1396 If this is inside a start_sequence, make the outer-level insn
1397 chain current, so the code is placed at the start of the
1399 push_topmost_sequence ();
1400 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1401 pop_topmost_sequence ();
1406 /* Perform an untyped call and save the state required to perform an
1407 untyped return of whatever value was returned by the given function. */
1410 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1412 int size, align, regno;
1413 enum machine_mode mode;
1414 rtx incoming_args, result, reg, dest, src, call_insn;
1415 rtx old_stack_level = 0;
1416 rtx call_fusage = 0;
1417 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1419 arguments = convert_memory_address (Pmode, arguments);
1421 /* Create a block where the return registers can be saved. */
1422 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1424 /* Fetch the arg pointer from the ARGUMENTS block. */
1425 incoming_args = gen_reg_rtx (Pmode);
1426 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1427 #ifndef STACK_GROWS_DOWNWARD
1428 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1429 incoming_args, 0, OPTAB_LIB_WIDEN);
1432 /* Push a new argument block and copy the arguments. Do not allow
1433 the (potential) memcpy call below to interfere with our stack
1435 do_pending_stack_adjust ();
1438 /* Save the stack with nonlocal if available. */
1439 #ifdef HAVE_save_stack_nonlocal
1440 if (HAVE_save_stack_nonlocal)
1441 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1444 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1446 /* Allocate a block of memory onto the stack and copy the memory
1447 arguments to the outgoing arguments address. */
1448 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1449 dest = virtual_outgoing_args_rtx;
1450 #ifndef STACK_GROWS_DOWNWARD
1451 if (GET_CODE (argsize) == CONST_INT)
1452 dest = plus_constant (dest, -INTVAL (argsize));
1454 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1456 dest = gen_rtx_MEM (BLKmode, dest);
1457 set_mem_align (dest, PARM_BOUNDARY);
1458 src = gen_rtx_MEM (BLKmode, incoming_args);
1459 set_mem_align (src, PARM_BOUNDARY);
1460 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1462 /* Refer to the argument block. */
1464 arguments = gen_rtx_MEM (BLKmode, arguments);
1465 set_mem_align (arguments, PARM_BOUNDARY);
1467 /* Walk past the arg-pointer and structure value address. */
1468 size = GET_MODE_SIZE (Pmode);
1470 size += GET_MODE_SIZE (Pmode);
1472 /* Restore each of the registers previously saved. Make USE insns
1473 for each of these registers for use in making the call. */
1474 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1475 if ((mode = apply_args_mode[regno]) != VOIDmode)
1477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1478 if (size % align != 0)
1479 size = CEIL (size, align) * align;
1480 reg = gen_rtx_REG (mode, regno);
1481 emit_move_insn (reg, adjust_address (arguments, mode, size));
1482 use_reg (&call_fusage, reg);
1483 size += GET_MODE_SIZE (mode);
1486 /* Restore the structure value address unless this is passed as an
1487 "invisible" first argument. */
1488 size = GET_MODE_SIZE (Pmode);
1491 rtx value = gen_reg_rtx (Pmode);
1492 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1493 emit_move_insn (struct_value, value);
1494 if (REG_P (struct_value))
1495 use_reg (&call_fusage, struct_value);
1496 size += GET_MODE_SIZE (Pmode);
1499 /* All arguments and registers used for the call are set up by now! */
1500 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1502 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1503 and we don't want to load it into a register as an optimization,
1504 because prepare_call_address already did it if it should be done. */
1505 if (GET_CODE (function) != SYMBOL_REF)
1506 function = memory_address (FUNCTION_MODE, function);
1508 /* Generate the actual call instruction and save the return value. */
1509 #ifdef HAVE_untyped_call
1510 if (HAVE_untyped_call)
1511 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1512 result, result_vector (1, result)));
1515 #ifdef HAVE_call_value
1516 if (HAVE_call_value)
1520 /* Locate the unique return register. It is not possible to
1521 express a call that sets more than one return register using
1522 call_value; use untyped_call for that. In fact, untyped_call
1523 only needs to save the return registers in the given block. */
1524 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1525 if ((mode = apply_result_mode[regno]) != VOIDmode)
1527 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1529 valreg = gen_rtx_REG (mode, regno);
1532 emit_call_insn (GEN_CALL_VALUE (valreg,
1533 gen_rtx_MEM (FUNCTION_MODE, function),
1534 const0_rtx, NULL_RTX, const0_rtx));
1536 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1542 /* Find the CALL insn we just emitted, and attach the register usage
1544 call_insn = last_call_insn ();
1545 add_function_usage_to (call_insn, call_fusage);
1547 /* Restore the stack. */
1548 #ifdef HAVE_save_stack_nonlocal
1549 if (HAVE_save_stack_nonlocal)
1550 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1553 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1557 /* Return the address of the result block. */
1558 result = copy_addr_to_reg (XEXP (result, 0));
1559 return convert_memory_address (ptr_mode, result);
1562 /* Perform an untyped return. */
1565 expand_builtin_return (rtx result)
1567 int size, align, regno;
1568 enum machine_mode mode;
1570 rtx call_fusage = 0;
1572 result = convert_memory_address (Pmode, result);
1574 apply_result_size ();
1575 result = gen_rtx_MEM (BLKmode, result);
1577 #ifdef HAVE_untyped_return
1578 if (HAVE_untyped_return)
1580 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1586 /* Restore the return value and note that each value is used. */
1588 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1589 if ((mode = apply_result_mode[regno]) != VOIDmode)
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1595 emit_move_insn (reg, adjust_address (result, mode, size));
1597 push_to_sequence (call_fusage);
1598 emit_insn (gen_rtx_USE (VOIDmode, reg));
1599 call_fusage = get_insns ();
1601 size += GET_MODE_SIZE (mode);
1604 /* Put the USE insns before the return. */
1605 emit_insn (call_fusage);
1607 /* Return whatever values was restored by jumping directly to the end
1609 expand_naked_return ();
1612 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1614 static enum type_class
1615 type_to_class (tree type)
1617 switch (TREE_CODE (type))
1619 case VOID_TYPE: return void_type_class;
1620 case INTEGER_TYPE: return integer_type_class;
1621 case ENUMERAL_TYPE: return enumeral_type_class;
1622 case BOOLEAN_TYPE: return boolean_type_class;
1623 case POINTER_TYPE: return pointer_type_class;
1624 case REFERENCE_TYPE: return reference_type_class;
1625 case OFFSET_TYPE: return offset_type_class;
1626 case REAL_TYPE: return real_type_class;
1627 case COMPLEX_TYPE: return complex_type_class;
1628 case FUNCTION_TYPE: return function_type_class;
1629 case METHOD_TYPE: return method_type_class;
1630 case RECORD_TYPE: return record_type_class;
1632 case QUAL_UNION_TYPE: return union_type_class;
1633 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1634 ? string_type_class : array_type_class);
1635 case LANG_TYPE: return lang_type_class;
1636 default: return no_type_class;
1640 /* Expand a call EXP to __builtin_classify_type. */
1643 expand_builtin_classify_type (tree exp)
1645 if (call_expr_nargs (exp))
1646 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1647 return GEN_INT (no_type_class);
1650 /* This helper macro, meant to be used in mathfn_built_in below,
1651 determines which among a set of three builtin math functions is
1652 appropriate for a given type mode. The `F' and `L' cases are
1653 automatically generated from the `double' case. */
1654 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1655 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1656 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1657 fcodel = BUILT_IN_MATHFN##L ; break;
1658 /* Similar to above, but appends _R after any F/L suffix. */
1659 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1660 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1661 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1662 fcodel = BUILT_IN_MATHFN##L_R ; break;
1664 /* Return mathematic function equivalent to FN but operating directly
1665 on TYPE, if available. If we can't do the conversion, return zero. */
1667 mathfn_built_in (tree type, enum built_in_function fn)
1669 enum built_in_function fcode, fcodef, fcodel;
1673 CASE_MATHFN (BUILT_IN_ACOS)
1674 CASE_MATHFN (BUILT_IN_ACOSH)
1675 CASE_MATHFN (BUILT_IN_ASIN)
1676 CASE_MATHFN (BUILT_IN_ASINH)
1677 CASE_MATHFN (BUILT_IN_ATAN)
1678 CASE_MATHFN (BUILT_IN_ATAN2)
1679 CASE_MATHFN (BUILT_IN_ATANH)
1680 CASE_MATHFN (BUILT_IN_CBRT)
1681 CASE_MATHFN (BUILT_IN_CEIL)
1682 CASE_MATHFN (BUILT_IN_CEXPI)
1683 CASE_MATHFN (BUILT_IN_COPYSIGN)
1684 CASE_MATHFN (BUILT_IN_COS)
1685 CASE_MATHFN (BUILT_IN_COSH)
1686 CASE_MATHFN (BUILT_IN_DREM)
1687 CASE_MATHFN (BUILT_IN_ERF)
1688 CASE_MATHFN (BUILT_IN_ERFC)
1689 CASE_MATHFN (BUILT_IN_EXP)
1690 CASE_MATHFN (BUILT_IN_EXP10)
1691 CASE_MATHFN (BUILT_IN_EXP2)
1692 CASE_MATHFN (BUILT_IN_EXPM1)
1693 CASE_MATHFN (BUILT_IN_FABS)
1694 CASE_MATHFN (BUILT_IN_FDIM)
1695 CASE_MATHFN (BUILT_IN_FLOOR)
1696 CASE_MATHFN (BUILT_IN_FMA)
1697 CASE_MATHFN (BUILT_IN_FMAX)
1698 CASE_MATHFN (BUILT_IN_FMIN)
1699 CASE_MATHFN (BUILT_IN_FMOD)
1700 CASE_MATHFN (BUILT_IN_FREXP)
1701 CASE_MATHFN (BUILT_IN_GAMMA)
1702 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1703 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1704 CASE_MATHFN (BUILT_IN_HYPOT)
1705 CASE_MATHFN (BUILT_IN_ILOGB)
1706 CASE_MATHFN (BUILT_IN_INF)
1707 CASE_MATHFN (BUILT_IN_ISINF)
1708 CASE_MATHFN (BUILT_IN_J0)
1709 CASE_MATHFN (BUILT_IN_J1)
1710 CASE_MATHFN (BUILT_IN_JN)
1711 CASE_MATHFN (BUILT_IN_LCEIL)
1712 CASE_MATHFN (BUILT_IN_LDEXP)
1713 CASE_MATHFN (BUILT_IN_LFLOOR)
1714 CASE_MATHFN (BUILT_IN_LGAMMA)
1715 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1716 CASE_MATHFN (BUILT_IN_LLCEIL)
1717 CASE_MATHFN (BUILT_IN_LLFLOOR)
1718 CASE_MATHFN (BUILT_IN_LLRINT)
1719 CASE_MATHFN (BUILT_IN_LLROUND)
1720 CASE_MATHFN (BUILT_IN_LOG)
1721 CASE_MATHFN (BUILT_IN_LOG10)
1722 CASE_MATHFN (BUILT_IN_LOG1P)
1723 CASE_MATHFN (BUILT_IN_LOG2)
1724 CASE_MATHFN (BUILT_IN_LOGB)
1725 CASE_MATHFN (BUILT_IN_LRINT)
1726 CASE_MATHFN (BUILT_IN_LROUND)
1727 CASE_MATHFN (BUILT_IN_MODF)
1728 CASE_MATHFN (BUILT_IN_NAN)
1729 CASE_MATHFN (BUILT_IN_NANS)
1730 CASE_MATHFN (BUILT_IN_NEARBYINT)
1731 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1732 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1733 CASE_MATHFN (BUILT_IN_POW)
1734 CASE_MATHFN (BUILT_IN_POWI)
1735 CASE_MATHFN (BUILT_IN_POW10)
1736 CASE_MATHFN (BUILT_IN_REMAINDER)
1737 CASE_MATHFN (BUILT_IN_REMQUO)
1738 CASE_MATHFN (BUILT_IN_RINT)
1739 CASE_MATHFN (BUILT_IN_ROUND)
1740 CASE_MATHFN (BUILT_IN_SCALB)
1741 CASE_MATHFN (BUILT_IN_SCALBLN)
1742 CASE_MATHFN (BUILT_IN_SCALBN)
1743 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1744 CASE_MATHFN (BUILT_IN_SIN)
1745 CASE_MATHFN (BUILT_IN_SINCOS)
1746 CASE_MATHFN (BUILT_IN_SINH)
1747 CASE_MATHFN (BUILT_IN_SQRT)
1748 CASE_MATHFN (BUILT_IN_TAN)
1749 CASE_MATHFN (BUILT_IN_TANH)
1750 CASE_MATHFN (BUILT_IN_TGAMMA)
1751 CASE_MATHFN (BUILT_IN_TRUNC)
1752 CASE_MATHFN (BUILT_IN_Y0)
1753 CASE_MATHFN (BUILT_IN_Y1)
1754 CASE_MATHFN (BUILT_IN_YN)
1760 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1761 return implicit_built_in_decls[fcode];
1762 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1763 return implicit_built_in_decls[fcodef];
1764 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1765 return implicit_built_in_decls[fcodel];
1770 /* If errno must be maintained, expand the RTL to check if the result,
1771 TARGET, of a built-in function call, EXP, is NaN, and if so set
1775 expand_errno_check (tree exp, rtx target)
1777 rtx lab = gen_label_rtx ();
1779 /* Test the result; if it is NaN, set errno=EDOM because
1780 the argument was not in the domain. */
1781 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1785 /* If this built-in doesn't throw an exception, set errno directly. */
1786 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1788 #ifdef GEN_ERRNO_RTX
1789 rtx errno_rtx = GEN_ERRNO_RTX;
1792 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1794 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1800 /* We can't set errno=EDOM directly; let the library call do it.
1801 Pop the arguments right away in case the call gets deleted. */
1803 expand_call (exp, target, 0);
1808 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1809 Return NULL_RTX if a normal call should be emitted rather than expanding
1810 the function in-line. EXP is the expression that is a call to the builtin
1811 function; if convenient, the result should be placed in TARGET.
1812 SUBTARGET may be used as the target for computing one of EXP's operands. */
1815 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1817 optab builtin_optab;
1818 rtx op0, insns, before_call;
1819 tree fndecl = get_callee_fndecl (exp);
1820 enum machine_mode mode;
1821 bool errno_set = false;
1824 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1827 arg = CALL_EXPR_ARG (exp, 0);
1829 switch (DECL_FUNCTION_CODE (fndecl))
1831 CASE_FLT_FN (BUILT_IN_SQRT):
1832 errno_set = ! tree_expr_nonnegative_p (arg);
1833 builtin_optab = sqrt_optab;
1835 CASE_FLT_FN (BUILT_IN_EXP):
1836 errno_set = true; builtin_optab = exp_optab; break;
1837 CASE_FLT_FN (BUILT_IN_EXP10):
1838 CASE_FLT_FN (BUILT_IN_POW10):
1839 errno_set = true; builtin_optab = exp10_optab; break;
1840 CASE_FLT_FN (BUILT_IN_EXP2):
1841 errno_set = true; builtin_optab = exp2_optab; break;
1842 CASE_FLT_FN (BUILT_IN_EXPM1):
1843 errno_set = true; builtin_optab = expm1_optab; break;
1844 CASE_FLT_FN (BUILT_IN_LOGB):
1845 errno_set = true; builtin_optab = logb_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOG):
1847 errno_set = true; builtin_optab = log_optab; break;
1848 CASE_FLT_FN (BUILT_IN_LOG10):
1849 errno_set = true; builtin_optab = log10_optab; break;
1850 CASE_FLT_FN (BUILT_IN_LOG2):
1851 errno_set = true; builtin_optab = log2_optab; break;
1852 CASE_FLT_FN (BUILT_IN_LOG1P):
1853 errno_set = true; builtin_optab = log1p_optab; break;
1854 CASE_FLT_FN (BUILT_IN_ASIN):
1855 builtin_optab = asin_optab; break;
1856 CASE_FLT_FN (BUILT_IN_ACOS):
1857 builtin_optab = acos_optab; break;
1858 CASE_FLT_FN (BUILT_IN_TAN):
1859 builtin_optab = tan_optab; break;
1860 CASE_FLT_FN (BUILT_IN_ATAN):
1861 builtin_optab = atan_optab; break;
1862 CASE_FLT_FN (BUILT_IN_FLOOR):
1863 builtin_optab = floor_optab; break;
1864 CASE_FLT_FN (BUILT_IN_CEIL):
1865 builtin_optab = ceil_optab; break;
1866 CASE_FLT_FN (BUILT_IN_TRUNC):
1867 builtin_optab = btrunc_optab; break;
1868 CASE_FLT_FN (BUILT_IN_ROUND):
1869 builtin_optab = round_optab; break;
1870 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1871 builtin_optab = nearbyint_optab;
1872 if (flag_trapping_math)
1874 /* Else fallthrough and expand as rint. */
1875 CASE_FLT_FN (BUILT_IN_RINT):
1876 builtin_optab = rint_optab; break;
1881 /* Make a suitable register to place result in. */
1882 mode = TYPE_MODE (TREE_TYPE (exp));
1884 if (! flag_errno_math || ! HONOR_NANS (mode))
1887 /* Before working hard, check whether the instruction is available. */
1888 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1890 target = gen_reg_rtx (mode);
1892 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1893 need to expand the argument again. This way, we will not perform
1894 side-effects more the once. */
1895 narg = builtin_save_expr (arg);
1899 exp = build_call_expr (fndecl, 1, arg);
1902 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1906 /* Compute into TARGET.
1907 Set TARGET to wherever the result comes back. */
1908 target = expand_unop (mode, builtin_optab, op0, target, 0);
1913 expand_errno_check (exp, target);
1915 /* Output the entire sequence. */
1916 insns = get_insns ();
1922 /* If we were unable to expand via the builtin, stop the sequence
1923 (without outputting the insns) and call to the library function
1924 with the stabilized argument list. */
1928 before_call = get_last_insn ();
1930 target = expand_call (exp, target, target == const0_rtx);
1932 /* If this is a sqrt operation and we don't care about errno, try to
1933 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1934 This allows the semantics of the libcall to be visible to the RTL
1936 if (builtin_optab == sqrt_optab && !errno_set)
1938 /* Search backwards through the insns emitted by expand_call looking
1939 for the instruction with the REG_RETVAL note. */
1940 rtx last = get_last_insn ();
1941 while (last != before_call)
1943 if (find_reg_note (last, REG_RETVAL, NULL))
1945 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1946 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1947 two elements, i.e. symbol_ref(sqrt) and the operand. */
1949 && GET_CODE (note) == EXPR_LIST
1950 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1951 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1952 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1954 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1955 /* Check operand is a register with expected mode. */
1958 && GET_MODE (operand) == mode)
1960 /* Replace the REG_EQUAL note with a SQRT rtx. */
1961 rtx equiv = gen_rtx_SQRT (mode, operand);
1962 set_unique_reg_note (last, REG_EQUAL, equiv);
1967 last = PREV_INSN (last);
1974 /* Expand a call to the builtin binary math functions (pow and atan2).
1975 Return NULL_RTX if a normal call should be emitted rather than expanding the
1976 function in-line. EXP is the expression that is a call to the builtin
1977 function; if convenient, the result should be placed in TARGET.
1978 SUBTARGET may be used as the target for computing one of EXP's
1982 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1984 optab builtin_optab;
1985 rtx op0, op1, insns;
1986 int op1_type = REAL_TYPE;
1987 tree fndecl = get_callee_fndecl (exp);
1988 tree arg0, arg1, narg;
1989 enum machine_mode mode;
1990 bool errno_set = true;
1993 switch (DECL_FUNCTION_CODE (fndecl))
1995 CASE_FLT_FN (BUILT_IN_SCALBN):
1996 CASE_FLT_FN (BUILT_IN_SCALBLN):
1997 CASE_FLT_FN (BUILT_IN_LDEXP):
1998 op1_type = INTEGER_TYPE;
2003 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2006 arg0 = CALL_EXPR_ARG (exp, 0);
2007 arg1 = CALL_EXPR_ARG (exp, 1);
2009 switch (DECL_FUNCTION_CODE (fndecl))
2011 CASE_FLT_FN (BUILT_IN_POW):
2012 builtin_optab = pow_optab; break;
2013 CASE_FLT_FN (BUILT_IN_ATAN2):
2014 builtin_optab = atan2_optab; break;
2015 CASE_FLT_FN (BUILT_IN_SCALB):
2016 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2018 builtin_optab = scalb_optab; break;
2019 CASE_FLT_FN (BUILT_IN_SCALBN):
2020 CASE_FLT_FN (BUILT_IN_SCALBLN):
2021 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2023 /* Fall through... */
2024 CASE_FLT_FN (BUILT_IN_LDEXP):
2025 builtin_optab = ldexp_optab; break;
2026 CASE_FLT_FN (BUILT_IN_FMOD):
2027 builtin_optab = fmod_optab; break;
2028 CASE_FLT_FN (BUILT_IN_REMAINDER):
2029 CASE_FLT_FN (BUILT_IN_DREM):
2030 builtin_optab = remainder_optab; break;
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2038 /* Before working hard, check whether the instruction is available. */
2039 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2042 target = gen_reg_rtx (mode);
2044 if (! flag_errno_math || ! HONOR_NANS (mode))
2047 /* Always stabilize the argument list. */
2048 narg = builtin_save_expr (arg1);
2054 narg = builtin_save_expr (arg0);
2062 exp = build_call_expr (fndecl, 2, arg0, arg1);
2064 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2065 op1 = expand_normal (arg1);
2069 /* Compute into TARGET.
2070 Set TARGET to wherever the result comes back. */
2071 target = expand_binop (mode, builtin_optab, op0, op1,
2072 target, 0, OPTAB_DIRECT);
2074 /* If we were unable to expand via the builtin, stop the sequence
2075 (without outputting the insns) and call to the library function
2076 with the stabilized argument list. */
2080 return expand_call (exp, target, target == const0_rtx);
2084 expand_errno_check (exp, target);
2086 /* Output the entire sequence. */
2087 insns = get_insns ();
2094 /* Expand a call to the builtin sin and cos math functions.
2095 Return NULL_RTX if a normal call should be emitted rather than expanding the
2096 function in-line. EXP is the expression that is a call to the builtin
2097 function; if convenient, the result should be placed in TARGET.
2098 SUBTARGET may be used as the target for computing one of EXP's
2102 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2104 optab builtin_optab;
2106 tree fndecl = get_callee_fndecl (exp);
2107 enum machine_mode mode;
2110 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2113 arg = CALL_EXPR_ARG (exp, 0);
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 CASE_FLT_FN (BUILT_IN_COS):
2119 builtin_optab = sincos_optab; break;
2124 /* Make a suitable register to place result in. */
2125 mode = TYPE_MODE (TREE_TYPE (exp));
2127 /* Check if sincos insn is available, otherwise fallback
2128 to sin or cos insn. */
2129 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2130 switch (DECL_FUNCTION_CODE (fndecl))
2132 CASE_FLT_FN (BUILT_IN_SIN):
2133 builtin_optab = sin_optab; break;
2134 CASE_FLT_FN (BUILT_IN_COS):
2135 builtin_optab = cos_optab; break;
2140 /* Before working hard, check whether the instruction is available. */
2141 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2143 target = gen_reg_rtx (mode);
2145 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2146 need to expand the argument again. This way, we will not perform
2147 side-effects more the once. */
2148 narg = save_expr (arg);
2152 exp = build_call_expr (fndecl, 1, arg);
2155 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2159 /* Compute into TARGET.
2160 Set TARGET to wherever the result comes back. */
2161 if (builtin_optab == sincos_optab)
2165 switch (DECL_FUNCTION_CODE (fndecl))
2167 CASE_FLT_FN (BUILT_IN_SIN):
2168 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2176 gcc_assert (result);
2180 target = expand_unop (mode, builtin_optab, op0, target, 0);
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2198 target = expand_call (exp, target, target == const0_rtx);
2203 /* Expand a call to one of the builtin math functions that operate on
2204 floating point argument and output an integer result (ilogb, isinf,
2206 Return 0 if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's operands. */
2212 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2214 optab builtin_optab;
2215 enum insn_code icode;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2219 bool errno_set = false;
2222 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2225 arg = CALL_EXPR_ARG (exp, 0);
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2237 /* There's no easy way to detect the case we need to set EDOM. */
2238 if (flag_errno_math && errno_set)
2241 /* Optab mode depends on the mode of the input argument. */
2242 mode = TYPE_MODE (TREE_TYPE (arg));
2244 icode = builtin_optab->handlers[(int) mode].insn_code;
2246 /* Before working hard, check whether the instruction is available. */
2247 if (icode != CODE_FOR_nothing)
2249 /* Make a suitable register to place result in. */
2251 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2252 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2254 gcc_assert (insn_data[icode].operand[0].predicate
2255 (target, GET_MODE (target)));
2257 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2258 need to expand the argument again. This way, we will not perform
2259 side-effects more the once. */
2260 narg = builtin_save_expr (arg);
2264 exp = build_call_expr (fndecl, 1, arg);
2267 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2269 if (mode != GET_MODE (op0))
2270 op0 = convert_to_mode (mode, op0, 0);
2272 /* Compute into TARGET.
2273 Set TARGET to wherever the result comes back. */
2274 emit_unop_insn (icode, target, op0, UNKNOWN);
2278 /* If there is no optab, try generic code. */
2279 switch (DECL_FUNCTION_CODE (fndecl))
2283 CASE_FLT_FN (BUILT_IN_ISINF):
2285 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2286 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2287 tree const type = TREE_TYPE (arg);
2291 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2292 real_from_string (&r, buf);
2293 result = build_call_expr (isgr_fn, 2,
2294 fold_build1 (ABS_EXPR, type, arg),
2295 build_real (type, r));
2296 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2302 target = expand_call (exp, target, target == const0_rtx);
2307 /* Expand a call to the builtin sincos math function.
2308 Return NULL_RTX if a normal call should be emitted rather than expanding the
2309 function in-line. EXP is the expression that is a call to the builtin
2313 expand_builtin_sincos (tree exp)
2315 rtx op0, op1, op2, target1, target2;
2316 enum machine_mode mode;
2317 tree arg, sinp, cosp;
2320 if (!validate_arglist (exp, REAL_TYPE,
2321 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2324 arg = CALL_EXPR_ARG (exp, 0);
2325 sinp = CALL_EXPR_ARG (exp, 1);
2326 cosp = CALL_EXPR_ARG (exp, 2);
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (arg));
2331 /* Check if sincos insn is available, otherwise emit the call. */
2332 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2335 target1 = gen_reg_rtx (mode);
2336 target2 = gen_reg_rtx (mode);
2338 op0 = expand_normal (arg);
2339 op1 = expand_normal (build_fold_indirect_ref (sinp));
2340 op2 = expand_normal (build_fold_indirect_ref (cosp));
2342 /* Compute into target1 and target2.
2343 Set TARGET to wherever the result comes back. */
2344 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2345 gcc_assert (result);
2347 /* Move target1 and target2 to the memory locations indicated
2349 emit_move_insn (op1, target1);
2350 emit_move_insn (op2, target2);
2355 /* Expand a call to the internal cexpi builtin to the sincos math function.
2356 EXP is the expression that is a call to the builtin function; if convenient,
2357 the result should be placed in TARGET. SUBTARGET may be used as the target
2358 for computing one of EXP's operands. */
2361 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2363 tree fndecl = get_callee_fndecl (exp);
2365 enum machine_mode mode;
2368 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2371 arg = CALL_EXPR_ARG (exp, 0);
2372 type = TREE_TYPE (arg);
2373 mode = TYPE_MODE (TREE_TYPE (arg));
2375 /* Try expanding via a sincos optab, fall back to emitting a libcall
2376 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2377 is only generated from sincos, cexp or if we have either of them. */
2378 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2380 op1 = gen_reg_rtx (mode);
2381 op2 = gen_reg_rtx (mode);
2383 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2385 /* Compute into op1 and op2. */
2386 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2388 else if (TARGET_HAS_SINCOS)
2390 tree call, fn = NULL_TREE;
2394 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2395 fn = built_in_decls[BUILT_IN_SINCOSF];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2397 fn = built_in_decls[BUILT_IN_SINCOS];
2398 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2399 fn = built_in_decls[BUILT_IN_SINCOSL];
2403 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2404 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2405 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2406 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2407 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2408 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2410 /* Make sure not to fold the sincos call again. */
2411 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2412 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2413 call, 3, arg, top1, top2));
2417 tree call, fn = NULL_TREE, narg;
2418 tree ctype = build_complex_type (type);
2420 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2421 fn = built_in_decls[BUILT_IN_CEXPF];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2423 fn = built_in_decls[BUILT_IN_CEXP];
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2425 fn = built_in_decls[BUILT_IN_CEXPL];
2429 /* If we don't have a decl for cexp create one. This is the
2430 friendliest fallback if the user calls __builtin_cexpi
2431 without full target C99 function support. */
2432 if (fn == NULL_TREE)
2435 const char *name = NULL;
2437 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2441 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2444 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2445 fn = build_fn_decl (name, fntype);
2448 narg = fold_build2 (COMPLEX_EXPR, ctype,
2449 build_real (type, dconst0), arg);
2451 /* Make sure not to fold the cexp call again. */
2452 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2453 return expand_expr (build_call_nary (ctype, call, 1, narg),
2454 target, VOIDmode, EXPAND_NORMAL);
2457 /* Now build the proper return type. */
2458 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2459 make_tree (TREE_TYPE (arg), op2),
2460 make_tree (TREE_TYPE (arg), op1)),
2461 target, VOIDmode, EXPAND_NORMAL);
2464 /* Expand a call to one of the builtin rounding functions gcc defines
2465 as an extension (lfloor and lceil). As these are gcc extensions we
2466 do not need to worry about setting errno to EDOM.
2467 If expanding via optab fails, lower expression to (int)(floor(x)).
2468 EXP is the expression that is a call to the builtin function;
2469 if convenient, the result should be placed in TARGET. SUBTARGET may
2470 be used as the target for computing one of EXP's operands. */
2473 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2475 convert_optab builtin_optab;
2476 rtx op0, insns, tmp;
2477 tree fndecl = get_callee_fndecl (exp);
2478 enum built_in_function fallback_fn;
2479 tree fallback_fndecl;
2480 enum machine_mode mode;
2483 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2486 arg = CALL_EXPR_ARG (exp, 0);
2488 switch (DECL_FUNCTION_CODE (fndecl))
2490 CASE_FLT_FN (BUILT_IN_LCEIL):
2491 CASE_FLT_FN (BUILT_IN_LLCEIL):
2492 builtin_optab = lceil_optab;
2493 fallback_fn = BUILT_IN_CEIL;
2496 CASE_FLT_FN (BUILT_IN_LFLOOR):
2497 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2498 builtin_optab = lfloor_optab;
2499 fallback_fn = BUILT_IN_FLOOR;
2506 /* Make a suitable register to place result in. */
2507 mode = TYPE_MODE (TREE_TYPE (exp));
2509 target = gen_reg_rtx (mode);
2511 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2512 need to expand the argument again. This way, we will not perform
2513 side-effects more the once. */
2514 narg = builtin_save_expr (arg);
2518 exp = build_call_expr (fndecl, 1, arg);
2521 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2525 /* Compute into TARGET. */
2526 if (expand_sfix_optab (target, op0, builtin_optab))
2528 /* Output the entire sequence. */
2529 insns = get_insns ();
2535 /* If we were unable to expand via the builtin, stop the sequence
2536 (without outputting the insns). */
2539 /* Fall back to floating point rounding optab. */
2540 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2542 /* For non-C99 targets we may end up without a fallback fndecl here
2543 if the user called __builtin_lfloor directly. In this case emit
2544 a call to the floor/ceil variants nevertheless. This should result
2545 in the best user experience for not full C99 targets. */
2546 if (fallback_fndecl == NULL_TREE)
2549 const char *name = NULL;
2551 switch (DECL_FUNCTION_CODE (fndecl))
2553 case BUILT_IN_LCEIL:
2554 case BUILT_IN_LLCEIL:
2557 case BUILT_IN_LCEILF:
2558 case BUILT_IN_LLCEILF:
2561 case BUILT_IN_LCEILL:
2562 case BUILT_IN_LLCEILL:
2565 case BUILT_IN_LFLOOR:
2566 case BUILT_IN_LLFLOOR:
2569 case BUILT_IN_LFLOORF:
2570 case BUILT_IN_LLFLOORF:
2573 case BUILT_IN_LFLOORL:
2574 case BUILT_IN_LLFLOORL:
2581 fntype = build_function_type_list (TREE_TYPE (arg),
2582 TREE_TYPE (arg), NULL_TREE);
2583 fallback_fndecl = build_fn_decl (name, fntype);
2586 exp = build_call_expr (fallback_fndecl, 1, arg);
2588 tmp = expand_normal (exp);
2590 /* Truncate the result of floating point optab to integer
2591 via expand_fix (). */
2592 target = gen_reg_rtx (mode);
2593 expand_fix (target, tmp, 0);
2598 /* Expand a call to one of the builtin math functions doing integer
2600 Return 0 if a normal call should be emitted rather than expanding the
2601 function in-line. EXP is the expression that is a call to the builtin
2602 function; if convenient, the result should be placed in TARGET.
2603 SUBTARGET may be used as the target for computing one of EXP's operands. */
2606 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2608 convert_optab builtin_optab;
2610 tree fndecl = get_callee_fndecl (exp);
2612 enum machine_mode mode;
2614 /* There's no easy way to detect the case we need to set EDOM. */
2615 if (flag_errno_math)
2618 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2621 arg = CALL_EXPR_ARG (exp, 0);
2623 switch (DECL_FUNCTION_CODE (fndecl))
2625 CASE_FLT_FN (BUILT_IN_LRINT):
2626 CASE_FLT_FN (BUILT_IN_LLRINT):
2627 builtin_optab = lrint_optab; break;
2628 CASE_FLT_FN (BUILT_IN_LROUND):
2629 CASE_FLT_FN (BUILT_IN_LLROUND):
2630 builtin_optab = lround_optab; break;
2635 /* Make a suitable register to place result in. */
2636 mode = TYPE_MODE (TREE_TYPE (exp));
2638 target = gen_reg_rtx (mode);
2640 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2641 need to expand the argument again. This way, we will not perform
2642 side-effects more the once. */
2643 narg = builtin_save_expr (arg);
2647 exp = build_call_expr (fndecl, 1, arg);
2650 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2654 if (expand_sfix_optab (target, op0, builtin_optab))
2656 /* Output the entire sequence. */
2657 insns = get_insns ();
2663 /* If we were unable to expand via the builtin, stop the sequence
2664 (without outputting the insns) and call to the library function
2665 with the stabilized argument list. */
2668 target = expand_call (exp, target, target == const0_rtx);
2673 /* To evaluate powi(x,n), the floating point value x raised to the
2674 constant integer exponent n, we use a hybrid algorithm that
2675 combines the "window method" with look-up tables. For an
2676 introduction to exponentiation algorithms and "addition chains",
2677 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2678 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2679 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2680 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2682 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2683 multiplications to inline before calling the system library's pow
2684 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2685 so this default never requires calling pow, powf or powl. */
2687 #ifndef POWI_MAX_MULTS
2688 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2691 /* The size of the "optimal power tree" lookup table. All
2692 exponents less than this value are simply looked up in the
2693 powi_table below. This threshold is also used to size the
2694 cache of pseudo registers that hold intermediate results. */
2695 #define POWI_TABLE_SIZE 256
2697 /* The size, in bits of the window, used in the "window method"
2698 exponentiation algorithm. This is equivalent to a radix of
2699 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2700 #define POWI_WINDOW_SIZE 3
2702 /* The following table is an efficient representation of an
2703 "optimal power tree". For each value, i, the corresponding
2704 value, j, in the table states than an optimal evaluation
2705 sequence for calculating pow(x,i) can be found by evaluating
2706 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2707 100 integers is given in Knuth's "Seminumerical algorithms". */
2709 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2711 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2712 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2713 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2714 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2715 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2716 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2717 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2718 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2719 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2720 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2721 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2722 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2723 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2724 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2725 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2726 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2727 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2728 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2729 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2730 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2731 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2732 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2733 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2734 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2735 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2736 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2737 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2738 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2739 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2740 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2741 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2742 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2746 /* Return the number of multiplications required to calculate
2747 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2748 subroutine of powi_cost. CACHE is an array indicating
2749 which exponents have already been calculated. */
2752 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2754 /* If we've already calculated this exponent, then this evaluation
2755 doesn't require any additional multiplications. */
2760 return powi_lookup_cost (n - powi_table[n], cache)
2761 + powi_lookup_cost (powi_table[n], cache) + 1;
2764 /* Return the number of multiplications required to calculate
2765 powi(x,n) for an arbitrary x, given the exponent N. This
2766 function needs to be kept in sync with expand_powi below. */
2769 powi_cost (HOST_WIDE_INT n)
2771 bool cache[POWI_TABLE_SIZE];
2772 unsigned HOST_WIDE_INT digit;
2773 unsigned HOST_WIDE_INT val;
2779 /* Ignore the reciprocal when calculating the cost. */
2780 val = (n < 0) ? -n : n;
2782 /* Initialize the exponent cache. */
2783 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2788 while (val >= POWI_TABLE_SIZE)
2792 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2793 result += powi_lookup_cost (digit, cache)
2794 + POWI_WINDOW_SIZE + 1;
2795 val >>= POWI_WINDOW_SIZE;
2804 return result + powi_lookup_cost (val, cache);
2807 /* Recursive subroutine of expand_powi. This function takes the array,
2808 CACHE, of already calculated exponents and an exponent N and returns
2809 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2812 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2814 unsigned HOST_WIDE_INT digit;
2818 if (n < POWI_TABLE_SIZE)
2823 target = gen_reg_rtx (mode);
2826 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2827 op1 = expand_powi_1 (mode, powi_table[n], cache);
2831 target = gen_reg_rtx (mode);
2832 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2833 op0 = expand_powi_1 (mode, n - digit, cache);
2834 op1 = expand_powi_1 (mode, digit, cache);
2838 target = gen_reg_rtx (mode);
2839 op0 = expand_powi_1 (mode, n >> 1, cache);
2843 result = expand_mult (mode, op0, op1, target, 0);
2844 if (result != target)
2845 emit_move_insn (target, result);
2849 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2850 floating point operand in mode MODE, and N is the exponent. This
2851 function needs to be kept in sync with powi_cost above. */
2854 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2856 unsigned HOST_WIDE_INT val;
2857 rtx cache[POWI_TABLE_SIZE];
2861 return CONST1_RTX (mode);
2863 val = (n < 0) ? -n : n;
2865 memset (cache, 0, sizeof (cache));
2868 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2870 /* If the original exponent was negative, reciprocate the result. */
2872 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2873 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2878 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2879 a normal call should be emitted rather than expanding the function
2880 in-line. EXP is the expression that is a call to the builtin
2881 function; if convenient, the result should be placed in TARGET. */
2884 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2888 tree type = TREE_TYPE (exp);
2889 REAL_VALUE_TYPE cint, c, c2;
2892 enum machine_mode mode = TYPE_MODE (type);
2894 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2897 arg0 = CALL_EXPR_ARG (exp, 0);
2898 arg1 = CALL_EXPR_ARG (exp, 1);
2900 if (TREE_CODE (arg1) != REAL_CST
2901 || TREE_OVERFLOW (arg1))
2902 return expand_builtin_mathfn_2 (exp, target, subtarget);
2904 /* Handle constant exponents. */
2906 /* For integer valued exponents we can expand to an optimal multiplication
2907 sequence using expand_powi. */
2908 c = TREE_REAL_CST (arg1);
2909 n = real_to_integer (&c);
2910 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2911 if (real_identical (&c, &cint)
2912 && ((n >= -1 && n <= 2)
2913 || (flag_unsafe_math_optimizations
2915 && powi_cost (n) <= POWI_MAX_MULTS)))
2917 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2920 op = force_reg (mode, op);
2921 op = expand_powi (op, mode, n);
2926 narg0 = builtin_save_expr (arg0);
2928 /* If the exponent is not integer valued, check if it is half of an integer.
2929 In this case we can expand to sqrt (x) * x**(n/2). */
2930 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2931 if (fn != NULL_TREE)
2933 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2934 n = real_to_integer (&c2);
2935 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2936 if (real_identical (&c2, &cint)
2937 && ((flag_unsafe_math_optimizations
2939 && powi_cost (n/2) <= POWI_MAX_MULTS)
2942 tree call_expr = build_call_expr (fn, 1, narg0);
2943 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2946 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2947 op2 = force_reg (mode, op2);
2948 op2 = expand_powi (op2, mode, abs (n / 2));
2949 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2950 0, OPTAB_LIB_WIDEN);
2951 /* If the original exponent was negative, reciprocate the
2954 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2955 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2961 /* Try if the exponent is a third of an integer. In this case
2962 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2963 different from pow (x, 1./3.) due to rounding and behavior
2964 with negative x we need to constrain this transformation to
2965 unsafe math and positive x or finite math. */
2966 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2968 && flag_unsafe_math_optimizations
2969 && (tree_expr_nonnegative_p (arg0)
2970 || !HONOR_NANS (mode)))
2972 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2973 real_round (&c2, mode, &c2);
2974 n = real_to_integer (&c2);
2975 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2976 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2977 real_convert (&c2, mode, &c2);
2978 if (real_identical (&c2, &c)
2980 && powi_cost (n/3) <= POWI_MAX_MULTS)
2983 tree call_expr = build_call_expr (fn, 1,narg0);
2984 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2985 if (abs (n) % 3 == 2)
2986 op = expand_simple_binop (mode, MULT, op, op, op,
2987 0, OPTAB_LIB_WIDEN);
2990 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2991 op2 = force_reg (mode, op2);
2992 op2 = expand_powi (op2, mode, abs (n / 3));
2993 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2994 0, OPTAB_LIB_WIDEN);
2995 /* If the original exponent was negative, reciprocate the
2998 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2999 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3005 /* Fall back to optab expansion. */
3006 return expand_builtin_mathfn_2 (exp, target, subtarget);
3009 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3010 a normal call should be emitted rather than expanding the function
3011 in-line. EXP is the expression that is a call to the builtin
3012 function; if convenient, the result should be placed in TARGET. */
3015 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3019 enum machine_mode mode;
3020 enum machine_mode mode2;
3022 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3025 arg0 = CALL_EXPR_ARG (exp, 0);
3026 arg1 = CALL_EXPR_ARG (exp, 1);
3027 mode = TYPE_MODE (TREE_TYPE (exp));
3029 /* Handle constant power. */
3031 if (TREE_CODE (arg1) == INTEGER_CST
3032 && !TREE_OVERFLOW (arg1))
3034 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3036 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3037 Otherwise, check the number of multiplications required. */
3038 if ((TREE_INT_CST_HIGH (arg1) == 0
3039 || TREE_INT_CST_HIGH (arg1) == -1)
3040 && ((n >= -1 && n <= 2)
3042 && powi_cost (n) <= POWI_MAX_MULTS)))
3044 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3045 op0 = force_reg (mode, op0);
3046 return expand_powi (op0, mode, n);
3050 /* Emit a libcall to libgcc. */
3052 /* Mode of the 2nd argument must match that of an int. */
3053 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3055 if (target == NULL_RTX)
3056 target = gen_reg_rtx (mode);
3058 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3059 if (GET_MODE (op0) != mode)
3060 op0 = convert_to_mode (mode, op0, 0);
3061 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3062 if (GET_MODE (op1) != mode2)
3063 op1 = convert_to_mode (mode2, op1, 0);
3065 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3066 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3067 op0, mode, op1, mode2);
3072 /* Expand expression EXP which is a call to the strlen builtin. Return
3073 NULL_RTX if we failed the caller should emit a normal call, otherwise
3074 try to get the result in TARGET, if convenient. */
3077 expand_builtin_strlen (tree exp, rtx target,
3078 enum machine_mode target_mode)
3080 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3086 tree src = CALL_EXPR_ARG (exp, 0);
3087 rtx result, src_reg, char_rtx, before_strlen;
3088 enum machine_mode insn_mode = target_mode, char_mode;
3089 enum insn_code icode = CODE_FOR_nothing;
3092 /* If the length can be computed at compile-time, return it. */
3093 len = c_strlen (src, 0);
3095 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3097 /* If the length can be computed at compile-time and is constant
3098 integer, but there are side-effects in src, evaluate
3099 src for side-effects, then return len.
3100 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3101 can be optimized into: i++; x = 3; */
3102 len = c_strlen (src, 1);
3103 if (len && TREE_CODE (len) == INTEGER_CST)
3105 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3106 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3109 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3111 /* If SRC is not a pointer type, don't do this operation inline. */
3115 /* Bail out if we can't compute strlen in the right mode. */
3116 while (insn_mode != VOIDmode)
3118 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3119 if (icode != CODE_FOR_nothing)
3122 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3124 if (insn_mode == VOIDmode)
3127 /* Make a place to write the result of the instruction. */
3131 && GET_MODE (result) == insn_mode
3132 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3133 result = gen_reg_rtx (insn_mode);
3135 /* Make a place to hold the source address. We will not expand
3136 the actual source until we are sure that the expansion will
3137 not fail -- there are trees that cannot be expanded twice. */
3138 src_reg = gen_reg_rtx (Pmode);
3140 /* Mark the beginning of the strlen sequence so we can emit the
3141 source operand later. */
3142 before_strlen = get_last_insn ();
3144 char_rtx = const0_rtx;
3145 char_mode = insn_data[(int) icode].operand[2].mode;
3146 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3148 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3150 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3151 char_rtx, GEN_INT (align));
3156 /* Now that we are assured of success, expand the source. */
3158 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3160 emit_move_insn (src_reg, pat);
3165 emit_insn_after (pat, before_strlen);
3167 emit_insn_before (pat, get_insns ());
3169 /* Return the value in the proper mode for this function. */
3170 if (GET_MODE (result) == target_mode)
3172 else if (target != 0)
3173 convert_move (target, result, 0);
3175 target = convert_to_mode (target_mode, result, 0);
3181 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3182 caller should emit a normal call, otherwise try to get the result
3183 in TARGET, if convenient (and in mode MODE if that's convenient). */
3186 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3188 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3190 tree type = TREE_TYPE (exp);
3191 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3192 CALL_EXPR_ARG (exp, 1), type);
3194 return expand_expr (result, target, mode, EXPAND_NORMAL);
3199 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3200 caller should emit a normal call, otherwise try to get the result
3201 in TARGET, if convenient (and in mode MODE if that's convenient). */
3204 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3206 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3208 tree type = TREE_TYPE (exp);
3209 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3210 CALL_EXPR_ARG (exp, 1), type);
3212 return expand_expr (result, target, mode, EXPAND_NORMAL);
3214 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3219 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3220 caller should emit a normal call, otherwise try to get the result
3221 in TARGET, if convenient (and in mode MODE if that's convenient). */
3224 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3226 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3228 tree type = TREE_TYPE (exp);
3229 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3230 CALL_EXPR_ARG (exp, 1), type);
3232 return expand_expr (result, target, mode, EXPAND_NORMAL);
3237 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3238 caller should emit a normal call, otherwise try to get the result
3239 in TARGET, if convenient (and in mode MODE if that's convenient). */
3242 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3244 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3246 tree type = TREE_TYPE (exp);
3247 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3248 CALL_EXPR_ARG (exp, 1), type);
3250 return expand_expr (result, target, mode, EXPAND_NORMAL);
3255 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3256 bytes from constant string DATA + OFFSET and return it as target
3260 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3261 enum machine_mode mode)
3263 const char *str = (const char *) data;
3265 gcc_assert (offset >= 0
3266 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3267 <= strlen (str) + 1));
3269 return c_readstr (str + offset, mode);
3272 /* Expand a call EXP to the memcpy builtin.
3273 Return NULL_RTX if we failed, the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). */
3278 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3280 tree fndecl = get_callee_fndecl (exp);
3282 if (!validate_arglist (exp,
3283 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3287 tree dest = CALL_EXPR_ARG (exp, 0);
3288 tree src = CALL_EXPR_ARG (exp, 1);
3289 tree len = CALL_EXPR_ARG (exp, 2);
3290 const char *src_str;
3291 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3292 unsigned int dest_align
3293 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3294 rtx dest_mem, src_mem, dest_addr, len_rtx;
3295 tree result = fold_builtin_memory_op (dest, src, len,
3296 TREE_TYPE (TREE_TYPE (fndecl)),
3298 HOST_WIDE_INT expected_size = -1;
3299 unsigned int expected_align = 0;
3303 while (TREE_CODE (result) == COMPOUND_EXPR)
3305 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3307 result = TREE_OPERAND (result, 1);
3309 return expand_expr (result, target, mode, EXPAND_NORMAL);
3312 /* If DEST is not a pointer type, call the normal function. */
3313 if (dest_align == 0)
3316 /* If either SRC is not a pointer type, don't do this
3317 operation in-line. */
3321 stringop_block_profile (exp, &expected_align, &expected_size);
3322 if (expected_align < dest_align)
3323 expected_align = dest_align;
3324 dest_mem = get_memory_rtx (dest, len);
3325 set_mem_align (dest_mem, dest_align);
3326 len_rtx = expand_normal (len);
3327 src_str = c_getstr (src);
3329 /* If SRC is a string constant and block move would be done
3330 by pieces, we can avoid loading the string from memory
3331 and only stored the computed constants. */
3333 && GET_CODE (len_rtx) == CONST_INT
3334 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3335 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3336 (void *) src_str, dest_align))
3338 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3339 builtin_memcpy_read_str,
3340 (void *) src_str, dest_align, 0);
3341 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3342 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3346 src_mem = get_memory_rtx (src, len);
3347 set_mem_align (src_mem, src_align);
3349 /* Copy word part most expediently. */
3350 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3351 CALL_EXPR_TAILCALL (exp)
3352 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3353 expected_align, expected_size);
3357 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3358 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3364 /* Expand a call EXP to the mempcpy builtin.
3365 Return NULL_RTX if we failed; the caller should emit a normal call,
3366 otherwise try to get the result in TARGET, if convenient (and in
3367 mode MODE if that's convenient). If ENDP is 0 return the
3368 destination pointer, if ENDP is 1 return the end pointer ala
3369 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3373 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3375 if (!validate_arglist (exp,
3376 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3380 tree dest = CALL_EXPR_ARG (exp, 0);
3381 tree src = CALL_EXPR_ARG (exp, 1);
3382 tree len = CALL_EXPR_ARG (exp, 2);
3383 return expand_builtin_mempcpy_args (dest, src, len,
3385 target, mode, /*endp=*/ 1);
3389 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3390 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3391 so that this can also be called without constructing an actual CALL_EXPR.
3392 TYPE is the return type of the call. The other arguments and return value
3393 are the same as for expand_builtin_mempcpy. */
3396 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3397 rtx target, enum machine_mode mode, int endp)
3399 /* If return value is ignored, transform mempcpy into memcpy. */
3400 if (target == const0_rtx)
3402 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3407 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3408 target, mode, EXPAND_NORMAL);
3412 const char *src_str;
3413 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3414 unsigned int dest_align
3415 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3416 rtx dest_mem, src_mem, len_rtx;
3417 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3421 while (TREE_CODE (result) == COMPOUND_EXPR)
3423 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3425 result = TREE_OPERAND (result, 1);
3427 return expand_expr (result, target, mode, EXPAND_NORMAL);
3430 /* If either SRC or DEST is not a pointer type, don't do this
3431 operation in-line. */
3432 if (dest_align == 0 || src_align == 0)
3435 /* If LEN is not constant, call the normal function. */
3436 if (! host_integerp (len, 1))
3439 len_rtx = expand_normal (len);
3440 src_str = c_getstr (src);
3442 /* If SRC is a string constant and block move would be done
3443 by pieces, we can avoid loading the string from memory
3444 and only stored the computed constants. */
3446 && GET_CODE (len_rtx) == CONST_INT
3447 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3448 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3449 (void *) src_str, dest_align))
3451 dest_mem = get_memory_rtx (dest, len);
3452 set_mem_align (dest_mem, dest_align);
3453 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3454 builtin_memcpy_read_str,
3455 (void *) src_str, dest_align, endp);
3456 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3457 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3461 if (GET_CODE (len_rtx) == CONST_INT
3462 && can_move_by_pieces (INTVAL (len_rtx),
3463 MIN (dest_align, src_align)))
3465 dest_mem = get_memory_rtx (dest, len);
3466 set_mem_align (dest_mem, dest_align);
3467 src_mem = get_memory_rtx (src, len);
3468 set_mem_align (src_mem, src_align);
3469 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3470 MIN (dest_align, src_align), endp);
3471 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3472 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3480 /* Expand expression EXP, which is a call to the memmove builtin. Return
3481 NULL_RTX if we failed; the caller should emit a normal call. */
3484 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3486 if (!validate_arglist (exp,
3487 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3491 tree dest = CALL_EXPR_ARG (exp, 0);
3492 tree src = CALL_EXPR_ARG (exp, 1);
3493 tree len = CALL_EXPR_ARG (exp, 2);
3494 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3495 target, mode, ignore);
3499 /* Helper function to do the actual work for expand_builtin_memmove. The
3500 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3501 so that this can also be called without constructing an actual CALL_EXPR.
3502 TYPE is the return type of the call. The other arguments and return value
3503 are the same as for expand_builtin_memmove. */
3506 expand_builtin_memmove_args (tree dest, tree src, tree len,
3507 tree type, rtx target, enum machine_mode mode,
3510 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3514 STRIP_TYPE_NOPS (result);
3515 while (TREE_CODE (result) == COMPOUND_EXPR)
3517 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3519 result = TREE_OPERAND (result, 1);
3521 return expand_expr (result, target, mode, EXPAND_NORMAL);
3524 /* Otherwise, call the normal function. */
3528 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3529 NULL_RTX if we failed the caller should emit a normal call. */
3532 expand_builtin_bcopy (tree exp, int ignore)
3534 tree type = TREE_TYPE (exp);
3535 tree src, dest, size;
3537 if (!validate_arglist (exp,
3538 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3541 src = CALL_EXPR_ARG (exp, 0);
3542 dest = CALL_EXPR_ARG (exp, 1);
3543 size = CALL_EXPR_ARG (exp, 2);
3545 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3546 This is done this way so that if it isn't expanded inline, we fall
3547 back to calling bcopy instead of memmove. */
3548 return expand_builtin_memmove_args (dest, src,
3549 fold_convert (sizetype, size),
3550 type, const0_rtx, VOIDmode,
3555 # define HAVE_movstr 0
3556 # define CODE_FOR_movstr CODE_FOR_nothing
3559 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3560 we failed, the caller should emit a normal call, otherwise try to
3561 get the result in TARGET, if convenient. If ENDP is 0 return the
3562 destination pointer, if ENDP is 1 return the end pointer ala
3563 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3567 expand_movstr (tree dest, tree src, rtx target, int endp)
3573 const struct insn_data * data;
3578 dest_mem = get_memory_rtx (dest, NULL);
3579 src_mem = get_memory_rtx (src, NULL);
3582 target = force_reg (Pmode, XEXP (dest_mem, 0));
3583 dest_mem = replace_equiv_address (dest_mem, target);
3584 end = gen_reg_rtx (Pmode);
3588 if (target == 0 || target == const0_rtx)
3590 end = gen_reg_rtx (Pmode);
3598 data = insn_data + CODE_FOR_movstr;
3600 if (data->operand[0].mode != VOIDmode)
3601 end = gen_lowpart (data->operand[0].mode, end);
3603 insn = data->genfun (end, dest_mem, src_mem);
3609 /* movstr is supposed to set end to the address of the NUL
3610 terminator. If the caller requested a mempcpy-like return value,
3612 if (endp == 1 && target != const0_rtx)
3614 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3615 emit_move_insn (target, force_operand (tem, NULL_RTX));
3621 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3622 NULL_RTX if we failed the caller should emit a normal call, otherwise
3623 try to get the result in TARGET, if convenient (and in mode MODE if that's
3627 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3629 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3631 tree dest = CALL_EXPR_ARG (exp, 0);
3632 tree src = CALL_EXPR_ARG (exp, 1);
3633 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3638 /* Helper function to do the actual work for expand_builtin_strcpy. The
3639 arguments to the builtin_strcpy call DEST and SRC are broken out
3640 so that this can also be called without constructing an actual CALL_EXPR.
3641 The other arguments and return value are the same as for
3642 expand_builtin_strcpy. */
3645 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3646 rtx target, enum machine_mode mode)
3648 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3650 return expand_expr (result, target, mode, EXPAND_NORMAL);
3651 return expand_movstr (dest, src, target, /*endp=*/0);
3655 /* Expand a call EXP to the stpcpy builtin.
3656 Return NULL_RTX if we failed the caller should emit a normal call,
3657 otherwise try to get the result in TARGET, if convenient (and in
3658 mode MODE if that's convenient). */
3661 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3665 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3668 dst = CALL_EXPR_ARG (exp, 0);
3669 src = CALL_EXPR_ARG (exp, 1);
3671 /* If return value is ignored, transform stpcpy into strcpy. */
3672 if (target == const0_rtx)
3674 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3678 return expand_expr (build_call_expr (fn, 2, dst, src),
3679 target, mode, EXPAND_NORMAL);
3686 /* Ensure we get an actual string whose length can be evaluated at
3687 compile-time, not an expression containing a string. This is
3688 because the latter will potentially produce pessimized code
3689 when used to produce the return value. */
3690 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3691 return expand_movstr (dst, src, target, /*endp=*/2);
3693 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3694 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3695 target, mode, /*endp=*/2);
3700 if (TREE_CODE (len) == INTEGER_CST)
3702 rtx len_rtx = expand_normal (len);
3704 if (GET_CODE (len_rtx) == CONST_INT)
3706 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3707 dst, src, target, mode);
3713 if (mode != VOIDmode)
3714 target = gen_reg_rtx (mode);
3716 target = gen_reg_rtx (GET_MODE (ret));
3718 if (GET_MODE (target) != GET_MODE (ret))
3719 ret = gen_lowpart (GET_MODE (target), ret);
3721 ret = plus_constant (ret, INTVAL (len_rtx));
3722 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3730 return expand_movstr (dst, src, target, /*endp=*/2);
3734 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3735 bytes from constant string DATA + OFFSET and return it as target
3739 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3740 enum machine_mode mode)
3742 const char *str = (const char *) data;
3744 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3747 return c_readstr (str + offset, mode);
3750 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3751 NULL_RTX if we failed the caller should emit a normal call. */
3754 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3756 tree fndecl = get_callee_fndecl (exp);
3758 if (validate_arglist (exp,
3759 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3761 tree dest = CALL_EXPR_ARG (exp, 0);
3762 tree src = CALL_EXPR_ARG (exp, 1);
3763 tree len = CALL_EXPR_ARG (exp, 2);
3764 tree slen = c_strlen (src, 1);
3765 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3769 while (TREE_CODE (result) == COMPOUND_EXPR)
3771 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3773 result = TREE_OPERAND (result, 1);
3775 return expand_expr (result, target, mode, EXPAND_NORMAL);
3778 /* We must be passed a constant len and src parameter. */
3779 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3782 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3784 /* We're required to pad with trailing zeros if the requested
3785 len is greater than strlen(s2)+1. In that case try to
3786 use store_by_pieces, if it fails, punt. */
3787 if (tree_int_cst_lt (slen, len))
3789 unsigned int dest_align
3790 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3791 const char *p = c_getstr (src);
3794 if (!p || dest_align == 0 || !host_integerp (len, 1)
3795 || !can_store_by_pieces (tree_low_cst (len, 1),
3796 builtin_strncpy_read_str,
3797 (void *) p, dest_align))
3800 dest_mem = get_memory_rtx (dest, len);
3801 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3802 builtin_strncpy_read_str,
3803 (void *) p, dest_align, 0);
3804 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3805 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3812 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3813 bytes from constant string DATA + OFFSET and return it as target
3817 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3818 enum machine_mode mode)
3820 const char *c = (const char *) data;
3821 char *p = alloca (GET_MODE_SIZE (mode));
3823 memset (p, *c, GET_MODE_SIZE (mode));
3825 return c_readstr (p, mode);
3828 /* Callback routine for store_by_pieces. Return the RTL of a register
3829 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3830 char value given in the RTL register data. For example, if mode is
3831 4 bytes wide, return the RTL for 0x01010101*data. */
3834 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3835 enum machine_mode mode)
3841 size = GET_MODE_SIZE (mode);
3846 memset (p, 1, size);
3847 coeff = c_readstr (p, mode);
3849 target = convert_to_mode (mode, (rtx) data, 1);
3850 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3851 return force_reg (mode, target);
3854 /* Expand expression EXP, which is a call to the memset builtin. Return
3855 NULL_RTX if we failed the caller should emit a normal call, otherwise
3856 try to get the result in TARGET, if convenient (and in mode MODE if that's
3860 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3862 if (!validate_arglist (exp,
3863 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3867 tree dest = CALL_EXPR_ARG (exp, 0);
3868 tree val = CALL_EXPR_ARG (exp, 1);
3869 tree len = CALL_EXPR_ARG (exp, 2);
3870 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3874 /* Helper function to do the actual work for expand_builtin_memset. The
3875 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3876 so that this can also be called without constructing an actual CALL_EXPR.
3877 The other arguments and return value are the same as for
3878 expand_builtin_memset. */
3881 expand_builtin_memset_args (tree dest, tree val, tree len,
3882 rtx target, enum machine_mode mode, tree orig_exp)
3885 enum built_in_function fcode;
3887 unsigned int dest_align;
3888 rtx dest_mem, dest_addr, len_rtx;
3889 HOST_WIDE_INT expected_size = -1;
3890 unsigned int expected_align = 0;
3892 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3894 /* If DEST is not a pointer type, don't do this operation in-line. */
3895 if (dest_align == 0)
3898 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3899 if (expected_align < dest_align)
3900 expected_align = dest_align;
3902 /* If the LEN parameter is zero, return DEST. */
3903 if (integer_zerop (len))
3905 /* Evaluate and ignore VAL in case it has side-effects. */
3906 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3907 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3910 /* Stabilize the arguments in case we fail. */
3911 dest = builtin_save_expr (dest);
3912 val = builtin_save_expr (val);
3913 len = builtin_save_expr (len);
3915 len_rtx = expand_normal (len);
3916 dest_mem = get_memory_rtx (dest, len);
3918 if (TREE_CODE (val) != INTEGER_CST)
3922 val_rtx = expand_normal (val);
3923 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3926 /* Assume that we can memset by pieces if we can store
3927 * the coefficients by pieces (in the required modes).
3928 * We can't pass builtin_memset_gen_str as that emits RTL. */
3930 if (host_integerp (len, 1)
3931 && !(optimize_size && tree_low_cst (len, 1) > 1)
3932 && can_store_by_pieces (tree_low_cst (len, 1),
3933 builtin_memset_read_str, &c, dest_align))
3935 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3937 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3938 builtin_memset_gen_str, val_rtx, dest_align, 0);
3940 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3941 dest_align, expected_align,
3945 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3946 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3950 if (target_char_cast (val, &c))
3955 if (host_integerp (len, 1)
3956 && !(optimize_size && tree_low_cst (len, 1) > 1)
3957 && can_store_by_pieces (tree_low_cst (len, 1),
3958 builtin_memset_read_str, &c, dest_align))
3959 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3960 builtin_memset_read_str, &c, dest_align, 0);
3961 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3962 dest_align, expected_align,
3966 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3967 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3971 set_mem_align (dest_mem, dest_align);
3972 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3973 CALL_EXPR_TAILCALL (orig_exp)
3974 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3975 expected_align, expected_size);
3979 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3980 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3986 fndecl = get_callee_fndecl (orig_exp);
3987 fcode = DECL_FUNCTION_CODE (fndecl);
3988 if (fcode == BUILT_IN_MEMSET)
3989 fn = build_call_expr (fndecl, 3, dest, val, len);
3990 else if (fcode == BUILT_IN_BZERO)
3991 fn = build_call_expr (fndecl, 2, dest, len);
3994 if (TREE_CODE (fn) == CALL_EXPR)
3995 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3996 return expand_call (fn, target, target == const0_rtx);
3999 /* Expand expression EXP, which is a call to the bzero builtin. Return
4000 NULL_RTX if we failed the caller should emit a normal call. */
4003 expand_builtin_bzero (tree exp)
4007 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4010 dest = CALL_EXPR_ARG (exp, 0);
4011 size = CALL_EXPR_ARG (exp, 1);
4013 /* New argument list transforming bzero(ptr x, int y) to
4014 memset(ptr x, int 0, size_t y). This is done this way
4015 so that if it isn't expanded inline, we fallback to
4016 calling bzero instead of memset. */
4018 return expand_builtin_memset_args (dest, integer_zero_node,
4019 fold_convert (sizetype, size),
4020 const0_rtx, VOIDmode, exp);
4023 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4024 caller should emit a normal call, otherwise try to get the result
4025 in TARGET, if convenient (and in mode MODE if that's convenient). */
4028 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4030 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4031 INTEGER_TYPE, VOID_TYPE))
4033 tree type = TREE_TYPE (exp);
4034 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4035 CALL_EXPR_ARG (exp, 1),
4036 CALL_EXPR_ARG (exp, 2), type);
4038 return expand_expr (result, target, mode, EXPAND_NORMAL);
4043 /* Expand expression EXP, which is a call to the memcmp built-in function.
4044 Return NULL_RTX if we failed and the
4045 caller should emit a normal call, otherwise try to get the result in
4046 TARGET, if convenient (and in mode MODE, if that's convenient). */
4049 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4051 if (!validate_arglist (exp,
4052 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4056 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4057 CALL_EXPR_ARG (exp, 1),
4058 CALL_EXPR_ARG (exp, 2));
4060 return expand_expr (result, target, mode, EXPAND_NORMAL);
4063 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4065 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4068 tree arg1 = CALL_EXPR_ARG (exp, 0);
4069 tree arg2 = CALL_EXPR_ARG (exp, 1);
4070 tree len = CALL_EXPR_ARG (exp, 2);
4073 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4075 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4076 enum machine_mode insn_mode;
4078 #ifdef HAVE_cmpmemsi
4080 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4083 #ifdef HAVE_cmpstrnsi
4085 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4090 /* If we don't have POINTER_TYPE, call the function. */
4091 if (arg1_align == 0 || arg2_align == 0)
4094 /* Make a place to write the result of the instruction. */
4097 && REG_P (result) && GET_MODE (result) == insn_mode
4098 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4099 result = gen_reg_rtx (insn_mode);
4101 arg1_rtx = get_memory_rtx (arg1, len);
4102 arg2_rtx = get_memory_rtx (arg2, len);
4103 arg3_rtx = expand_normal (len);
4105 /* Set MEM_SIZE as appropriate. */
4106 if (GET_CODE (arg3_rtx) == CONST_INT)
4108 set_mem_size (arg1_rtx, arg3_rtx);
4109 set_mem_size (arg2_rtx, arg3_rtx);
4112 #ifdef HAVE_cmpmemsi
4114 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4115 GEN_INT (MIN (arg1_align, arg2_align)));
4118 #ifdef HAVE_cmpstrnsi
4120 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4121 GEN_INT (MIN (arg1_align, arg2_align)));
4129 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4130 TYPE_MODE (integer_type_node), 3,
4131 XEXP (arg1_rtx, 0), Pmode,
4132 XEXP (arg2_rtx, 0), Pmode,
4133 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4134 TYPE_UNSIGNED (sizetype)),
4135 TYPE_MODE (sizetype));
4137 /* Return the value in the proper mode for this function. */
4138 mode = TYPE_MODE (TREE_TYPE (exp));
4139 if (GET_MODE (result) == mode)
4141 else if (target != 0)
4143 convert_move (target, result, 0);
4147 return convert_to_mode (mode, result, 0);
4154 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4155 if we failed the caller should emit a normal call, otherwise try to get
4156 the result in TARGET, if convenient. */
4159 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4161 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4165 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4166 CALL_EXPR_ARG (exp, 1));
4168 return expand_expr (result, target, mode, EXPAND_NORMAL);
4171 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4172 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4173 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4175 rtx arg1_rtx, arg2_rtx;
4176 rtx result, insn = NULL_RTX;
4178 tree arg1 = CALL_EXPR_ARG (exp, 0);
4179 tree arg2 = CALL_EXPR_ARG (exp, 1);
4182 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4184 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4186 /* If we don't have POINTER_TYPE, call the function. */
4187 if (arg1_align == 0 || arg2_align == 0)
4190 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4191 arg1 = builtin_save_expr (arg1);
4192 arg2 = builtin_save_expr (arg2);
4194 arg1_rtx = get_memory_rtx (arg1, NULL);
4195 arg2_rtx = get_memory_rtx (arg2, NULL);
4197 #ifdef HAVE_cmpstrsi
4198 /* Try to call cmpstrsi. */
4201 enum machine_mode insn_mode
4202 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4204 /* Make a place to write the result of the instruction. */
4207 && REG_P (result) && GET_MODE (result) == insn_mode
4208 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4209 result = gen_reg_rtx (insn_mode);
4211 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4212 GEN_INT (MIN (arg1_align, arg2_align)));
4215 #ifdef HAVE_cmpstrnsi
4216 /* Try to determine at least one length and call cmpstrnsi. */
4217 if (!insn && HAVE_cmpstrnsi)
4222 enum machine_mode insn_mode
4223 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4224 tree len1 = c_strlen (arg1, 1);
4225 tree len2 = c_strlen (arg2, 1);
4228 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4230 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4232 /* If we don't have a constant length for the first, use the length
4233 of the second, if we know it. We don't require a constant for
4234 this case; some cost analysis could be done if both are available
4235 but neither is constant. For now, assume they're equally cheap,
4236 unless one has side effects. If both strings have constant lengths,
4243 else if (TREE_SIDE_EFFECTS (len1))
4245 else if (TREE_SIDE_EFFECTS (len2))
4247 else if (TREE_CODE (len1) != INTEGER_CST)
4249 else if (TREE_CODE (len2) != INTEGER_CST)
4251 else if (tree_int_cst_lt (len1, len2))
4256 /* If both arguments have side effects, we cannot optimize. */
4257 if (!len || TREE_SIDE_EFFECTS (len))
4260 arg3_rtx = expand_normal (len);
4262 /* Make a place to write the result of the instruction. */
4265 && REG_P (result) && GET_MODE (result) == insn_mode
4266 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4267 result = gen_reg_rtx (insn_mode);
4269 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4270 GEN_INT (MIN (arg1_align, arg2_align)));
4278 /* Return the value in the proper mode for this function. */
4279 mode = TYPE_MODE (TREE_TYPE (exp));
4280 if (GET_MODE (result) == mode)
4283 return convert_to_mode (mode, result, 0);
4284 convert_move (target, result, 0);
4288 /* Expand the library call ourselves using a stabilized argument
4289 list to avoid re-evaluating the function's arguments twice. */
4290 #ifdef HAVE_cmpstrnsi
4293 fndecl = get_callee_fndecl (exp);
4294 fn = build_call_expr (fndecl, 2, arg1, arg2);
4295 if (TREE_CODE (fn) == CALL_EXPR)
4296 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4297 return expand_call (fn, target, target == const0_rtx);
4303 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4304 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4305 the result in TARGET, if convenient. */
4308 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4310 if (!validate_arglist (exp,
4311 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4315 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4316 CALL_EXPR_ARG (exp, 1),
4317 CALL_EXPR_ARG (exp, 2));
4319 return expand_expr (result, target, mode, EXPAND_NORMAL);
4322 /* If c_strlen can determine an expression for one of the string
4323 lengths, and it doesn't have side effects, then emit cmpstrnsi
4324 using length MIN(strlen(string)+1, arg3). */
4325 #ifdef HAVE_cmpstrnsi
4328 tree len, len1, len2;
4329 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4332 tree arg1 = CALL_EXPR_ARG (exp, 0);
4333 tree arg2 = CALL_EXPR_ARG (exp, 1);
4334 tree arg3 = CALL_EXPR_ARG (exp, 2);
4337 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4339 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4340 enum machine_mode insn_mode
4341 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4343 len1 = c_strlen (arg1, 1);
4344 len2 = c_strlen (arg2, 1);
4347 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4349 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4351 /* If we don't have a constant length for the first, use the length
4352 of the second, if we know it. We don't require a constant for
4353 this case; some cost analysis could be done if both are available
4354 but neither is constant. For now, assume they're equally cheap,
4355 unless one has side effects. If both strings have constant lengths,
4362 else if (TREE_SIDE_EFFECTS (len1))
4364 else if (TREE_SIDE_EFFECTS (len2))
4366 else if (TREE_CODE (len1) != INTEGER_CST)
4368 else if (TREE_CODE (len2) != INTEGER_CST)
4370 else if (tree_int_cst_lt (len1, len2))
4375 /* If both arguments have side effects, we cannot optimize. */
4376 if (!len || TREE_SIDE_EFFECTS (len))
4379 /* The actual new length parameter is MIN(len,arg3). */
4380 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4381 fold_convert (TREE_TYPE (len), arg3));
4383 /* If we don't have POINTER_TYPE, call the function. */
4384 if (arg1_align == 0 || arg2_align == 0)
4387 /* Make a place to write the result of the instruction. */
4390 && REG_P (result) && GET_MODE (result) == insn_mode
4391 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4392 result = gen_reg_rtx (insn_mode);
4394 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4395 arg1 = builtin_save_expr (arg1);
4396 arg2 = builtin_save_expr (arg2);
4397 len = builtin_save_expr (len);
4399 arg1_rtx = get_memory_rtx (arg1, len);
4400 arg2_rtx = get_memory_rtx (arg2, len);
4401 arg3_rtx = expand_normal (len);
4402 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4403 GEN_INT (MIN (arg1_align, arg2_align)));
4408 /* Return the value in the proper mode for this function. */
4409 mode = TYPE_MODE (TREE_TYPE (exp));
4410 if (GET_MODE (result) == mode)
4413 return convert_to_mode (mode, result, 0);
4414 convert_move (target, result, 0);
4418 /* Expand the library call ourselves using a stabilized argument
4419 list to avoid re-evaluating the function's arguments twice. */
4420 fndecl = get_callee_fndecl (exp);
4421 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4422 if (TREE_CODE (fn) == CALL_EXPR)
4423 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4424 return expand_call (fn, target, target == const0_rtx);
4430 /* Expand expression EXP, which is a call to the strcat builtin.
4431 Return NULL_RTX if we failed the caller should emit a normal call,
4432 otherwise try to get the result in TARGET, if convenient. */
4435 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4437 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4441 tree dst = CALL_EXPR_ARG (exp, 0);
4442 tree src = CALL_EXPR_ARG (exp, 1);
4443 const char *p = c_getstr (src);
4445 /* If the string length is zero, return the dst parameter. */
4446 if (p && *p == '\0')
4447 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4451 /* See if we can store by pieces into (dst + strlen(dst)). */
4452 tree newsrc, newdst,
4453 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4456 /* Stabilize the argument list. */
4457 newsrc = builtin_save_expr (src);
4458 dst = builtin_save_expr (dst);
4462 /* Create strlen (dst). */
4463 newdst = build_call_expr (strlen_fn, 1, dst);
4464 /* Create (dst p+ strlen (dst)). */
4466 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4467 newdst = builtin_save_expr (newdst);
4469 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4471 end_sequence (); /* Stop sequence. */
4475 /* Output the entire sequence. */
4476 insns = get_insns ();
4480 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4487 /* Expand expression EXP, which is a call to the strncat builtin.
4488 Return NULL_RTX if we failed the caller should emit a normal call,
4489 otherwise try to get the result in TARGET, if convenient. */
4492 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4494 if (validate_arglist (exp,
4495 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4497 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4498 CALL_EXPR_ARG (exp, 1),
4499 CALL_EXPR_ARG (exp, 2));
4501 return expand_expr (result, target, mode, EXPAND_NORMAL);
4506 /* Expand expression EXP, which is a call to the strspn builtin.
4507 Return NULL_RTX if we failed the caller should emit a normal call,
4508 otherwise try to get the result in TARGET, if convenient. */
4511 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4513 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4515 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4516 CALL_EXPR_ARG (exp, 1));
4518 return expand_expr (result, target, mode, EXPAND_NORMAL);
4523 /* Expand expression EXP, which is a call to the strcspn builtin.
4524 Return NULL_RTX if we failed the caller should emit a normal call,
4525 otherwise try to get the result in TARGET, if convenient. */
4528 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4530 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4532 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4533 CALL_EXPR_ARG (exp, 1));
4535 return expand_expr (result, target, mode, EXPAND_NORMAL);
4540 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4541 if that's convenient. */
4544 expand_builtin_saveregs (void)
4548 /* Don't do __builtin_saveregs more than once in a function.
4549 Save the result of the first call and reuse it. */
4550 if (saveregs_value != 0)
4551 return saveregs_value;
4553 /* When this function is called, it means that registers must be
4554 saved on entry to this function. So we migrate the call to the
4555 first insn of this function. */
4559 /* Do whatever the machine needs done in this case. */
4560 val = targetm.calls.expand_builtin_saveregs ();
4565 saveregs_value = val;
4567 /* Put the insns after the NOTE that starts the function. If this
4568 is inside a start_sequence, make the outer-level insn chain current, so
4569 the code is placed at the start of the function. */
4570 push_topmost_sequence ();
4571 emit_insn_after (seq, entry_of_function ());
4572 pop_topmost_sequence ();
4577 /* __builtin_args_info (N) returns word N of the arg space info
4578 for the current function. The number and meanings of words
4579 is controlled by the definition of CUMULATIVE_ARGS. */
4582 expand_builtin_args_info (tree exp)
4584 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4585 int *word_ptr = (int *) ¤t_function_args_info;
4587 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4589 if (call_expr_nargs (exp) != 0)
4591 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4592 error ("argument of %<__builtin_args_info%> must be constant");
4595 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4597 if (wordnum < 0 || wordnum >= nwords)
4598 error ("argument of %<__builtin_args_info%> out of range");
4600 return GEN_INT (word_ptr[wordnum]);
4604 error ("missing argument in %<__builtin_args_info%>");
4609 /* Expand a call to __builtin_next_arg. */
4612 expand_builtin_next_arg (void)
4614 /* Checking arguments is already done in fold_builtin_next_arg
4615 that must be called before this function. */
4616 return expand_binop (ptr_mode, add_optab,
4617 current_function_internal_arg_pointer,
4618 current_function_arg_offset_rtx,
4619 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4622 /* Make it easier for the backends by protecting the valist argument
4623 from multiple evaluations. */
4626 stabilize_va_list (tree valist, int needs_lvalue)
4628 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4630 if (TREE_SIDE_EFFECTS (valist))
4631 valist = save_expr (valist);
4633 /* For this case, the backends will be expecting a pointer to
4634 TREE_TYPE (va_list_type_node), but it's possible we've
4635 actually been given an array (an actual va_list_type_node).
4637 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4639 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4640 valist = build_fold_addr_expr_with_type (valist, p1);
4649 if (! TREE_SIDE_EFFECTS (valist))
4652 pt = build_pointer_type (va_list_type_node);
4653 valist = fold_build1 (ADDR_EXPR, pt, valist);
4654 TREE_SIDE_EFFECTS (valist) = 1;
4657 if (TREE_SIDE_EFFECTS (valist))
4658 valist = save_expr (valist);
4659 valist = build_fold_indirect_ref (valist);
4665 /* The "standard" definition of va_list is void*. */
4668 std_build_builtin_va_list (void)
4670 return ptr_type_node;
4673 /* The "standard" implementation of va_start: just assign `nextarg' to
4677 std_expand_builtin_va_start (tree valist, rtx nextarg)
4679 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4680 convert_move (va_r, nextarg, 0);
4683 /* Expand EXP, a call to __builtin_va_start. */
4686 expand_builtin_va_start (tree exp)
4691 if (call_expr_nargs (exp) < 2)
4693 error ("too few arguments to function %<va_start%>");
4697 if (fold_builtin_next_arg (exp, true))
4700 nextarg = expand_builtin_next_arg ();
4701 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4703 #ifdef EXPAND_BUILTIN_VA_START
4704 EXPAND_BUILTIN_VA_START (valist, nextarg);
4706 std_expand_builtin_va_start (valist, nextarg);
4712 /* The "standard" implementation of va_arg: read the value from the
4713 current (padded) address and increment by the (padded) size. */
4716 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4718 tree addr, t, type_size, rounded_size, valist_tmp;
4719 unsigned HOST_WIDE_INT align, boundary;
4722 #ifdef ARGS_GROW_DOWNWARD
4723 /* All of the alignment and movement below is for args-grow-up machines.
4724 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4725 implement their own specialized gimplify_va_arg_expr routines. */
4729 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4731 type = build_pointer_type (type);
4733 align = PARM_BOUNDARY / BITS_PER_UNIT;
4734 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4736 /* Hoist the valist value into a temporary for the moment. */
4737 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4739 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4740 requires greater alignment, we must perform dynamic alignment. */
4741 if (boundary > align
4742 && !integer_zerop (TYPE_SIZE (type)))
4744 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4745 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4746 valist_tmp, size_int (boundary - 1)));
4747 gimplify_and_add (t, pre_p);
4749 t = fold_convert (sizetype, valist_tmp);
4750 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4751 fold_convert (TREE_TYPE (valist),
4752 fold_build2 (BIT_AND_EXPR, sizetype, t,
4753 size_int (-boundary))));
4754 gimplify_and_add (t, pre_p);
4759 /* If the actual alignment is less than the alignment of the type,
4760 adjust the type accordingly so that we don't assume strict alignment
4761 when deferencing the pointer. */
4762 boundary *= BITS_PER_UNIT;
4763 if (boundary < TYPE_ALIGN (type))
4765 type = build_variant_type_copy (type);
4766 TYPE_ALIGN (type) = boundary;
4769 /* Compute the rounded size of the type. */
4770 type_size = size_in_bytes (type);
4771 rounded_size = round_up (type_size, align);
4773 /* Reduce rounded_size so it's sharable with the postqueue. */
4774 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4778 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4780 /* Small args are padded downward. */
4781 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4782 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4783 size_binop (MINUS_EXPR, rounded_size, type_size));
4784 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4787 /* Compute new value for AP. */
4788 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4789 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4790 gimplify_and_add (t, pre_p);
4792 addr = fold_convert (build_pointer_type (type), addr);
4795 addr = build_va_arg_indirect_ref (addr);
4797 return build_va_arg_indirect_ref (addr);
4800 /* Build an indirect-ref expression over the given TREE, which represents a
4801 piece of a va_arg() expansion. */
4803 build_va_arg_indirect_ref (tree addr)
4805 addr = build_fold_indirect_ref (addr);
4807 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4813 /* Return a dummy expression of type TYPE in order to keep going after an
4817 dummy_object (tree type)
4819 tree t = build_int_cst (build_pointer_type (type), 0);
4820 return build1 (INDIRECT_REF, type, t);
4823 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4824 builtin function, but a very special sort of operator. */
4826 enum gimplify_status
4827 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4829 tree promoted_type, want_va_type, have_va_type;
4830 tree valist = TREE_OPERAND (*expr_p, 0);
4831 tree type = TREE_TYPE (*expr_p);
4834 /* Verify that valist is of the proper type. */
4835 want_va_type = va_list_type_node;
4836 have_va_type = TREE_TYPE (valist);
4838 if (have_va_type == error_mark_node)
4841 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4843 /* If va_list is an array type, the argument may have decayed
4844 to a pointer type, e.g. by being passed to another function.
4845 In that case, unwrap both types so that we can compare the
4846 underlying records. */
4847 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4848 || POINTER_TYPE_P (have_va_type))
4850 want_va_type = TREE_TYPE (want_va_type);
4851 have_va_type = TREE_TYPE (have_va_type);
4855 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4857 error ("first argument to %<va_arg%> not of type %<va_list%>");
4861 /* Generate a diagnostic for requesting data of a type that cannot
4862 be passed through `...' due to type promotion at the call site. */
4863 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4866 static bool gave_help;
4868 /* Unfortunately, this is merely undefined, rather than a constraint
4869 violation, so we cannot make this an error. If this call is never
4870 executed, the program is still strictly conforming. */
4871 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4872 type, promoted_type);
4876 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4877 promoted_type, type);
4880 /* We can, however, treat "undefined" any way we please.
4881 Call abort to encourage the user to fix the program. */
4882 inform ("if this code is reached, the program will abort");
4883 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4884 append_to_statement_list (t, pre_p);
4886 /* This is dead code, but go ahead and finish so that the
4887 mode of the result comes out right. */
4888 *expr_p = dummy_object (type);
4893 /* Make it easier for the backends by protecting the valist argument
4894 from multiple evaluations. */
4895 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4897 /* For this case, the backends will be expecting a pointer to
4898 TREE_TYPE (va_list_type_node), but it's possible we've
4899 actually been given an array (an actual va_list_type_node).
4901 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4903 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4904 valist = build_fold_addr_expr_with_type (valist, p1);
4906 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4909 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4911 if (!targetm.gimplify_va_arg_expr)
4912 /* FIXME:Once most targets are converted we should merely
4913 assert this is non-null. */
4916 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4921 /* Expand EXP, a call to __builtin_va_end. */
4924 expand_builtin_va_end (tree exp)
4926 tree valist = CALL_EXPR_ARG (exp, 0);
4928 /* Evaluate for side effects, if needed. I hate macros that don't
4930 if (TREE_SIDE_EFFECTS (valist))
4931 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4936 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4937 builtin rather than just as an assignment in stdarg.h because of the
4938 nastiness of array-type va_list types. */
4941 expand_builtin_va_copy (tree exp)
4945 dst = CALL_EXPR_ARG (exp, 0);
4946 src = CALL_EXPR_ARG (exp, 1);
4948 dst = stabilize_va_list (dst, 1);
4949 src = stabilize_va_list (src, 0);
4951 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4953 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4954 TREE_SIDE_EFFECTS (t) = 1;
4955 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4959 rtx dstb, srcb, size;
4961 /* Evaluate to pointers. */
4962 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4963 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4964 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4965 VOIDmode, EXPAND_NORMAL);
4967 dstb = convert_memory_address (Pmode, dstb);
4968 srcb = convert_memory_address (Pmode, srcb);
4970 /* "Dereference" to BLKmode memories. */
4971 dstb = gen_rtx_MEM (BLKmode, dstb);
4972 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4973 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4974 srcb = gen_rtx_MEM (BLKmode, srcb);
4975 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4976 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4979 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4985 /* Expand a call to one of the builtin functions __builtin_frame_address or
4986 __builtin_return_address. */
4989 expand_builtin_frame_address (tree fndecl, tree exp)
4991 /* The argument must be a nonnegative integer constant.
4992 It counts the number of frames to scan up the stack.
4993 The value is the return address saved in that frame. */
4994 if (call_expr_nargs (exp) == 0)
4995 /* Warning about missing arg was already issued. */
4997 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4999 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5000 error ("invalid argument to %<__builtin_frame_address%>");
5002 error ("invalid argument to %<__builtin_return_address%>");
5008 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5009 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5011 /* Some ports cannot access arbitrary stack frames. */
5014 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5015 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5017 warning (0, "unsupported argument to %<__builtin_return_address%>");
5021 /* For __builtin_frame_address, return what we've got. */
5022 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5026 && ! CONSTANT_P (tem))
5027 tem = copy_to_mode_reg (Pmode, tem);
5032 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5033 we failed and the caller should emit a normal call, otherwise try to get
5034 the result in TARGET, if convenient. */
5037 expand_builtin_alloca (tree exp, rtx target)
5042 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5043 should always expand to function calls. These can be intercepted
5048 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5051 /* Compute the argument. */
5052 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5054 /* Allocate the desired space. */
5055 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5056 result = convert_memory_address (ptr_mode, result);
5061 /* Expand a call to a bswap builtin with argument ARG0. MODE
5062 is the mode to expand with. */
5065 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5067 enum machine_mode mode;
5071 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5074 arg = CALL_EXPR_ARG (exp, 0);
5075 mode = TYPE_MODE (TREE_TYPE (arg));
5076 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5078 target = expand_unop (mode, bswap_optab, op0, target, 1);
5080 gcc_assert (target);
5082 return convert_to_mode (mode, target, 0);
5085 /* Expand a call to a unary builtin in EXP.
5086 Return NULL_RTX if a normal call should be emitted rather than expanding the
5087 function in-line. If convenient, the result should be placed in TARGET.
5088 SUBTARGET may be used as the target for computing one of EXP's operands. */
5091 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5092 rtx subtarget, optab op_optab)
5096 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5099 /* Compute the argument. */
5100 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5101 VOIDmode, EXPAND_NORMAL);
5102 /* Compute op, into TARGET if possible.
5103 Set TARGET to wherever the result comes back. */
5104 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5105 op_optab, op0, target, 1);
5106 gcc_assert (target);
5108 return convert_to_mode (target_mode, target, 0);
5111 /* If the string passed to fputs is a constant and is one character
5112 long, we attempt to transform this call into __builtin_fputc(). */
5115 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5117 /* Verify the arguments in the original call. */
5118 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5120 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5121 CALL_EXPR_ARG (exp, 1),
5122 (target == const0_rtx),
5123 unlocked, NULL_TREE);
5125 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5130 /* Expand a call to __builtin_expect. We just return our argument
5131 as the builtin_expect semantic should've been already executed by
5132 tree branch prediction pass. */
5135 expand_builtin_expect (tree exp, rtx target)
5139 if (call_expr_nargs (exp) < 2)
5141 arg = CALL_EXPR_ARG (exp, 0);
5142 c = CALL_EXPR_ARG (exp, 1);
5144 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5145 /* When guessing was done, the hints should be already stripped away. */
5146 gcc_assert (!flag_guess_branch_prob
5147 || optimize == 0 || errorcount || sorrycount);
5152 expand_builtin_trap (void)
5156 emit_insn (gen_trap ());
5159 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5163 /* Expand EXP, a call to fabs, fabsf or fabsl.
5164 Return NULL_RTX if a normal call should be emitted rather than expanding
5165 the function inline. If convenient, the result should be placed
5166 in TARGET. SUBTARGET may be used as the target for computing
5170 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5172 enum machine_mode mode;
5176 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5179 arg = CALL_EXPR_ARG (exp, 0);
5180 mode = TYPE_MODE (TREE_TYPE (arg));
5181 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5182 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5185 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5186 Return NULL is a normal call should be emitted rather than expanding the
5187 function inline. If convenient, the result should be placed in TARGET.
5188 SUBTARGET may be used as the target for computing the operand. */
5191 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5196 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5199 arg = CALL_EXPR_ARG (exp, 0);
5200 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5202 arg = CALL_EXPR_ARG (exp, 1);
5203 op1 = expand_normal (arg);
5205 return expand_copysign (op0, op1, target);
5208 /* Create a new constant string literal and return a char* pointer to it.
5209 The STRING_CST value is the LEN characters at STR. */
5211 build_string_literal (int len, const char *str)
5213 tree t, elem, index, type;
5215 t = build_string (len, str);
5216 elem = build_type_variant (char_type_node, 1, 0);
5217 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5218 type = build_array_type (elem, index);
5219 TREE_TYPE (t) = type;
5220 TREE_CONSTANT (t) = 1;
5221 TREE_INVARIANT (t) = 1;
5222 TREE_READONLY (t) = 1;
5223 TREE_STATIC (t) = 1;
5225 type = build_pointer_type (type);
5226 t = build1 (ADDR_EXPR, type, t);
5228 type = build_pointer_type (elem);
5229 t = build1 (NOP_EXPR, type, t);
5233 /* Expand EXP, a call to printf or printf_unlocked.
5234 Return NULL_RTX if a normal call should be emitted rather than transforming
5235 the function inline. If convenient, the result should be placed in
5236 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5239 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5242 /* If we're using an unlocked function, assume the other unlocked
5243 functions exist explicitly. */
5244 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5245 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5246 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5247 : implicit_built_in_decls[BUILT_IN_PUTS];
5248 const char *fmt_str;
5251 int nargs = call_expr_nargs (exp);
5253 /* If the return value is used, don't do the transformation. */
5254 if (target != const0_rtx)
5257 /* Verify the required arguments in the original call. */
5260 fmt = CALL_EXPR_ARG (exp, 0);
5261 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5264 /* Check whether the format is a literal string constant. */
5265 fmt_str = c_getstr (fmt);
5266 if (fmt_str == NULL)
5269 if (!init_target_chars ())
5272 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5273 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5276 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5279 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5281 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5282 else if (strcmp (fmt_str, target_percent_c) == 0)
5285 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5288 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5292 /* We can't handle anything else with % args or %% ... yet. */
5293 if (strchr (fmt_str, target_percent))
5299 /* If the format specifier was "", printf does nothing. */
5300 if (fmt_str[0] == '\0')
5302 /* If the format specifier has length of 1, call putchar. */
5303 if (fmt_str[1] == '\0')
5305 /* Given printf("c"), (where c is any one character,)
5306 convert "c"[0] to an int and pass that to the replacement
5308 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5310 fn = build_call_expr (fn_putchar, 1, arg);
5314 /* If the format specifier was "string\n", call puts("string"). */
5315 size_t len = strlen (fmt_str);
5316 if ((unsigned char)fmt_str[len - 1] == target_newline)
5318 /* Create a NUL-terminated string that's one char shorter
5319 than the original, stripping off the trailing '\n'. */
5320 char *newstr = alloca (len);
5321 memcpy (newstr, fmt_str, len - 1);
5322 newstr[len - 1] = 0;
5323 arg = build_string_literal (len, newstr);
5325 fn = build_call_expr (fn_puts, 1, arg);
5328 /* We'd like to arrange to call fputs(string,stdout) here,
5329 but we need stdout and don't have a way to get it yet. */
5336 if (TREE_CODE (fn) == CALL_EXPR)
5337 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5338 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5341 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5342 Return NULL_RTX if a normal call should be emitted rather than transforming
5343 the function inline. If convenient, the result should be placed in
5344 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5347 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5350 /* If we're using an unlocked function, assume the other unlocked
5351 functions exist explicitly. */
5352 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5353 : implicit_built_in_decls[BUILT_IN_FPUTC];
5354 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5355 : implicit_built_in_decls[BUILT_IN_FPUTS];
5356 const char *fmt_str;
5359 int nargs = call_expr_nargs (exp);
5361 /* If the return value is used, don't do the transformation. */
5362 if (target != const0_rtx)
5365 /* Verify the required arguments in the original call. */
5368 fp = CALL_EXPR_ARG (exp, 0);
5369 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5371 fmt = CALL_EXPR_ARG (exp, 1);
5372 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5375 /* Check whether the format is a literal string constant. */
5376 fmt_str = c_getstr (fmt);
5377 if (fmt_str == NULL)
5380 if (!init_target_chars ())
5383 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5384 if (strcmp (fmt_str, target_percent_s) == 0)
5387 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5389 arg = CALL_EXPR_ARG (exp, 2);
5391 fn = build_call_expr (fn_fputs, 2, arg, fp);
5393 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5394 else if (strcmp (fmt_str, target_percent_c) == 0)
5397 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5399 arg = CALL_EXPR_ARG (exp, 2);
5401 fn = build_call_expr (fn_fputc, 2, arg, fp);
5405 /* We can't handle anything else with % args or %% ... yet. */
5406 if (strchr (fmt_str, target_percent))
5412 /* If the format specifier was "", fprintf does nothing. */
5413 if (fmt_str[0] == '\0')
5415 /* Evaluate and ignore FILE* argument for side-effects. */
5416 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5420 /* When "string" doesn't contain %, replace all cases of
5421 fprintf(stream,string) with fputs(string,stream). The fputs
5422 builtin will take care of special cases like length == 1. */
5424 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5429 if (TREE_CODE (fn) == CALL_EXPR)
5430 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5431 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5434 /* Expand a call EXP to sprintf. Return NULL_RTX if
5435 a normal call should be emitted rather than expanding the function
5436 inline. If convenient, the result should be placed in TARGET with
5440 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5443 const char *fmt_str;
5444 int nargs = call_expr_nargs (exp);
5446 /* Verify the required arguments in the original call. */
5449 dest = CALL_EXPR_ARG (exp, 0);
5450 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5452 fmt = CALL_EXPR_ARG (exp, 0);
5453 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5456 /* Check whether the format is a literal string constant. */
5457 fmt_str = c_getstr (fmt);
5458 if (fmt_str == NULL)
5461 if (!init_target_chars ())
5464 /* If the format doesn't contain % args or %%, use strcpy. */
5465 if (strchr (fmt_str, target_percent) == 0)
5467 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5470 if ((nargs > 2) || ! fn)
5472 expand_expr (build_call_expr (fn, 2, dest, fmt),
5473 const0_rtx, VOIDmode, EXPAND_NORMAL);
5474 if (target == const0_rtx)
5476 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5477 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5479 /* If the format is "%s", use strcpy if the result isn't used. */
5480 else if (strcmp (fmt_str, target_percent_s) == 0)
5483 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5489 arg = CALL_EXPR_ARG (exp, 2);
5490 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5493 if (target != const0_rtx)
5495 len = c_strlen (arg, 1);
5496 if (! len || TREE_CODE (len) != INTEGER_CST)
5502 expand_expr (build_call_expr (fn, 2, dest, arg),
5503 const0_rtx, VOIDmode, EXPAND_NORMAL);
5505 if (target == const0_rtx)
5507 return expand_expr (len, target, mode, EXPAND_NORMAL);
5513 /* Expand a call to either the entry or exit function profiler. */
5516 expand_builtin_profile_func (bool exitp)
5520 this = DECL_RTL (current_function_decl);
5521 gcc_assert (MEM_P (this));
5522 this = XEXP (this, 0);
5525 which = profile_function_exit_libfunc;
5527 which = profile_function_entry_libfunc;
5529 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5530 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5537 /* Expand a call to __builtin___clear_cache. */
5540 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5542 #ifndef HAVE_clear_cache
5543 #ifdef CLEAR_INSN_CACHE
5544 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5545 does something. Just do the default expansion to a call to
5549 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5550 does nothing. There is no need to call it. Do nothing. */
5552 #endif /* CLEAR_INSN_CACHE */
5554 /* We have a "clear_cache" insn, and it will handle everything. */
5556 rtx begin_rtx, end_rtx;
5557 enum insn_code icode;
5559 /* We must not expand to a library call. If we did, any
5560 fallback library function in libgcc that might contain a call to
5561 __builtin___clear_cache() would recurse infinitely. */
5562 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5564 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5568 if (HAVE_clear_cache)
5570 icode = CODE_FOR_clear_cache;
5572 begin = CALL_EXPR_ARG (exp, 0);
5573 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5574 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5575 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5576 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5578 end = CALL_EXPR_ARG (exp, 1);
5579 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5580 end_rtx = convert_memory_address (Pmode, end_rtx);
5581 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5582 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5584 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5587 #endif /* HAVE_clear_cache */
5590 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5593 round_trampoline_addr (rtx tramp)
5595 rtx temp, addend, mask;
5597 /* If we don't need too much alignment, we'll have been guaranteed
5598 proper alignment by get_trampoline_type. */
5599 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5602 /* Round address up to desired boundary. */
5603 temp = gen_reg_rtx (Pmode);
5604 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5605 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5607 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5608 temp, 0, OPTAB_LIB_WIDEN);
5609 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5610 temp, 0, OPTAB_LIB_WIDEN);
5616 expand_builtin_init_trampoline (tree exp)
5618 tree t_tramp, t_func, t_chain;
5619 rtx r_tramp, r_func, r_chain;
5620 #ifdef TRAMPOLINE_TEMPLATE
5624 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5625 POINTER_TYPE, VOID_TYPE))
5628 t_tramp = CALL_EXPR_ARG (exp, 0);
5629 t_func = CALL_EXPR_ARG (exp, 1);
5630 t_chain = CALL_EXPR_ARG (exp, 2);
5632 r_tramp = expand_normal (t_tramp);
5633 r_func = expand_normal (t_func);
5634 r_chain = expand_normal (t_chain);
5636 /* Generate insns to initialize the trampoline. */
5637 r_tramp = round_trampoline_addr (r_tramp);
5638 #ifdef TRAMPOLINE_TEMPLATE
5639 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5640 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5641 emit_block_move (blktramp, assemble_trampoline_template (),
5642 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5644 trampolines_created = 1;
5645 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5651 expand_builtin_adjust_trampoline (tree exp)
5655 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5658 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5659 tramp = round_trampoline_addr (tramp);
5660 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5661 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5667 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5668 function. The function first checks whether the back end provides
5669 an insn to implement signbit for the respective mode. If not, it
5670 checks whether the floating point format of the value is such that
5671 the sign bit can be extracted. If that is not the case, the
5672 function returns NULL_RTX to indicate that a normal call should be
5673 emitted rather than expanding the function in-line. EXP is the
5674 expression that is a call to the builtin function; if convenient,
5675 the result should be placed in TARGET. */
5677 expand_builtin_signbit (tree exp, rtx target)
5679 const struct real_format *fmt;
5680 enum machine_mode fmode, imode, rmode;
5681 HOST_WIDE_INT hi, lo;
5684 enum insn_code signbit_insn_code;
5687 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5690 arg = CALL_EXPR_ARG (exp, 0);
5691 fmode = TYPE_MODE (TREE_TYPE (arg));
5692 rmode = TYPE_MODE (TREE_TYPE (exp));
5693 fmt = REAL_MODE_FORMAT (fmode);
5695 arg = builtin_save_expr (arg);
5697 /* Expand the argument yielding a RTX expression. */
5698 temp = expand_normal (arg);
5700 /* Check if the back end provides an insn that handles signbit for the
5702 signbit_insn_code = signbit_optab [(int) fmode];
5703 if (signbit_insn_code != CODE_FOR_nothing)
5705 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5706 emit_unop_insn (signbit_insn_code, target, temp, UNKNOWN);
5710 /* For floating point formats without a sign bit, implement signbit
5712 bitpos = fmt->signbit_ro;
5715 /* But we can't do this if the format supports signed zero. */
5716 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5719 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5720 build_real (TREE_TYPE (arg), dconst0));
5721 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5724 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5726 imode = int_mode_for_mode (fmode);
5727 if (imode == BLKmode)
5729 temp = gen_lowpart (imode, temp);
5734 /* Handle targets with different FP word orders. */
5735 if (FLOAT_WORDS_BIG_ENDIAN)
5736 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5738 word = bitpos / BITS_PER_WORD;
5739 temp = operand_subword_force (temp, word, fmode);
5740 bitpos = bitpos % BITS_PER_WORD;
5743 /* Force the intermediate word_mode (or narrower) result into a
5744 register. This avoids attempting to create paradoxical SUBREGs
5745 of floating point modes below. */
5746 temp = force_reg (imode, temp);
5748 /* If the bitpos is within the "result mode" lowpart, the operation
5749 can be implement with a single bitwise AND. Otherwise, we need
5750 a right shift and an AND. */
5752 if (bitpos < GET_MODE_BITSIZE (rmode))
5754 if (bitpos < HOST_BITS_PER_WIDE_INT)
5757 lo = (HOST_WIDE_INT) 1 << bitpos;
5761 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5766 temp = gen_lowpart (rmode, temp);
5767 temp = expand_binop (rmode, and_optab, temp,
5768 immed_double_const (lo, hi, rmode),
5769 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5773 /* Perform a logical right shift to place the signbit in the least
5774 significant bit, then truncate the result to the desired mode
5775 and mask just this bit. */
5776 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5777 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5778 temp = gen_lowpart (rmode, temp);
5779 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5780 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5786 /* Expand fork or exec calls. TARGET is the desired target of the
5787 call. EXP is the call. FN is the
5788 identificator of the actual function. IGNORE is nonzero if the
5789 value is to be ignored. */
5792 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5797 /* If we are not profiling, just call the function. */
5798 if (!profile_arc_flag)
5801 /* Otherwise call the wrapper. This should be equivalent for the rest of
5802 compiler, so the code does not diverge, and the wrapper may run the
5803 code necessary for keeping the profiling sane. */
5805 switch (DECL_FUNCTION_CODE (fn))
5808 id = get_identifier ("__gcov_fork");
5811 case BUILT_IN_EXECL:
5812 id = get_identifier ("__gcov_execl");
5815 case BUILT_IN_EXECV:
5816 id = get_identifier ("__gcov_execv");
5819 case BUILT_IN_EXECLP:
5820 id = get_identifier ("__gcov_execlp");
5823 case BUILT_IN_EXECLE:
5824 id = get_identifier ("__gcov_execle");
5827 case BUILT_IN_EXECVP:
5828 id = get_identifier ("__gcov_execvp");
5831 case BUILT_IN_EXECVE:
5832 id = get_identifier ("__gcov_execve");
5839 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5840 DECL_EXTERNAL (decl) = 1;
5841 TREE_PUBLIC (decl) = 1;
5842 DECL_ARTIFICIAL (decl) = 1;
5843 TREE_NOTHROW (decl) = 1;
5844 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5845 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5846 call = rewrite_call_expr (exp, 0, decl, 0);
5847 return expand_call (call, target, ignore);
5852 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5853 the pointer in these functions is void*, the tree optimizers may remove
5854 casts. The mode computed in expand_builtin isn't reliable either, due
5855 to __sync_bool_compare_and_swap.
5857 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5858 group of builtins. This gives us log2 of the mode size. */
5860 static inline enum machine_mode
5861 get_builtin_sync_mode (int fcode_diff)
5863 /* The size is not negotiable, so ask not to get BLKmode in return
5864 if the target indicates that a smaller size would be better. */
5865 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5868 /* Expand the memory expression LOC and return the appropriate memory operand
5869 for the builtin_sync operations. */
5872 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5876 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5878 /* Note that we explicitly do not want any alias information for this
5879 memory, so that we kill all other live memories. Otherwise we don't
5880 satisfy the full barrier semantics of the intrinsic. */
5881 mem = validize_mem (gen_rtx_MEM (mode, addr));
5883 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5884 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5885 MEM_VOLATILE_P (mem) = 1;
5890 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5891 EXP is the CALL_EXPR. CODE is the rtx code
5892 that corresponds to the arithmetic or logical operation from the name;
5893 an exception here is that NOT actually means NAND. TARGET is an optional
5894 place for us to store the results; AFTER is true if this is the
5895 fetch_and_xxx form. IGNORE is true if we don't actually care about
5896 the result of the operation at all. */
5899 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5900 enum rtx_code code, bool after,
5901 rtx target, bool ignore)
5904 enum machine_mode old_mode;
5906 /* Expand the operands. */
5907 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5909 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5910 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5911 of CONST_INTs, where we know the old_mode only from the call argument. */
5912 old_mode = GET_MODE (val);
5913 if (old_mode == VOIDmode)
5914 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5915 val = convert_modes (mode, old_mode, val, 1);
5918 return expand_sync_operation (mem, val, code);
5920 return expand_sync_fetch_operation (mem, val, code, after, target);
5923 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5924 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5925 true if this is the boolean form. TARGET is a place for us to store the
5926 results; this is NOT optional if IS_BOOL is true. */
5929 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5930 bool is_bool, rtx target)
5932 rtx old_val, new_val, mem;
5933 enum machine_mode old_mode;
5935 /* Expand the operands. */
5936 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5939 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5940 mode, EXPAND_NORMAL);
5941 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5942 of CONST_INTs, where we know the old_mode only from the call argument. */
5943 old_mode = GET_MODE (old_val);
5944 if (old_mode == VOIDmode)
5945 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5946 old_val = convert_modes (mode, old_mode, old_val, 1);
5948 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5949 mode, EXPAND_NORMAL);
5950 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5951 of CONST_INTs, where we know the old_mode only from the call argument. */
5952 old_mode = GET_MODE (new_val);
5953 if (old_mode == VOIDmode)
5954 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5955 new_val = convert_modes (mode, old_mode, new_val, 1);
5958 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5960 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5963 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5964 general form is actually an atomic exchange, and some targets only
5965 support a reduced form with the second argument being a constant 1.
5966 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5970 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5974 enum machine_mode old_mode;
5976 /* Expand the operands. */
5977 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5978 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5979 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5980 of CONST_INTs, where we know the old_mode only from the call argument. */
5981 old_mode = GET_MODE (val);
5982 if (old_mode == VOIDmode)
5983 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5984 val = convert_modes (mode, old_mode, val, 1);
5986 return expand_sync_lock_test_and_set (mem, val, target);
5989 /* Expand the __sync_synchronize intrinsic. */
5992 expand_builtin_synchronize (void)
5996 #ifdef HAVE_memory_barrier
5997 if (HAVE_memory_barrier)
5999 emit_insn (gen_memory_barrier ());
6004 /* If no explicit memory barrier instruction is available, create an
6005 empty asm stmt with a memory clobber. */
6006 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6007 tree_cons (NULL, build_string (6, "memory"), NULL));
6008 ASM_VOLATILE_P (x) = 1;
6009 expand_asm_expr (x);
6012 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6015 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6017 enum insn_code icode;
6019 rtx val = const0_rtx;
6021 /* Expand the operands. */
6022 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6024 /* If there is an explicit operation in the md file, use it. */
6025 icode = sync_lock_release[mode];
6026 if (icode != CODE_FOR_nothing)
6028 if (!insn_data[icode].operand[1].predicate (val, mode))
6029 val = force_reg (mode, val);
6031 insn = GEN_FCN (icode) (mem, val);
6039 /* Otherwise we can implement this operation by emitting a barrier
6040 followed by a store of zero. */
6041 expand_builtin_synchronize ();
6042 emit_move_insn (mem, val);
6045 /* Expand an expression EXP that calls a built-in function,
6046 with result going to TARGET if that's convenient
6047 (and in mode MODE if that's convenient).
6048 SUBTARGET may be used as the target for computing one of EXP's operands.
6049 IGNORE is nonzero if the value is to be ignored. */
6052 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6055 tree fndecl = get_callee_fndecl (exp);
6056 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6057 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6059 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6060 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6062 /* When not optimizing, generate calls to library functions for a certain
6065 && !called_as_built_in (fndecl)
6066 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6067 && fcode != BUILT_IN_ALLOCA)
6068 return expand_call (exp, target, ignore);
6070 /* The built-in function expanders test for target == const0_rtx
6071 to determine whether the function's result will be ignored. */
6073 target = const0_rtx;
6075 /* If the result of a pure or const built-in function is ignored, and
6076 none of its arguments are volatile, we can avoid expanding the
6077 built-in call and just evaluate the arguments for side-effects. */
6078 if (target == const0_rtx
6079 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6081 bool volatilep = false;
6083 call_expr_arg_iterator iter;
6085 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6086 if (TREE_THIS_VOLATILE (arg))
6094 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6095 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6102 CASE_FLT_FN (BUILT_IN_FABS):
6103 target = expand_builtin_fabs (exp, target, subtarget);
6108 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6109 target = expand_builtin_copysign (exp, target, subtarget);
6114 /* Just do a normal library call if we were unable to fold
6116 CASE_FLT_FN (BUILT_IN_CABS):
6119 CASE_FLT_FN (BUILT_IN_EXP):
6120 CASE_FLT_FN (BUILT_IN_EXP10):
6121 CASE_FLT_FN (BUILT_IN_POW10):
6122 CASE_FLT_FN (BUILT_IN_EXP2):
6123 CASE_FLT_FN (BUILT_IN_EXPM1):
6124 CASE_FLT_FN (BUILT_IN_LOGB):
6125 CASE_FLT_FN (BUILT_IN_LOG):
6126 CASE_FLT_FN (BUILT_IN_LOG10):
6127 CASE_FLT_FN (BUILT_IN_LOG2):
6128 CASE_FLT_FN (BUILT_IN_LOG1P):
6129 CASE_FLT_FN (BUILT_IN_TAN):
6130 CASE_FLT_FN (BUILT_IN_ASIN):
6131 CASE_FLT_FN (BUILT_IN_ACOS):
6132 CASE_FLT_FN (BUILT_IN_ATAN):
6133 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6134 because of possible accuracy problems. */
6135 if (! flag_unsafe_math_optimizations)
6137 CASE_FLT_FN (BUILT_IN_SQRT):
6138 CASE_FLT_FN (BUILT_IN_FLOOR):
6139 CASE_FLT_FN (BUILT_IN_CEIL):
6140 CASE_FLT_FN (BUILT_IN_TRUNC):
6141 CASE_FLT_FN (BUILT_IN_ROUND):
6142 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6143 CASE_FLT_FN (BUILT_IN_RINT):
6144 target = expand_builtin_mathfn (exp, target, subtarget);
6149 CASE_FLT_FN (BUILT_IN_ILOGB):
6150 if (! flag_unsafe_math_optimizations)
6152 CASE_FLT_FN (BUILT_IN_ISINF):
6153 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6158 CASE_FLT_FN (BUILT_IN_LCEIL):
6159 CASE_FLT_FN (BUILT_IN_LLCEIL):
6160 CASE_FLT_FN (BUILT_IN_LFLOOR):
6161 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6162 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6167 CASE_FLT_FN (BUILT_IN_LRINT):
6168 CASE_FLT_FN (BUILT_IN_LLRINT):
6169 CASE_FLT_FN (BUILT_IN_LROUND):
6170 CASE_FLT_FN (BUILT_IN_LLROUND):
6171 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6176 CASE_FLT_FN (BUILT_IN_POW):
6177 target = expand_builtin_pow (exp, target, subtarget);
6182 CASE_FLT_FN (BUILT_IN_POWI):
6183 target = expand_builtin_powi (exp, target, subtarget);
6188 CASE_FLT_FN (BUILT_IN_ATAN2):
6189 CASE_FLT_FN (BUILT_IN_LDEXP):
6190 CASE_FLT_FN (BUILT_IN_SCALB):
6191 CASE_FLT_FN (BUILT_IN_SCALBN):
6192 CASE_FLT_FN (BUILT_IN_SCALBLN):
6193 if (! flag_unsafe_math_optimizations)
6196 CASE_FLT_FN (BUILT_IN_FMOD):
6197 CASE_FLT_FN (BUILT_IN_REMAINDER):
6198 CASE_FLT_FN (BUILT_IN_DREM):
6199 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6204 CASE_FLT_FN (BUILT_IN_CEXPI):
6205 target = expand_builtin_cexpi (exp, target, subtarget);
6206 gcc_assert (target);
6209 CASE_FLT_FN (BUILT_IN_SIN):
6210 CASE_FLT_FN (BUILT_IN_COS):
6211 if (! flag_unsafe_math_optimizations)
6213 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6218 CASE_FLT_FN (BUILT_IN_SINCOS):
6219 if (! flag_unsafe_math_optimizations)
6221 target = expand_builtin_sincos (exp);
6226 case BUILT_IN_APPLY_ARGS:
6227 return expand_builtin_apply_args ();
6229 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6230 FUNCTION with a copy of the parameters described by
6231 ARGUMENTS, and ARGSIZE. It returns a block of memory
6232 allocated on the stack into which is stored all the registers
6233 that might possibly be used for returning the result of a
6234 function. ARGUMENTS is the value returned by
6235 __builtin_apply_args. ARGSIZE is the number of bytes of
6236 arguments that must be copied. ??? How should this value be
6237 computed? We'll also need a safe worst case value for varargs
6239 case BUILT_IN_APPLY:
6240 if (!validate_arglist (exp, POINTER_TYPE,
6241 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6242 && !validate_arglist (exp, REFERENCE_TYPE,
6243 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6249 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6250 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6251 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6253 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6256 /* __builtin_return (RESULT) causes the function to return the
6257 value described by RESULT. RESULT is address of the block of
6258 memory returned by __builtin_apply. */
6259 case BUILT_IN_RETURN:
6260 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6261 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6264 case BUILT_IN_SAVEREGS:
6265 return expand_builtin_saveregs ();
6267 case BUILT_IN_ARGS_INFO:
6268 return expand_builtin_args_info (exp);
6270 /* Return the address of the first anonymous stack arg. */
6271 case BUILT_IN_NEXT_ARG:
6272 if (fold_builtin_next_arg (exp, false))
6274 return expand_builtin_next_arg ();
6276 case BUILT_IN_CLEAR_CACHE:
6277 target = expand_builtin___clear_cache (exp);
6282 case BUILT_IN_CLASSIFY_TYPE:
6283 return expand_builtin_classify_type (exp);
6285 case BUILT_IN_CONSTANT_P:
6288 case BUILT_IN_FRAME_ADDRESS:
6289 case BUILT_IN_RETURN_ADDRESS:
6290 return expand_builtin_frame_address (fndecl, exp);
6292 /* Returns the address of the area where the structure is returned.
6294 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6295 if (call_expr_nargs (exp) != 0
6296 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6297 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6300 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6302 case BUILT_IN_ALLOCA:
6303 target = expand_builtin_alloca (exp, target);
6308 case BUILT_IN_STACK_SAVE:
6309 return expand_stack_save ();
6311 case BUILT_IN_STACK_RESTORE:
6312 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6315 case BUILT_IN_BSWAP32:
6316 case BUILT_IN_BSWAP64:
6317 target = expand_builtin_bswap (exp, target, subtarget);
6323 CASE_INT_FN (BUILT_IN_FFS):
6324 case BUILT_IN_FFSIMAX:
6325 target = expand_builtin_unop (target_mode, exp, target,
6326 subtarget, ffs_optab);
6331 CASE_INT_FN (BUILT_IN_CLZ):
6332 case BUILT_IN_CLZIMAX:
6333 target = expand_builtin_unop (target_mode, exp, target,
6334 subtarget, clz_optab);
6339 CASE_INT_FN (BUILT_IN_CTZ):
6340 case BUILT_IN_CTZIMAX:
6341 target = expand_builtin_unop (target_mode, exp, target,
6342 subtarget, ctz_optab);
6347 CASE_INT_FN (BUILT_IN_POPCOUNT):
6348 case BUILT_IN_POPCOUNTIMAX:
6349 target = expand_builtin_unop (target_mode, exp, target,
6350 subtarget, popcount_optab);
6355 CASE_INT_FN (BUILT_IN_PARITY):
6356 case BUILT_IN_PARITYIMAX:
6357 target = expand_builtin_unop (target_mode, exp, target,
6358 subtarget, parity_optab);
6363 case BUILT_IN_STRLEN:
6364 target = expand_builtin_strlen (exp, target, target_mode);
6369 case BUILT_IN_STRCPY:
6370 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6375 case BUILT_IN_STRNCPY:
6376 target = expand_builtin_strncpy (exp, target, mode);
6381 case BUILT_IN_STPCPY:
6382 target = expand_builtin_stpcpy (exp, target, mode);
6387 case BUILT_IN_STRCAT:
6388 target = expand_builtin_strcat (fndecl, exp, target, mode);
6393 case BUILT_IN_STRNCAT:
6394 target = expand_builtin_strncat (exp, target, mode);
6399 case BUILT_IN_STRSPN:
6400 target = expand_builtin_strspn (exp, target, mode);
6405 case BUILT_IN_STRCSPN:
6406 target = expand_builtin_strcspn (exp, target, mode);
6411 case BUILT_IN_STRSTR:
6412 target = expand_builtin_strstr (exp, target, mode);
6417 case BUILT_IN_STRPBRK:
6418 target = expand_builtin_strpbrk (exp, target, mode);
6423 case BUILT_IN_INDEX:
6424 case BUILT_IN_STRCHR:
6425 target = expand_builtin_strchr (exp, target, mode);
6430 case BUILT_IN_RINDEX:
6431 case BUILT_IN_STRRCHR:
6432 target = expand_builtin_strrchr (exp, target, mode);
6437 case BUILT_IN_MEMCPY:
6438 target = expand_builtin_memcpy (exp, target, mode);
6443 case BUILT_IN_MEMPCPY:
6444 target = expand_builtin_mempcpy (exp, target, mode);
6449 case BUILT_IN_MEMMOVE:
6450 target = expand_builtin_memmove (exp, target, mode, ignore);
6455 case BUILT_IN_BCOPY:
6456 target = expand_builtin_bcopy (exp, ignore);
6461 case BUILT_IN_MEMSET:
6462 target = expand_builtin_memset (exp, target, mode);
6467 case BUILT_IN_BZERO:
6468 target = expand_builtin_bzero (exp);
6473 case BUILT_IN_STRCMP:
6474 target = expand_builtin_strcmp (exp, target, mode);
6479 case BUILT_IN_STRNCMP:
6480 target = expand_builtin_strncmp (exp, target, mode);
6485 case BUILT_IN_MEMCHR:
6486 target = expand_builtin_memchr (exp, target, mode);
6492 case BUILT_IN_MEMCMP:
6493 target = expand_builtin_memcmp (exp, target, mode);
6498 case BUILT_IN_SETJMP:
6499 /* This should have been lowered to the builtins below. */
6502 case BUILT_IN_SETJMP_SETUP:
6503 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6504 and the receiver label. */
6505 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6507 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6508 VOIDmode, EXPAND_NORMAL);
6509 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6510 rtx label_r = label_rtx (label);
6512 /* This is copied from the handling of non-local gotos. */
6513 expand_builtin_setjmp_setup (buf_addr, label_r);
6514 nonlocal_goto_handler_labels
6515 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6516 nonlocal_goto_handler_labels);
6517 /* ??? Do not let expand_label treat us as such since we would
6518 not want to be both on the list of non-local labels and on
6519 the list of forced labels. */
6520 FORCED_LABEL (label) = 0;
6525 case BUILT_IN_SETJMP_DISPATCHER:
6526 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6527 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6529 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6530 rtx label_r = label_rtx (label);
6532 /* Remove the dispatcher label from the list of non-local labels
6533 since the receiver labels have been added to it above. */
6534 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6539 case BUILT_IN_SETJMP_RECEIVER:
6540 /* __builtin_setjmp_receiver is passed the receiver label. */
6541 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6543 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6544 rtx label_r = label_rtx (label);
6546 expand_builtin_setjmp_receiver (label_r);
6551 /* __builtin_longjmp is passed a pointer to an array of five words.
6552 It's similar to the C library longjmp function but works with
6553 __builtin_setjmp above. */
6554 case BUILT_IN_LONGJMP:
6555 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6557 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6558 VOIDmode, EXPAND_NORMAL);
6559 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6561 if (value != const1_rtx)
6563 error ("%<__builtin_longjmp%> second argument must be 1");
6567 expand_builtin_longjmp (buf_addr, value);
6572 case BUILT_IN_NONLOCAL_GOTO:
6573 target = expand_builtin_nonlocal_goto (exp);
6578 /* This updates the setjmp buffer that is its argument with the value
6579 of the current stack pointer. */
6580 case BUILT_IN_UPDATE_SETJMP_BUF:
6581 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6584 = expand_normal (CALL_EXPR_ARG (exp, 0));
6586 expand_builtin_update_setjmp_buf (buf_addr);
6592 expand_builtin_trap ();
6595 case BUILT_IN_PRINTF:
6596 target = expand_builtin_printf (exp, target, mode, false);
6601 case BUILT_IN_PRINTF_UNLOCKED:
6602 target = expand_builtin_printf (exp, target, mode, true);
6607 case BUILT_IN_FPUTS:
6608 target = expand_builtin_fputs (exp, target, false);
6612 case BUILT_IN_FPUTS_UNLOCKED:
6613 target = expand_builtin_fputs (exp, target, true);
6618 case BUILT_IN_FPRINTF:
6619 target = expand_builtin_fprintf (exp, target, mode, false);
6624 case BUILT_IN_FPRINTF_UNLOCKED:
6625 target = expand_builtin_fprintf (exp, target, mode, true);
6630 case BUILT_IN_SPRINTF:
6631 target = expand_builtin_sprintf (exp, target, mode);
6636 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6637 case BUILT_IN_SIGNBITD32:
6638 case BUILT_IN_SIGNBITD64:
6639 case BUILT_IN_SIGNBITD128:
6640 target = expand_builtin_signbit (exp, target);
6645 /* Various hooks for the DWARF 2 __throw routine. */
6646 case BUILT_IN_UNWIND_INIT:
6647 expand_builtin_unwind_init ();
6649 case BUILT_IN_DWARF_CFA:
6650 return virtual_cfa_rtx;
6651 #ifdef DWARF2_UNWIND_INFO
6652 case BUILT_IN_DWARF_SP_COLUMN:
6653 return expand_builtin_dwarf_sp_column ();
6654 case BUILT_IN_INIT_DWARF_REG_SIZES:
6655 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6658 case BUILT_IN_FROB_RETURN_ADDR:
6659 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6660 case BUILT_IN_EXTRACT_RETURN_ADDR:
6661 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6662 case BUILT_IN_EH_RETURN:
6663 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6664 CALL_EXPR_ARG (exp, 1));
6666 #ifdef EH_RETURN_DATA_REGNO
6667 case BUILT_IN_EH_RETURN_DATA_REGNO:
6668 return expand_builtin_eh_return_data_regno (exp);
6670 case BUILT_IN_EXTEND_POINTER:
6671 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6673 case BUILT_IN_VA_START:
6674 case BUILT_IN_STDARG_START:
6675 return expand_builtin_va_start (exp);
6676 case BUILT_IN_VA_END:
6677 return expand_builtin_va_end (exp);
6678 case BUILT_IN_VA_COPY:
6679 return expand_builtin_va_copy (exp);
6680 case BUILT_IN_EXPECT:
6681 return expand_builtin_expect (exp, target);
6682 case BUILT_IN_PREFETCH:
6683 expand_builtin_prefetch (exp);
6686 case BUILT_IN_PROFILE_FUNC_ENTER:
6687 return expand_builtin_profile_func (false);
6688 case BUILT_IN_PROFILE_FUNC_EXIT:
6689 return expand_builtin_profile_func (true);
6691 case BUILT_IN_INIT_TRAMPOLINE:
6692 return expand_builtin_init_trampoline (exp);
6693 case BUILT_IN_ADJUST_TRAMPOLINE:
6694 return expand_builtin_adjust_trampoline (exp);
6697 case BUILT_IN_EXECL:
6698 case BUILT_IN_EXECV:
6699 case BUILT_IN_EXECLP:
6700 case BUILT_IN_EXECLE:
6701 case BUILT_IN_EXECVP:
6702 case BUILT_IN_EXECVE:
6703 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6708 case BUILT_IN_FETCH_AND_ADD_1:
6709 case BUILT_IN_FETCH_AND_ADD_2:
6710 case BUILT_IN_FETCH_AND_ADD_4:
6711 case BUILT_IN_FETCH_AND_ADD_8:
6712 case BUILT_IN_FETCH_AND_ADD_16:
6713 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6714 target = expand_builtin_sync_operation (mode, exp, PLUS,
6715 false, target, ignore);
6720 case BUILT_IN_FETCH_AND_SUB_1:
6721 case BUILT_IN_FETCH_AND_SUB_2:
6722 case BUILT_IN_FETCH_AND_SUB_4:
6723 case BUILT_IN_FETCH_AND_SUB_8:
6724 case BUILT_IN_FETCH_AND_SUB_16:
6725 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6726 target = expand_builtin_sync_operation (mode, exp, MINUS,
6727 false, target, ignore);
6732 case BUILT_IN_FETCH_AND_OR_1:
6733 case BUILT_IN_FETCH_AND_OR_2:
6734 case BUILT_IN_FETCH_AND_OR_4:
6735 case BUILT_IN_FETCH_AND_OR_8:
6736 case BUILT_IN_FETCH_AND_OR_16:
6737 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6738 target = expand_builtin_sync_operation (mode, exp, IOR,
6739 false, target, ignore);
6744 case BUILT_IN_FETCH_AND_AND_1:
6745 case BUILT_IN_FETCH_AND_AND_2:
6746 case BUILT_IN_FETCH_AND_AND_4:
6747 case BUILT_IN_FETCH_AND_AND_8:
6748 case BUILT_IN_FETCH_AND_AND_16:
6749 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6750 target = expand_builtin_sync_operation (mode, exp, AND,
6751 false, target, ignore);
6756 case BUILT_IN_FETCH_AND_XOR_1:
6757 case BUILT_IN_FETCH_AND_XOR_2:
6758 case BUILT_IN_FETCH_AND_XOR_4:
6759 case BUILT_IN_FETCH_AND_XOR_8:
6760 case BUILT_IN_FETCH_AND_XOR_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6762 target = expand_builtin_sync_operation (mode, exp, XOR,
6763 false, target, ignore);
6768 case BUILT_IN_FETCH_AND_NAND_1:
6769 case BUILT_IN_FETCH_AND_NAND_2:
6770 case BUILT_IN_FETCH_AND_NAND_4:
6771 case BUILT_IN_FETCH_AND_NAND_8:
6772 case BUILT_IN_FETCH_AND_NAND_16:
6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6774 target = expand_builtin_sync_operation (mode, exp, NOT,
6775 false, target, ignore);
6780 case BUILT_IN_ADD_AND_FETCH_1:
6781 case BUILT_IN_ADD_AND_FETCH_2:
6782 case BUILT_IN_ADD_AND_FETCH_4:
6783 case BUILT_IN_ADD_AND_FETCH_8:
6784 case BUILT_IN_ADD_AND_FETCH_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6786 target = expand_builtin_sync_operation (mode, exp, PLUS,
6787 true, target, ignore);
6792 case BUILT_IN_SUB_AND_FETCH_1:
6793 case BUILT_IN_SUB_AND_FETCH_2:
6794 case BUILT_IN_SUB_AND_FETCH_4:
6795 case BUILT_IN_SUB_AND_FETCH_8:
6796 case BUILT_IN_SUB_AND_FETCH_16:
6797 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6798 target = expand_builtin_sync_operation (mode, exp, MINUS,
6799 true, target, ignore);
6804 case BUILT_IN_OR_AND_FETCH_1:
6805 case BUILT_IN_OR_AND_FETCH_2:
6806 case BUILT_IN_OR_AND_FETCH_4:
6807 case BUILT_IN_OR_AND_FETCH_8:
6808 case BUILT_IN_OR_AND_FETCH_16:
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6810 target = expand_builtin_sync_operation (mode, exp, IOR,
6811 true, target, ignore);
6816 case BUILT_IN_AND_AND_FETCH_1:
6817 case BUILT_IN_AND_AND_FETCH_2:
6818 case BUILT_IN_AND_AND_FETCH_4:
6819 case BUILT_IN_AND_AND_FETCH_8:
6820 case BUILT_IN_AND_AND_FETCH_16:
6821 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6822 target = expand_builtin_sync_operation (mode, exp, AND,
6823 true, target, ignore);
6828 case BUILT_IN_XOR_AND_FETCH_1:
6829 case BUILT_IN_XOR_AND_FETCH_2:
6830 case BUILT_IN_XOR_AND_FETCH_4:
6831 case BUILT_IN_XOR_AND_FETCH_8:
6832 case BUILT_IN_XOR_AND_FETCH_16:
6833 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6834 target = expand_builtin_sync_operation (mode, exp, XOR,
6835 true, target, ignore);
6840 case BUILT_IN_NAND_AND_FETCH_1:
6841 case BUILT_IN_NAND_AND_FETCH_2:
6842 case BUILT_IN_NAND_AND_FETCH_4:
6843 case BUILT_IN_NAND_AND_FETCH_8:
6844 case BUILT_IN_NAND_AND_FETCH_16:
6845 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6846 target = expand_builtin_sync_operation (mode, exp, NOT,
6847 true, target, ignore);
6852 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6853 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6854 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6855 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6856 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6857 if (mode == VOIDmode)
6858 mode = TYPE_MODE (boolean_type_node);
6859 if (!target || !register_operand (target, mode))
6860 target = gen_reg_rtx (mode);
6862 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6863 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6868 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6869 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6870 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6871 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6872 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6873 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6874 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6879 case BUILT_IN_LOCK_TEST_AND_SET_1:
6880 case BUILT_IN_LOCK_TEST_AND_SET_2:
6881 case BUILT_IN_LOCK_TEST_AND_SET_4:
6882 case BUILT_IN_LOCK_TEST_AND_SET_8:
6883 case BUILT_IN_LOCK_TEST_AND_SET_16:
6884 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6885 target = expand_builtin_lock_test_and_set (mode, exp, target);
6890 case BUILT_IN_LOCK_RELEASE_1:
6891 case BUILT_IN_LOCK_RELEASE_2:
6892 case BUILT_IN_LOCK_RELEASE_4:
6893 case BUILT_IN_LOCK_RELEASE_8:
6894 case BUILT_IN_LOCK_RELEASE_16:
6895 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6896 expand_builtin_lock_release (mode, exp);
6899 case BUILT_IN_SYNCHRONIZE:
6900 expand_builtin_synchronize ();
6903 case BUILT_IN_OBJECT_SIZE:
6904 return expand_builtin_object_size (exp);
6906 case BUILT_IN_MEMCPY_CHK:
6907 case BUILT_IN_MEMPCPY_CHK:
6908 case BUILT_IN_MEMMOVE_CHK:
6909 case BUILT_IN_MEMSET_CHK:
6910 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6915 case BUILT_IN_STRCPY_CHK:
6916 case BUILT_IN_STPCPY_CHK:
6917 case BUILT_IN_STRNCPY_CHK:
6918 case BUILT_IN_STRCAT_CHK:
6919 case BUILT_IN_STRNCAT_CHK:
6920 case BUILT_IN_SNPRINTF_CHK:
6921 case BUILT_IN_VSNPRINTF_CHK:
6922 maybe_emit_chk_warning (exp, fcode);
6925 case BUILT_IN_SPRINTF_CHK:
6926 case BUILT_IN_VSPRINTF_CHK:
6927 maybe_emit_sprintf_chk_warning (exp, fcode);
6930 default: /* just do library call, if unknown builtin */
6934 /* The switch statement above can drop through to cause the function
6935 to be called normally. */
6936 return expand_call (exp, target, ignore);
6939 /* Determine whether a tree node represents a call to a built-in
6940 function. If the tree T is a call to a built-in function with
6941 the right number of arguments of the appropriate types, return
6942 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6943 Otherwise the return value is END_BUILTINS. */
6945 enum built_in_function
6946 builtin_mathfn_code (tree t)
6948 tree fndecl, arg, parmlist;
6949 tree argtype, parmtype;
6950 call_expr_arg_iterator iter;
6952 if (TREE_CODE (t) != CALL_EXPR
6953 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6954 return END_BUILTINS;
6956 fndecl = get_callee_fndecl (t);
6957 if (fndecl == NULL_TREE
6958 || TREE_CODE (fndecl) != FUNCTION_DECL
6959 || ! DECL_BUILT_IN (fndecl)
6960 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6961 return END_BUILTINS;
6963 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6964 init_call_expr_arg_iterator (t, &iter);
6965 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6967 /* If a function doesn't take a variable number of arguments,
6968 the last element in the list will have type `void'. */
6969 parmtype = TREE_VALUE (parmlist);
6970 if (VOID_TYPE_P (parmtype))
6972 if (more_call_expr_args_p (&iter))
6973 return END_BUILTINS;
6974 return DECL_FUNCTION_CODE (fndecl);
6977 if (! more_call_expr_args_p (&iter))
6978 return END_BUILTINS;
6980 arg = next_call_expr_arg (&iter);
6981 argtype = TREE_TYPE (arg);
6983 if (SCALAR_FLOAT_TYPE_P (parmtype))
6985 if (! SCALAR_FLOAT_TYPE_P (argtype))
6986 return END_BUILTINS;
6988 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6990 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6991 return END_BUILTINS;
6993 else if (POINTER_TYPE_P (parmtype))
6995 if (! POINTER_TYPE_P (argtype))
6996 return END_BUILTINS;
6998 else if (INTEGRAL_TYPE_P (parmtype))
7000 if (! INTEGRAL_TYPE_P (argtype))
7001 return END_BUILTINS;
7004 return END_BUILTINS;
7007 /* Variable-length argument list. */
7008 return DECL_FUNCTION_CODE (fndecl);
7011 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7012 evaluate to a constant. */
7015 fold_builtin_constant_p (tree arg)
7017 /* We return 1 for a numeric type that's known to be a constant
7018 value at compile-time or for an aggregate type that's a
7019 literal constant. */
7022 /* If we know this is a constant, emit the constant of one. */
7023 if (CONSTANT_CLASS_P (arg)
7024 || (TREE_CODE (arg) == CONSTRUCTOR
7025 && TREE_CONSTANT (arg)))
7026 return integer_one_node;
7027 if (TREE_CODE (arg) == ADDR_EXPR)
7029 tree op = TREE_OPERAND (arg, 0);
7030 if (TREE_CODE (op) == STRING_CST
7031 || (TREE_CODE (op) == ARRAY_REF
7032 && integer_zerop (TREE_OPERAND (op, 1))
7033 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7034 return integer_one_node;
7037 /* If this expression has side effects, show we don't know it to be a
7038 constant. Likewise if it's a pointer or aggregate type since in
7039 those case we only want literals, since those are only optimized
7040 when generating RTL, not later.
7041 And finally, if we are compiling an initializer, not code, we
7042 need to return a definite result now; there's not going to be any
7043 more optimization done. */
7044 if (TREE_SIDE_EFFECTS (arg)
7045 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7046 || POINTER_TYPE_P (TREE_TYPE (arg))
7048 || folding_initializer)
7049 return integer_zero_node;
7054 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7055 comparison against the argument will fold to a constant. In practice,
7056 this means a true constant or the address of a non-weak symbol. */
7059 fold_builtin_expect (tree arg)
7063 /* If the argument isn't invariant, then there's nothing we can do. */
7064 if (!TREE_INVARIANT (arg))
7067 /* If we're looking at an address of a weak decl, then do not fold. */
7070 if (TREE_CODE (inner) == ADDR_EXPR)
7074 inner = TREE_OPERAND (inner, 0);
7076 while (TREE_CODE (inner) == COMPONENT_REF
7077 || TREE_CODE (inner) == ARRAY_REF);
7078 if (DECL_P (inner) && DECL_WEAK (inner))
7082 /* Otherwise, ARG already has the proper type for the return value. */
7086 /* Fold a call to __builtin_classify_type with argument ARG. */
7089 fold_builtin_classify_type (tree arg)
7092 return build_int_cst (NULL_TREE, no_type_class);
7094 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7097 /* Fold a call to __builtin_strlen with argument ARG. */
7100 fold_builtin_strlen (tree arg)
7102 if (!validate_arg (arg, POINTER_TYPE))
7106 tree len = c_strlen (arg, 0);
7110 /* Convert from the internal "sizetype" type to "size_t". */
7112 len = fold_convert (size_type_node, len);
7120 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7123 fold_builtin_inf (tree type, int warn)
7125 REAL_VALUE_TYPE real;
7127 /* __builtin_inff is intended to be usable to define INFINITY on all
7128 targets. If an infinity is not available, INFINITY expands "to a
7129 positive constant of type float that overflows at translation
7130 time", footnote "In this case, using INFINITY will violate the
7131 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7132 Thus we pedwarn to ensure this constraint violation is
7134 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7135 pedwarn ("target format does not support infinity");
7138 return build_real (type, real);
7141 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7144 fold_builtin_nan (tree arg, tree type, int quiet)
7146 REAL_VALUE_TYPE real;
7149 if (!validate_arg (arg, POINTER_TYPE))
7151 str = c_getstr (arg);
7155 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7158 return build_real (type, real);
7161 /* Return true if the floating point expression T has an integer value.
7162 We also allow +Inf, -Inf and NaN to be considered integer values. */
7165 integer_valued_real_p (tree t)
7167 switch (TREE_CODE (t))
7174 case NON_LVALUE_EXPR:
7175 return integer_valued_real_p (TREE_OPERAND (t, 0));
7180 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7187 return integer_valued_real_p (TREE_OPERAND (t, 0))
7188 && integer_valued_real_p (TREE_OPERAND (t, 1));
7191 return integer_valued_real_p (TREE_OPERAND (t, 1))
7192 && integer_valued_real_p (TREE_OPERAND (t, 2));
7195 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7199 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7200 if (TREE_CODE (type) == INTEGER_TYPE)
7202 if (TREE_CODE (type) == REAL_TYPE)
7203 return integer_valued_real_p (TREE_OPERAND (t, 0));
7208 switch (builtin_mathfn_code (t))
7210 CASE_FLT_FN (BUILT_IN_CEIL):
7211 CASE_FLT_FN (BUILT_IN_FLOOR):
7212 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7213 CASE_FLT_FN (BUILT_IN_RINT):
7214 CASE_FLT_FN (BUILT_IN_ROUND):
7215 CASE_FLT_FN (BUILT_IN_TRUNC):
7218 CASE_FLT_FN (BUILT_IN_FMIN):
7219 CASE_FLT_FN (BUILT_IN_FMAX):
7220 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7221 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7234 /* FNDECL is assumed to be a builtin where truncation can be propagated
7235 across (for instance floor((double)f) == (double)floorf (f).
7236 Do the transformation for a call with argument ARG. */
7239 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7241 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7243 if (!validate_arg (arg, REAL_TYPE))
7246 /* Integer rounding functions are idempotent. */
7247 if (fcode == builtin_mathfn_code (arg))
7250 /* If argument is already integer valued, and we don't need to worry
7251 about setting errno, there's no need to perform rounding. */
7252 if (! flag_errno_math && integer_valued_real_p (arg))
7257 tree arg0 = strip_float_extensions (arg);
7258 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7259 tree newtype = TREE_TYPE (arg0);
7262 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7263 && (decl = mathfn_built_in (newtype, fcode)))
7264 return fold_convert (ftype,
7265 build_call_expr (decl, 1,
7266 fold_convert (newtype, arg0)));
7271 /* FNDECL is assumed to be builtin which can narrow the FP type of
7272 the argument, for instance lround((double)f) -> lroundf (f).
7273 Do the transformation for a call with argument ARG. */
7276 fold_fixed_mathfn (tree fndecl, tree arg)
7278 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7280 if (!validate_arg (arg, REAL_TYPE))
7283 /* If argument is already integer valued, and we don't need to worry
7284 about setting errno, there's no need to perform rounding. */
7285 if (! flag_errno_math && integer_valued_real_p (arg))
7286 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7290 tree ftype = TREE_TYPE (arg);
7291 tree arg0 = strip_float_extensions (arg);
7292 tree newtype = TREE_TYPE (arg0);
7295 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7296 && (decl = mathfn_built_in (newtype, fcode)))
7297 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7300 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7301 sizeof (long long) == sizeof (long). */
7302 if (TYPE_PRECISION (long_long_integer_type_node)
7303 == TYPE_PRECISION (long_integer_type_node))
7305 tree newfn = NULL_TREE;
7308 CASE_FLT_FN (BUILT_IN_LLCEIL):
7309 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7312 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7313 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7316 CASE_FLT_FN (BUILT_IN_LLROUND):
7317 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7320 CASE_FLT_FN (BUILT_IN_LLRINT):
7321 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7330 tree newcall = build_call_expr(newfn, 1, arg);
7331 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7338 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7339 return type. Return NULL_TREE if no simplification can be made. */
7342 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7346 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7347 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7350 /* Calculate the result when the argument is a constant. */
7351 if (TREE_CODE (arg) == COMPLEX_CST
7352 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7356 if (TREE_CODE (arg) == COMPLEX_EXPR)
7358 tree real = TREE_OPERAND (arg, 0);
7359 tree imag = TREE_OPERAND (arg, 1);
7361 /* If either part is zero, cabs is fabs of the other. */
7362 if (real_zerop (real))
7363 return fold_build1 (ABS_EXPR, type, imag);
7364 if (real_zerop (imag))
7365 return fold_build1 (ABS_EXPR, type, real);
7367 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7368 if (flag_unsafe_math_optimizations
7369 && operand_equal_p (real, imag, OEP_PURE_SAME))
7371 const REAL_VALUE_TYPE sqrt2_trunc
7372 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7374 return fold_build2 (MULT_EXPR, type,
7375 fold_build1 (ABS_EXPR, type, real),
7376 build_real (type, sqrt2_trunc));
7380 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7381 if (TREE_CODE (arg) == NEGATE_EXPR
7382 || TREE_CODE (arg) == CONJ_EXPR)
7383 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7385 /* Don't do this when optimizing for size. */
7386 if (flag_unsafe_math_optimizations
7387 && optimize && !optimize_size)
7389 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7391 if (sqrtfn != NULL_TREE)
7393 tree rpart, ipart, result;
7395 arg = builtin_save_expr (arg);
7397 rpart = fold_build1 (REALPART_EXPR, type, arg);
7398 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7400 rpart = builtin_save_expr (rpart);
7401 ipart = builtin_save_expr (ipart);
7403 result = fold_build2 (PLUS_EXPR, type,
7404 fold_build2 (MULT_EXPR, type,
7406 fold_build2 (MULT_EXPR, type,
7409 return build_call_expr (sqrtfn, 1, result);
7416 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7417 Return NULL_TREE if no simplification can be made. */
7420 fold_builtin_sqrt (tree arg, tree type)
7423 enum built_in_function fcode;
7426 if (!validate_arg (arg, REAL_TYPE))
7429 /* Calculate the result when the argument is a constant. */
7430 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7433 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7434 fcode = builtin_mathfn_code (arg);
7435 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7437 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7438 arg = fold_build2 (MULT_EXPR, type,
7439 CALL_EXPR_ARG (arg, 0),
7440 build_real (type, dconsthalf));
7441 return build_call_expr (expfn, 1, arg);
7444 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7445 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7447 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7451 tree arg0 = CALL_EXPR_ARG (arg, 0);
7453 /* The inner root was either sqrt or cbrt. */
7454 REAL_VALUE_TYPE dconstroot =
7455 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7457 /* Adjust for the outer root. */
7458 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7459 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7460 tree_root = build_real (type, dconstroot);
7461 return build_call_expr (powfn, 2, arg0, tree_root);
7465 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7466 if (flag_unsafe_math_optimizations
7467 && (fcode == BUILT_IN_POW
7468 || fcode == BUILT_IN_POWF
7469 || fcode == BUILT_IN_POWL))
7471 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7472 tree arg0 = CALL_EXPR_ARG (arg, 0);
7473 tree arg1 = CALL_EXPR_ARG (arg, 1);
7475 if (!tree_expr_nonnegative_p (arg0))
7476 arg0 = build1 (ABS_EXPR, type, arg0);
7477 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7478 build_real (type, dconsthalf));
7479 return build_call_expr (powfn, 2, arg0, narg1);
7485 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7486 Return NULL_TREE if no simplification can be made. */
7489 fold_builtin_cbrt (tree arg, tree type)
7491 const enum built_in_function fcode = builtin_mathfn_code (arg);
7494 if (!validate_arg (arg, REAL_TYPE))
7497 /* Calculate the result when the argument is a constant. */
7498 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7501 if (flag_unsafe_math_optimizations)
7503 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7504 if (BUILTIN_EXPONENT_P (fcode))
7506 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7507 const REAL_VALUE_TYPE third_trunc =
7508 real_value_truncate (TYPE_MODE (type), dconstthird);
7509 arg = fold_build2 (MULT_EXPR, type,
7510 CALL_EXPR_ARG (arg, 0),
7511 build_real (type, third_trunc));
7512 return build_call_expr (expfn, 1, arg);
7515 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7516 if (BUILTIN_SQRT_P (fcode))
7518 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7522 tree arg0 = CALL_EXPR_ARG (arg, 0);
7524 REAL_VALUE_TYPE dconstroot = dconstthird;
7526 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7527 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7528 tree_root = build_real (type, dconstroot);
7529 return build_call_expr (powfn, 2, arg0, tree_root);
7533 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7534 if (BUILTIN_CBRT_P (fcode))
7536 tree arg0 = CALL_EXPR_ARG (arg, 0);
7537 if (tree_expr_nonnegative_p (arg0))
7539 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7544 REAL_VALUE_TYPE dconstroot;
7546 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7547 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7548 tree_root = build_real (type, dconstroot);
7549 return build_call_expr (powfn, 2, arg0, tree_root);
7554 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7555 if (fcode == BUILT_IN_POW
7556 || fcode == BUILT_IN_POWF
7557 || fcode == BUILT_IN_POWL)
7559 tree arg00 = CALL_EXPR_ARG (arg, 0);
7560 tree arg01 = CALL_EXPR_ARG (arg, 1);
7561 if (tree_expr_nonnegative_p (arg00))
7563 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7564 const REAL_VALUE_TYPE dconstroot
7565 = real_value_truncate (TYPE_MODE (type), dconstthird);
7566 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7567 build_real (type, dconstroot));
7568 return build_call_expr (powfn, 2, arg00, narg01);
7575 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7576 TYPE is the type of the return value. Return NULL_TREE if no
7577 simplification can be made. */
7580 fold_builtin_cos (tree arg, tree type, tree fndecl)
7584 if (!validate_arg (arg, REAL_TYPE))
7587 /* Calculate the result when the argument is a constant. */
7588 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7591 /* Optimize cos(-x) into cos (x). */
7592 if ((narg = fold_strip_sign_ops (arg)))
7593 return build_call_expr (fndecl, 1, narg);
7598 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7599 Return NULL_TREE if no simplification can be made. */
7602 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7604 if (validate_arg (arg, REAL_TYPE))
7608 /* Calculate the result when the argument is a constant. */
7609 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7612 /* Optimize cosh(-x) into cosh (x). */
7613 if ((narg = fold_strip_sign_ops (arg)))
7614 return build_call_expr (fndecl, 1, narg);
7620 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7621 Return NULL_TREE if no simplification can be made. */
7624 fold_builtin_tan (tree arg, tree type)
7626 enum built_in_function fcode;
7629 if (!validate_arg (arg, REAL_TYPE))
7632 /* Calculate the result when the argument is a constant. */
7633 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7636 /* Optimize tan(atan(x)) = x. */
7637 fcode = builtin_mathfn_code (arg);
7638 if (flag_unsafe_math_optimizations
7639 && (fcode == BUILT_IN_ATAN
7640 || fcode == BUILT_IN_ATANF
7641 || fcode == BUILT_IN_ATANL))
7642 return CALL_EXPR_ARG (arg, 0);
7647 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7648 NULL_TREE if no simplification can be made. */
7651 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7656 if (!validate_arg (arg0, REAL_TYPE)
7657 || !validate_arg (arg1, POINTER_TYPE)
7658 || !validate_arg (arg2, POINTER_TYPE))
7661 type = TREE_TYPE (arg0);
7663 /* Calculate the result when the argument is a constant. */
7664 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7667 /* Canonicalize sincos to cexpi. */
7668 if (!TARGET_C99_FUNCTIONS)
7670 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7674 call = build_call_expr (fn, 1, arg0);
7675 call = builtin_save_expr (call);
7677 return build2 (COMPOUND_EXPR, type,
7678 build2 (MODIFY_EXPR, void_type_node,
7679 build_fold_indirect_ref (arg1),
7680 build1 (IMAGPART_EXPR, type, call)),
7681 build2 (MODIFY_EXPR, void_type_node,
7682 build_fold_indirect_ref (arg2),
7683 build1 (REALPART_EXPR, type, call)));
7686 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7687 NULL_TREE if no simplification can be made. */
7690 fold_builtin_cexp (tree arg0, tree type)
7693 tree realp, imagp, ifn;
7695 if (!validate_arg (arg0, COMPLEX_TYPE))
7698 rtype = TREE_TYPE (TREE_TYPE (arg0));
7700 /* In case we can figure out the real part of arg0 and it is constant zero
7702 if (!TARGET_C99_FUNCTIONS)
7704 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7708 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7709 && real_zerop (realp))
7711 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7712 return build_call_expr (ifn, 1, narg);
7715 /* In case we can easily decompose real and imaginary parts split cexp
7716 to exp (r) * cexpi (i). */
7717 if (flag_unsafe_math_optimizations
7720 tree rfn, rcall, icall;
7722 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7726 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7730 icall = build_call_expr (ifn, 1, imagp);
7731 icall = builtin_save_expr (icall);
7732 rcall = build_call_expr (rfn, 1, realp);
7733 rcall = builtin_save_expr (rcall);
7734 return build2 (COMPLEX_EXPR, type,
7735 build2 (MULT_EXPR, rtype,
7737 build1 (REALPART_EXPR, rtype, icall)),
7738 build2 (MULT_EXPR, rtype,
7740 build1 (IMAGPART_EXPR, rtype, icall)));
7746 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7747 Return NULL_TREE if no simplification can be made. */
7750 fold_builtin_trunc (tree fndecl, tree arg)
7752 if (!validate_arg (arg, REAL_TYPE))
7755 /* Optimize trunc of constant value. */
7756 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7758 REAL_VALUE_TYPE r, x;
7759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7761 x = TREE_REAL_CST (arg);
7762 real_trunc (&r, TYPE_MODE (type), &x);
7763 return build_real (type, r);
7766 return fold_trunc_transparent_mathfn (fndecl, arg);
7769 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7770 Return NULL_TREE if no simplification can be made. */
7773 fold_builtin_floor (tree fndecl, tree arg)
7775 if (!validate_arg (arg, REAL_TYPE))
7778 /* Optimize floor of constant value. */
7779 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7783 x = TREE_REAL_CST (arg);
7784 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7786 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7789 real_floor (&r, TYPE_MODE (type), &x);
7790 return build_real (type, r);
7794 /* Fold floor (x) where x is nonnegative to trunc (x). */
7795 if (tree_expr_nonnegative_p (arg))
7797 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7799 return build_call_expr (truncfn, 1, arg);
7802 return fold_trunc_transparent_mathfn (fndecl, arg);
7805 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7806 Return NULL_TREE if no simplification can be made. */
7809 fold_builtin_ceil (tree fndecl, tree arg)
7811 if (!validate_arg (arg, REAL_TYPE))
7814 /* Optimize ceil of constant value. */
7815 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7819 x = TREE_REAL_CST (arg);
7820 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7822 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7825 real_ceil (&r, TYPE_MODE (type), &x);
7826 return build_real (type, r);
7830 return fold_trunc_transparent_mathfn (fndecl, arg);
7833 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7834 Return NULL_TREE if no simplification can be made. */
7837 fold_builtin_round (tree fndecl, tree arg)
7839 if (!validate_arg (arg, REAL_TYPE))
7842 /* Optimize round of constant value. */
7843 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7847 x = TREE_REAL_CST (arg);
7848 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7853 real_round (&r, TYPE_MODE (type), &x);
7854 return build_real (type, r);
7858 return fold_trunc_transparent_mathfn (fndecl, arg);
7861 /* Fold function call to builtin lround, lroundf or lroundl (or the
7862 corresponding long long versions) and other rounding functions. ARG
7863 is the argument to the call. Return NULL_TREE if no simplification
7867 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7869 if (!validate_arg (arg, REAL_TYPE))
7872 /* Optimize lround of constant value. */
7873 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7875 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7877 if (real_isfinite (&x))
7879 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7880 tree ftype = TREE_TYPE (arg);
7881 unsigned HOST_WIDE_INT lo2;
7882 HOST_WIDE_INT hi, lo;
7885 switch (DECL_FUNCTION_CODE (fndecl))
7887 CASE_FLT_FN (BUILT_IN_LFLOOR):
7888 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7889 real_floor (&r, TYPE_MODE (ftype), &x);
7892 CASE_FLT_FN (BUILT_IN_LCEIL):
7893 CASE_FLT_FN (BUILT_IN_LLCEIL):
7894 real_ceil (&r, TYPE_MODE (ftype), &x);
7897 CASE_FLT_FN (BUILT_IN_LROUND):
7898 CASE_FLT_FN (BUILT_IN_LLROUND):
7899 real_round (&r, TYPE_MODE (ftype), &x);
7906 REAL_VALUE_TO_INT (&lo, &hi, r);
7907 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7908 return build_int_cst_wide (itype, lo2, hi);
7912 switch (DECL_FUNCTION_CODE (fndecl))
7914 CASE_FLT_FN (BUILT_IN_LFLOOR):
7915 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7916 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7917 if (tree_expr_nonnegative_p (arg))
7918 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7924 return fold_fixed_mathfn (fndecl, arg);
7927 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7928 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7929 the argument to the call. Return NULL_TREE if no simplification can
7933 fold_builtin_bitop (tree fndecl, tree arg)
7935 if (!validate_arg (arg, INTEGER_TYPE))
7938 /* Optimize for constant argument. */
7939 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7941 HOST_WIDE_INT hi, width, result;
7942 unsigned HOST_WIDE_INT lo;
7945 type = TREE_TYPE (arg);
7946 width = TYPE_PRECISION (type);
7947 lo = TREE_INT_CST_LOW (arg);
7949 /* Clear all the bits that are beyond the type's precision. */
7950 if (width > HOST_BITS_PER_WIDE_INT)
7952 hi = TREE_INT_CST_HIGH (arg);
7953 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7954 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7959 if (width < HOST_BITS_PER_WIDE_INT)
7960 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7963 switch (DECL_FUNCTION_CODE (fndecl))
7965 CASE_INT_FN (BUILT_IN_FFS):
7967 result = exact_log2 (lo & -lo) + 1;
7969 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7974 CASE_INT_FN (BUILT_IN_CLZ):
7976 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7978 result = width - floor_log2 (lo) - 1;
7979 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7983 CASE_INT_FN (BUILT_IN_CTZ):
7985 result = exact_log2 (lo & -lo);
7987 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7988 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7992 CASE_INT_FN (BUILT_IN_POPCOUNT):
7995 result++, lo &= lo - 1;
7997 result++, hi &= hi - 1;
8000 CASE_INT_FN (BUILT_IN_PARITY):
8003 result++, lo &= lo - 1;
8005 result++, hi &= hi - 1;
8013 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8019 /* Fold function call to builtin_bswap and the long and long long
8020 variants. Return NULL_TREE if no simplification can be made. */
8022 fold_builtin_bswap (tree fndecl, tree arg)
8024 if (! validate_arg (arg, INTEGER_TYPE))
8027 /* Optimize constant value. */
8028 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8030 HOST_WIDE_INT hi, width, r_hi = 0;
8031 unsigned HOST_WIDE_INT lo, r_lo = 0;
8034 type = TREE_TYPE (arg);
8035 width = TYPE_PRECISION (type);
8036 lo = TREE_INT_CST_LOW (arg);
8037 hi = TREE_INT_CST_HIGH (arg);
8039 switch (DECL_FUNCTION_CODE (fndecl))
8041 case BUILT_IN_BSWAP32:
8042 case BUILT_IN_BSWAP64:
8046 for (s = 0; s < width; s += 8)
8048 int d = width - s - 8;
8049 unsigned HOST_WIDE_INT byte;
8051 if (s < HOST_BITS_PER_WIDE_INT)
8052 byte = (lo >> s) & 0xff;
8054 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8056 if (d < HOST_BITS_PER_WIDE_INT)
8059 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8069 if (width < HOST_BITS_PER_WIDE_INT)
8070 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8072 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8078 /* Return true if EXPR is the real constant contained in VALUE. */
8081 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8085 return ((TREE_CODE (expr) == REAL_CST
8086 && !TREE_OVERFLOW (expr)
8087 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8088 || (TREE_CODE (expr) == COMPLEX_CST
8089 && real_dconstp (TREE_REALPART (expr), value)
8090 && real_zerop (TREE_IMAGPART (expr))));
8093 /* A subroutine of fold_builtin to fold the various logarithmic
8094 functions. Return NULL_TREE if no simplification can me made.
8095 FUNC is the corresponding MPFR logarithm function. */
8098 fold_builtin_logarithm (tree fndecl, tree arg,
8099 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8101 if (validate_arg (arg, REAL_TYPE))
8103 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8105 const enum built_in_function fcode = builtin_mathfn_code (arg);
8107 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8108 instead we'll look for 'e' truncated to MODE. So only do
8109 this if flag_unsafe_math_optimizations is set. */
8110 if (flag_unsafe_math_optimizations && func == mpfr_log)
8112 const REAL_VALUE_TYPE e_truncated =
8113 real_value_truncate (TYPE_MODE (type), dconste);
8114 if (real_dconstp (arg, &e_truncated))
8115 return build_real (type, dconst1);
8118 /* Calculate the result when the argument is a constant. */
8119 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8122 /* Special case, optimize logN(expN(x)) = x. */
8123 if (flag_unsafe_math_optimizations
8124 && ((func == mpfr_log
8125 && (fcode == BUILT_IN_EXP
8126 || fcode == BUILT_IN_EXPF
8127 || fcode == BUILT_IN_EXPL))
8128 || (func == mpfr_log2
8129 && (fcode == BUILT_IN_EXP2
8130 || fcode == BUILT_IN_EXP2F
8131 || fcode == BUILT_IN_EXP2L))
8132 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8133 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8135 /* Optimize logN(func()) for various exponential functions. We
8136 want to determine the value "x" and the power "exponent" in
8137 order to transform logN(x**exponent) into exponent*logN(x). */
8138 if (flag_unsafe_math_optimizations)
8140 tree exponent = 0, x = 0;
8144 CASE_FLT_FN (BUILT_IN_EXP):
8145 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8146 x = build_real (type,
8147 real_value_truncate (TYPE_MODE (type), dconste));
8148 exponent = CALL_EXPR_ARG (arg, 0);
8150 CASE_FLT_FN (BUILT_IN_EXP2):
8151 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8152 x = build_real (type, dconst2);
8153 exponent = CALL_EXPR_ARG (arg, 0);
8155 CASE_FLT_FN (BUILT_IN_EXP10):
8156 CASE_FLT_FN (BUILT_IN_POW10):
8157 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8158 x = build_real (type, dconst10);
8159 exponent = CALL_EXPR_ARG (arg, 0);
8161 CASE_FLT_FN (BUILT_IN_SQRT):
8162 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8163 x = CALL_EXPR_ARG (arg, 0);
8164 exponent = build_real (type, dconsthalf);
8166 CASE_FLT_FN (BUILT_IN_CBRT):
8167 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8168 x = CALL_EXPR_ARG (arg, 0);
8169 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8172 CASE_FLT_FN (BUILT_IN_POW):
8173 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8174 x = CALL_EXPR_ARG (arg, 0);
8175 exponent = CALL_EXPR_ARG (arg, 1);
8181 /* Now perform the optimization. */
8184 tree logfn = build_call_expr (fndecl, 1, x);
8185 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8193 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8194 NULL_TREE if no simplification can be made. */
8197 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8199 tree res, narg0, narg1;
8201 if (!validate_arg (arg0, REAL_TYPE)
8202 || !validate_arg (arg1, REAL_TYPE))
8205 /* Calculate the result when the argument is a constant. */
8206 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8209 /* If either argument to hypot has a negate or abs, strip that off.
8210 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8211 narg0 = fold_strip_sign_ops (arg0);
8212 narg1 = fold_strip_sign_ops (arg1);
8215 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8216 narg1 ? narg1 : arg1);
8219 /* If either argument is zero, hypot is fabs of the other. */
8220 if (real_zerop (arg0))
8221 return fold_build1 (ABS_EXPR, type, arg1);
8222 else if (real_zerop (arg1))
8223 return fold_build1 (ABS_EXPR, type, arg0);
8225 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8226 if (flag_unsafe_math_optimizations
8227 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8229 const REAL_VALUE_TYPE sqrt2_trunc
8230 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8231 return fold_build2 (MULT_EXPR, type,
8232 fold_build1 (ABS_EXPR, type, arg0),
8233 build_real (type, sqrt2_trunc));
8240 /* Fold a builtin function call to pow, powf, or powl. Return
8241 NULL_TREE if no simplification can be made. */
8243 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8247 if (!validate_arg (arg0, REAL_TYPE)
8248 || !validate_arg (arg1, REAL_TYPE))
8251 /* Calculate the result when the argument is a constant. */
8252 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8255 /* Optimize pow(1.0,y) = 1.0. */
8256 if (real_onep (arg0))
8257 return omit_one_operand (type, build_real (type, dconst1), arg1);
8259 if (TREE_CODE (arg1) == REAL_CST
8260 && !TREE_OVERFLOW (arg1))
8262 REAL_VALUE_TYPE cint;
8266 c = TREE_REAL_CST (arg1);
8268 /* Optimize pow(x,0.0) = 1.0. */
8269 if (REAL_VALUES_EQUAL (c, dconst0))
8270 return omit_one_operand (type, build_real (type, dconst1),
8273 /* Optimize pow(x,1.0) = x. */
8274 if (REAL_VALUES_EQUAL (c, dconst1))
8277 /* Optimize pow(x,-1.0) = 1.0/x. */
8278 if (REAL_VALUES_EQUAL (c, dconstm1))
8279 return fold_build2 (RDIV_EXPR, type,
8280 build_real (type, dconst1), arg0);
8282 /* Optimize pow(x,0.5) = sqrt(x). */
8283 if (flag_unsafe_math_optimizations
8284 && REAL_VALUES_EQUAL (c, dconsthalf))
8286 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8288 if (sqrtfn != NULL_TREE)
8289 return build_call_expr (sqrtfn, 1, arg0);
8292 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8293 if (flag_unsafe_math_optimizations)
8295 const REAL_VALUE_TYPE dconstroot
8296 = real_value_truncate (TYPE_MODE (type), dconstthird);
8298 if (REAL_VALUES_EQUAL (c, dconstroot))
8300 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8301 if (cbrtfn != NULL_TREE)
8302 return build_call_expr (cbrtfn, 1, arg0);
8306 /* Check for an integer exponent. */
8307 n = real_to_integer (&c);
8308 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8309 if (real_identical (&c, &cint))
8311 /* Attempt to evaluate pow at compile-time. */
8312 if (TREE_CODE (arg0) == REAL_CST
8313 && !TREE_OVERFLOW (arg0))
8318 x = TREE_REAL_CST (arg0);
8319 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8320 if (flag_unsafe_math_optimizations || !inexact)
8321 return build_real (type, x);
8324 /* Strip sign ops from even integer powers. */
8325 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8327 tree narg0 = fold_strip_sign_ops (arg0);
8329 return build_call_expr (fndecl, 2, narg0, arg1);
8334 if (flag_unsafe_math_optimizations)
8336 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8338 /* Optimize pow(expN(x),y) = expN(x*y). */
8339 if (BUILTIN_EXPONENT_P (fcode))
8341 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8342 tree arg = CALL_EXPR_ARG (arg0, 0);
8343 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8344 return build_call_expr (expfn, 1, arg);
8347 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8348 if (BUILTIN_SQRT_P (fcode))
8350 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8351 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8352 build_real (type, dconsthalf));
8353 return build_call_expr (fndecl, 2, narg0, narg1);
8356 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8357 if (BUILTIN_CBRT_P (fcode))
8359 tree arg = CALL_EXPR_ARG (arg0, 0);
8360 if (tree_expr_nonnegative_p (arg))
8362 const REAL_VALUE_TYPE dconstroot
8363 = real_value_truncate (TYPE_MODE (type), dconstthird);
8364 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8365 build_real (type, dconstroot));
8366 return build_call_expr (fndecl, 2, arg, narg1);
8370 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8371 if (fcode == BUILT_IN_POW
8372 || fcode == BUILT_IN_POWF
8373 || fcode == BUILT_IN_POWL)
8375 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8376 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8377 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8378 return build_call_expr (fndecl, 2, arg00, narg1);
8385 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8386 Return NULL_TREE if no simplification can be made. */
8388 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8389 tree arg0, tree arg1, tree type)
8391 if (!validate_arg (arg0, REAL_TYPE)
8392 || !validate_arg (arg1, INTEGER_TYPE))
8395 /* Optimize pow(1.0,y) = 1.0. */
8396 if (real_onep (arg0))
8397 return omit_one_operand (type, build_real (type, dconst1), arg1);
8399 if (host_integerp (arg1, 0))
8401 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8403 /* Evaluate powi at compile-time. */
8404 if (TREE_CODE (arg0) == REAL_CST
8405 && !TREE_OVERFLOW (arg0))
8408 x = TREE_REAL_CST (arg0);
8409 real_powi (&x, TYPE_MODE (type), &x, c);
8410 return build_real (type, x);
8413 /* Optimize pow(x,0) = 1.0. */
8415 return omit_one_operand (type, build_real (type, dconst1),
8418 /* Optimize pow(x,1) = x. */
8422 /* Optimize pow(x,-1) = 1.0/x. */
8424 return fold_build2 (RDIV_EXPR, type,
8425 build_real (type, dconst1), arg0);
8431 /* A subroutine of fold_builtin to fold the various exponent
8432 functions. Return NULL_TREE if no simplification can be made.
8433 FUNC is the corresponding MPFR exponent function. */
8436 fold_builtin_exponent (tree fndecl, tree arg,
8437 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8439 if (validate_arg (arg, REAL_TYPE))
8441 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8444 /* Calculate the result when the argument is a constant. */
8445 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8448 /* Optimize expN(logN(x)) = x. */
8449 if (flag_unsafe_math_optimizations)
8451 const enum built_in_function fcode = builtin_mathfn_code (arg);
8453 if ((func == mpfr_exp
8454 && (fcode == BUILT_IN_LOG
8455 || fcode == BUILT_IN_LOGF
8456 || fcode == BUILT_IN_LOGL))
8457 || (func == mpfr_exp2
8458 && (fcode == BUILT_IN_LOG2
8459 || fcode == BUILT_IN_LOG2F
8460 || fcode == BUILT_IN_LOG2L))
8461 || (func == mpfr_exp10
8462 && (fcode == BUILT_IN_LOG10
8463 || fcode == BUILT_IN_LOG10F
8464 || fcode == BUILT_IN_LOG10L)))
8465 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8472 /* Return true if VAR is a VAR_DECL or a component thereof. */
8475 var_decl_component_p (tree var)
8478 while (handled_component_p (inner))
8479 inner = TREE_OPERAND (inner, 0);
8480 return SSA_VAR_P (inner);
8483 /* Fold function call to builtin memset. Return
8484 NULL_TREE if no simplification can be made. */
8487 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8490 unsigned HOST_WIDE_INT length, cval;
8492 if (! validate_arg (dest, POINTER_TYPE)
8493 || ! validate_arg (c, INTEGER_TYPE)
8494 || ! validate_arg (len, INTEGER_TYPE))
8497 if (! host_integerp (len, 1))
8500 /* If the LEN parameter is zero, return DEST. */
8501 if (integer_zerop (len))
8502 return omit_one_operand (type, dest, c);
8504 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8509 if (TREE_CODE (var) != ADDR_EXPR)
8512 var = TREE_OPERAND (var, 0);
8513 if (TREE_THIS_VOLATILE (var))
8516 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8517 && !POINTER_TYPE_P (TREE_TYPE (var)))
8520 if (! var_decl_component_p (var))
8523 length = tree_low_cst (len, 1);
8524 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8525 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8529 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8532 if (integer_zerop (c))
8536 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8539 cval = tree_low_cst (c, 1);
8543 cval |= (cval << 31) << 1;
8546 ret = build_int_cst_type (TREE_TYPE (var), cval);
8547 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8551 return omit_one_operand (type, dest, ret);
8554 /* Fold function call to builtin memset. Return
8555 NULL_TREE if no simplification can be made. */
8558 fold_builtin_bzero (tree dest, tree size, bool ignore)
8560 if (! validate_arg (dest, POINTER_TYPE)
8561 || ! validate_arg (size, INTEGER_TYPE))
8567 /* New argument list transforming bzero(ptr x, int y) to
8568 memset(ptr x, int 0, size_t y). This is done this way
8569 so that if it isn't expanded inline, we fallback to
8570 calling bzero instead of memset. */
8572 return fold_builtin_memset (dest, integer_zero_node,
8573 fold_convert (sizetype, size),
8574 void_type_node, ignore);
8577 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8578 NULL_TREE if no simplification can be made.
8579 If ENDP is 0, return DEST (like memcpy).
8580 If ENDP is 1, return DEST+LEN (like mempcpy).
8581 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8582 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8586 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8588 tree destvar, srcvar, expr;
8590 if (! validate_arg (dest, POINTER_TYPE)
8591 || ! validate_arg (src, POINTER_TYPE)
8592 || ! validate_arg (len, INTEGER_TYPE))
8595 /* If the LEN parameter is zero, return DEST. */
8596 if (integer_zerop (len))
8597 return omit_one_operand (type, dest, src);
8599 /* If SRC and DEST are the same (and not volatile), return
8600 DEST{,+LEN,+LEN-1}. */
8601 if (operand_equal_p (src, dest, 0))
8605 tree srctype, desttype;
8608 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8609 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8611 /* Both DEST and SRC must be pointer types.
8612 ??? This is what old code did. Is the testing for pointer types
8615 If either SRC is readonly or length is 1, we can use memcpy. */
8616 if (dest_align && src_align
8617 && (readonly_data_expr (src)
8618 || (host_integerp (len, 1)
8619 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8620 tree_low_cst (len, 1)))))
8622 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8625 return build_call_expr (fn, 3, dest, src, len);
8630 if (!host_integerp (len, 0))
8633 This logic lose for arguments like (type *)malloc (sizeof (type)),
8634 since we strip the casts of up to VOID return value from malloc.
8635 Perhaps we ought to inherit type from non-VOID argument here? */
8638 srctype = TREE_TYPE (TREE_TYPE (src));
8639 desttype = TREE_TYPE (TREE_TYPE (dest));
8640 if (!srctype || !desttype
8641 || !TYPE_SIZE_UNIT (srctype)
8642 || !TYPE_SIZE_UNIT (desttype)
8643 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8644 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8645 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8646 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8649 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8650 < (int) TYPE_ALIGN (desttype)
8651 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8652 < (int) TYPE_ALIGN (srctype)))
8656 dest = builtin_save_expr (dest);
8658 srcvar = build_fold_indirect_ref (src);
8659 if (TREE_THIS_VOLATILE (srcvar))
8661 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8663 /* With memcpy, it is possible to bypass aliasing rules, so without
8664 this check i. e. execute/20060930-2.c would be misoptimized, because
8665 it use conflicting alias set to hold argument for the memcpy call.
8666 This check is probably unnecesary with -fno-strict-aliasing.
8667 Similarly for destvar. See also PR29286. */
8668 if (!var_decl_component_p (srcvar)
8669 /* Accept: memcpy (*char_var, "test", 1); that simplify
8671 || is_gimple_min_invariant (srcvar)
8672 || readonly_data_expr (src))
8675 destvar = build_fold_indirect_ref (dest);
8676 if (TREE_THIS_VOLATILE (destvar))
8678 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8680 if (!var_decl_component_p (destvar))
8683 if (srctype == desttype
8684 || (gimple_in_ssa_p (cfun)
8685 && useless_type_conversion_p (desttype, srctype)))
8687 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8688 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8689 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8690 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8691 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8693 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8694 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8700 if (endp == 0 || endp == 3)
8701 return omit_one_operand (type, dest, expr);
8707 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8710 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8711 dest = fold_convert (type, dest);
8713 dest = omit_one_operand (type, dest, expr);
8717 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8718 If LEN is not NULL, it represents the length of the string to be
8719 copied. Return NULL_TREE if no simplification can be made. */
8722 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8726 if (!validate_arg (dest, POINTER_TYPE)
8727 || !validate_arg (src, POINTER_TYPE))
8730 /* If SRC and DEST are the same (and not volatile), return DEST. */
8731 if (operand_equal_p (src, dest, 0))
8732 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8737 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8743 len = c_strlen (src, 1);
8744 if (! len || TREE_SIDE_EFFECTS (len))
8748 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8749 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8750 build_call_expr (fn, 3, dest, src, len));
8753 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8754 If SLEN is not NULL, it represents the length of the source string.
8755 Return NULL_TREE if no simplification can be made. */
8758 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8762 if (!validate_arg (dest, POINTER_TYPE)
8763 || !validate_arg (src, POINTER_TYPE)
8764 || !validate_arg (len, INTEGER_TYPE))
8767 /* If the LEN parameter is zero, return DEST. */
8768 if (integer_zerop (len))
8769 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8771 /* We can't compare slen with len as constants below if len is not a
8773 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8777 slen = c_strlen (src, 1);
8779 /* Now, we must be passed a constant src ptr parameter. */
8780 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8783 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8785 /* We do not support simplification of this case, though we do
8786 support it when expanding trees into RTL. */
8787 /* FIXME: generate a call to __builtin_memset. */
8788 if (tree_int_cst_lt (slen, len))
8791 /* OK transform into builtin memcpy. */
8792 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8795 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8796 build_call_expr (fn, 3, dest, src, len));
8799 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8800 arguments to the call, and TYPE is its return type.
8801 Return NULL_TREE if no simplification can be made. */
8804 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8806 if (!validate_arg (arg1, POINTER_TYPE)
8807 || !validate_arg (arg2, INTEGER_TYPE)
8808 || !validate_arg (len, INTEGER_TYPE))
8814 if (TREE_CODE (arg2) != INTEGER_CST
8815 || !host_integerp (len, 1))
8818 p1 = c_getstr (arg1);
8819 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8825 if (target_char_cast (arg2, &c))
8828 r = memchr (p1, c, tree_low_cst (len, 1));
8831 return build_int_cst (TREE_TYPE (arg1), 0);
8833 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8835 return fold_convert (type, tem);
8841 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8842 Return NULL_TREE if no simplification can be made. */
8845 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8847 const char *p1, *p2;
8849 if (!validate_arg (arg1, POINTER_TYPE)
8850 || !validate_arg (arg2, POINTER_TYPE)
8851 || !validate_arg (len, INTEGER_TYPE))
8854 /* If the LEN parameter is zero, return zero. */
8855 if (integer_zerop (len))
8856 return omit_two_operands (integer_type_node, integer_zero_node,
8859 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8860 if (operand_equal_p (arg1, arg2, 0))
8861 return omit_one_operand (integer_type_node, integer_zero_node, len);
8863 p1 = c_getstr (arg1);
8864 p2 = c_getstr (arg2);
8866 /* If all arguments are constant, and the value of len is not greater
8867 than the lengths of arg1 and arg2, evaluate at compile-time. */
8868 if (host_integerp (len, 1) && p1 && p2
8869 && compare_tree_int (len, strlen (p1) + 1) <= 0
8870 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8872 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8875 return integer_one_node;
8877 return integer_minus_one_node;
8879 return integer_zero_node;
8882 /* If len parameter is one, return an expression corresponding to
8883 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8884 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8886 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8887 tree cst_uchar_ptr_node
8888 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8890 tree ind1 = fold_convert (integer_type_node,
8891 build1 (INDIRECT_REF, cst_uchar_node,
8892 fold_convert (cst_uchar_ptr_node,
8894 tree ind2 = fold_convert (integer_type_node,
8895 build1 (INDIRECT_REF, cst_uchar_node,
8896 fold_convert (cst_uchar_ptr_node,
8898 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8904 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8905 Return NULL_TREE if no simplification can be made. */
8908 fold_builtin_strcmp (tree arg1, tree arg2)
8910 const char *p1, *p2;
8912 if (!validate_arg (arg1, POINTER_TYPE)
8913 || !validate_arg (arg2, POINTER_TYPE))
8916 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8917 if (operand_equal_p (arg1, arg2, 0))
8918 return integer_zero_node;
8920 p1 = c_getstr (arg1);
8921 p2 = c_getstr (arg2);
8925 const int i = strcmp (p1, p2);
8927 return integer_minus_one_node;
8929 return integer_one_node;
8931 return integer_zero_node;
8934 /* If the second arg is "", return *(const unsigned char*)arg1. */
8935 if (p2 && *p2 == '\0')
8937 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8938 tree cst_uchar_ptr_node
8939 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8941 return fold_convert (integer_type_node,
8942 build1 (INDIRECT_REF, cst_uchar_node,
8943 fold_convert (cst_uchar_ptr_node,
8947 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8948 if (p1 && *p1 == '\0')
8950 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8951 tree cst_uchar_ptr_node
8952 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8954 tree temp = fold_convert (integer_type_node,
8955 build1 (INDIRECT_REF, cst_uchar_node,
8956 fold_convert (cst_uchar_ptr_node,
8958 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8964 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8965 Return NULL_TREE if no simplification can be made. */
8968 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8970 const char *p1, *p2;
8972 if (!validate_arg (arg1, POINTER_TYPE)
8973 || !validate_arg (arg2, POINTER_TYPE)
8974 || !validate_arg (len, INTEGER_TYPE))
8977 /* If the LEN parameter is zero, return zero. */
8978 if (integer_zerop (len))
8979 return omit_two_operands (integer_type_node, integer_zero_node,
8982 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8983 if (operand_equal_p (arg1, arg2, 0))
8984 return omit_one_operand (integer_type_node, integer_zero_node, len);
8986 p1 = c_getstr (arg1);
8987 p2 = c_getstr (arg2);
8989 if (host_integerp (len, 1) && p1 && p2)
8991 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8993 return integer_one_node;
8995 return integer_minus_one_node;
8997 return integer_zero_node;
9000 /* If the second arg is "", and the length is greater than zero,
9001 return *(const unsigned char*)arg1. */
9002 if (p2 && *p2 == '\0'
9003 && TREE_CODE (len) == INTEGER_CST
9004 && tree_int_cst_sgn (len) == 1)
9006 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9007 tree cst_uchar_ptr_node
9008 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9010 return fold_convert (integer_type_node,
9011 build1 (INDIRECT_REF, cst_uchar_node,
9012 fold_convert (cst_uchar_ptr_node,
9016 /* If the first arg is "", and the length is greater than zero,
9017 return -*(const unsigned char*)arg2. */
9018 if (p1 && *p1 == '\0'
9019 && TREE_CODE (len) == INTEGER_CST
9020 && tree_int_cst_sgn (len) == 1)
9022 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9023 tree cst_uchar_ptr_node
9024 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9026 tree temp = fold_convert (integer_type_node,
9027 build1 (INDIRECT_REF, cst_uchar_node,
9028 fold_convert (cst_uchar_ptr_node,
9030 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9033 /* If len parameter is one, return an expression corresponding to
9034 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9035 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9037 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9038 tree cst_uchar_ptr_node
9039 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9041 tree ind1 = fold_convert (integer_type_node,
9042 build1 (INDIRECT_REF, cst_uchar_node,
9043 fold_convert (cst_uchar_ptr_node,
9045 tree ind2 = fold_convert (integer_type_node,
9046 build1 (INDIRECT_REF, cst_uchar_node,
9047 fold_convert (cst_uchar_ptr_node,
9049 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9055 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9056 ARG. Return NULL_TREE if no simplification can be made. */
9059 fold_builtin_signbit (tree arg, tree type)
9063 if (!validate_arg (arg, REAL_TYPE))
9066 /* If ARG is a compile-time constant, determine the result. */
9067 if (TREE_CODE (arg) == REAL_CST
9068 && !TREE_OVERFLOW (arg))
9072 c = TREE_REAL_CST (arg);
9073 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9074 return fold_convert (type, temp);
9077 /* If ARG is non-negative, the result is always zero. */
9078 if (tree_expr_nonnegative_p (arg))
9079 return omit_one_operand (type, integer_zero_node, arg);
9081 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9082 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9083 return fold_build2 (LT_EXPR, type, arg,
9084 build_real (TREE_TYPE (arg), dconst0));
9089 /* Fold function call to builtin copysign, copysignf or copysignl with
9090 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9094 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9098 if (!validate_arg (arg1, REAL_TYPE)
9099 || !validate_arg (arg2, REAL_TYPE))
9102 /* copysign(X,X) is X. */
9103 if (operand_equal_p (arg1, arg2, 0))
9104 return fold_convert (type, arg1);
9106 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9107 if (TREE_CODE (arg1) == REAL_CST
9108 && TREE_CODE (arg2) == REAL_CST
9109 && !TREE_OVERFLOW (arg1)
9110 && !TREE_OVERFLOW (arg2))
9112 REAL_VALUE_TYPE c1, c2;
9114 c1 = TREE_REAL_CST (arg1);
9115 c2 = TREE_REAL_CST (arg2);
9116 /* c1.sign := c2.sign. */
9117 real_copysign (&c1, &c2);
9118 return build_real (type, c1);
9121 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9122 Remember to evaluate Y for side-effects. */
9123 if (tree_expr_nonnegative_p (arg2))
9124 return omit_one_operand (type,
9125 fold_build1 (ABS_EXPR, type, arg1),
9128 /* Strip sign changing operations for the first argument. */
9129 tem = fold_strip_sign_ops (arg1);
9131 return build_call_expr (fndecl, 2, tem, arg2);
9136 /* Fold a call to builtin isascii with argument ARG. */
9139 fold_builtin_isascii (tree arg)
9141 if (!validate_arg (arg, INTEGER_TYPE))
9145 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9146 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9147 build_int_cst (NULL_TREE,
9148 ~ (unsigned HOST_WIDE_INT) 0x7f));
9149 return fold_build2 (EQ_EXPR, integer_type_node,
9150 arg, integer_zero_node);
9154 /* Fold a call to builtin toascii with argument ARG. */
9157 fold_builtin_toascii (tree arg)
9159 if (!validate_arg (arg, INTEGER_TYPE))
9162 /* Transform toascii(c) -> (c & 0x7f). */
9163 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9164 build_int_cst (NULL_TREE, 0x7f));
9167 /* Fold a call to builtin isdigit with argument ARG. */
9170 fold_builtin_isdigit (tree arg)
9172 if (!validate_arg (arg, INTEGER_TYPE))
9176 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9177 /* According to the C standard, isdigit is unaffected by locale.
9178 However, it definitely is affected by the target character set. */
9179 unsigned HOST_WIDE_INT target_digit0
9180 = lang_hooks.to_target_charset ('0');
9182 if (target_digit0 == 0)
9185 arg = fold_convert (unsigned_type_node, arg);
9186 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9187 build_int_cst (unsigned_type_node, target_digit0));
9188 return fold_build2 (LE_EXPR, integer_type_node, arg,
9189 build_int_cst (unsigned_type_node, 9));
9193 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9196 fold_builtin_fabs (tree arg, tree type)
9198 if (!validate_arg (arg, REAL_TYPE))
9201 arg = fold_convert (type, arg);
9202 if (TREE_CODE (arg) == REAL_CST)
9203 return fold_abs_const (arg, type);
9204 return fold_build1 (ABS_EXPR, type, arg);
9207 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9210 fold_builtin_abs (tree arg, tree type)
9212 if (!validate_arg (arg, INTEGER_TYPE))
9215 arg = fold_convert (type, arg);
9216 if (TREE_CODE (arg) == INTEGER_CST)
9217 return fold_abs_const (arg, type);
9218 return fold_build1 (ABS_EXPR, type, arg);
9221 /* Fold a call to builtin fmin or fmax. */
9224 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9226 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9228 /* Calculate the result when the argument is a constant. */
9229 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9234 /* If either argument is NaN, return the other one. Avoid the
9235 transformation if we get (and honor) a signalling NaN. Using
9236 omit_one_operand() ensures we create a non-lvalue. */
9237 if (TREE_CODE (arg0) == REAL_CST
9238 && real_isnan (&TREE_REAL_CST (arg0))
9239 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9240 || ! TREE_REAL_CST (arg0).signalling))
9241 return omit_one_operand (type, arg1, arg0);
9242 if (TREE_CODE (arg1) == REAL_CST
9243 && real_isnan (&TREE_REAL_CST (arg1))
9244 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9245 || ! TREE_REAL_CST (arg1).signalling))
9246 return omit_one_operand (type, arg0, arg1);
9248 /* Transform fmin/fmax(x,x) -> x. */
9249 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9250 return omit_one_operand (type, arg0, arg1);
9252 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9253 functions to return the numeric arg if the other one is NaN.
9254 These tree codes don't honor that, so only transform if
9255 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9256 handled, so we don't have to worry about it either. */
9257 if (flag_finite_math_only)
9258 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9259 fold_convert (type, arg0),
9260 fold_convert (type, arg1));
9265 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9268 fold_builtin_carg (tree arg, tree type)
9270 if (validate_arg (arg, COMPLEX_TYPE))
9272 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9276 tree new_arg = builtin_save_expr (arg);
9277 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9278 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9279 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9286 /* Fold a call to builtin logb/ilogb. */
9289 fold_builtin_logb (tree arg, tree rettype)
9291 if (! validate_arg (arg, REAL_TYPE))
9296 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9298 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9304 /* If arg is Inf or NaN and we're logb, return it. */
9305 if (TREE_CODE (rettype) == REAL_TYPE)
9306 return fold_convert (rettype, arg);
9307 /* Fall through... */
9309 /* Zero may set errno and/or raise an exception for logb, also
9310 for ilogb we don't know FP_ILOGB0. */
9313 /* For normal numbers, proceed iff radix == 2. In GCC,
9314 normalized significands are in the range [0.5, 1.0). We
9315 want the exponent as if they were [1.0, 2.0) so get the
9316 exponent and subtract 1. */
9317 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9318 return fold_convert (rettype, build_int_cst (NULL_TREE,
9319 REAL_EXP (value)-1));
9327 /* Fold a call to builtin significand, if radix == 2. */
9330 fold_builtin_significand (tree arg, tree rettype)
9332 if (! validate_arg (arg, REAL_TYPE))
9337 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9339 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9346 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9347 return fold_convert (rettype, arg);
9349 /* For normal numbers, proceed iff radix == 2. */
9350 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9352 REAL_VALUE_TYPE result = *value;
9353 /* In GCC, normalized significands are in the range [0.5,
9354 1.0). We want them to be [1.0, 2.0) so set the
9356 SET_REAL_EXP (&result, 1);
9357 return build_real (rettype, result);
9366 /* Fold a call to builtin frexp, we can assume the base is 2. */
9369 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9371 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9376 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9379 arg1 = build_fold_indirect_ref (arg1);
9381 /* Proceed if a valid pointer type was passed in. */
9382 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9384 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9390 /* For +-0, return (*exp = 0, +-0). */
9391 exp = integer_zero_node;
9396 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9397 return omit_one_operand (rettype, arg0, arg1);
9400 /* Since the frexp function always expects base 2, and in
9401 GCC normalized significands are already in the range
9402 [0.5, 1.0), we have exactly what frexp wants. */
9403 REAL_VALUE_TYPE frac_rvt = *value;
9404 SET_REAL_EXP (&frac_rvt, 0);
9405 frac = build_real (rettype, frac_rvt);
9406 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9413 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9414 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9415 TREE_SIDE_EFFECTS (arg1) = 1;
9416 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9422 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9423 then we can assume the base is two. If it's false, then we have to
9424 check the mode of the TYPE parameter in certain cases. */
9427 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9429 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9434 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9435 if (real_zerop (arg0) || integer_zerop (arg1)
9436 || (TREE_CODE (arg0) == REAL_CST
9437 && !real_isfinite (&TREE_REAL_CST (arg0))))
9438 return omit_one_operand (type, arg0, arg1);
9440 /* If both arguments are constant, then try to evaluate it. */
9441 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9442 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9443 && host_integerp (arg1, 0))
9445 /* Bound the maximum adjustment to twice the range of the
9446 mode's valid exponents. Use abs to ensure the range is
9447 positive as a sanity check. */
9448 const long max_exp_adj = 2 *
9449 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9450 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9452 /* Get the user-requested adjustment. */
9453 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9455 /* The requested adjustment must be inside this range. This
9456 is a preliminary cap to avoid things like overflow, we
9457 may still fail to compute the result for other reasons. */
9458 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9460 REAL_VALUE_TYPE initial_result;
9462 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9464 /* Ensure we didn't overflow. */
9465 if (! real_isinf (&initial_result))
9467 const REAL_VALUE_TYPE trunc_result
9468 = real_value_truncate (TYPE_MODE (type), initial_result);
9470 /* Only proceed if the target mode can hold the
9472 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9473 return build_real (type, trunc_result);
9482 /* Fold a call to builtin modf. */
9485 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9487 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9492 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9495 arg1 = build_fold_indirect_ref (arg1);
9497 /* Proceed if a valid pointer type was passed in. */
9498 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9500 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9501 REAL_VALUE_TYPE trunc, frac;
9507 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9508 trunc = frac = *value;
9511 /* For +-Inf, return (*arg1 = arg0, +-0). */
9513 frac.sign = value->sign;
9517 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9518 real_trunc (&trunc, VOIDmode, value);
9519 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9520 /* If the original number was negative and already
9521 integral, then the fractional part is -0.0. */
9522 if (value->sign && frac.cl == rvc_zero)
9523 frac.sign = value->sign;
9527 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9528 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9529 build_real (rettype, trunc));
9530 TREE_SIDE_EFFECTS (arg1) = 1;
9531 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9532 build_real (rettype, frac));
9538 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9539 ARG is the argument for the call. */
9542 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9544 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9547 if (!validate_arg (arg, REAL_TYPE))
9549 error ("non-floating-point argument to function %qs",
9550 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9551 return error_mark_node;
9554 switch (builtin_index)
9556 case BUILT_IN_ISINF:
9557 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9558 return omit_one_operand (type, integer_zero_node, arg);
9560 if (TREE_CODE (arg) == REAL_CST)
9562 r = TREE_REAL_CST (arg);
9563 if (real_isinf (&r))
9564 return real_compare (GT_EXPR, &r, &dconst0)
9565 ? integer_one_node : integer_minus_one_node;
9567 return integer_zero_node;
9572 case BUILT_IN_FINITE:
9573 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9574 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9575 return omit_one_operand (type, integer_one_node, arg);
9577 if (TREE_CODE (arg) == REAL_CST)
9579 r = TREE_REAL_CST (arg);
9580 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9585 case BUILT_IN_ISNAN:
9586 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9587 return omit_one_operand (type, integer_zero_node, arg);
9589 if (TREE_CODE (arg) == REAL_CST)
9591 r = TREE_REAL_CST (arg);
9592 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9595 arg = builtin_save_expr (arg);
9596 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9603 /* Fold a call to an unordered comparison function such as
9604 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9605 being called and ARG0 and ARG1 are the arguments for the call.
9606 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9607 the opposite of the desired result. UNORDERED_CODE is used
9608 for modes that can hold NaNs and ORDERED_CODE is used for
9612 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9613 enum tree_code unordered_code,
9614 enum tree_code ordered_code)
9616 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9617 enum tree_code code;
9619 enum tree_code code0, code1;
9620 tree cmp_type = NULL_TREE;
9622 type0 = TREE_TYPE (arg0);
9623 type1 = TREE_TYPE (arg1);
9625 code0 = TREE_CODE (type0);
9626 code1 = TREE_CODE (type1);
9628 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9629 /* Choose the wider of two real types. */
9630 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9632 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9634 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9638 error ("non-floating-point argument to function %qs",
9639 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9640 return error_mark_node;
9643 arg0 = fold_convert (cmp_type, arg0);
9644 arg1 = fold_convert (cmp_type, arg1);
9646 if (unordered_code == UNORDERED_EXPR)
9648 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9649 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9650 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9653 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9655 return fold_build1 (TRUTH_NOT_EXPR, type,
9656 fold_build2 (code, type, arg0, arg1));
9659 /* Fold a call to built-in function FNDECL with 0 arguments.
9660 IGNORE is true if the result of the function call is ignored. This
9661 function returns NULL_TREE if no simplification was possible. */
9664 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9666 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9667 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9670 CASE_FLT_FN (BUILT_IN_INF):
9671 case BUILT_IN_INFD32:
9672 case BUILT_IN_INFD64:
9673 case BUILT_IN_INFD128:
9674 return fold_builtin_inf (type, true);
9676 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9677 return fold_builtin_inf (type, false);
9679 case BUILT_IN_CLASSIFY_TYPE:
9680 return fold_builtin_classify_type (NULL_TREE);
9688 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9689 IGNORE is true if the result of the function call is ignored. This
9690 function returns NULL_TREE if no simplification was possible. */
9693 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9695 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9696 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9700 case BUILT_IN_CONSTANT_P:
9702 tree val = fold_builtin_constant_p (arg0);
9704 /* Gimplification will pull the CALL_EXPR for the builtin out of
9705 an if condition. When not optimizing, we'll not CSE it back.
9706 To avoid link error types of regressions, return false now. */
9707 if (!val && !optimize)
9708 val = integer_zero_node;
9713 case BUILT_IN_CLASSIFY_TYPE:
9714 return fold_builtin_classify_type (arg0);
9716 case BUILT_IN_STRLEN:
9717 return fold_builtin_strlen (arg0);
9719 CASE_FLT_FN (BUILT_IN_FABS):
9720 return fold_builtin_fabs (arg0, type);
9724 case BUILT_IN_LLABS:
9725 case BUILT_IN_IMAXABS:
9726 return fold_builtin_abs (arg0, type);
9728 CASE_FLT_FN (BUILT_IN_CONJ):
9729 if (validate_arg (arg0, COMPLEX_TYPE))
9730 return fold_build1 (CONJ_EXPR, type, arg0);
9733 CASE_FLT_FN (BUILT_IN_CREAL):
9734 if (validate_arg (arg0, COMPLEX_TYPE))
9735 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9738 CASE_FLT_FN (BUILT_IN_CIMAG):
9739 if (validate_arg (arg0, COMPLEX_TYPE))
9740 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9743 CASE_FLT_FN (BUILT_IN_CCOS):
9744 CASE_FLT_FN (BUILT_IN_CCOSH):
9745 /* These functions are "even", i.e. f(x) == f(-x). */
9746 if (validate_arg (arg0, COMPLEX_TYPE))
9748 tree narg = fold_strip_sign_ops (arg0);
9750 return build_call_expr (fndecl, 1, narg);
9754 CASE_FLT_FN (BUILT_IN_CABS):
9755 return fold_builtin_cabs (arg0, type, fndecl);
9757 CASE_FLT_FN (BUILT_IN_CARG):
9758 return fold_builtin_carg (arg0, type);
9760 CASE_FLT_FN (BUILT_IN_SQRT):
9761 return fold_builtin_sqrt (arg0, type);
9763 CASE_FLT_FN (BUILT_IN_CBRT):
9764 return fold_builtin_cbrt (arg0, type);
9766 CASE_FLT_FN (BUILT_IN_ASIN):
9767 if (validate_arg (arg0, REAL_TYPE))
9768 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9769 &dconstm1, &dconst1, true);
9772 CASE_FLT_FN (BUILT_IN_ACOS):
9773 if (validate_arg (arg0, REAL_TYPE))
9774 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9775 &dconstm1, &dconst1, true);
9778 CASE_FLT_FN (BUILT_IN_ATAN):
9779 if (validate_arg (arg0, REAL_TYPE))
9780 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9783 CASE_FLT_FN (BUILT_IN_ASINH):
9784 if (validate_arg (arg0, REAL_TYPE))
9785 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9788 CASE_FLT_FN (BUILT_IN_ACOSH):
9789 if (validate_arg (arg0, REAL_TYPE))
9790 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9791 &dconst1, NULL, true);
9794 CASE_FLT_FN (BUILT_IN_ATANH):
9795 if (validate_arg (arg0, REAL_TYPE))
9796 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9797 &dconstm1, &dconst1, false);
9800 CASE_FLT_FN (BUILT_IN_SIN):
9801 if (validate_arg (arg0, REAL_TYPE))
9802 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9805 CASE_FLT_FN (BUILT_IN_COS):
9806 return fold_builtin_cos (arg0, type, fndecl);
9809 CASE_FLT_FN (BUILT_IN_TAN):
9810 return fold_builtin_tan (arg0, type);
9812 CASE_FLT_FN (BUILT_IN_CEXP):
9813 return fold_builtin_cexp (arg0, type);
9815 CASE_FLT_FN (BUILT_IN_CEXPI):
9816 if (validate_arg (arg0, REAL_TYPE))
9817 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9820 CASE_FLT_FN (BUILT_IN_SINH):
9821 if (validate_arg (arg0, REAL_TYPE))
9822 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9825 CASE_FLT_FN (BUILT_IN_COSH):
9826 return fold_builtin_cosh (arg0, type, fndecl);
9828 CASE_FLT_FN (BUILT_IN_TANH):
9829 if (validate_arg (arg0, REAL_TYPE))
9830 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9833 CASE_FLT_FN (BUILT_IN_ERF):
9834 if (validate_arg (arg0, REAL_TYPE))
9835 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9838 CASE_FLT_FN (BUILT_IN_ERFC):
9839 if (validate_arg (arg0, REAL_TYPE))
9840 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9843 CASE_FLT_FN (BUILT_IN_TGAMMA):
9844 if (validate_arg (arg0, REAL_TYPE))
9845 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9848 CASE_FLT_FN (BUILT_IN_EXP):
9849 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9851 CASE_FLT_FN (BUILT_IN_EXP2):
9852 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9854 CASE_FLT_FN (BUILT_IN_EXP10):
9855 CASE_FLT_FN (BUILT_IN_POW10):
9856 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9858 CASE_FLT_FN (BUILT_IN_EXPM1):
9859 if (validate_arg (arg0, REAL_TYPE))
9860 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9863 CASE_FLT_FN (BUILT_IN_LOG):
9864 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9866 CASE_FLT_FN (BUILT_IN_LOG2):
9867 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9869 CASE_FLT_FN (BUILT_IN_LOG10):
9870 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9872 CASE_FLT_FN (BUILT_IN_LOG1P):
9873 if (validate_arg (arg0, REAL_TYPE))
9874 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9875 &dconstm1, NULL, false);
9878 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9879 CASE_FLT_FN (BUILT_IN_J0):
9880 if (validate_arg (arg0, REAL_TYPE))
9881 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9885 CASE_FLT_FN (BUILT_IN_J1):
9886 if (validate_arg (arg0, REAL_TYPE))
9887 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9891 CASE_FLT_FN (BUILT_IN_Y0):
9892 if (validate_arg (arg0, REAL_TYPE))
9893 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9894 &dconst0, NULL, false);
9897 CASE_FLT_FN (BUILT_IN_Y1):
9898 if (validate_arg (arg0, REAL_TYPE))
9899 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9900 &dconst0, NULL, false);
9904 CASE_FLT_FN (BUILT_IN_NAN):
9905 case BUILT_IN_NAND32:
9906 case BUILT_IN_NAND64:
9907 case BUILT_IN_NAND128:
9908 return fold_builtin_nan (arg0, type, true);
9910 CASE_FLT_FN (BUILT_IN_NANS):
9911 return fold_builtin_nan (arg0, type, false);
9913 CASE_FLT_FN (BUILT_IN_FLOOR):
9914 return fold_builtin_floor (fndecl, arg0);
9916 CASE_FLT_FN (BUILT_IN_CEIL):
9917 return fold_builtin_ceil (fndecl, arg0);
9919 CASE_FLT_FN (BUILT_IN_TRUNC):
9920 return fold_builtin_trunc (fndecl, arg0);
9922 CASE_FLT_FN (BUILT_IN_ROUND):
9923 return fold_builtin_round (fndecl, arg0);
9925 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9926 CASE_FLT_FN (BUILT_IN_RINT):
9927 return fold_trunc_transparent_mathfn (fndecl, arg0);
9929 CASE_FLT_FN (BUILT_IN_LCEIL):
9930 CASE_FLT_FN (BUILT_IN_LLCEIL):
9931 CASE_FLT_FN (BUILT_IN_LFLOOR):
9932 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9933 CASE_FLT_FN (BUILT_IN_LROUND):
9934 CASE_FLT_FN (BUILT_IN_LLROUND):
9935 return fold_builtin_int_roundingfn (fndecl, arg0);
9937 CASE_FLT_FN (BUILT_IN_LRINT):
9938 CASE_FLT_FN (BUILT_IN_LLRINT):
9939 return fold_fixed_mathfn (fndecl, arg0);
9941 case BUILT_IN_BSWAP32:
9942 case BUILT_IN_BSWAP64:
9943 return fold_builtin_bswap (fndecl, arg0);
9945 CASE_INT_FN (BUILT_IN_FFS):
9946 CASE_INT_FN (BUILT_IN_CLZ):
9947 CASE_INT_FN (BUILT_IN_CTZ):
9948 CASE_INT_FN (BUILT_IN_POPCOUNT):
9949 CASE_INT_FN (BUILT_IN_PARITY):
9950 return fold_builtin_bitop (fndecl, arg0);
9952 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9953 return fold_builtin_signbit (arg0, type);
9955 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9956 return fold_builtin_significand (arg0, type);
9958 CASE_FLT_FN (BUILT_IN_ILOGB):
9959 CASE_FLT_FN (BUILT_IN_LOGB):
9960 return fold_builtin_logb (arg0, type);
9962 case BUILT_IN_ISASCII:
9963 return fold_builtin_isascii (arg0);
9965 case BUILT_IN_TOASCII:
9966 return fold_builtin_toascii (arg0);
9968 case BUILT_IN_ISDIGIT:
9969 return fold_builtin_isdigit (arg0);
9971 CASE_FLT_FN (BUILT_IN_FINITE):
9972 case BUILT_IN_FINITED32:
9973 case BUILT_IN_FINITED64:
9974 case BUILT_IN_FINITED128:
9975 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9977 CASE_FLT_FN (BUILT_IN_ISINF):
9978 case BUILT_IN_ISINFD32:
9979 case BUILT_IN_ISINFD64:
9980 case BUILT_IN_ISINFD128:
9981 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9983 CASE_FLT_FN (BUILT_IN_ISNAN):
9984 case BUILT_IN_ISNAND32:
9985 case BUILT_IN_ISNAND64:
9986 case BUILT_IN_ISNAND128:
9987 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9989 case BUILT_IN_PRINTF:
9990 case BUILT_IN_PRINTF_UNLOCKED:
9991 case BUILT_IN_VPRINTF:
9992 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10002 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10003 IGNORE is true if the result of the function call is ignored. This
10004 function returns NULL_TREE if no simplification was possible. */
10007 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10009 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10010 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10014 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10015 CASE_FLT_FN (BUILT_IN_JN):
10016 if (validate_arg (arg0, INTEGER_TYPE)
10017 && validate_arg (arg1, REAL_TYPE))
10018 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10021 CASE_FLT_FN (BUILT_IN_YN):
10022 if (validate_arg (arg0, INTEGER_TYPE)
10023 && validate_arg (arg1, REAL_TYPE))
10024 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10028 CASE_FLT_FN (BUILT_IN_DREM):
10029 CASE_FLT_FN (BUILT_IN_REMAINDER):
10030 if (validate_arg (arg0, REAL_TYPE)
10031 && validate_arg(arg1, REAL_TYPE))
10032 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10035 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10036 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10037 if (validate_arg (arg0, REAL_TYPE)
10038 && validate_arg(arg1, POINTER_TYPE))
10039 return do_mpfr_lgamma_r (arg0, arg1, type);
10043 CASE_FLT_FN (BUILT_IN_ATAN2):
10044 if (validate_arg (arg0, REAL_TYPE)
10045 && validate_arg(arg1, REAL_TYPE))
10046 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10049 CASE_FLT_FN (BUILT_IN_FDIM):
10050 if (validate_arg (arg0, REAL_TYPE)
10051 && validate_arg(arg1, REAL_TYPE))
10052 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10055 CASE_FLT_FN (BUILT_IN_HYPOT):
10056 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10058 CASE_FLT_FN (BUILT_IN_LDEXP):
10059 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10060 CASE_FLT_FN (BUILT_IN_SCALBN):
10061 CASE_FLT_FN (BUILT_IN_SCALBLN):
10062 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10064 CASE_FLT_FN (BUILT_IN_FREXP):
10065 return fold_builtin_frexp (arg0, arg1, type);
10067 CASE_FLT_FN (BUILT_IN_MODF):
10068 return fold_builtin_modf (arg0, arg1, type);
10070 case BUILT_IN_BZERO:
10071 return fold_builtin_bzero (arg0, arg1, ignore);
10073 case BUILT_IN_FPUTS:
10074 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10076 case BUILT_IN_FPUTS_UNLOCKED:
10077 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10079 case BUILT_IN_STRSTR:
10080 return fold_builtin_strstr (arg0, arg1, type);
10082 case BUILT_IN_STRCAT:
10083 return fold_builtin_strcat (arg0, arg1);
10085 case BUILT_IN_STRSPN:
10086 return fold_builtin_strspn (arg0, arg1);
10088 case BUILT_IN_STRCSPN:
10089 return fold_builtin_strcspn (arg0, arg1);
10091 case BUILT_IN_STRCHR:
10092 case BUILT_IN_INDEX:
10093 return fold_builtin_strchr (arg0, arg1, type);
10095 case BUILT_IN_STRRCHR:
10096 case BUILT_IN_RINDEX:
10097 return fold_builtin_strrchr (arg0, arg1, type);
10099 case BUILT_IN_STRCPY:
10100 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10102 case BUILT_IN_STRCMP:
10103 return fold_builtin_strcmp (arg0, arg1);
10105 case BUILT_IN_STRPBRK:
10106 return fold_builtin_strpbrk (arg0, arg1, type);
10108 case BUILT_IN_EXPECT:
10109 return fold_builtin_expect (arg0);
10111 CASE_FLT_FN (BUILT_IN_POW):
10112 return fold_builtin_pow (fndecl, arg0, arg1, type);
10114 CASE_FLT_FN (BUILT_IN_POWI):
10115 return fold_builtin_powi (fndecl, arg0, arg1, type);
10117 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10118 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10120 CASE_FLT_FN (BUILT_IN_FMIN):
10121 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10123 CASE_FLT_FN (BUILT_IN_FMAX):
10124 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10126 case BUILT_IN_ISGREATER:
10127 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10128 case BUILT_IN_ISGREATEREQUAL:
10129 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10130 case BUILT_IN_ISLESS:
10131 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10132 case BUILT_IN_ISLESSEQUAL:
10133 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10134 case BUILT_IN_ISLESSGREATER:
10135 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10136 case BUILT_IN_ISUNORDERED:
10137 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10140 /* We do the folding for va_start in the expander. */
10141 case BUILT_IN_VA_START:
10144 case BUILT_IN_SPRINTF:
10145 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10147 case BUILT_IN_OBJECT_SIZE:
10148 return fold_builtin_object_size (arg0, arg1);
10150 case BUILT_IN_PRINTF:
10151 case BUILT_IN_PRINTF_UNLOCKED:
10152 case BUILT_IN_VPRINTF:
10153 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10155 case BUILT_IN_PRINTF_CHK:
10156 case BUILT_IN_VPRINTF_CHK:
10157 if (!validate_arg (arg0, INTEGER_TYPE)
10158 || TREE_SIDE_EFFECTS (arg0))
10161 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10164 case BUILT_IN_FPRINTF:
10165 case BUILT_IN_FPRINTF_UNLOCKED:
10166 case BUILT_IN_VFPRINTF:
10167 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10176 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10177 and ARG2. IGNORE is true if the result of the function call is ignored.
10178 This function returns NULL_TREE if no simplification was possible. */
10181 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10183 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10184 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10188 CASE_FLT_FN (BUILT_IN_SINCOS):
10189 return fold_builtin_sincos (arg0, arg1, arg2);
10191 CASE_FLT_FN (BUILT_IN_FMA):
10192 if (validate_arg (arg0, REAL_TYPE)
10193 && validate_arg(arg1, REAL_TYPE)
10194 && validate_arg(arg2, REAL_TYPE))
10195 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10198 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10199 CASE_FLT_FN (BUILT_IN_REMQUO):
10200 if (validate_arg (arg0, REAL_TYPE)
10201 && validate_arg(arg1, REAL_TYPE)
10202 && validate_arg(arg2, POINTER_TYPE))
10203 return do_mpfr_remquo (arg0, arg1, arg2);
10207 case BUILT_IN_MEMSET:
10208 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10210 case BUILT_IN_BCOPY:
10211 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10213 case BUILT_IN_MEMCPY:
10214 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10216 case BUILT_IN_MEMPCPY:
10217 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10219 case BUILT_IN_MEMMOVE:
10220 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10222 case BUILT_IN_STRNCAT:
10223 return fold_builtin_strncat (arg0, arg1, arg2);
10225 case BUILT_IN_STRNCPY:
10226 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10228 case BUILT_IN_STRNCMP:
10229 return fold_builtin_strncmp (arg0, arg1, arg2);
10231 case BUILT_IN_MEMCHR:
10232 return fold_builtin_memchr (arg0, arg1, arg2, type);
10234 case BUILT_IN_BCMP:
10235 case BUILT_IN_MEMCMP:
10236 return fold_builtin_memcmp (arg0, arg1, arg2);;
10238 case BUILT_IN_SPRINTF:
10239 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10241 case BUILT_IN_STRCPY_CHK:
10242 case BUILT_IN_STPCPY_CHK:
10243 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10246 case BUILT_IN_STRCAT_CHK:
10247 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10249 case BUILT_IN_PRINTF_CHK:
10250 case BUILT_IN_VPRINTF_CHK:
10251 if (!validate_arg (arg0, INTEGER_TYPE)
10252 || TREE_SIDE_EFFECTS (arg0))
10255 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10258 case BUILT_IN_FPRINTF:
10259 case BUILT_IN_FPRINTF_UNLOCKED:
10260 case BUILT_IN_VFPRINTF:
10261 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10263 case BUILT_IN_FPRINTF_CHK:
10264 case BUILT_IN_VFPRINTF_CHK:
10265 if (!validate_arg (arg1, INTEGER_TYPE)
10266 || TREE_SIDE_EFFECTS (arg1))
10269 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10278 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10279 ARG2, and ARG3. IGNORE is true if the result of the function call is
10280 ignored. This function returns NULL_TREE if no simplification was
10284 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10287 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10291 case BUILT_IN_MEMCPY_CHK:
10292 case BUILT_IN_MEMPCPY_CHK:
10293 case BUILT_IN_MEMMOVE_CHK:
10294 case BUILT_IN_MEMSET_CHK:
10295 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10297 DECL_FUNCTION_CODE (fndecl));
10299 case BUILT_IN_STRNCPY_CHK:
10300 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10302 case BUILT_IN_STRNCAT_CHK:
10303 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10305 case BUILT_IN_FPRINTF_CHK:
10306 case BUILT_IN_VFPRINTF_CHK:
10307 if (!validate_arg (arg1, INTEGER_TYPE)
10308 || TREE_SIDE_EFFECTS (arg1))
10311 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10321 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10322 arguments, where NARGS <= 4. IGNORE is true if the result of the
10323 function call is ignored. This function returns NULL_TREE if no
10324 simplification was possible. Note that this only folds builtins with
10325 fixed argument patterns. Foldings that do varargs-to-varargs
10326 transformations, or that match calls with more than 4 arguments,
10327 need to be handled with fold_builtin_varargs instead. */
10329 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10332 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10334 tree ret = NULL_TREE;
10338 ret = fold_builtin_0 (fndecl, ignore);
10341 ret = fold_builtin_1 (fndecl, args[0], ignore);
10344 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10347 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10350 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10358 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10359 TREE_NO_WARNING (ret) = 1;
10365 /* Builtins with folding operations that operate on "..." arguments
10366 need special handling; we need to store the arguments in a convenient
10367 data structure before attempting any folding. Fortunately there are
10368 only a few builtins that fall into this category. FNDECL is the
10369 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10370 result of the function call is ignored. */
10373 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10375 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10376 tree ret = NULL_TREE;
10380 case BUILT_IN_SPRINTF_CHK:
10381 case BUILT_IN_VSPRINTF_CHK:
10382 ret = fold_builtin_sprintf_chk (exp, fcode);
10385 case BUILT_IN_SNPRINTF_CHK:
10386 case BUILT_IN_VSNPRINTF_CHK:
10387 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10394 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10395 TREE_NO_WARNING (ret) = 1;
10401 /* A wrapper function for builtin folding that prevents warnings for
10402 "statement without effect" and the like, caused by removing the
10403 call node earlier than the warning is generated. */
10406 fold_call_expr (tree exp, bool ignore)
10408 tree ret = NULL_TREE;
10409 tree fndecl = get_callee_fndecl (exp);
10411 && TREE_CODE (fndecl) == FUNCTION_DECL
10412 && DECL_BUILT_IN (fndecl))
10414 /* FIXME: Don't use a list in this interface. */
10415 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10416 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10419 int nargs = call_expr_nargs (exp);
10420 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10422 tree *args = CALL_EXPR_ARGP (exp);
10423 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10426 ret = fold_builtin_varargs (fndecl, exp, ignore);
10429 /* Propagate location information from original call to
10430 expansion of builtin. Otherwise things like
10431 maybe_emit_chk_warning, that operate on the expansion
10432 of a builtin, will use the wrong location information. */
10433 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10435 tree realret = ret;
10436 if (TREE_CODE (ret) == NOP_EXPR)
10437 realret = TREE_OPERAND (ret, 0);
10438 if (CAN_HAVE_LOCATION_P (realret)
10439 && !EXPR_HAS_LOCATION (realret))
10440 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10449 /* Conveniently construct a function call expression. FNDECL names the
10450 function to be called and ARGLIST is a TREE_LIST of arguments. */
10453 build_function_call_expr (tree fndecl, tree arglist)
10455 tree fntype = TREE_TYPE (fndecl);
10456 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10457 int n = list_length (arglist);
10458 tree *argarray = (tree *) alloca (n * sizeof (tree));
10461 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10462 argarray[i] = TREE_VALUE (arglist);
10463 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10466 /* Conveniently construct a function call expression. FNDECL names the
10467 function to be called, N is the number of arguments, and the "..."
10468 parameters are the argument expressions. */
10471 build_call_expr (tree fndecl, int n, ...)
10474 tree fntype = TREE_TYPE (fndecl);
10475 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10476 tree *argarray = (tree *) alloca (n * sizeof (tree));
10480 for (i = 0; i < n; i++)
10481 argarray[i] = va_arg (ap, tree);
10483 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10486 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10487 N arguments are passed in the array ARGARRAY. */
10490 fold_builtin_call_array (tree type,
10495 tree ret = NULL_TREE;
10499 if (TREE_CODE (fn) == ADDR_EXPR)
10501 tree fndecl = TREE_OPERAND (fn, 0);
10502 if (TREE_CODE (fndecl) == FUNCTION_DECL
10503 && DECL_BUILT_IN (fndecl))
10505 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10507 tree arglist = NULL_TREE;
10508 for (i = n - 1; i >= 0; i--)
10509 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10510 ret = targetm.fold_builtin (fndecl, arglist, false);
10514 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10516 /* First try the transformations that don't require consing up
10518 ret = fold_builtin_n (fndecl, argarray, n, false);
10523 /* If we got this far, we need to build an exp. */
10524 exp = build_call_array (type, fn, n, argarray);
10525 ret = fold_builtin_varargs (fndecl, exp, false);
10526 return ret ? ret : exp;
10530 return build_call_array (type, fn, n, argarray);
10533 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10534 along with N new arguments specified as the "..." parameters. SKIP
10535 is the number of arguments in EXP to be omitted. This function is used
10536 to do varargs-to-varargs transformations. */
10539 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10541 int oldnargs = call_expr_nargs (exp);
10542 int nargs = oldnargs - skip + n;
10543 tree fntype = TREE_TYPE (fndecl);
10544 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10552 buffer = alloca (nargs * sizeof (tree));
10554 for (i = 0; i < n; i++)
10555 buffer[i] = va_arg (ap, tree);
10557 for (j = skip; j < oldnargs; j++, i++)
10558 buffer[i] = CALL_EXPR_ARG (exp, j);
10561 buffer = CALL_EXPR_ARGP (exp) + skip;
10563 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10566 /* Validate a single argument ARG against a tree code CODE representing
10570 validate_arg (tree arg, enum tree_code code)
10574 else if (code == POINTER_TYPE)
10575 return POINTER_TYPE_P (TREE_TYPE (arg));
10576 return code == TREE_CODE (TREE_TYPE (arg));
10579 /* This function validates the types of a function call argument list
10580 against a specified list of tree_codes. If the last specifier is a 0,
10581 that represents an ellipses, otherwise the last specifier must be a
10585 validate_arglist (tree callexpr, ...)
10587 enum tree_code code;
10590 call_expr_arg_iterator iter;
10593 va_start (ap, callexpr);
10594 init_call_expr_arg_iterator (callexpr, &iter);
10598 code = va_arg (ap, enum tree_code);
10602 /* This signifies an ellipses, any further arguments are all ok. */
10606 /* This signifies an endlink, if no arguments remain, return
10607 true, otherwise return false. */
10608 res = !more_call_expr_args_p (&iter);
10611 /* If no parameters remain or the parameter's code does not
10612 match the specified code, return false. Otherwise continue
10613 checking any remaining arguments. */
10614 arg = next_call_expr_arg (&iter);
10615 if (!validate_arg (arg, code))
10622 /* We need gotos here since we can only have one VA_CLOSE in a
10630 /* Default target-specific builtin expander that does nothing. */
10633 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10634 rtx target ATTRIBUTE_UNUSED,
10635 rtx subtarget ATTRIBUTE_UNUSED,
10636 enum machine_mode mode ATTRIBUTE_UNUSED,
10637 int ignore ATTRIBUTE_UNUSED)
10642 /* Returns true is EXP represents data that would potentially reside
10643 in a readonly section. */
10646 readonly_data_expr (tree exp)
10650 if (TREE_CODE (exp) != ADDR_EXPR)
10653 exp = get_base_address (TREE_OPERAND (exp, 0));
10657 /* Make sure we call decl_readonly_section only for trees it
10658 can handle (since it returns true for everything it doesn't
10660 if (TREE_CODE (exp) == STRING_CST
10661 || TREE_CODE (exp) == CONSTRUCTOR
10662 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10663 return decl_readonly_section (exp, 0);
10668 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10669 to the call, and TYPE is its return type.
10671 Return NULL_TREE if no simplification was possible, otherwise return the
10672 simplified form of the call as a tree.
10674 The simplified form may be a constant or other expression which
10675 computes the same value, but in a more efficient manner (including
10676 calls to other builtin functions).
10678 The call may contain arguments which need to be evaluated, but
10679 which are not useful to determine the result of the call. In
10680 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10681 COMPOUND_EXPR will be an argument which must be evaluated.
10682 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10683 COMPOUND_EXPR in the chain will contain the tree for the simplified
10684 form of the builtin function call. */
10687 fold_builtin_strstr (tree s1, tree s2, tree type)
10689 if (!validate_arg (s1, POINTER_TYPE)
10690 || !validate_arg (s2, POINTER_TYPE))
10695 const char *p1, *p2;
10697 p2 = c_getstr (s2);
10701 p1 = c_getstr (s1);
10704 const char *r = strstr (p1, p2);
10708 return build_int_cst (TREE_TYPE (s1), 0);
10710 /* Return an offset into the constant string argument. */
10711 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10712 s1, size_int (r - p1));
10713 return fold_convert (type, tem);
10716 /* The argument is const char *, and the result is char *, so we need
10717 a type conversion here to avoid a warning. */
10719 return fold_convert (type, s1);
10724 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10728 /* New argument list transforming strstr(s1, s2) to
10729 strchr(s1, s2[0]). */
10730 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10734 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10735 the call, and TYPE is its return type.
10737 Return NULL_TREE if no simplification was possible, otherwise return the
10738 simplified form of the call as a tree.
10740 The simplified form may be a constant or other expression which
10741 computes the same value, but in a more efficient manner (including
10742 calls to other builtin functions).
10744 The call may contain arguments which need to be evaluated, but
10745 which are not useful to determine the result of the call. In
10746 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10747 COMPOUND_EXPR will be an argument which must be evaluated.
10748 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10749 COMPOUND_EXPR in the chain will contain the tree for the simplified
10750 form of the builtin function call. */
10753 fold_builtin_strchr (tree s1, tree s2, tree type)
10755 if (!validate_arg (s1, POINTER_TYPE)
10756 || !validate_arg (s2, INTEGER_TYPE))
10762 if (TREE_CODE (s2) != INTEGER_CST)
10765 p1 = c_getstr (s1);
10772 if (target_char_cast (s2, &c))
10775 r = strchr (p1, c);
10778 return build_int_cst (TREE_TYPE (s1), 0);
10780 /* Return an offset into the constant string argument. */
10781 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10782 s1, size_int (r - p1));
10783 return fold_convert (type, tem);
10789 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10790 the call, and TYPE is its return type.
10792 Return NULL_TREE if no simplification was possible, otherwise return the
10793 simplified form of the call as a tree.
10795 The simplified form may be a constant or other expression which
10796 computes the same value, but in a more efficient manner (including
10797 calls to other builtin functions).
10799 The call may contain arguments which need to be evaluated, but
10800 which are not useful to determine the result of the call. In
10801 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10802 COMPOUND_EXPR will be an argument which must be evaluated.
10803 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10804 COMPOUND_EXPR in the chain will contain the tree for the simplified
10805 form of the builtin function call. */
10808 fold_builtin_strrchr (tree s1, tree s2, tree type)
10810 if (!validate_arg (s1, POINTER_TYPE)
10811 || !validate_arg (s2, INTEGER_TYPE))
10818 if (TREE_CODE (s2) != INTEGER_CST)
10821 p1 = c_getstr (s1);
10828 if (target_char_cast (s2, &c))
10831 r = strrchr (p1, c);
10834 return build_int_cst (TREE_TYPE (s1), 0);
10836 /* Return an offset into the constant string argument. */
10837 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10838 s1, size_int (r - p1));
10839 return fold_convert (type, tem);
10842 if (! integer_zerop (s2))
10845 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10849 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10850 return build_call_expr (fn, 2, s1, s2);
10854 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10855 to the call, and TYPE is its return type.
10857 Return NULL_TREE if no simplification was possible, otherwise return the
10858 simplified form of the call as a tree.
10860 The simplified form may be a constant or other expression which
10861 computes the same value, but in a more efficient manner (including
10862 calls to other builtin functions).
10864 The call may contain arguments which need to be evaluated, but
10865 which are not useful to determine the result of the call. In
10866 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10867 COMPOUND_EXPR will be an argument which must be evaluated.
10868 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10869 COMPOUND_EXPR in the chain will contain the tree for the simplified
10870 form of the builtin function call. */
10873 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10875 if (!validate_arg (s1, POINTER_TYPE)
10876 || !validate_arg (s2, POINTER_TYPE))
10881 const char *p1, *p2;
10883 p2 = c_getstr (s2);
10887 p1 = c_getstr (s1);
10890 const char *r = strpbrk (p1, p2);
10894 return build_int_cst (TREE_TYPE (s1), 0);
10896 /* Return an offset into the constant string argument. */
10897 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10898 s1, size_int (r - p1));
10899 return fold_convert (type, tem);
10903 /* strpbrk(x, "") == NULL.
10904 Evaluate and ignore s1 in case it had side-effects. */
10905 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10908 return NULL_TREE; /* Really call strpbrk. */
10910 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10914 /* New argument list transforming strpbrk(s1, s2) to
10915 strchr(s1, s2[0]). */
10916 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10920 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10923 Return NULL_TREE if no simplification was possible, otherwise return the
10924 simplified form of the call as a tree.
10926 The simplified form may be a constant or other expression which
10927 computes the same value, but in a more efficient manner (including
10928 calls to other builtin functions).
10930 The call may contain arguments which need to be evaluated, but
10931 which are not useful to determine the result of the call. In
10932 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10933 COMPOUND_EXPR will be an argument which must be evaluated.
10934 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10935 COMPOUND_EXPR in the chain will contain the tree for the simplified
10936 form of the builtin function call. */
10939 fold_builtin_strcat (tree dst, tree src)
10941 if (!validate_arg (dst, POINTER_TYPE)
10942 || !validate_arg (src, POINTER_TYPE))
10946 const char *p = c_getstr (src);
10948 /* If the string length is zero, return the dst parameter. */
10949 if (p && *p == '\0')
10956 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10957 arguments to the call.
10959 Return NULL_TREE if no simplification was possible, otherwise return the
10960 simplified form of the call as a tree.
10962 The simplified form may be a constant or other expression which
10963 computes the same value, but in a more efficient manner (including
10964 calls to other builtin functions).
10966 The call may contain arguments which need to be evaluated, but
10967 which are not useful to determine the result of the call. In
10968 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10969 COMPOUND_EXPR will be an argument which must be evaluated.
10970 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10971 COMPOUND_EXPR in the chain will contain the tree for the simplified
10972 form of the builtin function call. */
10975 fold_builtin_strncat (tree dst, tree src, tree len)
10977 if (!validate_arg (dst, POINTER_TYPE)
10978 || !validate_arg (src, POINTER_TYPE)
10979 || !validate_arg (len, INTEGER_TYPE))
10983 const char *p = c_getstr (src);
10985 /* If the requested length is zero, or the src parameter string
10986 length is zero, return the dst parameter. */
10987 if (integer_zerop (len) || (p && *p == '\0'))
10988 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10990 /* If the requested len is greater than or equal to the string
10991 length, call strcat. */
10992 if (TREE_CODE (len) == INTEGER_CST && p
10993 && compare_tree_int (len, strlen (p)) >= 0)
10995 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10997 /* If the replacement _DECL isn't initialized, don't do the
11002 return build_call_expr (fn, 2, dst, src);
11008 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11011 Return NULL_TREE if no simplification was possible, otherwise return the
11012 simplified form of the call as a tree.
11014 The simplified form may be a constant or other expression which
11015 computes the same value, but in a more efficient manner (including
11016 calls to other builtin functions).
11018 The call may contain arguments which need to be evaluated, but
11019 which are not useful to determine the result of the call. In
11020 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11021 COMPOUND_EXPR will be an argument which must be evaluated.
11022 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11023 COMPOUND_EXPR in the chain will contain the tree for the simplified
11024 form of the builtin function call. */
11027 fold_builtin_strspn (tree s1, tree s2)
11029 if (!validate_arg (s1, POINTER_TYPE)
11030 || !validate_arg (s2, POINTER_TYPE))
11034 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11036 /* If both arguments are constants, evaluate at compile-time. */
11039 const size_t r = strspn (p1, p2);
11040 return size_int (r);
11043 /* If either argument is "", return NULL_TREE. */
11044 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11045 /* Evaluate and ignore both arguments in case either one has
11047 return omit_two_operands (integer_type_node, integer_zero_node,
11053 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11056 Return NULL_TREE if no simplification was possible, otherwise return the
11057 simplified form of the call as a tree.
11059 The simplified form may be a constant or other expression which
11060 computes the same value, but in a more efficient manner (including
11061 calls to other builtin functions).
11063 The call may contain arguments which need to be evaluated, but
11064 which are not useful to determine the result of the call. In
11065 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11066 COMPOUND_EXPR will be an argument which must be evaluated.
11067 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11068 COMPOUND_EXPR in the chain will contain the tree for the simplified
11069 form of the builtin function call. */
11072 fold_builtin_strcspn (tree s1, tree s2)
11074 if (!validate_arg (s1, POINTER_TYPE)
11075 || !validate_arg (s2, POINTER_TYPE))
11079 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11081 /* If both arguments are constants, evaluate at compile-time. */
11084 const size_t r = strcspn (p1, p2);
11085 return size_int (r);
11088 /* If the first argument is "", return NULL_TREE. */
11089 if (p1 && *p1 == '\0')
11091 /* Evaluate and ignore argument s2 in case it has
11093 return omit_one_operand (integer_type_node,
11094 integer_zero_node, s2);
11097 /* If the second argument is "", return __builtin_strlen(s1). */
11098 if (p2 && *p2 == '\0')
11100 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11102 /* If the replacement _DECL isn't initialized, don't do the
11107 return build_call_expr (fn, 1, s1);
11113 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11114 to the call. IGNORE is true if the value returned
11115 by the builtin will be ignored. UNLOCKED is true is true if this
11116 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11117 the known length of the string. Return NULL_TREE if no simplification
11121 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11123 /* If we're using an unlocked function, assume the other unlocked
11124 functions exist explicitly. */
11125 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11126 : implicit_built_in_decls[BUILT_IN_FPUTC];
11127 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11128 : implicit_built_in_decls[BUILT_IN_FWRITE];
11130 /* If the return value is used, don't do the transformation. */
11134 /* Verify the arguments in the original call. */
11135 if (!validate_arg (arg0, POINTER_TYPE)
11136 || !validate_arg (arg1, POINTER_TYPE))
11140 len = c_strlen (arg0, 0);
11142 /* Get the length of the string passed to fputs. If the length
11143 can't be determined, punt. */
11145 || TREE_CODE (len) != INTEGER_CST)
11148 switch (compare_tree_int (len, 1))
11150 case -1: /* length is 0, delete the call entirely . */
11151 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11153 case 0: /* length is 1, call fputc. */
11155 const char *p = c_getstr (arg0);
11160 return build_call_expr (fn_fputc, 2,
11161 build_int_cst (NULL_TREE, p[0]), arg1);
11167 case 1: /* length is greater than 1, call fwrite. */
11169 /* If optimizing for size keep fputs. */
11172 /* New argument list transforming fputs(string, stream) to
11173 fwrite(string, 1, len, stream). */
11175 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11180 gcc_unreachable ();
11185 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11186 produced. False otherwise. This is done so that we don't output the error
11187 or warning twice or three times. */
11189 fold_builtin_next_arg (tree exp, bool va_start_p)
11191 tree fntype = TREE_TYPE (current_function_decl);
11192 int nargs = call_expr_nargs (exp);
11195 if (TYPE_ARG_TYPES (fntype) == 0
11196 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11197 == void_type_node))
11199 error ("%<va_start%> used in function with fixed args");
11205 if (va_start_p && (nargs != 2))
11207 error ("wrong number of arguments to function %<va_start%>");
11210 arg = CALL_EXPR_ARG (exp, 1);
11212 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11213 when we checked the arguments and if needed issued a warning. */
11218 /* Evidently an out of date version of <stdarg.h>; can't validate
11219 va_start's second argument, but can still work as intended. */
11220 warning (0, "%<__builtin_next_arg%> called without an argument");
11223 else if (nargs > 1)
11225 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11228 arg = CALL_EXPR_ARG (exp, 0);
11231 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11232 or __builtin_next_arg (0) the first time we see it, after checking
11233 the arguments and if needed issuing a warning. */
11234 if (!integer_zerop (arg))
11236 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11238 /* Strip off all nops for the sake of the comparison. This
11239 is not quite the same as STRIP_NOPS. It does more.
11240 We must also strip off INDIRECT_EXPR for C++ reference
11242 while (TREE_CODE (arg) == NOP_EXPR
11243 || TREE_CODE (arg) == CONVERT_EXPR
11244 || TREE_CODE (arg) == NON_LVALUE_EXPR
11245 || TREE_CODE (arg) == INDIRECT_REF)
11246 arg = TREE_OPERAND (arg, 0);
11247 if (arg != last_parm)
11249 /* FIXME: Sometimes with the tree optimizers we can get the
11250 not the last argument even though the user used the last
11251 argument. We just warn and set the arg to be the last
11252 argument so that we will get wrong-code because of
11254 warning (0, "second parameter of %<va_start%> not last named argument");
11256 /* We want to verify the second parameter just once before the tree
11257 optimizers are run and then avoid keeping it in the tree,
11258 as otherwise we could warn even for correct code like:
11259 void foo (int i, ...)
11260 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11262 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11264 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11270 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11271 ORIG may be null if this is a 2-argument call. We don't attempt to
11272 simplify calls with more than 3 arguments.
11274 Return NULL_TREE if no simplification was possible, otherwise return the
11275 simplified form of the call as a tree. If IGNORED is true, it means that
11276 the caller does not use the returned value of the function. */
11279 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11282 const char *fmt_str = NULL;
11284 /* Verify the required arguments in the original call. We deal with two
11285 types of sprintf() calls: 'sprintf (str, fmt)' and
11286 'sprintf (dest, "%s", orig)'. */
11287 if (!validate_arg (dest, POINTER_TYPE)
11288 || !validate_arg (fmt, POINTER_TYPE))
11290 if (orig && !validate_arg (orig, POINTER_TYPE))
11293 /* Check whether the format is a literal string constant. */
11294 fmt_str = c_getstr (fmt);
11295 if (fmt_str == NULL)
11299 retval = NULL_TREE;
11301 if (!init_target_chars ())
11304 /* If the format doesn't contain % args or %%, use strcpy. */
11305 if (strchr (fmt_str, target_percent) == NULL)
11307 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11312 /* Don't optimize sprintf (buf, "abc", ptr++). */
11316 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11317 'format' is known to contain no % formats. */
11318 call = build_call_expr (fn, 2, dest, fmt);
11320 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11323 /* If the format is "%s", use strcpy if the result isn't used. */
11324 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11327 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11332 /* Don't crash on sprintf (str1, "%s"). */
11336 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11339 retval = c_strlen (orig, 1);
11340 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11343 call = build_call_expr (fn, 2, dest, orig);
11346 if (call && retval)
11348 retval = fold_convert
11349 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11351 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11357 /* Expand a call EXP to __builtin_object_size. */
11360 expand_builtin_object_size (tree exp)
11363 int object_size_type;
11364 tree fndecl = get_callee_fndecl (exp);
11365 location_t locus = EXPR_LOCATION (exp);
11367 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11369 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11371 expand_builtin_trap ();
11375 ost = CALL_EXPR_ARG (exp, 1);
11378 if (TREE_CODE (ost) != INTEGER_CST
11379 || tree_int_cst_sgn (ost) < 0
11380 || compare_tree_int (ost, 3) > 0)
11382 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11384 expand_builtin_trap ();
11388 object_size_type = tree_low_cst (ost, 0);
11390 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11393 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11394 FCODE is the BUILT_IN_* to use.
11395 Return NULL_RTX if we failed; the caller should emit a normal call,
11396 otherwise try to get the result in TARGET, if convenient (and in
11397 mode MODE if that's convenient). */
11400 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11401 enum built_in_function fcode)
11403 tree dest, src, len, size;
11405 if (!validate_arglist (exp,
11407 fcode == BUILT_IN_MEMSET_CHK
11408 ? INTEGER_TYPE : POINTER_TYPE,
11409 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11412 dest = CALL_EXPR_ARG (exp, 0);
11413 src = CALL_EXPR_ARG (exp, 1);
11414 len = CALL_EXPR_ARG (exp, 2);
11415 size = CALL_EXPR_ARG (exp, 3);
11417 if (! host_integerp (size, 1))
11420 if (host_integerp (len, 1) || integer_all_onesp (size))
11424 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11426 location_t locus = EXPR_LOCATION (exp);
11427 warning (0, "%Hcall to %D will always overflow destination buffer",
11428 &locus, get_callee_fndecl (exp));
11433 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11434 mem{cpy,pcpy,move,set} is available. */
11437 case BUILT_IN_MEMCPY_CHK:
11438 fn = built_in_decls[BUILT_IN_MEMCPY];
11440 case BUILT_IN_MEMPCPY_CHK:
11441 fn = built_in_decls[BUILT_IN_MEMPCPY];
11443 case BUILT_IN_MEMMOVE_CHK:
11444 fn = built_in_decls[BUILT_IN_MEMMOVE];
11446 case BUILT_IN_MEMSET_CHK:
11447 fn = built_in_decls[BUILT_IN_MEMSET];
11456 fn = build_call_expr (fn, 3, dest, src, len);
11457 if (TREE_CODE (fn) == CALL_EXPR)
11458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11459 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11461 else if (fcode == BUILT_IN_MEMSET_CHK)
11465 unsigned int dest_align
11466 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11468 /* If DEST is not a pointer type, call the normal function. */
11469 if (dest_align == 0)
11472 /* If SRC and DEST are the same (and not volatile), do nothing. */
11473 if (operand_equal_p (src, dest, 0))
11477 if (fcode != BUILT_IN_MEMPCPY_CHK)
11479 /* Evaluate and ignore LEN in case it has side-effects. */
11480 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11481 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11484 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11485 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11488 /* __memmove_chk special case. */
11489 if (fcode == BUILT_IN_MEMMOVE_CHK)
11491 unsigned int src_align
11492 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11494 if (src_align == 0)
11497 /* If src is categorized for a readonly section we can use
11498 normal __memcpy_chk. */
11499 if (readonly_data_expr (src))
11501 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11504 fn = build_call_expr (fn, 4, dest, src, len, size);
11505 if (TREE_CODE (fn) == CALL_EXPR)
11506 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11507 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11514 /* Emit warning if a buffer overflow is detected at compile time. */
11517 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11525 case BUILT_IN_STRCPY_CHK:
11526 case BUILT_IN_STPCPY_CHK:
11527 /* For __strcat_chk the warning will be emitted only if overflowing
11528 by at least strlen (dest) + 1 bytes. */
11529 case BUILT_IN_STRCAT_CHK:
11530 len = CALL_EXPR_ARG (exp, 1);
11531 size = CALL_EXPR_ARG (exp, 2);
11534 case BUILT_IN_STRNCAT_CHK:
11535 case BUILT_IN_STRNCPY_CHK:
11536 len = CALL_EXPR_ARG (exp, 2);
11537 size = CALL_EXPR_ARG (exp, 3);
11539 case BUILT_IN_SNPRINTF_CHK:
11540 case BUILT_IN_VSNPRINTF_CHK:
11541 len = CALL_EXPR_ARG (exp, 1);
11542 size = CALL_EXPR_ARG (exp, 3);
11545 gcc_unreachable ();
11551 if (! host_integerp (size, 1) || integer_all_onesp (size))
11556 len = c_strlen (len, 1);
11557 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11560 else if (fcode == BUILT_IN_STRNCAT_CHK)
11562 tree src = CALL_EXPR_ARG (exp, 1);
11563 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11565 src = c_strlen (src, 1);
11566 if (! src || ! host_integerp (src, 1))
11568 locus = EXPR_LOCATION (exp);
11569 warning (0, "%Hcall to %D might overflow destination buffer",
11570 &locus, get_callee_fndecl (exp));
11573 else if (tree_int_cst_lt (src, size))
11576 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11579 locus = EXPR_LOCATION (exp);
11580 warning (0, "%Hcall to %D will always overflow destination buffer",
11581 &locus, get_callee_fndecl (exp));
11584 /* Emit warning if a buffer overflow is detected at compile time
11585 in __sprintf_chk/__vsprintf_chk calls. */
11588 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11590 tree dest, size, len, fmt, flag;
11591 const char *fmt_str;
11592 int nargs = call_expr_nargs (exp);
11594 /* Verify the required arguments in the original call. */
11598 dest = CALL_EXPR_ARG (exp, 0);
11599 flag = CALL_EXPR_ARG (exp, 1);
11600 size = CALL_EXPR_ARG (exp, 2);
11601 fmt = CALL_EXPR_ARG (exp, 3);
11603 if (! host_integerp (size, 1) || integer_all_onesp (size))
11606 /* Check whether the format is a literal string constant. */
11607 fmt_str = c_getstr (fmt);
11608 if (fmt_str == NULL)
11611 if (!init_target_chars ())
11614 /* If the format doesn't contain % args or %%, we know its size. */
11615 if (strchr (fmt_str, target_percent) == 0)
11616 len = build_int_cstu (size_type_node, strlen (fmt_str));
11617 /* If the format is "%s" and first ... argument is a string literal,
11619 else if (fcode == BUILT_IN_SPRINTF_CHK
11620 && strcmp (fmt_str, target_percent_s) == 0)
11626 arg = CALL_EXPR_ARG (exp, 4);
11627 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11630 len = c_strlen (arg, 1);
11631 if (!len || ! host_integerp (len, 1))
11637 if (! tree_int_cst_lt (len, size))
11639 location_t locus = EXPR_LOCATION (exp);
11640 warning (0, "%Hcall to %D will always overflow destination buffer",
11641 &locus, get_callee_fndecl (exp));
11645 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11649 fold_builtin_object_size (tree ptr, tree ost)
11651 tree ret = NULL_TREE;
11652 int object_size_type;
11654 if (!validate_arg (ptr, POINTER_TYPE)
11655 || !validate_arg (ost, INTEGER_TYPE))
11660 if (TREE_CODE (ost) != INTEGER_CST
11661 || tree_int_cst_sgn (ost) < 0
11662 || compare_tree_int (ost, 3) > 0)
11665 object_size_type = tree_low_cst (ost, 0);
11667 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11668 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11669 and (size_t) 0 for types 2 and 3. */
11670 if (TREE_SIDE_EFFECTS (ptr))
11671 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11673 if (TREE_CODE (ptr) == ADDR_EXPR)
11674 ret = build_int_cstu (size_type_node,
11675 compute_builtin_object_size (ptr, object_size_type));
11677 else if (TREE_CODE (ptr) == SSA_NAME)
11679 unsigned HOST_WIDE_INT bytes;
11681 /* If object size is not known yet, delay folding until
11682 later. Maybe subsequent passes will help determining
11684 bytes = compute_builtin_object_size (ptr, object_size_type);
11685 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11687 ret = build_int_cstu (size_type_node, bytes);
11692 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11693 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11694 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11701 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11702 DEST, SRC, LEN, and SIZE are the arguments to the call.
11703 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11704 code of the builtin. If MAXLEN is not NULL, it is maximum length
11705 passed as third argument. */
11708 fold_builtin_memory_chk (tree fndecl,
11709 tree dest, tree src, tree len, tree size,
11710 tree maxlen, bool ignore,
11711 enum built_in_function fcode)
11715 if (!validate_arg (dest, POINTER_TYPE)
11716 || !validate_arg (src,
11717 (fcode == BUILT_IN_MEMSET_CHK
11718 ? INTEGER_TYPE : POINTER_TYPE))
11719 || !validate_arg (len, INTEGER_TYPE)
11720 || !validate_arg (size, INTEGER_TYPE))
11723 /* If SRC and DEST are the same (and not volatile), return DEST
11724 (resp. DEST+LEN for __mempcpy_chk). */
11725 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11727 if (fcode != BUILT_IN_MEMPCPY_CHK)
11728 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11731 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11732 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11736 if (! host_integerp (size, 1))
11739 if (! integer_all_onesp (size))
11741 if (! host_integerp (len, 1))
11743 /* If LEN is not constant, try MAXLEN too.
11744 For MAXLEN only allow optimizing into non-_ocs function
11745 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11746 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11748 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11750 /* (void) __mempcpy_chk () can be optimized into
11751 (void) __memcpy_chk (). */
11752 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11756 return build_call_expr (fn, 4, dest, src, len, size);
11764 if (tree_int_cst_lt (size, maxlen))
11769 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11770 mem{cpy,pcpy,move,set} is available. */
11773 case BUILT_IN_MEMCPY_CHK:
11774 fn = built_in_decls[BUILT_IN_MEMCPY];
11776 case BUILT_IN_MEMPCPY_CHK:
11777 fn = built_in_decls[BUILT_IN_MEMPCPY];
11779 case BUILT_IN_MEMMOVE_CHK:
11780 fn = built_in_decls[BUILT_IN_MEMMOVE];
11782 case BUILT_IN_MEMSET_CHK:
11783 fn = built_in_decls[BUILT_IN_MEMSET];
11792 return build_call_expr (fn, 3, dest, src, len);
11795 /* Fold a call to the __st[rp]cpy_chk builtin.
11796 DEST, SRC, and SIZE are the arguments to the call.
11797 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11798 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11799 strings passed as second argument. */
11802 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11803 tree maxlen, bool ignore,
11804 enum built_in_function fcode)
11808 if (!validate_arg (dest, POINTER_TYPE)
11809 || !validate_arg (src, POINTER_TYPE)
11810 || !validate_arg (size, INTEGER_TYPE))
11813 /* If SRC and DEST are the same (and not volatile), return DEST. */
11814 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11815 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11817 if (! host_integerp (size, 1))
11820 if (! integer_all_onesp (size))
11822 len = c_strlen (src, 1);
11823 if (! len || ! host_integerp (len, 1))
11825 /* If LEN is not constant, try MAXLEN too.
11826 For MAXLEN only allow optimizing into non-_ocs function
11827 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11828 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11830 if (fcode == BUILT_IN_STPCPY_CHK)
11835 /* If return value of __stpcpy_chk is ignored,
11836 optimize into __strcpy_chk. */
11837 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11841 return build_call_expr (fn, 3, dest, src, size);
11844 if (! len || TREE_SIDE_EFFECTS (len))
11847 /* If c_strlen returned something, but not a constant,
11848 transform __strcpy_chk into __memcpy_chk. */
11849 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11853 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11854 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11855 build_call_expr (fn, 4,
11856 dest, src, len, size));
11862 if (! tree_int_cst_lt (maxlen, size))
11866 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11867 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11868 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11872 return build_call_expr (fn, 2, dest, src);
11875 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11876 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11877 length passed as third argument. */
11880 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11885 if (!validate_arg (dest, POINTER_TYPE)
11886 || !validate_arg (src, POINTER_TYPE)
11887 || !validate_arg (len, INTEGER_TYPE)
11888 || !validate_arg (size, INTEGER_TYPE))
11891 if (! host_integerp (size, 1))
11894 if (! integer_all_onesp (size))
11896 if (! host_integerp (len, 1))
11898 /* If LEN is not constant, try MAXLEN too.
11899 For MAXLEN only allow optimizing into non-_ocs function
11900 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11901 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11907 if (tree_int_cst_lt (size, maxlen))
11911 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11912 fn = built_in_decls[BUILT_IN_STRNCPY];
11916 return build_call_expr (fn, 3, dest, src, len);
11919 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11920 are the arguments to the call. */
11923 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11928 if (!validate_arg (dest, POINTER_TYPE)
11929 || !validate_arg (src, POINTER_TYPE)
11930 || !validate_arg (size, INTEGER_TYPE))
11933 p = c_getstr (src);
11934 /* If the SRC parameter is "", return DEST. */
11935 if (p && *p == '\0')
11936 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11938 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11941 /* If __builtin_strcat_chk is used, assume strcat is available. */
11942 fn = built_in_decls[BUILT_IN_STRCAT];
11946 return build_call_expr (fn, 2, dest, src);
11949 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11953 fold_builtin_strncat_chk (tree fndecl,
11954 tree dest, tree src, tree len, tree size)
11959 if (!validate_arg (dest, POINTER_TYPE)
11960 || !validate_arg (src, POINTER_TYPE)
11961 || !validate_arg (size, INTEGER_TYPE)
11962 || !validate_arg (size, INTEGER_TYPE))
11965 p = c_getstr (src);
11966 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11967 if (p && *p == '\0')
11968 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11969 else if (integer_zerop (len))
11970 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11972 if (! host_integerp (size, 1))
11975 if (! integer_all_onesp (size))
11977 tree src_len = c_strlen (src, 1);
11979 && host_integerp (src_len, 1)
11980 && host_integerp (len, 1)
11981 && ! tree_int_cst_lt (len, src_len))
11983 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11984 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11988 return build_call_expr (fn, 3, dest, src, size);
11993 /* If __builtin_strncat_chk is used, assume strncat is available. */
11994 fn = built_in_decls[BUILT_IN_STRNCAT];
11998 return build_call_expr (fn, 3, dest, src, len);
12001 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12002 a normal call should be emitted rather than expanding the function
12003 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12006 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12008 tree dest, size, len, fn, fmt, flag;
12009 const char *fmt_str;
12010 int nargs = call_expr_nargs (exp);
12012 /* Verify the required arguments in the original call. */
12015 dest = CALL_EXPR_ARG (exp, 0);
12016 if (!validate_arg (dest, POINTER_TYPE))
12018 flag = CALL_EXPR_ARG (exp, 1);
12019 if (!validate_arg (flag, INTEGER_TYPE))
12021 size = CALL_EXPR_ARG (exp, 2);
12022 if (!validate_arg (size, INTEGER_TYPE))
12024 fmt = CALL_EXPR_ARG (exp, 3);
12025 if (!validate_arg (fmt, POINTER_TYPE))
12028 if (! host_integerp (size, 1))
12033 if (!init_target_chars ())
12036 /* Check whether the format is a literal string constant. */
12037 fmt_str = c_getstr (fmt);
12038 if (fmt_str != NULL)
12040 /* If the format doesn't contain % args or %%, we know the size. */
12041 if (strchr (fmt_str, target_percent) == 0)
12043 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12044 len = build_int_cstu (size_type_node, strlen (fmt_str));
12046 /* If the format is "%s" and first ... argument is a string literal,
12047 we know the size too. */
12048 else if (fcode == BUILT_IN_SPRINTF_CHK
12049 && strcmp (fmt_str, target_percent_s) == 0)
12055 arg = CALL_EXPR_ARG (exp, 4);
12056 if (validate_arg (arg, POINTER_TYPE))
12058 len = c_strlen (arg, 1);
12059 if (! len || ! host_integerp (len, 1))
12066 if (! integer_all_onesp (size))
12068 if (! len || ! tree_int_cst_lt (len, size))
12072 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12073 or if format doesn't contain % chars or is "%s". */
12074 if (! integer_zerop (flag))
12076 if (fmt_str == NULL)
12078 if (strchr (fmt_str, target_percent) != NULL
12079 && strcmp (fmt_str, target_percent_s))
12083 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12084 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12085 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12089 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12092 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12093 a normal call should be emitted rather than expanding the function
12094 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12095 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12096 passed as second argument. */
12099 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12100 enum built_in_function fcode)
12102 tree dest, size, len, fn, fmt, flag;
12103 const char *fmt_str;
12105 /* Verify the required arguments in the original call. */
12106 if (call_expr_nargs (exp) < 5)
12108 dest = CALL_EXPR_ARG (exp, 0);
12109 if (!validate_arg (dest, POINTER_TYPE))
12111 len = CALL_EXPR_ARG (exp, 1);
12112 if (!validate_arg (len, INTEGER_TYPE))
12114 flag = CALL_EXPR_ARG (exp, 2);
12115 if (!validate_arg (flag, INTEGER_TYPE))
12117 size = CALL_EXPR_ARG (exp, 3);
12118 if (!validate_arg (size, INTEGER_TYPE))
12120 fmt = CALL_EXPR_ARG (exp, 4);
12121 if (!validate_arg (fmt, POINTER_TYPE))
12124 if (! host_integerp (size, 1))
12127 if (! integer_all_onesp (size))
12129 if (! host_integerp (len, 1))
12131 /* If LEN is not constant, try MAXLEN too.
12132 For MAXLEN only allow optimizing into non-_ocs function
12133 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12134 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12140 if (tree_int_cst_lt (size, maxlen))
12144 if (!init_target_chars ())
12147 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12148 or if format doesn't contain % chars or is "%s". */
12149 if (! integer_zerop (flag))
12151 fmt_str = c_getstr (fmt);
12152 if (fmt_str == NULL)
12154 if (strchr (fmt_str, target_percent) != NULL
12155 && strcmp (fmt_str, target_percent_s))
12159 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12161 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12162 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12166 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12169 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12170 FMT and ARG are the arguments to the call; we don't fold cases with
12171 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12173 Return NULL_TREE if no simplification was possible, otherwise return the
12174 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12175 code of the function to be simplified. */
12178 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12179 enum built_in_function fcode)
12181 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12182 const char *fmt_str = NULL;
12184 /* If the return value is used, don't do the transformation. */
12188 /* Verify the required arguments in the original call. */
12189 if (!validate_arg (fmt, POINTER_TYPE))
12192 /* Check whether the format is a literal string constant. */
12193 fmt_str = c_getstr (fmt);
12194 if (fmt_str == NULL)
12197 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12199 /* If we're using an unlocked function, assume the other
12200 unlocked functions exist explicitly. */
12201 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12202 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12206 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12207 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12210 if (!init_target_chars ())
12213 if (strcmp (fmt_str, target_percent_s) == 0
12214 || strchr (fmt_str, target_percent) == NULL)
12218 if (strcmp (fmt_str, target_percent_s) == 0)
12220 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12223 if (!arg || !validate_arg (arg, POINTER_TYPE))
12226 str = c_getstr (arg);
12232 /* The format specifier doesn't contain any '%' characters. */
12233 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12239 /* If the string was "", printf does nothing. */
12240 if (str[0] == '\0')
12241 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12243 /* If the string has length of 1, call putchar. */
12244 if (str[1] == '\0')
12246 /* Given printf("c"), (where c is any one character,)
12247 convert "c"[0] to an int and pass that to the replacement
12249 newarg = build_int_cst (NULL_TREE, str[0]);
12251 call = build_call_expr (fn_putchar, 1, newarg);
12255 /* If the string was "string\n", call puts("string"). */
12256 size_t len = strlen (str);
12257 if ((unsigned char)str[len - 1] == target_newline)
12259 /* Create a NUL-terminated string that's one char shorter
12260 than the original, stripping off the trailing '\n'. */
12261 char *newstr = alloca (len);
12262 memcpy (newstr, str, len - 1);
12263 newstr[len - 1] = 0;
12265 newarg = build_string_literal (len, newstr);
12267 call = build_call_expr (fn_puts, 1, newarg);
12270 /* We'd like to arrange to call fputs(string,stdout) here,
12271 but we need stdout and don't have a way to get it yet. */
12276 /* The other optimizations can be done only on the non-va_list variants. */
12277 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12280 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12281 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12283 if (!arg || !validate_arg (arg, POINTER_TYPE))
12286 call = build_call_expr (fn_puts, 1, arg);
12289 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12290 else if (strcmp (fmt_str, target_percent_c) == 0)
12292 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12295 call = build_call_expr (fn_putchar, 1, arg);
12301 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12304 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12305 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12306 more than 3 arguments, and ARG may be null in the 2-argument case.
12308 Return NULL_TREE if no simplification was possible, otherwise return the
12309 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12310 code of the function to be simplified. */
12313 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12314 enum built_in_function fcode)
12316 tree fn_fputc, fn_fputs, call = NULL_TREE;
12317 const char *fmt_str = NULL;
12319 /* If the return value is used, don't do the transformation. */
12323 /* Verify the required arguments in the original call. */
12324 if (!validate_arg (fp, POINTER_TYPE))
12326 if (!validate_arg (fmt, POINTER_TYPE))
12329 /* Check whether the format is a literal string constant. */
12330 fmt_str = c_getstr (fmt);
12331 if (fmt_str == NULL)
12334 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12336 /* If we're using an unlocked function, assume the other
12337 unlocked functions exist explicitly. */
12338 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12339 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12343 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12344 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12347 if (!init_target_chars ())
12350 /* If the format doesn't contain % args or %%, use strcpy. */
12351 if (strchr (fmt_str, target_percent) == NULL)
12353 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12357 /* If the format specifier was "", fprintf does nothing. */
12358 if (fmt_str[0] == '\0')
12360 /* If FP has side-effects, just wait until gimplification is
12362 if (TREE_SIDE_EFFECTS (fp))
12365 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12368 /* When "string" doesn't contain %, replace all cases of
12369 fprintf (fp, string) with fputs (string, fp). The fputs
12370 builtin will take care of special cases like length == 1. */
12372 call = build_call_expr (fn_fputs, 2, fmt, fp);
12375 /* The other optimizations can be done only on the non-va_list variants. */
12376 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12379 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12380 else if (strcmp (fmt_str, target_percent_s) == 0)
12382 if (!arg || !validate_arg (arg, POINTER_TYPE))
12385 call = build_call_expr (fn_fputs, 2, arg, fp);
12388 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12389 else if (strcmp (fmt_str, target_percent_c) == 0)
12391 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12394 call = build_call_expr (fn_fputc, 2, arg, fp);
12399 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12402 /* Initialize format string characters in the target charset. */
12405 init_target_chars (void)
12410 target_newline = lang_hooks.to_target_charset ('\n');
12411 target_percent = lang_hooks.to_target_charset ('%');
12412 target_c = lang_hooks.to_target_charset ('c');
12413 target_s = lang_hooks.to_target_charset ('s');
12414 if (target_newline == 0 || target_percent == 0 || target_c == 0
12418 target_percent_c[0] = target_percent;
12419 target_percent_c[1] = target_c;
12420 target_percent_c[2] = '\0';
12422 target_percent_s[0] = target_percent;
12423 target_percent_s[1] = target_s;
12424 target_percent_s[2] = '\0';
12426 target_percent_s_newline[0] = target_percent;
12427 target_percent_s_newline[1] = target_s;
12428 target_percent_s_newline[2] = target_newline;
12429 target_percent_s_newline[3] = '\0';
12436 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12437 and no overflow/underflow occurred. INEXACT is true if M was not
12438 exactly calculated. TYPE is the tree type for the result. This
12439 function assumes that you cleared the MPFR flags and then
12440 calculated M to see if anything subsequently set a flag prior to
12441 entering this function. Return NULL_TREE if any checks fail. */
12444 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12446 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12447 overflow/underflow occurred. If -frounding-math, proceed iff the
12448 result of calling FUNC was exact. */
12449 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12450 && (!flag_rounding_math || !inexact))
12452 REAL_VALUE_TYPE rr;
12454 real_from_mpfr (&rr, m, type, GMP_RNDN);
12455 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12456 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12457 but the mpft_t is not, then we underflowed in the
12459 if (real_isfinite (&rr)
12460 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12462 REAL_VALUE_TYPE rmode;
12464 real_convert (&rmode, TYPE_MODE (type), &rr);
12465 /* Proceed iff the specified mode can hold the value. */
12466 if (real_identical (&rmode, &rr))
12467 return build_real (type, rmode);
12473 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12474 FUNC on it and return the resulting value as a tree with type TYPE.
12475 If MIN and/or MAX are not NULL, then the supplied ARG must be
12476 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12477 acceptable values, otherwise they are not. The mpfr precision is
12478 set to the precision of TYPE. We assume that function FUNC returns
12479 zero if the result could be calculated exactly within the requested
12483 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12484 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12487 tree result = NULL_TREE;
12491 /* To proceed, MPFR must exactly represent the target floating point
12492 format, which only happens when the target base equals two. */
12493 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12494 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12496 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12498 if (real_isfinite (ra)
12499 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12500 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12502 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12506 mpfr_init2 (m, prec);
12507 mpfr_from_real (m, ra, GMP_RNDN);
12508 mpfr_clear_flags ();
12509 inexact = func (m, m, GMP_RNDN);
12510 result = do_mpfr_ckconv (m, type, inexact);
12518 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12519 FUNC on it and return the resulting value as a tree with type TYPE.
12520 The mpfr precision is set to the precision of TYPE. We assume that
12521 function FUNC returns zero if the result could be calculated
12522 exactly within the requested precision. */
12525 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12526 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12528 tree result = NULL_TREE;
12533 /* To proceed, MPFR must exactly represent the target floating point
12534 format, which only happens when the target base equals two. */
12535 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12536 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12537 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12539 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12540 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12542 if (real_isfinite (ra1) && real_isfinite (ra2))
12544 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12548 mpfr_inits2 (prec, m1, m2, NULL);
12549 mpfr_from_real (m1, ra1, GMP_RNDN);
12550 mpfr_from_real (m2, ra2, GMP_RNDN);
12551 mpfr_clear_flags ();
12552 inexact = func (m1, m1, m2, GMP_RNDN);
12553 result = do_mpfr_ckconv (m1, type, inexact);
12554 mpfr_clears (m1, m2, NULL);
12561 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12562 FUNC on it and return the resulting value as a tree with type TYPE.
12563 The mpfr precision is set to the precision of TYPE. We assume that
12564 function FUNC returns zero if the result could be calculated
12565 exactly within the requested precision. */
12568 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12569 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12571 tree result = NULL_TREE;
12577 /* To proceed, MPFR must exactly represent the target floating point
12578 format, which only happens when the target base equals two. */
12579 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12580 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12581 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12582 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12584 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12585 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12586 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12588 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12590 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12594 mpfr_inits2 (prec, m1, m2, m3, NULL);
12595 mpfr_from_real (m1, ra1, GMP_RNDN);
12596 mpfr_from_real (m2, ra2, GMP_RNDN);
12597 mpfr_from_real (m3, ra3, GMP_RNDN);
12598 mpfr_clear_flags ();
12599 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12600 result = do_mpfr_ckconv (m1, type, inexact);
12601 mpfr_clears (m1, m2, m3, NULL);
12608 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12609 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12610 If ARG_SINP and ARG_COSP are NULL then the result is returned
12611 as a complex value.
12612 The type is taken from the type of ARG and is used for setting the
12613 precision of the calculation and results. */
12616 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12618 tree const type = TREE_TYPE (arg);
12619 tree result = NULL_TREE;
12623 /* To proceed, MPFR must exactly represent the target floating point
12624 format, which only happens when the target base equals two. */
12625 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12626 && TREE_CODE (arg) == REAL_CST
12627 && !TREE_OVERFLOW (arg))
12629 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12631 if (real_isfinite (ra))
12633 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12634 tree result_s, result_c;
12638 mpfr_inits2 (prec, m, ms, mc, NULL);
12639 mpfr_from_real (m, ra, GMP_RNDN);
12640 mpfr_clear_flags ();
12641 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12642 result_s = do_mpfr_ckconv (ms, type, inexact);
12643 result_c = do_mpfr_ckconv (mc, type, inexact);
12644 mpfr_clears (m, ms, mc, NULL);
12645 if (result_s && result_c)
12647 /* If we are to return in a complex value do so. */
12648 if (!arg_sinp && !arg_cosp)
12649 return build_complex (build_complex_type (type),
12650 result_c, result_s);
12652 /* Dereference the sin/cos pointer arguments. */
12653 arg_sinp = build_fold_indirect_ref (arg_sinp);
12654 arg_cosp = build_fold_indirect_ref (arg_cosp);
12655 /* Proceed if valid pointer type were passed in. */
12656 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12657 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12659 /* Set the values. */
12660 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12662 TREE_SIDE_EFFECTS (result_s) = 1;
12663 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12665 TREE_SIDE_EFFECTS (result_c) = 1;
12666 /* Combine the assignments into a compound expr. */
12667 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12668 result_s, result_c));
12676 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12677 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12678 two-argument mpfr order N Bessel function FUNC on them and return
12679 the resulting value as a tree with type TYPE. The mpfr precision
12680 is set to the precision of TYPE. We assume that function FUNC
12681 returns zero if the result could be calculated exactly within the
12682 requested precision. */
12684 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12685 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12686 const REAL_VALUE_TYPE *min, bool inclusive)
12688 tree result = NULL_TREE;
12693 /* To proceed, MPFR must exactly represent the target floating point
12694 format, which only happens when the target base equals two. */
12695 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12696 && host_integerp (arg1, 0)
12697 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12699 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12700 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12703 && real_isfinite (ra)
12704 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12706 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12710 mpfr_init2 (m, prec);
12711 mpfr_from_real (m, ra, GMP_RNDN);
12712 mpfr_clear_flags ();
12713 inexact = func (m, n, m, GMP_RNDN);
12714 result = do_mpfr_ckconv (m, type, inexact);
12722 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12723 the pointer *(ARG_QUO) and return the result. The type is taken
12724 from the type of ARG0 and is used for setting the precision of the
12725 calculation and results. */
12728 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12730 tree const type = TREE_TYPE (arg0);
12731 tree result = NULL_TREE;
12736 /* To proceed, MPFR must exactly represent the target floating point
12737 format, which only happens when the target base equals two. */
12738 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12739 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12740 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12742 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12743 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12745 if (real_isfinite (ra0) && real_isfinite (ra1))
12747 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12752 mpfr_inits2 (prec, m0, m1, NULL);
12753 mpfr_from_real (m0, ra0, GMP_RNDN);
12754 mpfr_from_real (m1, ra1, GMP_RNDN);
12755 mpfr_clear_flags ();
12756 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12757 /* Remquo is independent of the rounding mode, so pass
12758 inexact=0 to do_mpfr_ckconv(). */
12759 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12760 mpfr_clears (m0, m1, NULL);
12763 /* MPFR calculates quo in the host's long so it may
12764 return more bits in quo than the target int can hold
12765 if sizeof(host long) > sizeof(target int). This can
12766 happen even for native compilers in LP64 mode. In
12767 these cases, modulo the quo value with the largest
12768 number that the target int can hold while leaving one
12769 bit for the sign. */
12770 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12771 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12773 /* Dereference the quo pointer argument. */
12774 arg_quo = build_fold_indirect_ref (arg_quo);
12775 /* Proceed iff a valid pointer type was passed in. */
12776 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12778 /* Set the value. */
12779 tree result_quo = fold_build2 (MODIFY_EXPR,
12780 TREE_TYPE (arg_quo), arg_quo,
12781 build_int_cst (NULL, integer_quo));
12782 TREE_SIDE_EFFECTS (result_quo) = 1;
12783 /* Combine the quo assignment with the rem. */
12784 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12785 result_quo, result_rem));
12793 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12794 resulting value as a tree with type TYPE. The mpfr precision is
12795 set to the precision of TYPE. We assume that this mpfr function
12796 returns zero if the result could be calculated exactly within the
12797 requested precision. In addition, the integer pointer represented
12798 by ARG_SG will be dereferenced and set to the appropriate signgam
12802 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12804 tree result = NULL_TREE;
12808 /* To proceed, MPFR must exactly represent the target floating point
12809 format, which only happens when the target base equals two. Also
12810 verify ARG is a constant and that ARG_SG is an int pointer. */
12811 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12812 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12813 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12814 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12816 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12818 /* In addition to NaN and Inf, the argument cannot be zero or a
12819 negative integer. */
12820 if (real_isfinite (ra)
12821 && ra->cl != rvc_zero
12822 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12824 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12829 mpfr_init2 (m, prec);
12830 mpfr_from_real (m, ra, GMP_RNDN);
12831 mpfr_clear_flags ();
12832 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12833 result_lg = do_mpfr_ckconv (m, type, inexact);
12839 /* Dereference the arg_sg pointer argument. */
12840 arg_sg = build_fold_indirect_ref (arg_sg);
12841 /* Assign the signgam value into *arg_sg. */
12842 result_sg = fold_build2 (MODIFY_EXPR,
12843 TREE_TYPE (arg_sg), arg_sg,
12844 build_int_cst (NULL, sg));
12845 TREE_SIDE_EFFECTS (result_sg) = 1;
12846 /* Combine the signgam assignment with the lgamma result. */
12847 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12848 result_sg, result_lg));