1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_alloca (tree, rtx);
140 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
141 static rtx expand_builtin_frame_address (tree, tree);
142 static rtx expand_builtin_fputs (tree, rtx, bool);
143 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
146 static tree stabilize_va_list (tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_expect (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static tree rewrite_call_expr (tree, int, tree, int, ...);
155 static bool validate_arg (tree, enum tree_code code);
156 static bool integer_valued_real_p (tree);
157 static tree fold_trunc_transparent_mathfn (tree, tree);
158 static bool readonly_data_expr (tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (tree, tree);
162 static tree fold_builtin_cbrt (tree, tree);
163 static tree fold_builtin_pow (tree, tree, tree, tree);
164 static tree fold_builtin_powi (tree, tree, tree, tree);
165 static tree fold_builtin_cos (tree, tree, tree);
166 static tree fold_builtin_cosh (tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (tree, tree);
169 static tree fold_builtin_floor (tree, tree);
170 static tree fold_builtin_ceil (tree, tree);
171 static tree fold_builtin_round (tree, tree);
172 static tree fold_builtin_int_roundingfn (tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
175 static tree fold_builtin_strchr (tree, tree, tree);
176 static tree fold_builtin_memchr (tree, tree, tree, tree);
177 static tree fold_builtin_memcmp (tree, tree, tree);
178 static tree fold_builtin_strcmp (tree, tree);
179 static tree fold_builtin_strncmp (tree, tree, tree);
180 static tree fold_builtin_signbit (tree, tree);
181 static tree fold_builtin_copysign (tree, tree, tree, tree);
182 static tree fold_builtin_isascii (tree);
183 static tree fold_builtin_toascii (tree);
184 static tree fold_builtin_isdigit (tree);
185 static tree fold_builtin_fabs (tree, tree);
186 static tree fold_builtin_abs (tree, tree);
187 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
189 static tree fold_builtin_n (tree, tree *, int, bool);
190 static tree fold_builtin_0 (tree, bool);
191 static tree fold_builtin_1 (tree, tree, bool);
192 static tree fold_builtin_2 (tree, tree, tree, bool);
193 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (tree, tree, bool);
197 static tree fold_builtin_strpbrk (tree, tree, tree);
198 static tree fold_builtin_strstr (tree, tree, tree);
199 static tree fold_builtin_strrchr (tree, tree, tree);
200 static tree fold_builtin_strcat (tree, tree);
201 static tree fold_builtin_strncat (tree, tree, tree);
202 static tree fold_builtin_strspn (tree, tree);
203 static tree fold_builtin_strcspn (tree, tree);
204 static tree fold_builtin_sprintf (tree, tree, tree, int);
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
234 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
235 static tree do_mpfr_bessel_n (tree, tree, tree,
236 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, bool);
240 /* Return true if NODE should be considered for inline expansion regardless
241 of the optimization level. This means whenever a function is invoked with
242 its "internal" name, which normally contains the prefix "__builtin". */
244 static bool called_as_built_in (tree node)
246 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
247 if (strncmp (name, "__builtin_", 10) == 0)
249 if (strncmp (name, "__sync_", 7) == 0)
254 /* Return the alignment in bits of EXP, a pointer valued expression.
255 But don't return more than MAX_ALIGN no matter what.
256 The alignment returned is, by default, the alignment of the thing that
257 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
259 Otherwise, look at the expression to see if we can do better, i.e., if the
260 expression is actually pointing at an object whose alignment is tighter. */
263 get_pointer_alignment (tree exp, unsigned int max_align)
265 unsigned int align, inner;
267 /* We rely on TER to compute accurate alignment information. */
268 if (!(optimize && flag_tree_ter))
271 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
274 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
275 align = MIN (align, max_align);
279 switch (TREE_CODE (exp))
283 case NON_LVALUE_EXPR:
284 exp = TREE_OPERAND (exp, 0);
285 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
288 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
289 align = MIN (inner, max_align);
293 /* If sum of pointer + int, restrict our maximum alignment to that
294 imposed by the integer. If not, we can't do any better than
296 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
299 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
300 & (max_align / BITS_PER_UNIT - 1))
304 exp = TREE_OPERAND (exp, 0);
308 /* See what we are pointing at and look at its alignment. */
309 exp = TREE_OPERAND (exp, 0);
311 if (handled_component_p (exp))
313 HOST_WIDE_INT bitsize, bitpos;
315 enum machine_mode mode;
316 int unsignedp, volatilep;
318 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
319 &mode, &unsignedp, &volatilep, true);
321 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
322 if (offset && TREE_CODE (offset) == PLUS_EXPR
323 && host_integerp (TREE_OPERAND (offset, 1), 1))
325 /* Any overflow in calculating offset_bits won't change
328 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
332 inner = MIN (inner, (offset_bits & -offset_bits));
333 offset = TREE_OPERAND (offset, 0);
335 if (offset && TREE_CODE (offset) == MULT_EXPR
336 && host_integerp (TREE_OPERAND (offset, 1), 1))
338 /* Any overflow in calculating offset_factor won't change
340 unsigned offset_factor
341 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
345 inner = MIN (inner, (offset_factor & -offset_factor));
348 inner = MIN (inner, BITS_PER_UNIT);
350 if (TREE_CODE (exp) == FUNCTION_DECL)
351 align = FUNCTION_BOUNDARY;
352 else if (DECL_P (exp))
353 align = MIN (inner, DECL_ALIGN (exp));
354 #ifdef CONSTANT_ALIGNMENT
355 else if (CONSTANT_CLASS_P (exp))
356 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
358 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
359 || TREE_CODE (exp) == INDIRECT_REF)
360 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
362 align = MIN (align, inner);
363 return MIN (align, max_align);
371 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
372 way, because it could contain a zero byte in the middle.
373 TREE_STRING_LENGTH is the size of the character array, not the string.
375 ONLY_VALUE should be nonzero if the result is not going to be emitted
376 into the instruction stream and zero if it is going to be expanded.
377 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
378 is returned, otherwise NULL, since
379 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
380 evaluate the side-effects.
382 The value returned is of type `ssizetype'.
384 Unfortunately, string_constant can't access the values of const char
385 arrays with initializers, so neither can we do so here. */
388 c_strlen (tree src, int only_value)
391 HOST_WIDE_INT offset;
396 if (TREE_CODE (src) == COND_EXPR
397 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
401 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
402 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
403 if (tree_int_cst_equal (len1, len2))
407 if (TREE_CODE (src) == COMPOUND_EXPR
408 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
409 return c_strlen (TREE_OPERAND (src, 1), only_value);
411 src = string_constant (src, &offset_node);
415 max = TREE_STRING_LENGTH (src) - 1;
416 ptr = TREE_STRING_POINTER (src);
418 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
420 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
421 compute the offset to the following null if we don't know where to
422 start searching for it. */
425 for (i = 0; i < max; i++)
429 /* We don't know the starting offset, but we do know that the string
430 has no internal zero bytes. We can assume that the offset falls
431 within the bounds of the string; otherwise, the programmer deserves
432 what he gets. Subtract the offset from the length of the string,
433 and return that. This would perhaps not be valid if we were dealing
434 with named arrays in addition to literal string constants. */
436 return size_diffop (size_int (max), offset_node);
439 /* We have a known offset into the string. Start searching there for
440 a null character if we can represent it as a single HOST_WIDE_INT. */
441 if (offset_node == 0)
443 else if (! host_integerp (offset_node, 0))
446 offset = tree_low_cst (offset_node, 0);
448 /* If the offset is known to be out of bounds, warn, and call strlen at
450 if (offset < 0 || offset > max)
452 warning (0, "offset outside bounds of constant string");
456 /* Use strlen to search for the first zero byte. Since any strings
457 constructed with build_string will have nulls appended, we win even
458 if we get handed something like (char[4])"abcd".
460 Since OFFSET is our starting index into the string, no further
461 calculation is needed. */
462 return ssize_int (strlen (ptr + offset));
465 /* Return a char pointer for a C string if it is a string constant
466 or sum of string constant and integer constant. */
473 src = string_constant (src, &offset_node);
477 if (offset_node == 0)
478 return TREE_STRING_POINTER (src);
479 else if (!host_integerp (offset_node, 1)
480 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
486 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
487 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
490 c_readstr (const char *str, enum machine_mode mode)
496 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
501 for (i = 0; i < GET_MODE_SIZE (mode); i++)
504 if (WORDS_BIG_ENDIAN)
505 j = GET_MODE_SIZE (mode) - i - 1;
506 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
507 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
508 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
510 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
513 ch = (unsigned char) str[i];
514 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
516 return immed_double_const (c[0], c[1], mode);
519 /* Cast a target constant CST to target CHAR and if that value fits into
520 host char type, return zero and put that value into variable pointed to by
524 target_char_cast (tree cst, char *p)
526 unsigned HOST_WIDE_INT val, hostval;
528 if (!host_integerp (cst, 1)
529 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 val = tree_low_cst (cst, 1);
533 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
534 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
537 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
538 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
547 /* Similar to save_expr, but assumes that arbitrary code is not executed
548 in between the multiple evaluations. In particular, we assume that a
549 non-addressable local variable will not be modified. */
552 builtin_save_expr (tree exp)
554 if (TREE_ADDRESSABLE (exp) == 0
555 && (TREE_CODE (exp) == PARM_DECL
556 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return save_expr (exp);
562 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
563 times to get the address of either a higher stack frame, or a return
564 address located within it (depending on FNDECL_CODE). */
567 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 #ifdef INITIAL_FRAME_ADDRESS_RTX
572 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
576 /* For a zero count with __builtin_return_address, we don't care what
577 frame address we return, because target-specific definitions will
578 override us. Therefore frame pointer elimination is OK, and using
579 the soft frame pointer is OK.
581 For a nonzero count, or a zero count with __builtin_frame_address,
582 we require a stable offset from the current frame pointer to the
583 previous one, so we must use the hard frame pointer, and
584 we must disable frame pointer elimination. */
585 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
586 tem = frame_pointer_rtx;
589 tem = hard_frame_pointer_rtx;
591 /* Tell reload not to eliminate the frame pointer. */
592 current_function_accesses_prior_frames = 1;
596 /* Some machines need special handling before we can access
597 arbitrary frames. For example, on the SPARC, we must first flush
598 all register windows to the stack. */
599 #ifdef SETUP_FRAME_ADDRESSES
601 SETUP_FRAME_ADDRESSES ();
604 /* On the SPARC, the return address is not in the frame, it is in a
605 register. There is no way to access it off of the current frame
606 pointer, but it can be accessed off the previous frame pointer by
607 reading the value from the register window save area. */
608 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
609 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
613 /* Scan back COUNT frames to the specified frame. */
614 for (i = 0; i < count; i++)
616 /* Assume the dynamic chain pointer is in the word that the
617 frame address points to, unless otherwise specified. */
618 #ifdef DYNAMIC_CHAIN_ADDRESS
619 tem = DYNAMIC_CHAIN_ADDRESS (tem);
621 tem = memory_address (Pmode, tem);
622 tem = gen_frame_mem (Pmode, tem);
623 tem = copy_to_reg (tem);
626 /* For __builtin_frame_address, return what we've got. But, on
627 the SPARC for example, we may have to add a bias. */
628 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
629 #ifdef FRAME_ADDR_RTX
630 return FRAME_ADDR_RTX (tem);
635 /* For __builtin_return_address, get the return address from that frame. */
636 #ifdef RETURN_ADDR_RTX
637 tem = RETURN_ADDR_RTX (count, tem);
639 tem = memory_address (Pmode,
640 plus_constant (tem, GET_MODE_SIZE (Pmode)));
641 tem = gen_frame_mem (Pmode, tem);
646 /* Alias set used for setjmp buffer. */
647 static HOST_WIDE_INT setjmp_alias_set = -1;
649 /* Construct the leading half of a __builtin_setjmp call. Control will
650 return to RECEIVER_LABEL. This is also called directly by the SJLJ
651 exception handling code. */
654 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
656 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
660 if (setjmp_alias_set == -1)
661 setjmp_alias_set = new_alias_set ();
663 buf_addr = convert_memory_address (Pmode, buf_addr);
665 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
667 /* We store the frame pointer and the address of receiver_label in
668 the buffer and use the rest of it for the stack save area, which
669 is machine-dependent. */
671 mem = gen_rtx_MEM (Pmode, buf_addr);
672 set_mem_alias_set (mem, setjmp_alias_set);
673 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
675 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
676 set_mem_alias_set (mem, setjmp_alias_set);
678 emit_move_insn (validize_mem (mem),
679 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
681 stack_save = gen_rtx_MEM (sa_mode,
682 plus_constant (buf_addr,
683 2 * GET_MODE_SIZE (Pmode)));
684 set_mem_alias_set (stack_save, setjmp_alias_set);
685 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
687 /* If there is further processing to do, do it. */
688 #ifdef HAVE_builtin_setjmp_setup
689 if (HAVE_builtin_setjmp_setup)
690 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 /* Tell optimize_save_area_alloca that extra work is going to
694 need to go on during alloca. */
695 current_function_calls_setjmp = 1;
697 /* Set this so all the registers get saved in our frame; we need to be
698 able to copy the saved values for any registers from frames we unwind. */
699 current_function_has_nonlocal_label = 1;
702 /* Construct the trailing part of a __builtin_setjmp call. This is
703 also called directly by the SJLJ exception handling code. */
706 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
708 /* Clobber the FP when we get here, so we have to make sure it's
709 marked as used by this function. */
710 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
712 /* Mark the static chain as clobbered here so life information
713 doesn't get messed up for it. */
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
716 /* Now put in the code to restore the frame pointer, and argument
717 pointer, if needed. */
718 #ifdef HAVE_nonlocal_goto
719 if (! HAVE_nonlocal_goto)
722 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
723 /* This might change the hard frame pointer in ways that aren't
724 apparent to early optimization passes, so force a clobber. */
725 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
728 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
729 if (fixed_regs[ARG_POINTER_REGNUM])
731 #ifdef ELIMINABLE_REGS
733 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
735 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
736 if (elim_regs[i].from == ARG_POINTER_REGNUM
737 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
740 if (i == ARRAY_SIZE (elim_regs))
743 /* Now restore our arg pointer from the address at which it
744 was saved in our stack frame. */
745 emit_move_insn (virtual_incoming_args_rtx,
746 copy_to_reg (get_arg_pointer_save_area (cfun)));
751 #ifdef HAVE_builtin_setjmp_receiver
752 if (HAVE_builtin_setjmp_receiver)
753 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
756 #ifdef HAVE_nonlocal_goto_receiver
757 if (HAVE_nonlocal_goto_receiver)
758 emit_insn (gen_nonlocal_goto_receiver ());
763 /* @@@ This is a kludge. Not all machine descriptions define a blockage
764 insn, but we must not allow the code we just generated to be reordered
765 by scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. So emit an ASM_INPUT to act as blockage
768 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
777 expand_builtin_longjmp (rtx buf_addr, rtx value)
779 rtx fp, lab, stack, insn, last;
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
785 buf_addr = convert_memory_address (Pmode, buf_addr);
787 buf_addr = force_reg (Pmode, buf_addr);
789 /* We used to store value in static_chain_rtx, but that fails if pointers
790 are smaller than integers. We instead require that the user must pass
791 a second argument of 1, because that is what builtin_setjmp will
792 return. This also makes EH slightly more efficient, since we are no
793 longer copying around a value that we don't care about. */
794 gcc_assert (value == const1_rtx);
796 last = get_last_insn ();
797 #ifdef HAVE_builtin_longjmp
798 if (HAVE_builtin_longjmp)
799 emit_insn (gen_builtin_longjmp (buf_addr));
803 fp = gen_rtx_MEM (Pmode, buf_addr);
804 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
805 GET_MODE_SIZE (Pmode)));
807 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (fp, setjmp_alias_set);
810 set_mem_alias_set (lab, setjmp_alias_set);
811 set_mem_alias_set (stack, setjmp_alias_set);
813 /* Pick up FP, label, and SP from the block and jump. This code is
814 from expand_goto in stmt.c; see there for detailed comments. */
815 #ifdef HAVE_nonlocal_goto
816 if (HAVE_nonlocal_goto)
817 /* We have to pass a value to the nonlocal_goto pattern that will
818 get copied into the static_chain pointer, but it does not matter
819 what that value is, because builtin_setjmp does not use it. */
820 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
824 lab = copy_to_reg (lab);
826 emit_insn (gen_rtx_CLOBBER (VOIDmode,
827 gen_rtx_MEM (BLKmode,
828 gen_rtx_SCRATCH (VOIDmode))));
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 hard_frame_pointer_rtx)));
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
837 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
838 emit_indirect_jump (lab);
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 gcc_assert (insn != last);
853 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
857 else if (CALL_P (insn))
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
866 expand_builtin_nonlocal_goto (tree exp)
868 tree t_label, t_save_area;
869 rtx r_label, r_save_area, r_fp, r_sp, insn;
871 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
874 t_label = CALL_EXPR_ARG (exp, 0);
875 t_save_area = CALL_EXPR_ARG (exp, 1);
877 r_label = expand_normal (t_label);
878 r_label = convert_memory_address (Pmode, r_label);
879 r_save_area = expand_normal (t_save_area);
880 r_save_area = convert_memory_address (Pmode, r_save_area);
881 r_fp = gen_rtx_MEM (Pmode, r_save_area);
882 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
883 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
885 current_function_has_nonlocal_goto = 1;
887 #ifdef HAVE_nonlocal_goto
888 /* ??? We no longer need to pass the static chain value, afaik. */
889 if (HAVE_nonlocal_goto)
890 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
894 r_label = copy_to_reg (r_label);
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 gen_rtx_SCRATCH (VOIDmode))));
900 emit_insn (gen_rtx_CLOBBER (VOIDmode,
901 gen_rtx_MEM (BLKmode,
902 hard_frame_pointer_rtx)));
904 /* Restore frame pointer for containing function.
905 This sets the actual hard register used for the frame pointer
906 to the location of the function's incoming static chain info.
907 The non-local goto handler will then adjust it to contain the
908 proper value and reload the argument pointer, if needed. */
909 emit_move_insn (hard_frame_pointer_rtx, r_fp);
910 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
912 /* USE of hard_frame_pointer_rtx added for consistency;
913 not clear if really needed. */
914 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
915 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
916 emit_indirect_jump (r_label);
919 /* Search backwards to the jump insn and mark it as a
921 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
925 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
926 const0_rtx, REG_NOTES (insn));
929 else if (CALL_P (insn))
936 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
937 (not all will be used on all machines) that was passed to __builtin_setjmp.
938 It updates the stack pointer in that block to correspond to the current
942 expand_builtin_update_setjmp_buf (rtx buf_addr)
944 enum machine_mode sa_mode = Pmode;
948 #ifdef HAVE_save_stack_nonlocal
949 if (HAVE_save_stack_nonlocal)
950 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
952 #ifdef STACK_SAVEAREA_MODE
953 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
957 = gen_rtx_MEM (sa_mode,
960 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
964 emit_insn (gen_setjmp ());
967 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
970 /* Expand a call to __builtin_prefetch. For a target that does not support
971 data prefetch, evaluate the memory address argument in case it has side
975 expand_builtin_prefetch (tree exp)
977 tree arg0, arg1, arg2;
981 if (!validate_arglist (exp, POINTER_TYPE, 0))
984 arg0 = CALL_EXPR_ARG (exp, 0);
986 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
987 zero (read) and argument 2 (locality) defaults to 3 (high degree of
989 nargs = call_expr_nargs (exp);
991 arg1 = CALL_EXPR_ARG (exp, 1);
993 arg1 = integer_zero_node;
995 arg2 = CALL_EXPR_ARG (exp, 2);
997 arg2 = build_int_cst (NULL_TREE, 3);
999 /* Argument 0 is an address. */
1000 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1002 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1003 if (TREE_CODE (arg1) != INTEGER_CST)
1005 error ("second argument to %<__builtin_prefetch%> must be a constant");
1006 arg1 = integer_zero_node;
1008 op1 = expand_normal (arg1);
1009 /* Argument 1 must be either zero or one. */
1010 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1012 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1017 /* Argument 2 (locality) must be a compile-time constant int. */
1018 if (TREE_CODE (arg2) != INTEGER_CST)
1020 error ("third argument to %<__builtin_prefetch%> must be a constant");
1021 arg2 = integer_zero_node;
1023 op2 = expand_normal (arg2);
1024 /* Argument 2 must be 0, 1, 2, or 3. */
1025 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1027 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1031 #ifdef HAVE_prefetch
1034 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1036 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1037 || (GET_MODE (op0) != Pmode))
1039 op0 = convert_memory_address (Pmode, op0);
1040 op0 = force_reg (Pmode, op0);
1042 emit_insn (gen_prefetch (op0, op1, op2));
1046 /* Don't do anything with direct references to volatile memory, but
1047 generate code to handle other side effects. */
1048 if (!MEM_P (op0) && side_effects_p (op0))
1052 /* Get a MEM rtx for expression EXP which is the address of an operand
1053 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1054 the maximum length of the block of memory that might be accessed or
1058 get_memory_rtx (tree exp, tree len)
1060 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1061 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1063 /* Get an expression we can use to find the attributes to assign to MEM.
1064 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1065 we can. First remove any nops. */
1066 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1067 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1068 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1069 exp = TREE_OPERAND (exp, 0);
1071 if (TREE_CODE (exp) == ADDR_EXPR)
1072 exp = TREE_OPERAND (exp, 0);
1073 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1074 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1078 /* Honor attributes derived from exp, except for the alias set
1079 (as builtin stringops may alias with anything) and the size
1080 (as stringops may access multiple array elements). */
1083 set_mem_attributes (mem, exp, 0);
1085 /* Allow the string and memory builtins to overflow from one
1086 field into another, see http://gcc.gnu.org/PR23561.
1087 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1088 memory accessed by the string or memory builtin will fit
1089 within the field. */
1090 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1092 tree mem_expr = MEM_EXPR (mem);
1093 HOST_WIDE_INT offset = -1, length = -1;
1096 while (TREE_CODE (inner) == ARRAY_REF
1097 || TREE_CODE (inner) == NOP_EXPR
1098 || TREE_CODE (inner) == CONVERT_EXPR
1099 || TREE_CODE (inner) == NON_LVALUE_EXPR
1100 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1101 || TREE_CODE (inner) == SAVE_EXPR)
1102 inner = TREE_OPERAND (inner, 0);
1104 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1106 if (MEM_OFFSET (mem)
1107 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1108 offset = INTVAL (MEM_OFFSET (mem));
1110 if (offset >= 0 && len && host_integerp (len, 0))
1111 length = tree_low_cst (len, 0);
1113 while (TREE_CODE (inner) == COMPONENT_REF)
1115 tree field = TREE_OPERAND (inner, 1);
1116 gcc_assert (! DECL_BIT_FIELD (field));
1117 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1118 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1122 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1125 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1126 /* If we can prove the memory starting at XEXP (mem, 0)
1127 and ending at XEXP (mem, 0) + LENGTH will fit into
1128 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1131 && offset + length <= size)
1136 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1137 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1138 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1146 mem_expr = TREE_OPERAND (mem_expr, 0);
1147 inner = TREE_OPERAND (inner, 0);
1150 if (mem_expr == NULL)
1152 if (mem_expr != MEM_EXPR (mem))
1154 set_mem_expr (mem, mem_expr);
1155 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1158 set_mem_alias_set (mem, 0);
1159 set_mem_size (mem, NULL_RTX);
1165 /* Built-in functions to perform an untyped call and return. */
1167 /* For each register that may be used for calling a function, this
1168 gives a mode used to copy the register's value. VOIDmode indicates
1169 the register is not used for calling a function. If the machine
1170 has register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1174 /* For each register that may be used for returning values, this gives
1175 a mode used to copy the register's value. VOIDmode indicates the
1176 register is not used for returning values. If the machine has
1177 register windows, this gives only the outbound registers.
1178 INCOMING_REGNO gives the corresponding inbound register. */
1179 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1181 /* For each register that may be used for calling a function, this
1182 gives the offset of that register into the block returned by
1183 __builtin_apply_args. 0 indicates that the register is not
1184 used for calling a function. */
1185 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1187 /* Return the size required for the block returned by __builtin_apply_args,
1188 and initialize apply_args_mode. */
1191 apply_args_size (void)
1193 static int size = -1;
1196 enum machine_mode mode;
1198 /* The values computed by this function never change. */
1201 /* The first value is the incoming arg-pointer. */
1202 size = GET_MODE_SIZE (Pmode);
1204 /* The second value is the structure value address unless this is
1205 passed as an "invisible" first argument. */
1206 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1207 size += GET_MODE_SIZE (Pmode);
1209 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1210 if (FUNCTION_ARG_REGNO_P (regno))
1212 mode = reg_raw_mode[regno];
1214 gcc_assert (mode != VOIDmode);
1216 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1217 if (size % align != 0)
1218 size = CEIL (size, align) * align;
1219 apply_args_reg_offset[regno] = size;
1220 size += GET_MODE_SIZE (mode);
1221 apply_args_mode[regno] = mode;
1225 apply_args_mode[regno] = VOIDmode;
1226 apply_args_reg_offset[regno] = 0;
1232 /* Return the size required for the block returned by __builtin_apply,
1233 and initialize apply_result_mode. */
1236 apply_result_size (void)
1238 static int size = -1;
1240 enum machine_mode mode;
1242 /* The values computed by this function never change. */
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_VALUE_REGNO_P (regno))
1250 mode = reg_raw_mode[regno];
1252 gcc_assert (mode != VOIDmode);
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 size += GET_MODE_SIZE (mode);
1258 apply_result_mode[regno] = mode;
1261 apply_result_mode[regno] = VOIDmode;
1263 /* Allow targets that use untyped_call and untyped_return to override
1264 the size so that machine-specific information can be stored here. */
1265 #ifdef APPLY_RESULT_SIZE
1266 size = APPLY_RESULT_SIZE;
1272 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1273 /* Create a vector describing the result block RESULT. If SAVEP is true,
1274 the result block is used to save the values; otherwise it is used to
1275 restore the values. */
1278 result_vector (int savep, rtx result)
1280 int regno, size, align, nelts;
1281 enum machine_mode mode;
1283 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1287 if ((mode = apply_result_mode[regno]) != VOIDmode)
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1293 mem = adjust_address (result, mode, size);
1294 savevec[nelts++] = (savep
1295 ? gen_rtx_SET (VOIDmode, mem, reg)
1296 : gen_rtx_SET (VOIDmode, reg, mem));
1297 size += GET_MODE_SIZE (mode);
1299 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1301 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1303 /* Save the state required to perform an untyped call with the same
1304 arguments as were passed to the current function. */
1307 expand_builtin_apply_args_1 (void)
1310 int size, align, regno;
1311 enum machine_mode mode;
1312 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1314 /* Create a block where the arg-pointer, structure value address,
1315 and argument registers can be saved. */
1316 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1318 /* Walk past the arg-pointer and structure value address. */
1319 size = GET_MODE_SIZE (Pmode);
1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1321 size += GET_MODE_SIZE (Pmode);
1323 /* Save each register used in calling a function to the block. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_args_mode[regno]) != VOIDmode)
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1331 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1333 emit_move_insn (adjust_address (registers, mode, size), tem);
1334 size += GET_MODE_SIZE (mode);
1337 /* Save the arg pointer to the block. */
1338 tem = copy_to_reg (virtual_incoming_args_rtx);
1339 #ifdef STACK_GROWS_DOWNWARD
1340 /* We need the pointer as the caller actually passed them to us, not
1341 as we might have pretended they were passed. Make sure it's a valid
1342 operand, as emit_move_insn isn't expected to handle a PLUS. */
1344 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1347 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1349 size = GET_MODE_SIZE (Pmode);
1351 /* Save the structure value address unless this is passed as an
1352 "invisible" first argument. */
1353 if (struct_incoming_value)
1355 emit_move_insn (adjust_address (registers, Pmode, size),
1356 copy_to_reg (struct_incoming_value));
1357 size += GET_MODE_SIZE (Pmode);
1360 /* Return the address of the block. */
1361 return copy_addr_to_reg (XEXP (registers, 0));
1364 /* __builtin_apply_args returns block of memory allocated on
1365 the stack into which is stored the arg pointer, structure
1366 value address, static chain, and all the registers that might
1367 possibly be used in performing a function call. The code is
1368 moved to the start of the function so the incoming values are
1372 expand_builtin_apply_args (void)
1374 /* Don't do __builtin_apply_args more than once in a function.
1375 Save the result of the first call and reuse it. */
1376 if (apply_args_value != 0)
1377 return apply_args_value;
1379 /* When this function is called, it means that registers must be
1380 saved on entry to this function. So we migrate the
1381 call to the first insn of this function. */
1386 temp = expand_builtin_apply_args_1 ();
1390 apply_args_value = temp;
1392 /* Put the insns after the NOTE that starts the function.
1393 If this is inside a start_sequence, make the outer-level insn
1394 chain current, so the code is placed at the start of the
1396 push_topmost_sequence ();
1397 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1398 pop_topmost_sequence ();
1403 /* Perform an untyped call and save the state required to perform an
1404 untyped return of whatever value was returned by the given function. */
1407 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1409 int size, align, regno;
1410 enum machine_mode mode;
1411 rtx incoming_args, result, reg, dest, src, call_insn;
1412 rtx old_stack_level = 0;
1413 rtx call_fusage = 0;
1414 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1416 arguments = convert_memory_address (Pmode, arguments);
1418 /* Create a block where the return registers can be saved. */
1419 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1421 /* Fetch the arg pointer from the ARGUMENTS block. */
1422 incoming_args = gen_reg_rtx (Pmode);
1423 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1424 #ifndef STACK_GROWS_DOWNWARD
1425 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1426 incoming_args, 0, OPTAB_LIB_WIDEN);
1429 /* Push a new argument block and copy the arguments. Do not allow
1430 the (potential) memcpy call below to interfere with our stack
1432 do_pending_stack_adjust ();
1435 /* Save the stack with nonlocal if available. */
1436 #ifdef HAVE_save_stack_nonlocal
1437 if (HAVE_save_stack_nonlocal)
1438 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1441 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1443 /* Allocate a block of memory onto the stack and copy the memory
1444 arguments to the outgoing arguments address. */
1445 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1446 dest = virtual_outgoing_args_rtx;
1447 #ifndef STACK_GROWS_DOWNWARD
1448 if (GET_CODE (argsize) == CONST_INT)
1449 dest = plus_constant (dest, -INTVAL (argsize));
1451 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1453 dest = gen_rtx_MEM (BLKmode, dest);
1454 set_mem_align (dest, PARM_BOUNDARY);
1455 src = gen_rtx_MEM (BLKmode, incoming_args);
1456 set_mem_align (src, PARM_BOUNDARY);
1457 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1459 /* Refer to the argument block. */
1461 arguments = gen_rtx_MEM (BLKmode, arguments);
1462 set_mem_align (arguments, PARM_BOUNDARY);
1464 /* Walk past the arg-pointer and structure value address. */
1465 size = GET_MODE_SIZE (Pmode);
1467 size += GET_MODE_SIZE (Pmode);
1469 /* Restore each of the registers previously saved. Make USE insns
1470 for each of these registers for use in making the call. */
1471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 if ((mode = apply_args_mode[regno]) != VOIDmode)
1474 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1475 if (size % align != 0)
1476 size = CEIL (size, align) * align;
1477 reg = gen_rtx_REG (mode, regno);
1478 emit_move_insn (reg, adjust_address (arguments, mode, size));
1479 use_reg (&call_fusage, reg);
1480 size += GET_MODE_SIZE (mode);
1483 /* Restore the structure value address unless this is passed as an
1484 "invisible" first argument. */
1485 size = GET_MODE_SIZE (Pmode);
1488 rtx value = gen_reg_rtx (Pmode);
1489 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1490 emit_move_insn (struct_value, value);
1491 if (REG_P (struct_value))
1492 use_reg (&call_fusage, struct_value);
1493 size += GET_MODE_SIZE (Pmode);
1496 /* All arguments and registers used for the call are set up by now! */
1497 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1499 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1500 and we don't want to load it into a register as an optimization,
1501 because prepare_call_address already did it if it should be done. */
1502 if (GET_CODE (function) != SYMBOL_REF)
1503 function = memory_address (FUNCTION_MODE, function);
1505 /* Generate the actual call instruction and save the return value. */
1506 #ifdef HAVE_untyped_call
1507 if (HAVE_untyped_call)
1508 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1509 result, result_vector (1, result)));
1512 #ifdef HAVE_call_value
1513 if (HAVE_call_value)
1517 /* Locate the unique return register. It is not possible to
1518 express a call that sets more than one return register using
1519 call_value; use untyped_call for that. In fact, untyped_call
1520 only needs to save the return registers in the given block. */
1521 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1522 if ((mode = apply_result_mode[regno]) != VOIDmode)
1524 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1526 valreg = gen_rtx_REG (mode, regno);
1529 emit_call_insn (GEN_CALL_VALUE (valreg,
1530 gen_rtx_MEM (FUNCTION_MODE, function),
1531 const0_rtx, NULL_RTX, const0_rtx));
1533 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1539 /* Find the CALL insn we just emitted, and attach the register usage
1541 call_insn = last_call_insn ();
1542 add_function_usage_to (call_insn, call_fusage);
1544 /* Restore the stack. */
1545 #ifdef HAVE_save_stack_nonlocal
1546 if (HAVE_save_stack_nonlocal)
1547 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1550 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1554 /* Return the address of the result block. */
1555 result = copy_addr_to_reg (XEXP (result, 0));
1556 return convert_memory_address (ptr_mode, result);
1559 /* Perform an untyped return. */
1562 expand_builtin_return (rtx result)
1564 int size, align, regno;
1565 enum machine_mode mode;
1567 rtx call_fusage = 0;
1569 result = convert_memory_address (Pmode, result);
1571 apply_result_size ();
1572 result = gen_rtx_MEM (BLKmode, result);
1574 #ifdef HAVE_untyped_return
1575 if (HAVE_untyped_return)
1577 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1583 /* Restore the return value and note that each value is used. */
1585 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1586 if ((mode = apply_result_mode[regno]) != VOIDmode)
1588 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1589 if (size % align != 0)
1590 size = CEIL (size, align) * align;
1591 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1592 emit_move_insn (reg, adjust_address (result, mode, size));
1594 push_to_sequence (call_fusage);
1595 emit_insn (gen_rtx_USE (VOIDmode, reg));
1596 call_fusage = get_insns ();
1598 size += GET_MODE_SIZE (mode);
1601 /* Put the USE insns before the return. */
1602 emit_insn (call_fusage);
1604 /* Return whatever values was restored by jumping directly to the end
1606 expand_naked_return ();
1609 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1611 static enum type_class
1612 type_to_class (tree type)
1614 switch (TREE_CODE (type))
1616 case VOID_TYPE: return void_type_class;
1617 case INTEGER_TYPE: return integer_type_class;
1618 case ENUMERAL_TYPE: return enumeral_type_class;
1619 case BOOLEAN_TYPE: return boolean_type_class;
1620 case POINTER_TYPE: return pointer_type_class;
1621 case REFERENCE_TYPE: return reference_type_class;
1622 case OFFSET_TYPE: return offset_type_class;
1623 case REAL_TYPE: return real_type_class;
1624 case COMPLEX_TYPE: return complex_type_class;
1625 case FUNCTION_TYPE: return function_type_class;
1626 case METHOD_TYPE: return method_type_class;
1627 case RECORD_TYPE: return record_type_class;
1629 case QUAL_UNION_TYPE: return union_type_class;
1630 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1631 ? string_type_class : array_type_class);
1632 case LANG_TYPE: return lang_type_class;
1633 default: return no_type_class;
1637 /* Expand a call EXP to __builtin_classify_type. */
1640 expand_builtin_classify_type (tree exp)
1642 if (call_expr_nargs (exp))
1643 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1644 return GEN_INT (no_type_class);
1647 /* This helper macro, meant to be used in mathfn_built_in below,
1648 determines which among a set of three builtin math functions is
1649 appropriate for a given type mode. The `F' and `L' cases are
1650 automatically generated from the `double' case. */
1651 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1652 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1653 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1654 fcodel = BUILT_IN_MATHFN##L ; break;
1656 /* Return mathematic function equivalent to FN but operating directly
1657 on TYPE, if available. If we can't do the conversion, return zero. */
1659 mathfn_built_in (tree type, enum built_in_function fn)
1661 enum built_in_function fcode, fcodef, fcodel;
1665 CASE_MATHFN (BUILT_IN_ACOS)
1666 CASE_MATHFN (BUILT_IN_ACOSH)
1667 CASE_MATHFN (BUILT_IN_ASIN)
1668 CASE_MATHFN (BUILT_IN_ASINH)
1669 CASE_MATHFN (BUILT_IN_ATAN)
1670 CASE_MATHFN (BUILT_IN_ATAN2)
1671 CASE_MATHFN (BUILT_IN_ATANH)
1672 CASE_MATHFN (BUILT_IN_CBRT)
1673 CASE_MATHFN (BUILT_IN_CEIL)
1674 CASE_MATHFN (BUILT_IN_CEXPI)
1675 CASE_MATHFN (BUILT_IN_COPYSIGN)
1676 CASE_MATHFN (BUILT_IN_COS)
1677 CASE_MATHFN (BUILT_IN_COSH)
1678 CASE_MATHFN (BUILT_IN_DREM)
1679 CASE_MATHFN (BUILT_IN_ERF)
1680 CASE_MATHFN (BUILT_IN_ERFC)
1681 CASE_MATHFN (BUILT_IN_EXP)
1682 CASE_MATHFN (BUILT_IN_EXP10)
1683 CASE_MATHFN (BUILT_IN_EXP2)
1684 CASE_MATHFN (BUILT_IN_EXPM1)
1685 CASE_MATHFN (BUILT_IN_FABS)
1686 CASE_MATHFN (BUILT_IN_FDIM)
1687 CASE_MATHFN (BUILT_IN_FLOOR)
1688 CASE_MATHFN (BUILT_IN_FMA)
1689 CASE_MATHFN (BUILT_IN_FMAX)
1690 CASE_MATHFN (BUILT_IN_FMIN)
1691 CASE_MATHFN (BUILT_IN_FMOD)
1692 CASE_MATHFN (BUILT_IN_FREXP)
1693 CASE_MATHFN (BUILT_IN_GAMMA)
1694 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1695 CASE_MATHFN (BUILT_IN_HYPOT)
1696 CASE_MATHFN (BUILT_IN_ILOGB)
1697 CASE_MATHFN (BUILT_IN_INF)
1698 CASE_MATHFN (BUILT_IN_ISINF)
1699 CASE_MATHFN (BUILT_IN_J0)
1700 CASE_MATHFN (BUILT_IN_J1)
1701 CASE_MATHFN (BUILT_IN_JN)
1702 CASE_MATHFN (BUILT_IN_LCEIL)
1703 CASE_MATHFN (BUILT_IN_LDEXP)
1704 CASE_MATHFN (BUILT_IN_LFLOOR)
1705 CASE_MATHFN (BUILT_IN_LGAMMA)
1706 CASE_MATHFN (BUILT_IN_LLCEIL)
1707 CASE_MATHFN (BUILT_IN_LLFLOOR)
1708 CASE_MATHFN (BUILT_IN_LLRINT)
1709 CASE_MATHFN (BUILT_IN_LLROUND)
1710 CASE_MATHFN (BUILT_IN_LOG)
1711 CASE_MATHFN (BUILT_IN_LOG10)
1712 CASE_MATHFN (BUILT_IN_LOG1P)
1713 CASE_MATHFN (BUILT_IN_LOG2)
1714 CASE_MATHFN (BUILT_IN_LOGB)
1715 CASE_MATHFN (BUILT_IN_LRINT)
1716 CASE_MATHFN (BUILT_IN_LROUND)
1717 CASE_MATHFN (BUILT_IN_MODF)
1718 CASE_MATHFN (BUILT_IN_NAN)
1719 CASE_MATHFN (BUILT_IN_NANS)
1720 CASE_MATHFN (BUILT_IN_NEARBYINT)
1721 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1722 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1723 CASE_MATHFN (BUILT_IN_POW)
1724 CASE_MATHFN (BUILT_IN_POWI)
1725 CASE_MATHFN (BUILT_IN_POW10)
1726 CASE_MATHFN (BUILT_IN_REMAINDER)
1727 CASE_MATHFN (BUILT_IN_REMQUO)
1728 CASE_MATHFN (BUILT_IN_RINT)
1729 CASE_MATHFN (BUILT_IN_ROUND)
1730 CASE_MATHFN (BUILT_IN_SCALB)
1731 CASE_MATHFN (BUILT_IN_SCALBLN)
1732 CASE_MATHFN (BUILT_IN_SCALBN)
1733 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1734 CASE_MATHFN (BUILT_IN_SIN)
1735 CASE_MATHFN (BUILT_IN_SINCOS)
1736 CASE_MATHFN (BUILT_IN_SINH)
1737 CASE_MATHFN (BUILT_IN_SQRT)
1738 CASE_MATHFN (BUILT_IN_TAN)
1739 CASE_MATHFN (BUILT_IN_TANH)
1740 CASE_MATHFN (BUILT_IN_TGAMMA)
1741 CASE_MATHFN (BUILT_IN_TRUNC)
1742 CASE_MATHFN (BUILT_IN_Y0)
1743 CASE_MATHFN (BUILT_IN_Y1)
1744 CASE_MATHFN (BUILT_IN_YN)
1750 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1751 return implicit_built_in_decls[fcode];
1752 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1753 return implicit_built_in_decls[fcodef];
1754 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1755 return implicit_built_in_decls[fcodel];
1760 /* If errno must be maintained, expand the RTL to check if the result,
1761 TARGET, of a built-in function call, EXP, is NaN, and if so set
1765 expand_errno_check (tree exp, rtx target)
1767 rtx lab = gen_label_rtx ();
1769 /* Test the result; if it is NaN, set errno=EDOM because
1770 the argument was not in the domain. */
1771 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1775 /* If this built-in doesn't throw an exception, set errno directly. */
1776 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1778 #ifdef GEN_ERRNO_RTX
1779 rtx errno_rtx = GEN_ERRNO_RTX;
1782 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1784 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1790 /* We can't set errno=EDOM directly; let the library call do it.
1791 Pop the arguments right away in case the call gets deleted. */
1793 expand_call (exp, target, 0);
1798 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1799 Return NULL_RTX if a normal call should be emitted rather than expanding
1800 the function in-line. EXP is the expression that is a call to the builtin
1801 function; if convenient, the result should be placed in TARGET.
1802 SUBTARGET may be used as the target for computing one of EXP's operands. */
1805 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1807 optab builtin_optab;
1808 rtx op0, insns, before_call;
1809 tree fndecl = get_callee_fndecl (exp);
1810 enum machine_mode mode;
1811 bool errno_set = false;
1814 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1817 arg = CALL_EXPR_ARG (exp, 0);
1819 switch (DECL_FUNCTION_CODE (fndecl))
1821 CASE_FLT_FN (BUILT_IN_SQRT):
1822 errno_set = ! tree_expr_nonnegative_p (arg);
1823 builtin_optab = sqrt_optab;
1825 CASE_FLT_FN (BUILT_IN_EXP):
1826 errno_set = true; builtin_optab = exp_optab; break;
1827 CASE_FLT_FN (BUILT_IN_EXP10):
1828 CASE_FLT_FN (BUILT_IN_POW10):
1829 errno_set = true; builtin_optab = exp10_optab; break;
1830 CASE_FLT_FN (BUILT_IN_EXP2):
1831 errno_set = true; builtin_optab = exp2_optab; break;
1832 CASE_FLT_FN (BUILT_IN_EXPM1):
1833 errno_set = true; builtin_optab = expm1_optab; break;
1834 CASE_FLT_FN (BUILT_IN_LOGB):
1835 errno_set = true; builtin_optab = logb_optab; break;
1836 CASE_FLT_FN (BUILT_IN_LOG):
1837 errno_set = true; builtin_optab = log_optab; break;
1838 CASE_FLT_FN (BUILT_IN_LOG10):
1839 errno_set = true; builtin_optab = log10_optab; break;
1840 CASE_FLT_FN (BUILT_IN_LOG2):
1841 errno_set = true; builtin_optab = log2_optab; break;
1842 CASE_FLT_FN (BUILT_IN_LOG1P):
1843 errno_set = true; builtin_optab = log1p_optab; break;
1844 CASE_FLT_FN (BUILT_IN_ASIN):
1845 builtin_optab = asin_optab; break;
1846 CASE_FLT_FN (BUILT_IN_ACOS):
1847 builtin_optab = acos_optab; break;
1848 CASE_FLT_FN (BUILT_IN_TAN):
1849 builtin_optab = tan_optab; break;
1850 CASE_FLT_FN (BUILT_IN_ATAN):
1851 builtin_optab = atan_optab; break;
1852 CASE_FLT_FN (BUILT_IN_FLOOR):
1853 builtin_optab = floor_optab; break;
1854 CASE_FLT_FN (BUILT_IN_CEIL):
1855 builtin_optab = ceil_optab; break;
1856 CASE_FLT_FN (BUILT_IN_TRUNC):
1857 builtin_optab = btrunc_optab; break;
1858 CASE_FLT_FN (BUILT_IN_ROUND):
1859 builtin_optab = round_optab; break;
1860 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1861 builtin_optab = nearbyint_optab;
1862 if (flag_trapping_math)
1864 /* Else fallthrough and expand as rint. */
1865 CASE_FLT_FN (BUILT_IN_RINT):
1866 builtin_optab = rint_optab; break;
1871 /* Make a suitable register to place result in. */
1872 mode = TYPE_MODE (TREE_TYPE (exp));
1874 if (! flag_errno_math || ! HONOR_NANS (mode))
1877 /* Before working hard, check whether the instruction is available. */
1878 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1880 target = gen_reg_rtx (mode);
1882 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1883 need to expand the argument again. This way, we will not perform
1884 side-effects more the once. */
1885 narg = builtin_save_expr (arg);
1889 exp = build_call_expr (fndecl, 1, arg);
1892 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1896 /* Compute into TARGET.
1897 Set TARGET to wherever the result comes back. */
1898 target = expand_unop (mode, builtin_optab, op0, target, 0);
1903 expand_errno_check (exp, target);
1905 /* Output the entire sequence. */
1906 insns = get_insns ();
1912 /* If we were unable to expand via the builtin, stop the sequence
1913 (without outputting the insns) and call to the library function
1914 with the stabilized argument list. */
1918 before_call = get_last_insn ();
1920 target = expand_call (exp, target, target == const0_rtx);
1922 /* If this is a sqrt operation and we don't care about errno, try to
1923 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1924 This allows the semantics of the libcall to be visible to the RTL
1926 if (builtin_optab == sqrt_optab && !errno_set)
1928 /* Search backwards through the insns emitted by expand_call looking
1929 for the instruction with the REG_RETVAL note. */
1930 rtx last = get_last_insn ();
1931 while (last != before_call)
1933 if (find_reg_note (last, REG_RETVAL, NULL))
1935 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1936 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1937 two elements, i.e. symbol_ref(sqrt) and the operand. */
1939 && GET_CODE (note) == EXPR_LIST
1940 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1941 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1942 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1944 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1945 /* Check operand is a register with expected mode. */
1948 && GET_MODE (operand) == mode)
1950 /* Replace the REG_EQUAL note with a SQRT rtx. */
1951 rtx equiv = gen_rtx_SQRT (mode, operand);
1952 set_unique_reg_note (last, REG_EQUAL, equiv);
1957 last = PREV_INSN (last);
1964 /* Expand a call to the builtin binary math functions (pow and atan2).
1965 Return NULL_RTX if a normal call should be emitted rather than expanding the
1966 function in-line. EXP is the expression that is a call to the builtin
1967 function; if convenient, the result should be placed in TARGET.
1968 SUBTARGET may be used as the target for computing one of EXP's
1972 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1974 optab builtin_optab;
1975 rtx op0, op1, insns;
1976 int op1_type = REAL_TYPE;
1977 tree fndecl = get_callee_fndecl (exp);
1978 tree arg0, arg1, narg;
1979 enum machine_mode mode;
1980 bool errno_set = true;
1983 switch (DECL_FUNCTION_CODE (fndecl))
1985 CASE_FLT_FN (BUILT_IN_SCALBN):
1986 CASE_FLT_FN (BUILT_IN_SCALBLN):
1987 CASE_FLT_FN (BUILT_IN_LDEXP):
1988 op1_type = INTEGER_TYPE;
1993 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1996 arg0 = CALL_EXPR_ARG (exp, 0);
1997 arg1 = CALL_EXPR_ARG (exp, 1);
1999 switch (DECL_FUNCTION_CODE (fndecl))
2001 CASE_FLT_FN (BUILT_IN_POW):
2002 builtin_optab = pow_optab; break;
2003 CASE_FLT_FN (BUILT_IN_ATAN2):
2004 builtin_optab = atan2_optab; break;
2005 CASE_FLT_FN (BUILT_IN_SCALB):
2006 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2008 builtin_optab = scalb_optab; break;
2009 CASE_FLT_FN (BUILT_IN_SCALBN):
2010 CASE_FLT_FN (BUILT_IN_SCALBLN):
2011 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2013 /* Fall through... */
2014 CASE_FLT_FN (BUILT_IN_LDEXP):
2015 builtin_optab = ldexp_optab; break;
2016 CASE_FLT_FN (BUILT_IN_FMOD):
2017 builtin_optab = fmod_optab; break;
2018 CASE_FLT_FN (BUILT_IN_REMAINDER):
2019 CASE_FLT_FN (BUILT_IN_DREM):
2020 builtin_optab = remainder_optab; break;
2025 /* Make a suitable register to place result in. */
2026 mode = TYPE_MODE (TREE_TYPE (exp));
2028 /* Before working hard, check whether the instruction is available. */
2029 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2032 target = gen_reg_rtx (mode);
2034 if (! flag_errno_math || ! HONOR_NANS (mode))
2037 /* Always stabilize the argument list. */
2038 narg = builtin_save_expr (arg1);
2044 narg = builtin_save_expr (arg0);
2052 exp = build_call_expr (fndecl, 2, arg0, arg1);
2054 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2055 op1 = expand_normal (arg1);
2059 /* Compute into TARGET.
2060 Set TARGET to wherever the result comes back. */
2061 target = expand_binop (mode, builtin_optab, op0, op1,
2062 target, 0, OPTAB_DIRECT);
2064 /* If we were unable to expand via the builtin, stop the sequence
2065 (without outputting the insns) and call to the library function
2066 with the stabilized argument list. */
2070 return expand_call (exp, target, target == const0_rtx);
2074 expand_errno_check (exp, target);
2076 /* Output the entire sequence. */
2077 insns = get_insns ();
2084 /* Expand a call to the builtin sin and cos math functions.
2085 Return NULL_RTX if a normal call should be emitted rather than expanding the
2086 function in-line. EXP is the expression that is a call to the builtin
2087 function; if convenient, the result should be placed in TARGET.
2088 SUBTARGET may be used as the target for computing one of EXP's
2092 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2094 optab builtin_optab;
2096 tree fndecl = get_callee_fndecl (exp);
2097 enum machine_mode mode;
2100 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2103 arg = CALL_EXPR_ARG (exp, 0);
2105 switch (DECL_FUNCTION_CODE (fndecl))
2107 CASE_FLT_FN (BUILT_IN_SIN):
2108 CASE_FLT_FN (BUILT_IN_COS):
2109 builtin_optab = sincos_optab; break;
2114 /* Make a suitable register to place result in. */
2115 mode = TYPE_MODE (TREE_TYPE (exp));
2117 /* Check if sincos insn is available, otherwise fallback
2118 to sin or cos insn. */
2119 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2120 switch (DECL_FUNCTION_CODE (fndecl))
2122 CASE_FLT_FN (BUILT_IN_SIN):
2123 builtin_optab = sin_optab; break;
2124 CASE_FLT_FN (BUILT_IN_COS):
2125 builtin_optab = cos_optab; break;
2130 /* Before working hard, check whether the instruction is available. */
2131 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2133 target = gen_reg_rtx (mode);
2135 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2136 need to expand the argument again. This way, we will not perform
2137 side-effects more the once. */
2138 narg = save_expr (arg);
2142 exp = build_call_expr (fndecl, 1, arg);
2145 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2149 /* Compute into TARGET.
2150 Set TARGET to wherever the result comes back. */
2151 if (builtin_optab == sincos_optab)
2155 switch (DECL_FUNCTION_CODE (fndecl))
2157 CASE_FLT_FN (BUILT_IN_SIN):
2158 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2160 CASE_FLT_FN (BUILT_IN_COS):
2161 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2166 gcc_assert (result);
2170 target = expand_unop (mode, builtin_optab, op0, target, 0);
2175 /* Output the entire sequence. */
2176 insns = get_insns ();
2182 /* If we were unable to expand via the builtin, stop the sequence
2183 (without outputting the insns) and call to the library function
2184 with the stabilized argument list. */
2188 target = expand_call (exp, target, target == const0_rtx);
2193 /* Expand a call to one of the builtin math functions that operate on
2194 floating point argument and output an integer result (ilogb, isinf,
2196 Return 0 if a normal call should be emitted rather than expanding the
2197 function in-line. EXP is the expression that is a call to the builtin
2198 function; if convenient, the result should be placed in TARGET.
2199 SUBTARGET may be used as the target for computing one of EXP's operands. */
2202 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2204 optab builtin_optab;
2205 enum insn_code icode;
2207 tree fndecl = get_callee_fndecl (exp);
2208 enum machine_mode mode;
2209 bool errno_set = false;
2212 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2215 arg = CALL_EXPR_ARG (exp, 0);
2217 switch (DECL_FUNCTION_CODE (fndecl))
2219 CASE_FLT_FN (BUILT_IN_ILOGB):
2220 errno_set = true; builtin_optab = ilogb_optab; break;
2221 CASE_FLT_FN (BUILT_IN_ISINF):
2222 builtin_optab = isinf_optab; break;
2227 /* There's no easy way to detect the case we need to set EDOM. */
2228 if (flag_errno_math && errno_set)
2231 /* Optab mode depends on the mode of the input argument. */
2232 mode = TYPE_MODE (TREE_TYPE (arg));
2234 icode = builtin_optab->handlers[(int) mode].insn_code;
2236 /* Before working hard, check whether the instruction is available. */
2237 if (icode != CODE_FOR_nothing)
2239 /* Make a suitable register to place result in. */
2241 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2242 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2244 gcc_assert (insn_data[icode].operand[0].predicate
2245 (target, GET_MODE (target)));
2247 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2248 need to expand the argument again. This way, we will not perform
2249 side-effects more the once. */
2250 narg = builtin_save_expr (arg);
2254 exp = build_call_expr (fndecl, 1, arg);
2257 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2259 if (mode != GET_MODE (op0))
2260 op0 = convert_to_mode (mode, op0, 0);
2262 /* Compute into TARGET.
2263 Set TARGET to wherever the result comes back. */
2264 emit_unop_insn (icode, target, op0, UNKNOWN);
2268 target = expand_call (exp, target, target == const0_rtx);
2273 /* Expand a call to the builtin sincos math function.
2274 Return NULL_RTX if a normal call should be emitted rather than expanding the
2275 function in-line. EXP is the expression that is a call to the builtin
2279 expand_builtin_sincos (tree exp)
2281 rtx op0, op1, op2, target1, target2;
2282 enum machine_mode mode;
2283 tree arg, sinp, cosp;
2286 if (!validate_arglist (exp, REAL_TYPE,
2287 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2290 arg = CALL_EXPR_ARG (exp, 0);
2291 sinp = CALL_EXPR_ARG (exp, 1);
2292 cosp = CALL_EXPR_ARG (exp, 2);
2294 /* Make a suitable register to place result in. */
2295 mode = TYPE_MODE (TREE_TYPE (arg));
2297 /* Check if sincos insn is available, otherwise emit the call. */
2298 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2301 target1 = gen_reg_rtx (mode);
2302 target2 = gen_reg_rtx (mode);
2304 op0 = expand_normal (arg);
2305 op1 = expand_normal (build_fold_indirect_ref (sinp));
2306 op2 = expand_normal (build_fold_indirect_ref (cosp));
2308 /* Compute into target1 and target2.
2309 Set TARGET to wherever the result comes back. */
2310 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2311 gcc_assert (result);
2313 /* Move target1 and target2 to the memory locations indicated
2315 emit_move_insn (op1, target1);
2316 emit_move_insn (op2, target2);
2321 /* Expand a call to the internal cexpi builtin to the sincos math function.
2322 EXP is the expression that is a call to the builtin function; if convenient,
2323 the result should be placed in TARGET. SUBTARGET may be used as the target
2324 for computing one of EXP's operands. */
2327 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2329 tree fndecl = get_callee_fndecl (exp);
2331 enum machine_mode mode;
2334 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 arg = CALL_EXPR_ARG (exp, 0);
2338 type = TREE_TYPE (arg);
2339 mode = TYPE_MODE (TREE_TYPE (arg));
2341 /* Try expanding via a sincos optab, fall back to emitting a libcall
2342 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2343 is only generated from sincos, cexp or if we have either of them. */
2344 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2346 op1 = gen_reg_rtx (mode);
2347 op2 = gen_reg_rtx (mode);
2349 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2351 /* Compute into op1 and op2. */
2352 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2354 else if (TARGET_HAS_SINCOS)
2356 tree call, fn = NULL_TREE;
2360 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2361 fn = built_in_decls[BUILT_IN_SINCOSF];
2362 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2363 fn = built_in_decls[BUILT_IN_SINCOS];
2364 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2365 fn = built_in_decls[BUILT_IN_SINCOSL];
2369 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2370 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2371 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2372 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2373 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2374 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2376 /* Make sure not to fold the sincos call again. */
2377 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2378 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2379 call, 3, arg, top1, top2));
2383 tree call, fn = NULL_TREE, narg;
2384 tree ctype = build_complex_type (type);
2386 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2387 fn = built_in_decls[BUILT_IN_CEXPF];
2388 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2389 fn = built_in_decls[BUILT_IN_CEXP];
2390 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2391 fn = built_in_decls[BUILT_IN_CEXPL];
2395 /* If we don't have a decl for cexp create one. This is the
2396 friendliest fallback if the user calls __builtin_cexpi
2397 without full target C99 function support. */
2398 if (fn == NULL_TREE)
2401 const char *name = NULL;
2403 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2405 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2407 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2410 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2411 fn = build_fn_decl (name, fntype);
2414 narg = fold_build2 (COMPLEX_EXPR, ctype,
2415 build_real (type, dconst0), arg);
2417 /* Make sure not to fold the cexp call again. */
2418 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2419 return expand_expr (build_call_nary (ctype, call, 1, narg),
2420 target, VOIDmode, EXPAND_NORMAL);
2423 /* Now build the proper return type. */
2424 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2425 make_tree (TREE_TYPE (arg), op2),
2426 make_tree (TREE_TYPE (arg), op1)),
2427 target, VOIDmode, EXPAND_NORMAL);
2430 /* Expand a call to one of the builtin rounding functions gcc defines
2431 as an extension (lfloor and lceil). As these are gcc extensions we
2432 do not need to worry about setting errno to EDOM.
2433 If expanding via optab fails, lower expression to (int)(floor(x)).
2434 EXP is the expression that is a call to the builtin function;
2435 if convenient, the result should be placed in TARGET. SUBTARGET may
2436 be used as the target for computing one of EXP's operands. */
2439 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2441 convert_optab builtin_optab;
2442 rtx op0, insns, tmp;
2443 tree fndecl = get_callee_fndecl (exp);
2444 enum built_in_function fallback_fn;
2445 tree fallback_fndecl;
2446 enum machine_mode mode;
2449 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2452 arg = CALL_EXPR_ARG (exp, 0);
2454 switch (DECL_FUNCTION_CODE (fndecl))
2456 CASE_FLT_FN (BUILT_IN_LCEIL):
2457 CASE_FLT_FN (BUILT_IN_LLCEIL):
2458 builtin_optab = lceil_optab;
2459 fallback_fn = BUILT_IN_CEIL;
2462 CASE_FLT_FN (BUILT_IN_LFLOOR):
2463 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2464 builtin_optab = lfloor_optab;
2465 fallback_fn = BUILT_IN_FLOOR;
2472 /* Make a suitable register to place result in. */
2473 mode = TYPE_MODE (TREE_TYPE (exp));
2475 target = gen_reg_rtx (mode);
2477 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2478 need to expand the argument again. This way, we will not perform
2479 side-effects more the once. */
2480 narg = builtin_save_expr (arg);
2484 exp = build_call_expr (fndecl, 1, arg);
2487 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2491 /* Compute into TARGET. */
2492 if (expand_sfix_optab (target, op0, builtin_optab))
2494 /* Output the entire sequence. */
2495 insns = get_insns ();
2501 /* If we were unable to expand via the builtin, stop the sequence
2502 (without outputting the insns). */
2505 /* Fall back to floating point rounding optab. */
2506 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2508 /* For non-C99 targets we may end up without a fallback fndecl here
2509 if the user called __builtin_lfloor directly. In this case emit
2510 a call to the floor/ceil variants nevertheless. This should result
2511 in the best user experience for not full C99 targets. */
2512 if (fallback_fndecl == NULL_TREE)
2515 const char *name = NULL;
2517 switch (DECL_FUNCTION_CODE (fndecl))
2519 case BUILT_IN_LCEIL:
2520 case BUILT_IN_LLCEIL:
2523 case BUILT_IN_LCEILF:
2524 case BUILT_IN_LLCEILF:
2527 case BUILT_IN_LCEILL:
2528 case BUILT_IN_LLCEILL:
2531 case BUILT_IN_LFLOOR:
2532 case BUILT_IN_LLFLOOR:
2535 case BUILT_IN_LFLOORF:
2536 case BUILT_IN_LLFLOORF:
2539 case BUILT_IN_LFLOORL:
2540 case BUILT_IN_LLFLOORL:
2547 fntype = build_function_type_list (TREE_TYPE (arg),
2548 TREE_TYPE (arg), NULL_TREE);
2549 fallback_fndecl = build_fn_decl (name, fntype);
2552 exp = build_call_expr (fallback_fndecl, 1, arg);
2554 tmp = expand_normal (exp);
2556 /* Truncate the result of floating point optab to integer
2557 via expand_fix (). */
2558 target = gen_reg_rtx (mode);
2559 expand_fix (target, tmp, 0);
2564 /* Expand a call to one of the builtin math functions doing integer
2566 Return 0 if a normal call should be emitted rather than expanding the
2567 function in-line. EXP is the expression that is a call to the builtin
2568 function; if convenient, the result should be placed in TARGET.
2569 SUBTARGET may be used as the target for computing one of EXP's operands. */
2572 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2574 convert_optab builtin_optab;
2576 tree fndecl = get_callee_fndecl (exp);
2578 enum machine_mode mode;
2580 /* There's no easy way to detect the case we need to set EDOM. */
2581 if (flag_errno_math)
2584 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2587 arg = CALL_EXPR_ARG (exp, 0);
2589 switch (DECL_FUNCTION_CODE (fndecl))
2591 CASE_FLT_FN (BUILT_IN_LRINT):
2592 CASE_FLT_FN (BUILT_IN_LLRINT):
2593 builtin_optab = lrint_optab; break;
2594 CASE_FLT_FN (BUILT_IN_LROUND):
2595 CASE_FLT_FN (BUILT_IN_LLROUND):
2596 builtin_optab = lround_optab; break;
2601 /* Make a suitable register to place result in. */
2602 mode = TYPE_MODE (TREE_TYPE (exp));
2604 target = gen_reg_rtx (mode);
2606 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2607 need to expand the argument again. This way, we will not perform
2608 side-effects more the once. */
2609 narg = builtin_save_expr (arg);
2613 exp = build_call_expr (fndecl, 1, arg);
2616 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2620 if (expand_sfix_optab (target, op0, builtin_optab))
2622 /* Output the entire sequence. */
2623 insns = get_insns ();
2629 /* If we were unable to expand via the builtin, stop the sequence
2630 (without outputting the insns) and call to the library function
2631 with the stabilized argument list. */
2634 target = expand_call (exp, target, target == const0_rtx);
2639 /* To evaluate powi(x,n), the floating point value x raised to the
2640 constant integer exponent n, we use a hybrid algorithm that
2641 combines the "window method" with look-up tables. For an
2642 introduction to exponentiation algorithms and "addition chains",
2643 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2644 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2645 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2646 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2648 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2649 multiplications to inline before calling the system library's pow
2650 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2651 so this default never requires calling pow, powf or powl. */
2653 #ifndef POWI_MAX_MULTS
2654 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2657 /* The size of the "optimal power tree" lookup table. All
2658 exponents less than this value are simply looked up in the
2659 powi_table below. This threshold is also used to size the
2660 cache of pseudo registers that hold intermediate results. */
2661 #define POWI_TABLE_SIZE 256
2663 /* The size, in bits of the window, used in the "window method"
2664 exponentiation algorithm. This is equivalent to a radix of
2665 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2666 #define POWI_WINDOW_SIZE 3
2668 /* The following table is an efficient representation of an
2669 "optimal power tree". For each value, i, the corresponding
2670 value, j, in the table states than an optimal evaluation
2671 sequence for calculating pow(x,i) can be found by evaluating
2672 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2673 100 integers is given in Knuth's "Seminumerical algorithms". */
2675 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2677 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2678 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2679 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2680 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2681 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2682 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2683 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2684 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2685 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2686 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2687 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2688 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2689 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2690 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2691 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2692 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2693 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2694 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2695 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2696 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2697 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2698 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2699 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2700 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2701 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2702 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2703 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2704 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2705 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2706 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2707 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2708 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2712 /* Return the number of multiplications required to calculate
2713 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2714 subroutine of powi_cost. CACHE is an array indicating
2715 which exponents have already been calculated. */
2718 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2720 /* If we've already calculated this exponent, then this evaluation
2721 doesn't require any additional multiplications. */
2726 return powi_lookup_cost (n - powi_table[n], cache)
2727 + powi_lookup_cost (powi_table[n], cache) + 1;
2730 /* Return the number of multiplications required to calculate
2731 powi(x,n) for an arbitrary x, given the exponent N. This
2732 function needs to be kept in sync with expand_powi below. */
2735 powi_cost (HOST_WIDE_INT n)
2737 bool cache[POWI_TABLE_SIZE];
2738 unsigned HOST_WIDE_INT digit;
2739 unsigned HOST_WIDE_INT val;
2745 /* Ignore the reciprocal when calculating the cost. */
2746 val = (n < 0) ? -n : n;
2748 /* Initialize the exponent cache. */
2749 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2754 while (val >= POWI_TABLE_SIZE)
2758 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2759 result += powi_lookup_cost (digit, cache)
2760 + POWI_WINDOW_SIZE + 1;
2761 val >>= POWI_WINDOW_SIZE;
2770 return result + powi_lookup_cost (val, cache);
2773 /* Recursive subroutine of expand_powi. This function takes the array,
2774 CACHE, of already calculated exponents and an exponent N and returns
2775 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2778 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2780 unsigned HOST_WIDE_INT digit;
2784 if (n < POWI_TABLE_SIZE)
2789 target = gen_reg_rtx (mode);
2792 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2793 op1 = expand_powi_1 (mode, powi_table[n], cache);
2797 target = gen_reg_rtx (mode);
2798 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2799 op0 = expand_powi_1 (mode, n - digit, cache);
2800 op1 = expand_powi_1 (mode, digit, cache);
2804 target = gen_reg_rtx (mode);
2805 op0 = expand_powi_1 (mode, n >> 1, cache);
2809 result = expand_mult (mode, op0, op1, target, 0);
2810 if (result != target)
2811 emit_move_insn (target, result);
2815 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2816 floating point operand in mode MODE, and N is the exponent. This
2817 function needs to be kept in sync with powi_cost above. */
2820 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2822 unsigned HOST_WIDE_INT val;
2823 rtx cache[POWI_TABLE_SIZE];
2827 return CONST1_RTX (mode);
2829 val = (n < 0) ? -n : n;
2831 memset (cache, 0, sizeof (cache));
2834 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2836 /* If the original exponent was negative, reciprocate the result. */
2838 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2839 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2844 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2845 a normal call should be emitted rather than expanding the function
2846 in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2850 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2854 tree type = TREE_TYPE (exp);
2855 REAL_VALUE_TYPE cint, c, c2;
2858 enum machine_mode mode = TYPE_MODE (type);
2860 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2863 arg0 = CALL_EXPR_ARG (exp, 0);
2864 arg1 = CALL_EXPR_ARG (exp, 1);
2866 if (TREE_CODE (arg1) != REAL_CST
2867 || TREE_OVERFLOW (arg1))
2868 return expand_builtin_mathfn_2 (exp, target, subtarget);
2870 /* Handle constant exponents. */
2872 /* For integer valued exponents we can expand to an optimal multiplication
2873 sequence using expand_powi. */
2874 c = TREE_REAL_CST (arg1);
2875 n = real_to_integer (&c);
2876 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2877 if (real_identical (&c, &cint)
2878 && ((n >= -1 && n <= 2)
2879 || (flag_unsafe_math_optimizations
2881 && powi_cost (n) <= POWI_MAX_MULTS)))
2883 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2886 op = force_reg (mode, op);
2887 op = expand_powi (op, mode, n);
2892 narg0 = builtin_save_expr (arg0);
2894 /* If the exponent is not integer valued, check if it is half of an integer.
2895 In this case we can expand to sqrt (x) * x**(n/2). */
2896 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2897 if (fn != NULL_TREE)
2899 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2900 n = real_to_integer (&c2);
2901 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2902 if (real_identical (&c2, &cint)
2903 && ((flag_unsafe_math_optimizations
2905 && powi_cost (n/2) <= POWI_MAX_MULTS)
2908 tree call_expr = build_call_expr (fn, 1, narg0);
2909 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2912 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2913 op2 = force_reg (mode, op2);
2914 op2 = expand_powi (op2, mode, abs (n / 2));
2915 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2916 0, OPTAB_LIB_WIDEN);
2917 /* If the original exponent was negative, reciprocate the
2920 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2921 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2927 /* Try if the exponent is a third of an integer. In this case
2928 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2929 different from pow (x, 1./3.) due to rounding and behavior
2930 with negative x we need to constrain this transformation to
2931 unsafe math and positive x or finite math. */
2932 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2934 && flag_unsafe_math_optimizations
2935 && (tree_expr_nonnegative_p (arg0)
2936 || !HONOR_NANS (mode)))
2938 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2939 real_round (&c2, mode, &c2);
2940 n = real_to_integer (&c2);
2941 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2942 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2943 real_convert (&c2, mode, &c2);
2944 if (real_identical (&c2, &c)
2946 && powi_cost (n/3) <= POWI_MAX_MULTS)
2949 tree call_expr = build_call_expr (fn, 1,narg0);
2950 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2951 if (abs (n) % 3 == 2)
2952 op = expand_simple_binop (mode, MULT, op, op, op,
2953 0, OPTAB_LIB_WIDEN);
2956 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2957 op2 = force_reg (mode, op2);
2958 op2 = expand_powi (op2, mode, abs (n / 3));
2959 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2960 0, OPTAB_LIB_WIDEN);
2961 /* If the original exponent was negative, reciprocate the
2964 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2965 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2971 /* Fall back to optab expansion. */
2972 return expand_builtin_mathfn_2 (exp, target, subtarget);
2975 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2976 a normal call should be emitted rather than expanding the function
2977 in-line. EXP is the expression that is a call to the builtin
2978 function; if convenient, the result should be placed in TARGET. */
2981 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2985 enum machine_mode mode;
2986 enum machine_mode mode2;
2988 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2991 arg0 = CALL_EXPR_ARG (exp, 0);
2992 arg1 = CALL_EXPR_ARG (exp, 1);
2993 mode = TYPE_MODE (TREE_TYPE (exp));
2995 /* Handle constant power. */
2997 if (TREE_CODE (arg1) == INTEGER_CST
2998 && !TREE_OVERFLOW (arg1))
3000 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3002 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3003 Otherwise, check the number of multiplications required. */
3004 if ((TREE_INT_CST_HIGH (arg1) == 0
3005 || TREE_INT_CST_HIGH (arg1) == -1)
3006 && ((n >= -1 && n <= 2)
3008 && powi_cost (n) <= POWI_MAX_MULTS)))
3010 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3011 op0 = force_reg (mode, op0);
3012 return expand_powi (op0, mode, n);
3016 /* Emit a libcall to libgcc. */
3018 /* Mode of the 2nd argument must match that of an int. */
3019 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3021 if (target == NULL_RTX)
3022 target = gen_reg_rtx (mode);
3024 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3025 if (GET_MODE (op0) != mode)
3026 op0 = convert_to_mode (mode, op0, 0);
3027 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3028 if (GET_MODE (op1) != mode2)
3029 op1 = convert_to_mode (mode2, op1, 0);
3031 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3032 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3033 op0, mode, op1, mode2);
3038 /* Expand expression EXP which is a call to the strlen builtin. Return
3039 NULL_RTX if we failed the caller should emit a normal call, otherwise
3040 try to get the result in TARGET, if convenient. */
3043 expand_builtin_strlen (tree exp, rtx target,
3044 enum machine_mode target_mode)
3046 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3052 tree src = CALL_EXPR_ARG (exp, 0);
3053 rtx result, src_reg, char_rtx, before_strlen;
3054 enum machine_mode insn_mode = target_mode, char_mode;
3055 enum insn_code icode = CODE_FOR_nothing;
3058 /* If the length can be computed at compile-time, return it. */
3059 len = c_strlen (src, 0);
3061 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3063 /* If the length can be computed at compile-time and is constant
3064 integer, but there are side-effects in src, evaluate
3065 src for side-effects, then return len.
3066 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3067 can be optimized into: i++; x = 3; */
3068 len = c_strlen (src, 1);
3069 if (len && TREE_CODE (len) == INTEGER_CST)
3071 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3072 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3075 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3077 /* If SRC is not a pointer type, don't do this operation inline. */
3081 /* Bail out if we can't compute strlen in the right mode. */
3082 while (insn_mode != VOIDmode)
3084 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3085 if (icode != CODE_FOR_nothing)
3088 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3090 if (insn_mode == VOIDmode)
3093 /* Make a place to write the result of the instruction. */
3097 && GET_MODE (result) == insn_mode
3098 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3099 result = gen_reg_rtx (insn_mode);
3101 /* Make a place to hold the source address. We will not expand
3102 the actual source until we are sure that the expansion will
3103 not fail -- there are trees that cannot be expanded twice. */
3104 src_reg = gen_reg_rtx (Pmode);
3106 /* Mark the beginning of the strlen sequence so we can emit the
3107 source operand later. */
3108 before_strlen = get_last_insn ();
3110 char_rtx = const0_rtx;
3111 char_mode = insn_data[(int) icode].operand[2].mode;
3112 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3114 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3116 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3117 char_rtx, GEN_INT (align));
3122 /* Now that we are assured of success, expand the source. */
3124 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3126 emit_move_insn (src_reg, pat);
3131 emit_insn_after (pat, before_strlen);
3133 emit_insn_before (pat, get_insns ());
3135 /* Return the value in the proper mode for this function. */
3136 if (GET_MODE (result) == target_mode)
3138 else if (target != 0)
3139 convert_move (target, result, 0);
3141 target = convert_to_mode (target_mode, result, 0);
3147 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3148 caller should emit a normal call, otherwise try to get the result
3149 in TARGET, if convenient (and in mode MODE if that's convenient). */
3152 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3154 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3156 tree type = TREE_TYPE (exp);
3157 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3158 CALL_EXPR_ARG (exp, 1), type);
3160 return expand_expr (result, target, mode, EXPAND_NORMAL);
3165 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3166 caller should emit a normal call, otherwise try to get the result
3167 in TARGET, if convenient (and in mode MODE if that's convenient). */
3170 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3172 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3174 tree type = TREE_TYPE (exp);
3175 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3176 CALL_EXPR_ARG (exp, 1), type);
3178 return expand_expr (result, target, mode, EXPAND_NORMAL);
3180 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3185 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3186 caller should emit a normal call, otherwise try to get the result
3187 in TARGET, if convenient (and in mode MODE if that's convenient). */
3190 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3192 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3194 tree type = TREE_TYPE (exp);
3195 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3196 CALL_EXPR_ARG (exp, 1), type);
3198 return expand_expr (result, target, mode, EXPAND_NORMAL);
3203 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3204 caller should emit a normal call, otherwise try to get the result
3205 in TARGET, if convenient (and in mode MODE if that's convenient). */
3208 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3210 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3212 tree type = TREE_TYPE (exp);
3213 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3214 CALL_EXPR_ARG (exp, 1), type);
3216 return expand_expr (result, target, mode, EXPAND_NORMAL);
3221 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3222 bytes from constant string DATA + OFFSET and return it as target
3226 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3227 enum machine_mode mode)
3229 const char *str = (const char *) data;
3231 gcc_assert (offset >= 0
3232 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3233 <= strlen (str) + 1));
3235 return c_readstr (str + offset, mode);
3238 /* Expand a call EXP to the memcpy builtin.
3239 Return NULL_RTX if we failed, the caller should emit a normal call,
3240 otherwise try to get the result in TARGET, if convenient (and in
3241 mode MODE if that's convenient). */
3244 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3246 tree fndecl = get_callee_fndecl (exp);
3248 if (!validate_arglist (exp,
3249 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3253 tree dest = CALL_EXPR_ARG (exp, 0);
3254 tree src = CALL_EXPR_ARG (exp, 1);
3255 tree len = CALL_EXPR_ARG (exp, 2);
3256 const char *src_str;
3257 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3258 unsigned int dest_align
3259 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3260 rtx dest_mem, src_mem, dest_addr, len_rtx;
3261 tree result = fold_builtin_memory_op (dest, src, len,
3262 TREE_TYPE (TREE_TYPE (fndecl)),
3264 HOST_WIDE_INT expected_size = -1;
3265 unsigned int expected_align = 0;
3269 while (TREE_CODE (result) == COMPOUND_EXPR)
3271 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3273 result = TREE_OPERAND (result, 1);
3275 return expand_expr (result, target, mode, EXPAND_NORMAL);
3278 /* If DEST is not a pointer type, call the normal function. */
3279 if (dest_align == 0)
3282 /* If either SRC is not a pointer type, don't do this
3283 operation in-line. */
3287 stringop_block_profile (exp, &expected_align, &expected_size);
3288 if (expected_align < dest_align)
3289 expected_align = dest_align;
3290 dest_mem = get_memory_rtx (dest, len);
3291 set_mem_align (dest_mem, dest_align);
3292 len_rtx = expand_normal (len);
3293 src_str = c_getstr (src);
3295 /* If SRC is a string constant and block move would be done
3296 by pieces, we can avoid loading the string from memory
3297 and only stored the computed constants. */
3299 && GET_CODE (len_rtx) == CONST_INT
3300 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3301 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3302 (void *) src_str, dest_align))
3304 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3305 builtin_memcpy_read_str,
3306 (void *) src_str, dest_align, 0);
3307 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3308 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3312 src_mem = get_memory_rtx (src, len);
3313 set_mem_align (src_mem, src_align);
3315 /* Copy word part most expediently. */
3316 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3317 CALL_EXPR_TAILCALL (exp)
3318 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3319 expected_align, expected_size);
3323 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3324 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3330 /* Expand a call EXP to the mempcpy builtin.
3331 Return NULL_RTX if we failed; the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). If ENDP is 0 return the
3334 destination pointer, if ENDP is 1 return the end pointer ala
3335 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3339 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3341 if (!validate_arglist (exp,
3342 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3346 tree dest = CALL_EXPR_ARG (exp, 0);
3347 tree src = CALL_EXPR_ARG (exp, 1);
3348 tree len = CALL_EXPR_ARG (exp, 2);
3349 return expand_builtin_mempcpy_args (dest, src, len,
3351 target, mode, /*endp=*/ 1);
3355 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3356 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3357 so that this can also be called without constructing an actual CALL_EXPR.
3358 TYPE is the return type of the call. The other arguments and return value
3359 are the same as for expand_builtin_mempcpy. */
3362 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3363 rtx target, enum machine_mode mode, int endp)
3365 /* If return value is ignored, transform mempcpy into memcpy. */
3366 if (target == const0_rtx)
3368 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3373 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3374 target, mode, EXPAND_NORMAL);
3378 const char *src_str;
3379 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3380 unsigned int dest_align
3381 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3382 rtx dest_mem, src_mem, len_rtx;
3383 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3387 while (TREE_CODE (result) == COMPOUND_EXPR)
3389 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3391 result = TREE_OPERAND (result, 1);
3393 return expand_expr (result, target, mode, EXPAND_NORMAL);
3396 /* If either SRC or DEST is not a pointer type, don't do this
3397 operation in-line. */
3398 if (dest_align == 0 || src_align == 0)
3401 /* If LEN is not constant, call the normal function. */
3402 if (! host_integerp (len, 1))
3405 len_rtx = expand_normal (len);
3406 src_str = c_getstr (src);
3408 /* If SRC is a string constant and block move would be done
3409 by pieces, we can avoid loading the string from memory
3410 and only stored the computed constants. */
3412 && GET_CODE (len_rtx) == CONST_INT
3413 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3414 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3415 (void *) src_str, dest_align))
3417 dest_mem = get_memory_rtx (dest, len);
3418 set_mem_align (dest_mem, dest_align);
3419 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3420 builtin_memcpy_read_str,
3421 (void *) src_str, dest_align, endp);
3422 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3423 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3427 if (GET_CODE (len_rtx) == CONST_INT
3428 && can_move_by_pieces (INTVAL (len_rtx),
3429 MIN (dest_align, src_align)))
3431 dest_mem = get_memory_rtx (dest, len);
3432 set_mem_align (dest_mem, dest_align);
3433 src_mem = get_memory_rtx (src, len);
3434 set_mem_align (src_mem, src_align);
3435 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3436 MIN (dest_align, src_align), endp);
3437 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3438 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3446 /* Expand expression EXP, which is a call to the memmove builtin. Return
3447 NULL_RTX if we failed; the caller should emit a normal call. */
3450 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3452 if (!validate_arglist (exp,
3453 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3457 tree dest = CALL_EXPR_ARG (exp, 0);
3458 tree src = CALL_EXPR_ARG (exp, 1);
3459 tree len = CALL_EXPR_ARG (exp, 2);
3460 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3461 target, mode, ignore);
3465 /* Helper function to do the actual work for expand_builtin_memmove. The
3466 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3467 so that this can also be called without constructing an actual CALL_EXPR.
3468 TYPE is the return type of the call. The other arguments and return value
3469 are the same as for expand_builtin_memmove. */
3472 expand_builtin_memmove_args (tree dest, tree src, tree len,
3473 tree type, rtx target, enum machine_mode mode,
3476 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3480 while (TREE_CODE (result) == COMPOUND_EXPR)
3482 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3484 result = TREE_OPERAND (result, 1);
3486 return expand_expr (result, target, mode, EXPAND_NORMAL);
3489 /* Otherwise, call the normal function. */
3493 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3494 NULL_RTX if we failed the caller should emit a normal call. */
3497 expand_builtin_bcopy (tree exp, int ignore)
3499 tree type = TREE_TYPE (exp);
3500 tree src, dest, size;
3502 if (!validate_arglist (exp,
3503 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3506 src = CALL_EXPR_ARG (exp, 0);
3507 dest = CALL_EXPR_ARG (exp, 1);
3508 size = CALL_EXPR_ARG (exp, 2);
3510 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3511 This is done this way so that if it isn't expanded inline, we fall
3512 back to calling bcopy instead of memmove. */
3513 return expand_builtin_memmove_args (dest, src,
3514 fold_convert (sizetype, size),
3515 type, const0_rtx, VOIDmode,
3520 # define HAVE_movstr 0
3521 # define CODE_FOR_movstr CODE_FOR_nothing
3524 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3525 we failed, the caller should emit a normal call, otherwise try to
3526 get the result in TARGET, if convenient. If ENDP is 0 return the
3527 destination pointer, if ENDP is 1 return the end pointer ala
3528 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3532 expand_movstr (tree dest, tree src, rtx target, int endp)
3538 const struct insn_data * data;
3543 dest_mem = get_memory_rtx (dest, NULL);
3544 src_mem = get_memory_rtx (src, NULL);
3547 target = force_reg (Pmode, XEXP (dest_mem, 0));
3548 dest_mem = replace_equiv_address (dest_mem, target);
3549 end = gen_reg_rtx (Pmode);
3553 if (target == 0 || target == const0_rtx)
3555 end = gen_reg_rtx (Pmode);
3563 data = insn_data + CODE_FOR_movstr;
3565 if (data->operand[0].mode != VOIDmode)
3566 end = gen_lowpart (data->operand[0].mode, end);
3568 insn = data->genfun (end, dest_mem, src_mem);
3574 /* movstr is supposed to set end to the address of the NUL
3575 terminator. If the caller requested a mempcpy-like return value,
3577 if (endp == 1 && target != const0_rtx)
3579 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3580 emit_move_insn (target, force_operand (tem, NULL_RTX));
3586 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3587 NULL_RTX if we failed the caller should emit a normal call, otherwise
3588 try to get the result in TARGET, if convenient (and in mode MODE if that's
3592 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3594 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3596 tree dest = CALL_EXPR_ARG (exp, 0);
3597 tree src = CALL_EXPR_ARG (exp, 1);
3598 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3603 /* Helper function to do the actual work for expand_builtin_strcpy. The
3604 arguments to the builtin_strcpy call DEST and SRC are broken out
3605 so that this can also be called without constructing an actual CALL_EXPR.
3606 The other arguments and return value are the same as for
3607 expand_builtin_strcpy. */
3610 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3611 rtx target, enum machine_mode mode)
3613 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3615 return expand_expr (result, target, mode, EXPAND_NORMAL);
3616 return expand_movstr (dest, src, target, /*endp=*/0);
3620 /* Expand a call EXP to the stpcpy builtin.
3621 Return NULL_RTX if we failed the caller should emit a normal call,
3622 otherwise try to get the result in TARGET, if convenient (and in
3623 mode MODE if that's convenient). */
3626 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3630 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3633 dst = CALL_EXPR_ARG (exp, 0);
3634 src = CALL_EXPR_ARG (exp, 1);
3636 /* If return value is ignored, transform stpcpy into strcpy. */
3637 if (target == const0_rtx)
3639 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3643 return expand_expr (build_call_expr (fn, 2, dst, src),
3644 target, mode, EXPAND_NORMAL);
3651 /* Ensure we get an actual string whose length can be evaluated at
3652 compile-time, not an expression containing a string. This is
3653 because the latter will potentially produce pessimized code
3654 when used to produce the return value. */
3655 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3656 return expand_movstr (dst, src, target, /*endp=*/2);
3658 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3659 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3660 target, mode, /*endp=*/2);
3665 if (TREE_CODE (len) == INTEGER_CST)
3667 rtx len_rtx = expand_normal (len);
3669 if (GET_CODE (len_rtx) == CONST_INT)
3671 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3672 dst, src, target, mode);
3678 if (mode != VOIDmode)
3679 target = gen_reg_rtx (mode);
3681 target = gen_reg_rtx (GET_MODE (ret));
3683 if (GET_MODE (target) != GET_MODE (ret))
3684 ret = gen_lowpart (GET_MODE (target), ret);
3686 ret = plus_constant (ret, INTVAL (len_rtx));
3687 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3695 return expand_movstr (dst, src, target, /*endp=*/2);
3699 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3700 bytes from constant string DATA + OFFSET and return it as target
3704 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3705 enum machine_mode mode)
3707 const char *str = (const char *) data;
3709 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3712 return c_readstr (str + offset, mode);
3715 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3716 NULL_RTX if we failed the caller should emit a normal call. */
3719 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3721 tree fndecl = get_callee_fndecl (exp);
3723 if (validate_arglist (exp,
3724 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3726 tree dest = CALL_EXPR_ARG (exp, 0);
3727 tree src = CALL_EXPR_ARG (exp, 1);
3728 tree len = CALL_EXPR_ARG (exp, 2);
3729 tree slen = c_strlen (src, 1);
3730 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3734 while (TREE_CODE (result) == COMPOUND_EXPR)
3736 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3738 result = TREE_OPERAND (result, 1);
3740 return expand_expr (result, target, mode, EXPAND_NORMAL);
3743 /* We must be passed a constant len and src parameter. */
3744 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3747 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3749 /* We're required to pad with trailing zeros if the requested
3750 len is greater than strlen(s2)+1. In that case try to
3751 use store_by_pieces, if it fails, punt. */
3752 if (tree_int_cst_lt (slen, len))
3754 unsigned int dest_align
3755 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3756 const char *p = c_getstr (src);
3759 if (!p || dest_align == 0 || !host_integerp (len, 1)
3760 || !can_store_by_pieces (tree_low_cst (len, 1),
3761 builtin_strncpy_read_str,
3762 (void *) p, dest_align))
3765 dest_mem = get_memory_rtx (dest, len);
3766 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3767 builtin_strncpy_read_str,
3768 (void *) p, dest_align, 0);
3769 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3770 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3777 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3778 bytes from constant string DATA + OFFSET and return it as target
3782 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3783 enum machine_mode mode)
3785 const char *c = (const char *) data;
3786 char *p = alloca (GET_MODE_SIZE (mode));
3788 memset (p, *c, GET_MODE_SIZE (mode));
3790 return c_readstr (p, mode);
3793 /* Callback routine for store_by_pieces. Return the RTL of a register
3794 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3795 char value given in the RTL register data. For example, if mode is
3796 4 bytes wide, return the RTL for 0x01010101*data. */
3799 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3800 enum machine_mode mode)
3806 size = GET_MODE_SIZE (mode);
3811 memset (p, 1, size);
3812 coeff = c_readstr (p, mode);
3814 target = convert_to_mode (mode, (rtx) data, 1);
3815 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3816 return force_reg (mode, target);
3819 /* Expand expression EXP, which is a call to the memset builtin. Return
3820 NULL_RTX if we failed the caller should emit a normal call, otherwise
3821 try to get the result in TARGET, if convenient (and in mode MODE if that's
3825 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3827 if (!validate_arglist (exp,
3828 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3832 tree dest = CALL_EXPR_ARG (exp, 0);
3833 tree val = CALL_EXPR_ARG (exp, 1);
3834 tree len = CALL_EXPR_ARG (exp, 2);
3835 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3839 /* Helper function to do the actual work for expand_builtin_memset. The
3840 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3841 so that this can also be called without constructing an actual CALL_EXPR.
3842 The other arguments and return value are the same as for
3843 expand_builtin_memset. */
3846 expand_builtin_memset_args (tree dest, tree val, tree len,
3847 rtx target, enum machine_mode mode, tree orig_exp)
3850 enum built_in_function fcode;
3852 unsigned int dest_align;
3853 rtx dest_mem, dest_addr, len_rtx;
3854 HOST_WIDE_INT expected_size = -1;
3855 unsigned int expected_align = 0;
3857 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3859 /* If DEST is not a pointer type, don't do this operation in-line. */
3860 if (dest_align == 0)
3863 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3864 if (expected_align < dest_align)
3865 expected_align = dest_align;
3867 /* If the LEN parameter is zero, return DEST. */
3868 if (integer_zerop (len))
3870 /* Evaluate and ignore VAL in case it has side-effects. */
3871 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3872 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3875 /* Stabilize the arguments in case we fail. */
3876 dest = builtin_save_expr (dest);
3877 val = builtin_save_expr (val);
3878 len = builtin_save_expr (len);
3880 len_rtx = expand_normal (len);
3881 dest_mem = get_memory_rtx (dest, len);
3883 if (TREE_CODE (val) != INTEGER_CST)
3887 val_rtx = expand_normal (val);
3888 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3891 /* Assume that we can memset by pieces if we can store
3892 * the coefficients by pieces (in the required modes).
3893 * We can't pass builtin_memset_gen_str as that emits RTL. */
3895 if (host_integerp (len, 1)
3896 && !(optimize_size && tree_low_cst (len, 1) > 1)
3897 && can_store_by_pieces (tree_low_cst (len, 1),
3898 builtin_memset_read_str, &c, dest_align))
3900 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3902 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3903 builtin_memset_gen_str, val_rtx, dest_align, 0);
3905 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3906 dest_align, expected_align,
3910 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3911 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3915 if (target_char_cast (val, &c))
3920 if (host_integerp (len, 1)
3921 && !(optimize_size && tree_low_cst (len, 1) > 1)
3922 && can_store_by_pieces (tree_low_cst (len, 1),
3923 builtin_memset_read_str, &c, dest_align))
3924 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3925 builtin_memset_read_str, &c, dest_align, 0);
3926 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3927 dest_align, expected_align,
3931 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3932 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3936 set_mem_align (dest_mem, dest_align);
3937 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3938 CALL_EXPR_TAILCALL (orig_exp)
3939 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3940 expected_align, expected_size);
3944 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3945 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3951 fndecl = get_callee_fndecl (orig_exp);
3952 fcode = DECL_FUNCTION_CODE (fndecl);
3953 if (fcode == BUILT_IN_MEMSET)
3954 fn = build_call_expr (fndecl, 3, dest, val, len);
3955 else if (fcode == BUILT_IN_BZERO)
3956 fn = build_call_expr (fndecl, 2, dest, len);
3959 if (TREE_CODE (fn) == CALL_EXPR)
3960 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3961 return expand_call (fn, target, target == const0_rtx);
3964 /* Expand expression EXP, which is a call to the bzero builtin. Return
3965 NULL_RTX if we failed the caller should emit a normal call. */
3968 expand_builtin_bzero (tree exp)
3972 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3975 dest = CALL_EXPR_ARG (exp, 0);
3976 size = CALL_EXPR_ARG (exp, 1);
3978 /* New argument list transforming bzero(ptr x, int y) to
3979 memset(ptr x, int 0, size_t y). This is done this way
3980 so that if it isn't expanded inline, we fallback to
3981 calling bzero instead of memset. */
3983 return expand_builtin_memset_args (dest, integer_zero_node,
3984 fold_convert (sizetype, size),
3985 const0_rtx, VOIDmode, exp);
3988 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3989 caller should emit a normal call, otherwise try to get the result
3990 in TARGET, if convenient (and in mode MODE if that's convenient). */
3993 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
3995 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
3996 INTEGER_TYPE, VOID_TYPE))
3998 tree type = TREE_TYPE (exp);
3999 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4000 CALL_EXPR_ARG (exp, 1),
4001 CALL_EXPR_ARG (exp, 2), type);
4003 return expand_expr (result, target, mode, EXPAND_NORMAL);
4008 /* Expand expression EXP, which is a call to the memcmp built-in function.
4009 Return NULL_RTX if we failed and the
4010 caller should emit a normal call, otherwise try to get the result in
4011 TARGET, if convenient (and in mode MODE, if that's convenient). */
4014 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4016 if (!validate_arglist (exp,
4017 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4021 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4022 CALL_EXPR_ARG (exp, 1),
4023 CALL_EXPR_ARG (exp, 2));
4025 return expand_expr (result, target, mode, EXPAND_NORMAL);
4028 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4030 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4033 tree arg1 = CALL_EXPR_ARG (exp, 0);
4034 tree arg2 = CALL_EXPR_ARG (exp, 1);
4035 tree len = CALL_EXPR_ARG (exp, 2);
4038 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4040 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4041 enum machine_mode insn_mode;
4043 #ifdef HAVE_cmpmemsi
4045 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4048 #ifdef HAVE_cmpstrnsi
4050 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4055 /* If we don't have POINTER_TYPE, call the function. */
4056 if (arg1_align == 0 || arg2_align == 0)
4059 /* Make a place to write the result of the instruction. */
4062 && REG_P (result) && GET_MODE (result) == insn_mode
4063 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4064 result = gen_reg_rtx (insn_mode);
4066 arg1_rtx = get_memory_rtx (arg1, len);
4067 arg2_rtx = get_memory_rtx (arg2, len);
4068 arg3_rtx = expand_normal (len);
4070 /* Set MEM_SIZE as appropriate. */
4071 if (GET_CODE (arg3_rtx) == CONST_INT)
4073 set_mem_size (arg1_rtx, arg3_rtx);
4074 set_mem_size (arg2_rtx, arg3_rtx);
4077 #ifdef HAVE_cmpmemsi
4079 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4080 GEN_INT (MIN (arg1_align, arg2_align)));
4083 #ifdef HAVE_cmpstrnsi
4085 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4086 GEN_INT (MIN (arg1_align, arg2_align)));
4094 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4095 TYPE_MODE (integer_type_node), 3,
4096 XEXP (arg1_rtx, 0), Pmode,
4097 XEXP (arg2_rtx, 0), Pmode,
4098 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4099 TYPE_UNSIGNED (sizetype)),
4100 TYPE_MODE (sizetype));
4102 /* Return the value in the proper mode for this function. */
4103 mode = TYPE_MODE (TREE_TYPE (exp));
4104 if (GET_MODE (result) == mode)
4106 else if (target != 0)
4108 convert_move (target, result, 0);
4112 return convert_to_mode (mode, result, 0);
4119 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4120 if we failed the caller should emit a normal call, otherwise try to get
4121 the result in TARGET, if convenient. */
4124 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4126 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4130 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4131 CALL_EXPR_ARG (exp, 1));
4133 return expand_expr (result, target, mode, EXPAND_NORMAL);
4136 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4137 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4138 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4140 rtx arg1_rtx, arg2_rtx;
4141 rtx result, insn = NULL_RTX;
4143 tree arg1 = CALL_EXPR_ARG (exp, 0);
4144 tree arg2 = CALL_EXPR_ARG (exp, 1);
4147 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4149 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4151 /* If we don't have POINTER_TYPE, call the function. */
4152 if (arg1_align == 0 || arg2_align == 0)
4155 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4156 arg1 = builtin_save_expr (arg1);
4157 arg2 = builtin_save_expr (arg2);
4159 arg1_rtx = get_memory_rtx (arg1, NULL);
4160 arg2_rtx = get_memory_rtx (arg2, NULL);
4162 #ifdef HAVE_cmpstrsi
4163 /* Try to call cmpstrsi. */
4166 enum machine_mode insn_mode
4167 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4169 /* Make a place to write the result of the instruction. */
4172 && REG_P (result) && GET_MODE (result) == insn_mode
4173 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4174 result = gen_reg_rtx (insn_mode);
4176 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4177 GEN_INT (MIN (arg1_align, arg2_align)));
4180 #ifdef HAVE_cmpstrnsi
4181 /* Try to determine at least one length and call cmpstrnsi. */
4182 if (!insn && HAVE_cmpstrnsi)
4187 enum machine_mode insn_mode
4188 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4189 tree len1 = c_strlen (arg1, 1);
4190 tree len2 = c_strlen (arg2, 1);
4193 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4195 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4197 /* If we don't have a constant length for the first, use the length
4198 of the second, if we know it. We don't require a constant for
4199 this case; some cost analysis could be done if both are available
4200 but neither is constant. For now, assume they're equally cheap,
4201 unless one has side effects. If both strings have constant lengths,
4208 else if (TREE_SIDE_EFFECTS (len1))
4210 else if (TREE_SIDE_EFFECTS (len2))
4212 else if (TREE_CODE (len1) != INTEGER_CST)
4214 else if (TREE_CODE (len2) != INTEGER_CST)
4216 else if (tree_int_cst_lt (len1, len2))
4221 /* If both arguments have side effects, we cannot optimize. */
4222 if (!len || TREE_SIDE_EFFECTS (len))
4225 arg3_rtx = expand_normal (len);
4227 /* Make a place to write the result of the instruction. */
4230 && REG_P (result) && GET_MODE (result) == insn_mode
4231 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4232 result = gen_reg_rtx (insn_mode);
4234 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4235 GEN_INT (MIN (arg1_align, arg2_align)));
4243 /* Return the value in the proper mode for this function. */
4244 mode = TYPE_MODE (TREE_TYPE (exp));
4245 if (GET_MODE (result) == mode)
4248 return convert_to_mode (mode, result, 0);
4249 convert_move (target, result, 0);
4253 /* Expand the library call ourselves using a stabilized argument
4254 list to avoid re-evaluating the function's arguments twice. */
4255 #ifdef HAVE_cmpstrnsi
4258 fndecl = get_callee_fndecl (exp);
4259 fn = build_call_expr (fndecl, 2, arg1, arg2);
4260 if (TREE_CODE (fn) == CALL_EXPR)
4261 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4262 return expand_call (fn, target, target == const0_rtx);
4268 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4269 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4270 the result in TARGET, if convenient. */
4273 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4275 if (!validate_arglist (exp,
4276 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4280 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4281 CALL_EXPR_ARG (exp, 1),
4282 CALL_EXPR_ARG (exp, 2));
4284 return expand_expr (result, target, mode, EXPAND_NORMAL);
4287 /* If c_strlen can determine an expression for one of the string
4288 lengths, and it doesn't have side effects, then emit cmpstrnsi
4289 using length MIN(strlen(string)+1, arg3). */
4290 #ifdef HAVE_cmpstrnsi
4293 tree len, len1, len2;
4294 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4297 tree arg1 = CALL_EXPR_ARG (exp, 0);
4298 tree arg2 = CALL_EXPR_ARG (exp, 1);
4299 tree arg3 = CALL_EXPR_ARG (exp, 2);
4302 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4304 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4305 enum machine_mode insn_mode
4306 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4308 len1 = c_strlen (arg1, 1);
4309 len2 = c_strlen (arg2, 1);
4312 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4314 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4316 /* If we don't have a constant length for the first, use the length
4317 of the second, if we know it. We don't require a constant for
4318 this case; some cost analysis could be done if both are available
4319 but neither is constant. For now, assume they're equally cheap,
4320 unless one has side effects. If both strings have constant lengths,
4327 else if (TREE_SIDE_EFFECTS (len1))
4329 else if (TREE_SIDE_EFFECTS (len2))
4331 else if (TREE_CODE (len1) != INTEGER_CST)
4333 else if (TREE_CODE (len2) != INTEGER_CST)
4335 else if (tree_int_cst_lt (len1, len2))
4340 /* If both arguments have side effects, we cannot optimize. */
4341 if (!len || TREE_SIDE_EFFECTS (len))
4344 /* The actual new length parameter is MIN(len,arg3). */
4345 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4346 fold_convert (TREE_TYPE (len), arg3));
4348 /* If we don't have POINTER_TYPE, call the function. */
4349 if (arg1_align == 0 || arg2_align == 0)
4352 /* Make a place to write the result of the instruction. */
4355 && REG_P (result) && GET_MODE (result) == insn_mode
4356 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4357 result = gen_reg_rtx (insn_mode);
4359 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4360 arg1 = builtin_save_expr (arg1);
4361 arg2 = builtin_save_expr (arg2);
4362 len = builtin_save_expr (len);
4364 arg1_rtx = get_memory_rtx (arg1, len);
4365 arg2_rtx = get_memory_rtx (arg2, len);
4366 arg3_rtx = expand_normal (len);
4367 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4368 GEN_INT (MIN (arg1_align, arg2_align)));
4373 /* Return the value in the proper mode for this function. */
4374 mode = TYPE_MODE (TREE_TYPE (exp));
4375 if (GET_MODE (result) == mode)
4378 return convert_to_mode (mode, result, 0);
4379 convert_move (target, result, 0);
4383 /* Expand the library call ourselves using a stabilized argument
4384 list to avoid re-evaluating the function's arguments twice. */
4385 fndecl = get_callee_fndecl (exp);
4386 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4387 if (TREE_CODE (fn) == CALL_EXPR)
4388 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4389 return expand_call (fn, target, target == const0_rtx);
4395 /* Expand expression EXP, which is a call to the strcat builtin.
4396 Return NULL_RTX if we failed the caller should emit a normal call,
4397 otherwise try to get the result in TARGET, if convenient. */
4400 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4402 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4406 tree dst = CALL_EXPR_ARG (exp, 0);
4407 tree src = CALL_EXPR_ARG (exp, 1);
4408 const char *p = c_getstr (src);
4410 /* If the string length is zero, return the dst parameter. */
4411 if (p && *p == '\0')
4412 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4416 /* See if we can store by pieces into (dst + strlen(dst)). */
4417 tree newsrc, newdst,
4418 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4421 /* Stabilize the argument list. */
4422 newsrc = builtin_save_expr (src);
4423 dst = builtin_save_expr (dst);
4427 /* Create strlen (dst). */
4428 newdst = build_call_expr (strlen_fn, 1, dst);
4429 /* Create (dst + (cast) strlen (dst)). */
4430 newdst = fold_convert (TREE_TYPE (dst), newdst);
4431 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4433 newdst = builtin_save_expr (newdst);
4435 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4437 end_sequence (); /* Stop sequence. */
4441 /* Output the entire sequence. */
4442 insns = get_insns ();
4446 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4453 /* Expand expression EXP, which is a call to the strncat builtin.
4454 Return NULL_RTX if we failed the caller should emit a normal call,
4455 otherwise try to get the result in TARGET, if convenient. */
4458 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4460 if (validate_arglist (exp,
4461 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4463 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4464 CALL_EXPR_ARG (exp, 1),
4465 CALL_EXPR_ARG (exp, 2));
4467 return expand_expr (result, target, mode, EXPAND_NORMAL);
4472 /* Expand expression EXP, which is a call to the strspn builtin.
4473 Return NULL_RTX if we failed the caller should emit a normal call,
4474 otherwise try to get the result in TARGET, if convenient. */
4477 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4479 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4481 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4482 CALL_EXPR_ARG (exp, 1));
4484 return expand_expr (result, target, mode, EXPAND_NORMAL);
4489 /* Expand expression EXP, which is a call to the strcspn builtin.
4490 Return NULL_RTX if we failed the caller should emit a normal call,
4491 otherwise try to get the result in TARGET, if convenient. */
4494 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4496 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4498 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4499 CALL_EXPR_ARG (exp, 1));
4501 return expand_expr (result, target, mode, EXPAND_NORMAL);
4506 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4507 if that's convenient. */
4510 expand_builtin_saveregs (void)
4514 /* Don't do __builtin_saveregs more than once in a function.
4515 Save the result of the first call and reuse it. */
4516 if (saveregs_value != 0)
4517 return saveregs_value;
4519 /* When this function is called, it means that registers must be
4520 saved on entry to this function. So we migrate the call to the
4521 first insn of this function. */
4525 /* Do whatever the machine needs done in this case. */
4526 val = targetm.calls.expand_builtin_saveregs ();
4531 saveregs_value = val;
4533 /* Put the insns after the NOTE that starts the function. If this
4534 is inside a start_sequence, make the outer-level insn chain current, so
4535 the code is placed at the start of the function. */
4536 push_topmost_sequence ();
4537 emit_insn_after (seq, entry_of_function ());
4538 pop_topmost_sequence ();
4543 /* __builtin_args_info (N) returns word N of the arg space info
4544 for the current function. The number and meanings of words
4545 is controlled by the definition of CUMULATIVE_ARGS. */
4548 expand_builtin_args_info (tree exp)
4550 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4551 int *word_ptr = (int *) ¤t_function_args_info;
4553 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4555 if (call_expr_nargs (exp) != 0)
4557 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4558 error ("argument of %<__builtin_args_info%> must be constant");
4561 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4563 if (wordnum < 0 || wordnum >= nwords)
4564 error ("argument of %<__builtin_args_info%> out of range");
4566 return GEN_INT (word_ptr[wordnum]);
4570 error ("missing argument in %<__builtin_args_info%>");
4575 /* Expand a call to __builtin_next_arg. */
4578 expand_builtin_next_arg (void)
4580 /* Checking arguments is already done in fold_builtin_next_arg
4581 that must be called before this function. */
4582 return expand_binop (Pmode, add_optab,
4583 current_function_internal_arg_pointer,
4584 current_function_arg_offset_rtx,
4585 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4588 /* Make it easier for the backends by protecting the valist argument
4589 from multiple evaluations. */
4592 stabilize_va_list (tree valist, int needs_lvalue)
4594 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4596 if (TREE_SIDE_EFFECTS (valist))
4597 valist = save_expr (valist);
4599 /* For this case, the backends will be expecting a pointer to
4600 TREE_TYPE (va_list_type_node), but it's possible we've
4601 actually been given an array (an actual va_list_type_node).
4603 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4605 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4606 valist = build_fold_addr_expr_with_type (valist, p1);
4615 if (! TREE_SIDE_EFFECTS (valist))
4618 pt = build_pointer_type (va_list_type_node);
4619 valist = fold_build1 (ADDR_EXPR, pt, valist);
4620 TREE_SIDE_EFFECTS (valist) = 1;
4623 if (TREE_SIDE_EFFECTS (valist))
4624 valist = save_expr (valist);
4625 valist = build_fold_indirect_ref (valist);
4631 /* The "standard" definition of va_list is void*. */
4634 std_build_builtin_va_list (void)
4636 return ptr_type_node;
4639 /* The "standard" implementation of va_start: just assign `nextarg' to
4643 std_expand_builtin_va_start (tree valist, rtx nextarg)
4647 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4648 make_tree (ptr_type_node, nextarg));
4649 TREE_SIDE_EFFECTS (t) = 1;
4651 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4654 /* Expand EXP, a call to __builtin_va_start. */
4657 expand_builtin_va_start (tree exp)
4662 if (call_expr_nargs (exp) < 2)
4664 error ("too few arguments to function %<va_start%>");
4668 if (fold_builtin_next_arg (exp, true))
4671 nextarg = expand_builtin_next_arg ();
4672 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4674 #ifdef EXPAND_BUILTIN_VA_START
4675 EXPAND_BUILTIN_VA_START (valist, nextarg);
4677 std_expand_builtin_va_start (valist, nextarg);
4683 /* The "standard" implementation of va_arg: read the value from the
4684 current (padded) address and increment by the (padded) size. */
4687 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4689 tree addr, t, type_size, rounded_size, valist_tmp;
4690 unsigned HOST_WIDE_INT align, boundary;
4693 #ifdef ARGS_GROW_DOWNWARD
4694 /* All of the alignment and movement below is for args-grow-up machines.
4695 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4696 implement their own specialized gimplify_va_arg_expr routines. */
4700 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4702 type = build_pointer_type (type);
4704 align = PARM_BOUNDARY / BITS_PER_UNIT;
4705 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4707 /* Hoist the valist value into a temporary for the moment. */
4708 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4710 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4711 requires greater alignment, we must perform dynamic alignment. */
4712 if (boundary > align
4713 && !integer_zerop (TYPE_SIZE (type)))
4715 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4716 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4717 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4718 gimplify_and_add (t, pre_p);
4720 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4721 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4722 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4723 gimplify_and_add (t, pre_p);
4728 /* If the actual alignment is less than the alignment of the type,
4729 adjust the type accordingly so that we don't assume strict alignment
4730 when deferencing the pointer. */
4731 boundary *= BITS_PER_UNIT;
4732 if (boundary < TYPE_ALIGN (type))
4734 type = build_variant_type_copy (type);
4735 TYPE_ALIGN (type) = boundary;
4738 /* Compute the rounded size of the type. */
4739 type_size = size_in_bytes (type);
4740 rounded_size = round_up (type_size, align);
4742 /* Reduce rounded_size so it's sharable with the postqueue. */
4743 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4747 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4749 /* Small args are padded downward. */
4750 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4751 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4752 size_binop (MINUS_EXPR, rounded_size, type_size));
4753 t = fold_convert (TREE_TYPE (addr), t);
4754 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4757 /* Compute new value for AP. */
4758 t = fold_convert (TREE_TYPE (valist), rounded_size);
4759 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4760 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4761 gimplify_and_add (t, pre_p);
4763 addr = fold_convert (build_pointer_type (type), addr);
4766 addr = build_va_arg_indirect_ref (addr);
4768 return build_va_arg_indirect_ref (addr);
4771 /* Build an indirect-ref expression over the given TREE, which represents a
4772 piece of a va_arg() expansion. */
4774 build_va_arg_indirect_ref (tree addr)
4776 addr = build_fold_indirect_ref (addr);
4778 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4784 /* Return a dummy expression of type TYPE in order to keep going after an
4788 dummy_object (tree type)
4790 tree t = build_int_cst (build_pointer_type (type), 0);
4791 return build1 (INDIRECT_REF, type, t);
4794 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4795 builtin function, but a very special sort of operator. */
4797 enum gimplify_status
4798 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4800 tree promoted_type, want_va_type, have_va_type;
4801 tree valist = TREE_OPERAND (*expr_p, 0);
4802 tree type = TREE_TYPE (*expr_p);
4805 /* Verify that valist is of the proper type. */
4806 want_va_type = va_list_type_node;
4807 have_va_type = TREE_TYPE (valist);
4809 if (have_va_type == error_mark_node)
4812 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4814 /* If va_list is an array type, the argument may have decayed
4815 to a pointer type, e.g. by being passed to another function.
4816 In that case, unwrap both types so that we can compare the
4817 underlying records. */
4818 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4819 || POINTER_TYPE_P (have_va_type))
4821 want_va_type = TREE_TYPE (want_va_type);
4822 have_va_type = TREE_TYPE (have_va_type);
4826 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4828 error ("first argument to %<va_arg%> not of type %<va_list%>");
4832 /* Generate a diagnostic for requesting data of a type that cannot
4833 be passed through `...' due to type promotion at the call site. */
4834 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4837 static bool gave_help;
4839 /* Unfortunately, this is merely undefined, rather than a constraint
4840 violation, so we cannot make this an error. If this call is never
4841 executed, the program is still strictly conforming. */
4842 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4843 type, promoted_type);
4847 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4848 promoted_type, type);
4851 /* We can, however, treat "undefined" any way we please.
4852 Call abort to encourage the user to fix the program. */
4853 inform ("if this code is reached, the program will abort");
4854 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4855 append_to_statement_list (t, pre_p);
4857 /* This is dead code, but go ahead and finish so that the
4858 mode of the result comes out right. */
4859 *expr_p = dummy_object (type);
4864 /* Make it easier for the backends by protecting the valist argument
4865 from multiple evaluations. */
4866 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4868 /* For this case, the backends will be expecting a pointer to
4869 TREE_TYPE (va_list_type_node), but it's possible we've
4870 actually been given an array (an actual va_list_type_node).
4872 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4874 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4875 valist = build_fold_addr_expr_with_type (valist, p1);
4877 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4880 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4882 if (!targetm.gimplify_va_arg_expr)
4883 /* FIXME:Once most targets are converted we should merely
4884 assert this is non-null. */
4887 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4892 /* Expand EXP, a call to __builtin_va_end. */
4895 expand_builtin_va_end (tree exp)
4897 tree valist = CALL_EXPR_ARG (exp, 0);
4899 /* Evaluate for side effects, if needed. I hate macros that don't
4901 if (TREE_SIDE_EFFECTS (valist))
4902 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4907 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4908 builtin rather than just as an assignment in stdarg.h because of the
4909 nastiness of array-type va_list types. */
4912 expand_builtin_va_copy (tree exp)
4916 dst = CALL_EXPR_ARG (exp, 0);
4917 src = CALL_EXPR_ARG (exp, 1);
4919 dst = stabilize_va_list (dst, 1);
4920 src = stabilize_va_list (src, 0);
4922 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4924 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4925 TREE_SIDE_EFFECTS (t) = 1;
4926 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4930 rtx dstb, srcb, size;
4932 /* Evaluate to pointers. */
4933 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4934 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4935 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4936 VOIDmode, EXPAND_NORMAL);
4938 dstb = convert_memory_address (Pmode, dstb);
4939 srcb = convert_memory_address (Pmode, srcb);
4941 /* "Dereference" to BLKmode memories. */
4942 dstb = gen_rtx_MEM (BLKmode, dstb);
4943 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4944 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4945 srcb = gen_rtx_MEM (BLKmode, srcb);
4946 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4947 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4950 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4956 /* Expand a call to one of the builtin functions __builtin_frame_address or
4957 __builtin_return_address. */
4960 expand_builtin_frame_address (tree fndecl, tree exp)
4962 /* The argument must be a nonnegative integer constant.
4963 It counts the number of frames to scan up the stack.
4964 The value is the return address saved in that frame. */
4965 if (call_expr_nargs (exp) == 0)
4966 /* Warning about missing arg was already issued. */
4968 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4970 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4971 error ("invalid argument to %<__builtin_frame_address%>");
4973 error ("invalid argument to %<__builtin_return_address%>");
4979 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4980 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4982 /* Some ports cannot access arbitrary stack frames. */
4985 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4986 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4988 warning (0, "unsupported argument to %<__builtin_return_address%>");
4992 /* For __builtin_frame_address, return what we've got. */
4993 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4997 && ! CONSTANT_P (tem))
4998 tem = copy_to_mode_reg (Pmode, tem);
5003 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5004 we failed and the caller should emit a normal call, otherwise try to get
5005 the result in TARGET, if convenient. */
5008 expand_builtin_alloca (tree exp, rtx target)
5013 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5014 should always expand to function calls. These can be intercepted
5019 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5022 /* Compute the argument. */
5023 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5025 /* Allocate the desired space. */
5026 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5027 result = convert_memory_address (ptr_mode, result);
5032 /* Expand a call to a bswap builtin with argument ARG0. MODE
5033 is the mode to expand with. */
5036 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5038 enum machine_mode mode;
5042 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5045 arg = CALL_EXPR_ARG (exp, 0);
5046 mode = TYPE_MODE (TREE_TYPE (arg));
5047 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5049 target = expand_unop (mode, bswap_optab, op0, target, 1);
5051 gcc_assert (target);
5053 return convert_to_mode (mode, target, 0);
5056 /* Expand a call to a unary builtin in EXP.
5057 Return NULL_RTX if a normal call should be emitted rather than expanding the
5058 function in-line. If convenient, the result should be placed in TARGET.
5059 SUBTARGET may be used as the target for computing one of EXP's operands. */
5062 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5063 rtx subtarget, optab op_optab)
5067 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5070 /* Compute the argument. */
5071 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5072 VOIDmode, EXPAND_NORMAL);
5073 /* Compute op, into TARGET if possible.
5074 Set TARGET to wherever the result comes back. */
5075 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5076 op_optab, op0, target, 1);
5077 gcc_assert (target);
5079 return convert_to_mode (target_mode, target, 0);
5082 /* If the string passed to fputs is a constant and is one character
5083 long, we attempt to transform this call into __builtin_fputc(). */
5086 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5088 /* Verify the arguments in the original call. */
5089 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5091 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5092 CALL_EXPR_ARG (exp, 1),
5093 (target == const0_rtx),
5094 unlocked, NULL_TREE);
5096 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5101 /* Expand a call to __builtin_expect. We just return our argument
5102 as the builtin_expect semantic should've been already executed by
5103 tree branch prediction pass. */
5106 expand_builtin_expect (tree exp, rtx target)
5110 if (call_expr_nargs (exp) < 2)
5112 arg = CALL_EXPR_ARG (exp, 0);
5113 c = CALL_EXPR_ARG (exp, 1);
5115 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5116 /* When guessing was done, the hints should be already stripped away. */
5117 gcc_assert (!flag_guess_branch_prob);
5122 expand_builtin_trap (void)
5126 emit_insn (gen_trap ());
5129 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5133 /* Expand EXP, a call to fabs, fabsf or fabsl.
5134 Return NULL_RTX if a normal call should be emitted rather than expanding
5135 the function inline. If convenient, the result should be placed
5136 in TARGET. SUBTARGET may be used as the target for computing
5140 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5142 enum machine_mode mode;
5146 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5149 arg = CALL_EXPR_ARG (exp, 0);
5150 mode = TYPE_MODE (TREE_TYPE (arg));
5151 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5152 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5155 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5156 Return NULL is a normal call should be emitted rather than expanding the
5157 function inline. If convenient, the result should be placed in TARGET.
5158 SUBTARGET may be used as the target for computing the operand. */
5161 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5166 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5169 arg = CALL_EXPR_ARG (exp, 0);
5170 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5172 arg = CALL_EXPR_ARG (exp, 1);
5173 op1 = expand_normal (arg);
5175 return expand_copysign (op0, op1, target);
5178 /* Create a new constant string literal and return a char* pointer to it.
5179 The STRING_CST value is the LEN characters at STR. */
5181 build_string_literal (int len, const char *str)
5183 tree t, elem, index, type;
5185 t = build_string (len, str);
5186 elem = build_type_variant (char_type_node, 1, 0);
5187 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5188 type = build_array_type (elem, index);
5189 TREE_TYPE (t) = type;
5190 TREE_CONSTANT (t) = 1;
5191 TREE_INVARIANT (t) = 1;
5192 TREE_READONLY (t) = 1;
5193 TREE_STATIC (t) = 1;
5195 type = build_pointer_type (type);
5196 t = build1 (ADDR_EXPR, type, t);
5198 type = build_pointer_type (elem);
5199 t = build1 (NOP_EXPR, type, t);
5203 /* Expand EXP, a call to printf or printf_unlocked.
5204 Return NULL_RTX if a normal call should be emitted rather than transforming
5205 the function inline. If convenient, the result should be placed in
5206 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5209 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5212 /* If we're using an unlocked function, assume the other unlocked
5213 functions exist explicitly. */
5214 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5215 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5216 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5217 : implicit_built_in_decls[BUILT_IN_PUTS];
5218 const char *fmt_str;
5221 int nargs = call_expr_nargs (exp);
5223 /* If the return value is used, don't do the transformation. */
5224 if (target != const0_rtx)
5227 /* Verify the required arguments in the original call. */
5230 fmt = CALL_EXPR_ARG (exp, 0);
5231 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5234 /* Check whether the format is a literal string constant. */
5235 fmt_str = c_getstr (fmt);
5236 if (fmt_str == NULL)
5239 if (!init_target_chars ())
5242 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5243 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5246 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5249 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5251 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5252 else if (strcmp (fmt_str, target_percent_c) == 0)
5255 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5258 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5262 /* We can't handle anything else with % args or %% ... yet. */
5263 if (strchr (fmt_str, target_percent))
5269 /* If the format specifier was "", printf does nothing. */
5270 if (fmt_str[0] == '\0')
5272 /* If the format specifier has length of 1, call putchar. */
5273 if (fmt_str[1] == '\0')
5275 /* Given printf("c"), (where c is any one character,)
5276 convert "c"[0] to an int and pass that to the replacement
5278 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5280 fn = build_call_expr (fn_putchar, 1, arg);
5284 /* If the format specifier was "string\n", call puts("string"). */
5285 size_t len = strlen (fmt_str);
5286 if ((unsigned char)fmt_str[len - 1] == target_newline)
5288 /* Create a NUL-terminated string that's one char shorter
5289 than the original, stripping off the trailing '\n'. */
5290 char *newstr = alloca (len);
5291 memcpy (newstr, fmt_str, len - 1);
5292 newstr[len - 1] = 0;
5293 arg = build_string_literal (len, newstr);
5295 fn = build_call_expr (fn_puts, 1, arg);
5298 /* We'd like to arrange to call fputs(string,stdout) here,
5299 but we need stdout and don't have a way to get it yet. */
5306 if (TREE_CODE (fn) == CALL_EXPR)
5307 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5308 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5311 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5312 Return NULL_RTX if a normal call should be emitted rather than transforming
5313 the function inline. If convenient, the result should be placed in
5314 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5317 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5320 /* If we're using an unlocked function, assume the other unlocked
5321 functions exist explicitly. */
5322 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5323 : implicit_built_in_decls[BUILT_IN_FPUTC];
5324 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5325 : implicit_built_in_decls[BUILT_IN_FPUTS];
5326 const char *fmt_str;
5329 int nargs = call_expr_nargs (exp);
5331 /* If the return value is used, don't do the transformation. */
5332 if (target != const0_rtx)
5335 /* Verify the required arguments in the original call. */
5338 fp = CALL_EXPR_ARG (exp, 0);
5339 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5341 fmt = CALL_EXPR_ARG (exp, 1);
5342 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5345 /* Check whether the format is a literal string constant. */
5346 fmt_str = c_getstr (fmt);
5347 if (fmt_str == NULL)
5350 if (!init_target_chars ())
5353 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5354 if (strcmp (fmt_str, target_percent_s) == 0)
5357 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5359 arg = CALL_EXPR_ARG (exp, 2);
5361 fn = build_call_expr (fn_fputs, 2, arg, fp);
5363 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5364 else if (strcmp (fmt_str, target_percent_c) == 0)
5367 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5369 arg = CALL_EXPR_ARG (exp, 2);
5371 fn = build_call_expr (fn_fputc, 2, arg, fp);
5375 /* We can't handle anything else with % args or %% ... yet. */
5376 if (strchr (fmt_str, target_percent))
5382 /* If the format specifier was "", fprintf does nothing. */
5383 if (fmt_str[0] == '\0')
5385 /* Evaluate and ignore FILE* argument for side-effects. */
5386 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5390 /* When "string" doesn't contain %, replace all cases of
5391 fprintf(stream,string) with fputs(string,stream). The fputs
5392 builtin will take care of special cases like length == 1. */
5394 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5399 if (TREE_CODE (fn) == CALL_EXPR)
5400 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5401 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5404 /* Expand a call EXP to sprintf. Return NULL_RTX if
5405 a normal call should be emitted rather than expanding the function
5406 inline. If convenient, the result should be placed in TARGET with
5410 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5413 const char *fmt_str;
5414 int nargs = call_expr_nargs (exp);
5416 /* Verify the required arguments in the original call. */
5419 dest = CALL_EXPR_ARG (exp, 0);
5420 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5422 fmt = CALL_EXPR_ARG (exp, 0);
5423 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5426 /* Check whether the format is a literal string constant. */
5427 fmt_str = c_getstr (fmt);
5428 if (fmt_str == NULL)
5431 if (!init_target_chars ())
5434 /* If the format doesn't contain % args or %%, use strcpy. */
5435 if (strchr (fmt_str, target_percent) == 0)
5437 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5440 if ((nargs > 2) || ! fn)
5442 expand_expr (build_call_expr (fn, 2, dest, fmt),
5443 const0_rtx, VOIDmode, EXPAND_NORMAL);
5444 if (target == const0_rtx)
5446 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5447 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5449 /* If the format is "%s", use strcpy if the result isn't used. */
5450 else if (strcmp (fmt_str, target_percent_s) == 0)
5453 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5459 arg = CALL_EXPR_ARG (exp, 2);
5460 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5463 if (target != const0_rtx)
5465 len = c_strlen (arg, 1);
5466 if (! len || TREE_CODE (len) != INTEGER_CST)
5472 expand_expr (build_call_expr (fn, 2, dest, arg),
5473 const0_rtx, VOIDmode, EXPAND_NORMAL);
5475 if (target == const0_rtx)
5477 return expand_expr (len, target, mode, EXPAND_NORMAL);
5483 /* Expand a call to either the entry or exit function profiler. */
5486 expand_builtin_profile_func (bool exitp)
5490 this = DECL_RTL (current_function_decl);
5491 gcc_assert (MEM_P (this));
5492 this = XEXP (this, 0);
5495 which = profile_function_exit_libfunc;
5497 which = profile_function_entry_libfunc;
5499 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5500 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5507 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5510 round_trampoline_addr (rtx tramp)
5512 rtx temp, addend, mask;
5514 /* If we don't need too much alignment, we'll have been guaranteed
5515 proper alignment by get_trampoline_type. */
5516 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5519 /* Round address up to desired boundary. */
5520 temp = gen_reg_rtx (Pmode);
5521 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5522 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5524 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5525 temp, 0, OPTAB_LIB_WIDEN);
5526 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5527 temp, 0, OPTAB_LIB_WIDEN);
5533 expand_builtin_init_trampoline (tree exp)
5535 tree t_tramp, t_func, t_chain;
5536 rtx r_tramp, r_func, r_chain;
5537 #ifdef TRAMPOLINE_TEMPLATE
5541 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5542 POINTER_TYPE, VOID_TYPE))
5545 t_tramp = CALL_EXPR_ARG (exp, 0);
5546 t_func = CALL_EXPR_ARG (exp, 1);
5547 t_chain = CALL_EXPR_ARG (exp, 2);
5549 r_tramp = expand_normal (t_tramp);
5550 r_func = expand_normal (t_func);
5551 r_chain = expand_normal (t_chain);
5553 /* Generate insns to initialize the trampoline. */
5554 r_tramp = round_trampoline_addr (r_tramp);
5555 #ifdef TRAMPOLINE_TEMPLATE
5556 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5557 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5558 emit_block_move (blktramp, assemble_trampoline_template (),
5559 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5561 trampolines_created = 1;
5562 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5568 expand_builtin_adjust_trampoline (tree exp)
5572 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5575 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5576 tramp = round_trampoline_addr (tramp);
5577 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5578 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5584 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5585 signbitd64, or signbitd128 function.
5586 Return NULL_RTX if a normal call should be emitted rather than expanding
5587 the function in-line. EXP is the expression that is a call to the builtin
5588 function; if convenient, the result should be placed in TARGET. */
5591 expand_builtin_signbit (tree exp, rtx target)
5593 const struct real_format *fmt;
5594 enum machine_mode fmode, imode, rmode;
5595 HOST_WIDE_INT hi, lo;
5600 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5603 arg = CALL_EXPR_ARG (exp, 0);
5604 fmode = TYPE_MODE (TREE_TYPE (arg));
5605 rmode = TYPE_MODE (TREE_TYPE (exp));
5606 fmt = REAL_MODE_FORMAT (fmode);
5608 /* For floating point formats without a sign bit, implement signbit
5610 bitpos = fmt->signbit_ro;
5613 /* But we can't do this if the format supports signed zero. */
5614 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5617 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5618 build_real (TREE_TYPE (arg), dconst0));
5619 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5622 temp = expand_normal (arg);
5623 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5625 imode = int_mode_for_mode (fmode);
5626 if (imode == BLKmode)
5628 temp = gen_lowpart (imode, temp);
5633 /* Handle targets with different FP word orders. */
5634 if (FLOAT_WORDS_BIG_ENDIAN)
5635 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5637 word = bitpos / BITS_PER_WORD;
5638 temp = operand_subword_force (temp, word, fmode);
5639 bitpos = bitpos % BITS_PER_WORD;
5642 /* Force the intermediate word_mode (or narrower) result into a
5643 register. This avoids attempting to create paradoxical SUBREGs
5644 of floating point modes below. */
5645 temp = force_reg (imode, temp);
5647 /* If the bitpos is within the "result mode" lowpart, the operation
5648 can be implement with a single bitwise AND. Otherwise, we need
5649 a right shift and an AND. */
5651 if (bitpos < GET_MODE_BITSIZE (rmode))
5653 if (bitpos < HOST_BITS_PER_WIDE_INT)
5656 lo = (HOST_WIDE_INT) 1 << bitpos;
5660 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5665 temp = gen_lowpart (rmode, temp);
5666 temp = expand_binop (rmode, and_optab, temp,
5667 immed_double_const (lo, hi, rmode),
5668 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5672 /* Perform a logical right shift to place the signbit in the least
5673 significant bit, then truncate the result to the desired mode
5674 and mask just this bit. */
5675 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5676 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5677 temp = gen_lowpart (rmode, temp);
5678 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5679 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5685 /* Expand fork or exec calls. TARGET is the desired target of the
5686 call. EXP is the call. FN is the
5687 identificator of the actual function. IGNORE is nonzero if the
5688 value is to be ignored. */
5691 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5696 /* If we are not profiling, just call the function. */
5697 if (!profile_arc_flag)
5700 /* Otherwise call the wrapper. This should be equivalent for the rest of
5701 compiler, so the code does not diverge, and the wrapper may run the
5702 code necessary for keeping the profiling sane. */
5704 switch (DECL_FUNCTION_CODE (fn))
5707 id = get_identifier ("__gcov_fork");
5710 case BUILT_IN_EXECL:
5711 id = get_identifier ("__gcov_execl");
5714 case BUILT_IN_EXECV:
5715 id = get_identifier ("__gcov_execv");
5718 case BUILT_IN_EXECLP:
5719 id = get_identifier ("__gcov_execlp");
5722 case BUILT_IN_EXECLE:
5723 id = get_identifier ("__gcov_execle");
5726 case BUILT_IN_EXECVP:
5727 id = get_identifier ("__gcov_execvp");
5730 case BUILT_IN_EXECVE:
5731 id = get_identifier ("__gcov_execve");
5738 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5739 DECL_EXTERNAL (decl) = 1;
5740 TREE_PUBLIC (decl) = 1;
5741 DECL_ARTIFICIAL (decl) = 1;
5742 TREE_NOTHROW (decl) = 1;
5743 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5744 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5745 call = rewrite_call_expr (exp, 0, decl, 0);
5746 return expand_call (call, target, ignore);
5751 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5752 the pointer in these functions is void*, the tree optimizers may remove
5753 casts. The mode computed in expand_builtin isn't reliable either, due
5754 to __sync_bool_compare_and_swap.
5756 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5757 group of builtins. This gives us log2 of the mode size. */
5759 static inline enum machine_mode
5760 get_builtin_sync_mode (int fcode_diff)
5762 /* The size is not negotiable, so ask not to get BLKmode in return
5763 if the target indicates that a smaller size would be better. */
5764 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5767 /* Expand the memory expression LOC and return the appropriate memory operand
5768 for the builtin_sync operations. */
5771 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5775 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5777 /* Note that we explicitly do not want any alias information for this
5778 memory, so that we kill all other live memories. Otherwise we don't
5779 satisfy the full barrier semantics of the intrinsic. */
5780 mem = validize_mem (gen_rtx_MEM (mode, addr));
5782 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5783 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5784 MEM_VOLATILE_P (mem) = 1;
5789 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5790 EXP is the CALL_EXPR. CODE is the rtx code
5791 that corresponds to the arithmetic or logical operation from the name;
5792 an exception here is that NOT actually means NAND. TARGET is an optional
5793 place for us to store the results; AFTER is true if this is the
5794 fetch_and_xxx form. IGNORE is true if we don't actually care about
5795 the result of the operation at all. */
5798 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5799 enum rtx_code code, bool after,
5800 rtx target, bool ignore)
5803 enum machine_mode old_mode;
5805 /* Expand the operands. */
5806 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5808 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5809 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5810 of CONST_INTs, where we know the old_mode only from the call argument. */
5811 old_mode = GET_MODE (val);
5812 if (old_mode == VOIDmode)
5813 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5814 val = convert_modes (mode, old_mode, val, 1);
5817 return expand_sync_operation (mem, val, code);
5819 return expand_sync_fetch_operation (mem, val, code, after, target);
5822 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5823 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5824 true if this is the boolean form. TARGET is a place for us to store the
5825 results; this is NOT optional if IS_BOOL is true. */
5828 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5829 bool is_bool, rtx target)
5831 rtx old_val, new_val, mem;
5832 enum machine_mode old_mode;
5834 /* Expand the operands. */
5835 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5838 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5839 mode, EXPAND_NORMAL);
5840 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5841 of CONST_INTs, where we know the old_mode only from the call argument. */
5842 old_mode = GET_MODE (old_val);
5843 if (old_mode == VOIDmode)
5844 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5845 old_val = convert_modes (mode, old_mode, old_val, 1);
5847 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5848 mode, EXPAND_NORMAL);
5849 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5850 of CONST_INTs, where we know the old_mode only from the call argument. */
5851 old_mode = GET_MODE (new_val);
5852 if (old_mode == VOIDmode)
5853 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5854 new_val = convert_modes (mode, old_mode, new_val, 1);
5857 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5859 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5862 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5863 general form is actually an atomic exchange, and some targets only
5864 support a reduced form with the second argument being a constant 1.
5865 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5869 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5873 enum machine_mode old_mode;
5875 /* Expand the operands. */
5876 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5877 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5878 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5879 of CONST_INTs, where we know the old_mode only from the call argument. */
5880 old_mode = GET_MODE (val);
5881 if (old_mode == VOIDmode)
5882 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5883 val = convert_modes (mode, old_mode, val, 1);
5885 return expand_sync_lock_test_and_set (mem, val, target);
5888 /* Expand the __sync_synchronize intrinsic. */
5891 expand_builtin_synchronize (void)
5895 #ifdef HAVE_memory_barrier
5896 if (HAVE_memory_barrier)
5898 emit_insn (gen_memory_barrier ());
5903 /* If no explicit memory barrier instruction is available, create an
5904 empty asm stmt with a memory clobber. */
5905 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5906 tree_cons (NULL, build_string (6, "memory"), NULL));
5907 ASM_VOLATILE_P (x) = 1;
5908 expand_asm_expr (x);
5911 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5914 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5916 enum insn_code icode;
5918 rtx val = const0_rtx;
5920 /* Expand the operands. */
5921 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5923 /* If there is an explicit operation in the md file, use it. */
5924 icode = sync_lock_release[mode];
5925 if (icode != CODE_FOR_nothing)
5927 if (!insn_data[icode].operand[1].predicate (val, mode))
5928 val = force_reg (mode, val);
5930 insn = GEN_FCN (icode) (mem, val);
5938 /* Otherwise we can implement this operation by emitting a barrier
5939 followed by a store of zero. */
5940 expand_builtin_synchronize ();
5941 emit_move_insn (mem, val);
5944 /* Expand an expression EXP that calls a built-in function,
5945 with result going to TARGET if that's convenient
5946 (and in mode MODE if that's convenient).
5947 SUBTARGET may be used as the target for computing one of EXP's operands.
5948 IGNORE is nonzero if the value is to be ignored. */
5951 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5954 tree fndecl = get_callee_fndecl (exp);
5955 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5956 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5958 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5959 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5961 /* When not optimizing, generate calls to library functions for a certain
5964 && !called_as_built_in (fndecl)
5965 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5966 && fcode != BUILT_IN_ALLOCA)
5967 return expand_call (exp, target, ignore);
5969 /* The built-in function expanders test for target == const0_rtx
5970 to determine whether the function's result will be ignored. */
5972 target = const0_rtx;
5974 /* If the result of a pure or const built-in function is ignored, and
5975 none of its arguments are volatile, we can avoid expanding the
5976 built-in call and just evaluate the arguments for side-effects. */
5977 if (target == const0_rtx
5978 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5980 bool volatilep = false;
5982 call_expr_arg_iterator iter;
5984 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5985 if (TREE_THIS_VOLATILE (arg))
5993 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5994 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6001 CASE_FLT_FN (BUILT_IN_FABS):
6002 target = expand_builtin_fabs (exp, target, subtarget);
6007 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6008 target = expand_builtin_copysign (exp, target, subtarget);
6013 /* Just do a normal library call if we were unable to fold
6015 CASE_FLT_FN (BUILT_IN_CABS):
6018 CASE_FLT_FN (BUILT_IN_EXP):
6019 CASE_FLT_FN (BUILT_IN_EXP10):
6020 CASE_FLT_FN (BUILT_IN_POW10):
6021 CASE_FLT_FN (BUILT_IN_EXP2):
6022 CASE_FLT_FN (BUILT_IN_EXPM1):
6023 CASE_FLT_FN (BUILT_IN_LOGB):
6024 CASE_FLT_FN (BUILT_IN_LOG):
6025 CASE_FLT_FN (BUILT_IN_LOG10):
6026 CASE_FLT_FN (BUILT_IN_LOG2):
6027 CASE_FLT_FN (BUILT_IN_LOG1P):
6028 CASE_FLT_FN (BUILT_IN_TAN):
6029 CASE_FLT_FN (BUILT_IN_ASIN):
6030 CASE_FLT_FN (BUILT_IN_ACOS):
6031 CASE_FLT_FN (BUILT_IN_ATAN):
6032 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6033 because of possible accuracy problems. */
6034 if (! flag_unsafe_math_optimizations)
6036 CASE_FLT_FN (BUILT_IN_SQRT):
6037 CASE_FLT_FN (BUILT_IN_FLOOR):
6038 CASE_FLT_FN (BUILT_IN_CEIL):
6039 CASE_FLT_FN (BUILT_IN_TRUNC):
6040 CASE_FLT_FN (BUILT_IN_ROUND):
6041 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6042 CASE_FLT_FN (BUILT_IN_RINT):
6043 target = expand_builtin_mathfn (exp, target, subtarget);
6048 CASE_FLT_FN (BUILT_IN_ILOGB):
6049 if (! flag_unsafe_math_optimizations)
6051 CASE_FLT_FN (BUILT_IN_ISINF):
6052 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6057 CASE_FLT_FN (BUILT_IN_LCEIL):
6058 CASE_FLT_FN (BUILT_IN_LLCEIL):
6059 CASE_FLT_FN (BUILT_IN_LFLOOR):
6060 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6061 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6066 CASE_FLT_FN (BUILT_IN_LRINT):
6067 CASE_FLT_FN (BUILT_IN_LLRINT):
6068 CASE_FLT_FN (BUILT_IN_LROUND):
6069 CASE_FLT_FN (BUILT_IN_LLROUND):
6070 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6075 CASE_FLT_FN (BUILT_IN_POW):
6076 target = expand_builtin_pow (exp, target, subtarget);
6081 CASE_FLT_FN (BUILT_IN_POWI):
6082 target = expand_builtin_powi (exp, target, subtarget);
6087 CASE_FLT_FN (BUILT_IN_ATAN2):
6088 CASE_FLT_FN (BUILT_IN_LDEXP):
6089 CASE_FLT_FN (BUILT_IN_SCALB):
6090 CASE_FLT_FN (BUILT_IN_SCALBN):
6091 CASE_FLT_FN (BUILT_IN_SCALBLN):
6092 if (! flag_unsafe_math_optimizations)
6095 CASE_FLT_FN (BUILT_IN_FMOD):
6096 CASE_FLT_FN (BUILT_IN_REMAINDER):
6097 CASE_FLT_FN (BUILT_IN_DREM):
6098 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6103 CASE_FLT_FN (BUILT_IN_CEXPI):
6104 target = expand_builtin_cexpi (exp, target, subtarget);
6105 gcc_assert (target);
6108 CASE_FLT_FN (BUILT_IN_SIN):
6109 CASE_FLT_FN (BUILT_IN_COS):
6110 if (! flag_unsafe_math_optimizations)
6112 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6117 CASE_FLT_FN (BUILT_IN_SINCOS):
6118 if (! flag_unsafe_math_optimizations)
6120 target = expand_builtin_sincos (exp);
6125 case BUILT_IN_APPLY_ARGS:
6126 return expand_builtin_apply_args ();
6128 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6129 FUNCTION with a copy of the parameters described by
6130 ARGUMENTS, and ARGSIZE. It returns a block of memory
6131 allocated on the stack into which is stored all the registers
6132 that might possibly be used for returning the result of a
6133 function. ARGUMENTS is the value returned by
6134 __builtin_apply_args. ARGSIZE is the number of bytes of
6135 arguments that must be copied. ??? How should this value be
6136 computed? We'll also need a safe worst case value for varargs
6138 case BUILT_IN_APPLY:
6139 if (!validate_arglist (exp, POINTER_TYPE,
6140 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6141 && !validate_arglist (exp, REFERENCE_TYPE,
6142 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6148 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6149 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6150 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6152 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6155 /* __builtin_return (RESULT) causes the function to return the
6156 value described by RESULT. RESULT is address of the block of
6157 memory returned by __builtin_apply. */
6158 case BUILT_IN_RETURN:
6159 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6160 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6163 case BUILT_IN_SAVEREGS:
6164 return expand_builtin_saveregs ();
6166 case BUILT_IN_ARGS_INFO:
6167 return expand_builtin_args_info (exp);
6169 /* Return the address of the first anonymous stack arg. */
6170 case BUILT_IN_NEXT_ARG:
6171 if (fold_builtin_next_arg (exp, false))
6173 return expand_builtin_next_arg ();
6175 case BUILT_IN_CLASSIFY_TYPE:
6176 return expand_builtin_classify_type (exp);
6178 case BUILT_IN_CONSTANT_P:
6181 case BUILT_IN_FRAME_ADDRESS:
6182 case BUILT_IN_RETURN_ADDRESS:
6183 return expand_builtin_frame_address (fndecl, exp);
6185 /* Returns the address of the area where the structure is returned.
6187 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6188 if (call_expr_nargs (exp) != 0
6189 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6190 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6193 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6195 case BUILT_IN_ALLOCA:
6196 target = expand_builtin_alloca (exp, target);
6201 case BUILT_IN_STACK_SAVE:
6202 return expand_stack_save ();
6204 case BUILT_IN_STACK_RESTORE:
6205 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6208 case BUILT_IN_BSWAP32:
6209 case BUILT_IN_BSWAP64:
6210 target = expand_builtin_bswap (exp, target, subtarget);
6216 CASE_INT_FN (BUILT_IN_FFS):
6217 case BUILT_IN_FFSIMAX:
6218 target = expand_builtin_unop (target_mode, exp, target,
6219 subtarget, ffs_optab);
6224 CASE_INT_FN (BUILT_IN_CLZ):
6225 case BUILT_IN_CLZIMAX:
6226 target = expand_builtin_unop (target_mode, exp, target,
6227 subtarget, clz_optab);
6232 CASE_INT_FN (BUILT_IN_CTZ):
6233 case BUILT_IN_CTZIMAX:
6234 target = expand_builtin_unop (target_mode, exp, target,
6235 subtarget, ctz_optab);
6240 CASE_INT_FN (BUILT_IN_POPCOUNT):
6241 case BUILT_IN_POPCOUNTIMAX:
6242 target = expand_builtin_unop (target_mode, exp, target,
6243 subtarget, popcount_optab);
6248 CASE_INT_FN (BUILT_IN_PARITY):
6249 case BUILT_IN_PARITYIMAX:
6250 target = expand_builtin_unop (target_mode, exp, target,
6251 subtarget, parity_optab);
6256 case BUILT_IN_STRLEN:
6257 target = expand_builtin_strlen (exp, target, target_mode);
6262 case BUILT_IN_STRCPY:
6263 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6268 case BUILT_IN_STRNCPY:
6269 target = expand_builtin_strncpy (exp, target, mode);
6274 case BUILT_IN_STPCPY:
6275 target = expand_builtin_stpcpy (exp, target, mode);
6280 case BUILT_IN_STRCAT:
6281 target = expand_builtin_strcat (fndecl, exp, target, mode);
6286 case BUILT_IN_STRNCAT:
6287 target = expand_builtin_strncat (exp, target, mode);
6292 case BUILT_IN_STRSPN:
6293 target = expand_builtin_strspn (exp, target, mode);
6298 case BUILT_IN_STRCSPN:
6299 target = expand_builtin_strcspn (exp, target, mode);
6304 case BUILT_IN_STRSTR:
6305 target = expand_builtin_strstr (exp, target, mode);
6310 case BUILT_IN_STRPBRK:
6311 target = expand_builtin_strpbrk (exp, target, mode);
6316 case BUILT_IN_INDEX:
6317 case BUILT_IN_STRCHR:
6318 target = expand_builtin_strchr (exp, target, mode);
6323 case BUILT_IN_RINDEX:
6324 case BUILT_IN_STRRCHR:
6325 target = expand_builtin_strrchr (exp, target, mode);
6330 case BUILT_IN_MEMCPY:
6331 target = expand_builtin_memcpy (exp, target, mode);
6336 case BUILT_IN_MEMPCPY:
6337 target = expand_builtin_mempcpy (exp, target, mode);
6342 case BUILT_IN_MEMMOVE:
6343 target = expand_builtin_memmove (exp, target, mode, ignore);
6348 case BUILT_IN_BCOPY:
6349 target = expand_builtin_bcopy (exp, ignore);
6354 case BUILT_IN_MEMSET:
6355 target = expand_builtin_memset (exp, target, mode);
6360 case BUILT_IN_BZERO:
6361 target = expand_builtin_bzero (exp);
6366 case BUILT_IN_STRCMP:
6367 target = expand_builtin_strcmp (exp, target, mode);
6372 case BUILT_IN_STRNCMP:
6373 target = expand_builtin_strncmp (exp, target, mode);
6378 case BUILT_IN_MEMCHR:
6379 target = expand_builtin_memchr (exp, target, mode);
6385 case BUILT_IN_MEMCMP:
6386 target = expand_builtin_memcmp (exp, target, mode);
6391 case BUILT_IN_SETJMP:
6392 /* This should have been lowered to the builtins below. */
6395 case BUILT_IN_SETJMP_SETUP:
6396 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6397 and the receiver label. */
6398 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6400 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6401 VOIDmode, EXPAND_NORMAL);
6402 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6403 rtx label_r = label_rtx (label);
6405 /* This is copied from the handling of non-local gotos. */
6406 expand_builtin_setjmp_setup (buf_addr, label_r);
6407 nonlocal_goto_handler_labels
6408 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6409 nonlocal_goto_handler_labels);
6410 /* ??? Do not let expand_label treat us as such since we would
6411 not want to be both on the list of non-local labels and on
6412 the list of forced labels. */
6413 FORCED_LABEL (label) = 0;
6418 case BUILT_IN_SETJMP_DISPATCHER:
6419 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6420 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6422 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6423 rtx label_r = label_rtx (label);
6425 /* Remove the dispatcher label from the list of non-local labels
6426 since the receiver labels have been added to it above. */
6427 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6432 case BUILT_IN_SETJMP_RECEIVER:
6433 /* __builtin_setjmp_receiver is passed the receiver label. */
6434 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6436 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6437 rtx label_r = label_rtx (label);
6439 expand_builtin_setjmp_receiver (label_r);
6444 /* __builtin_longjmp is passed a pointer to an array of five words.
6445 It's similar to the C library longjmp function but works with
6446 __builtin_setjmp above. */
6447 case BUILT_IN_LONGJMP:
6448 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6450 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6451 VOIDmode, EXPAND_NORMAL);
6452 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6454 if (value != const1_rtx)
6456 error ("%<__builtin_longjmp%> second argument must be 1");
6460 expand_builtin_longjmp (buf_addr, value);
6465 case BUILT_IN_NONLOCAL_GOTO:
6466 target = expand_builtin_nonlocal_goto (exp);
6471 /* This updates the setjmp buffer that is its argument with the value
6472 of the current stack pointer. */
6473 case BUILT_IN_UPDATE_SETJMP_BUF:
6474 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6477 = expand_normal (CALL_EXPR_ARG (exp, 0));
6479 expand_builtin_update_setjmp_buf (buf_addr);
6485 expand_builtin_trap ();
6488 case BUILT_IN_PRINTF:
6489 target = expand_builtin_printf (exp, target, mode, false);
6494 case BUILT_IN_PRINTF_UNLOCKED:
6495 target = expand_builtin_printf (exp, target, mode, true);
6500 case BUILT_IN_FPUTS:
6501 target = expand_builtin_fputs (exp, target, false);
6505 case BUILT_IN_FPUTS_UNLOCKED:
6506 target = expand_builtin_fputs (exp, target, true);
6511 case BUILT_IN_FPRINTF:
6512 target = expand_builtin_fprintf (exp, target, mode, false);
6517 case BUILT_IN_FPRINTF_UNLOCKED:
6518 target = expand_builtin_fprintf (exp, target, mode, true);
6523 case BUILT_IN_SPRINTF:
6524 target = expand_builtin_sprintf (exp, target, mode);
6529 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6530 case BUILT_IN_SIGNBITD32:
6531 case BUILT_IN_SIGNBITD64:
6532 case BUILT_IN_SIGNBITD128:
6533 target = expand_builtin_signbit (exp, target);
6538 /* Various hooks for the DWARF 2 __throw routine. */
6539 case BUILT_IN_UNWIND_INIT:
6540 expand_builtin_unwind_init ();
6542 case BUILT_IN_DWARF_CFA:
6543 return virtual_cfa_rtx;
6544 #ifdef DWARF2_UNWIND_INFO
6545 case BUILT_IN_DWARF_SP_COLUMN:
6546 return expand_builtin_dwarf_sp_column ();
6547 case BUILT_IN_INIT_DWARF_REG_SIZES:
6548 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6551 case BUILT_IN_FROB_RETURN_ADDR:
6552 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6553 case BUILT_IN_EXTRACT_RETURN_ADDR:
6554 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6555 case BUILT_IN_EH_RETURN:
6556 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6557 CALL_EXPR_ARG (exp, 1));
6559 #ifdef EH_RETURN_DATA_REGNO
6560 case BUILT_IN_EH_RETURN_DATA_REGNO:
6561 return expand_builtin_eh_return_data_regno (exp);
6563 case BUILT_IN_EXTEND_POINTER:
6564 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6566 case BUILT_IN_VA_START:
6567 case BUILT_IN_STDARG_START:
6568 return expand_builtin_va_start (exp);
6569 case BUILT_IN_VA_END:
6570 return expand_builtin_va_end (exp);
6571 case BUILT_IN_VA_COPY:
6572 return expand_builtin_va_copy (exp);
6573 case BUILT_IN_EXPECT:
6574 return expand_builtin_expect (exp, target);
6575 case BUILT_IN_PREFETCH:
6576 expand_builtin_prefetch (exp);
6579 case BUILT_IN_PROFILE_FUNC_ENTER:
6580 return expand_builtin_profile_func (false);
6581 case BUILT_IN_PROFILE_FUNC_EXIT:
6582 return expand_builtin_profile_func (true);
6584 case BUILT_IN_INIT_TRAMPOLINE:
6585 return expand_builtin_init_trampoline (exp);
6586 case BUILT_IN_ADJUST_TRAMPOLINE:
6587 return expand_builtin_adjust_trampoline (exp);
6590 case BUILT_IN_EXECL:
6591 case BUILT_IN_EXECV:
6592 case BUILT_IN_EXECLP:
6593 case BUILT_IN_EXECLE:
6594 case BUILT_IN_EXECVP:
6595 case BUILT_IN_EXECVE:
6596 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6601 case BUILT_IN_FETCH_AND_ADD_1:
6602 case BUILT_IN_FETCH_AND_ADD_2:
6603 case BUILT_IN_FETCH_AND_ADD_4:
6604 case BUILT_IN_FETCH_AND_ADD_8:
6605 case BUILT_IN_FETCH_AND_ADD_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6607 target = expand_builtin_sync_operation (mode, exp, PLUS,
6608 false, target, ignore);
6613 case BUILT_IN_FETCH_AND_SUB_1:
6614 case BUILT_IN_FETCH_AND_SUB_2:
6615 case BUILT_IN_FETCH_AND_SUB_4:
6616 case BUILT_IN_FETCH_AND_SUB_8:
6617 case BUILT_IN_FETCH_AND_SUB_16:
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6619 target = expand_builtin_sync_operation (mode, exp, MINUS,
6620 false, target, ignore);
6625 case BUILT_IN_FETCH_AND_OR_1:
6626 case BUILT_IN_FETCH_AND_OR_2:
6627 case BUILT_IN_FETCH_AND_OR_4:
6628 case BUILT_IN_FETCH_AND_OR_8:
6629 case BUILT_IN_FETCH_AND_OR_16:
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6631 target = expand_builtin_sync_operation (mode, exp, IOR,
6632 false, target, ignore);
6637 case BUILT_IN_FETCH_AND_AND_1:
6638 case BUILT_IN_FETCH_AND_AND_2:
6639 case BUILT_IN_FETCH_AND_AND_4:
6640 case BUILT_IN_FETCH_AND_AND_8:
6641 case BUILT_IN_FETCH_AND_AND_16:
6642 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6643 target = expand_builtin_sync_operation (mode, exp, AND,
6644 false, target, ignore);
6649 case BUILT_IN_FETCH_AND_XOR_1:
6650 case BUILT_IN_FETCH_AND_XOR_2:
6651 case BUILT_IN_FETCH_AND_XOR_4:
6652 case BUILT_IN_FETCH_AND_XOR_8:
6653 case BUILT_IN_FETCH_AND_XOR_16:
6654 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6655 target = expand_builtin_sync_operation (mode, exp, XOR,
6656 false, target, ignore);
6661 case BUILT_IN_FETCH_AND_NAND_1:
6662 case BUILT_IN_FETCH_AND_NAND_2:
6663 case BUILT_IN_FETCH_AND_NAND_4:
6664 case BUILT_IN_FETCH_AND_NAND_8:
6665 case BUILT_IN_FETCH_AND_NAND_16:
6666 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6667 target = expand_builtin_sync_operation (mode, exp, NOT,
6668 false, target, ignore);
6673 case BUILT_IN_ADD_AND_FETCH_1:
6674 case BUILT_IN_ADD_AND_FETCH_2:
6675 case BUILT_IN_ADD_AND_FETCH_4:
6676 case BUILT_IN_ADD_AND_FETCH_8:
6677 case BUILT_IN_ADD_AND_FETCH_16:
6678 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6679 target = expand_builtin_sync_operation (mode, exp, PLUS,
6680 true, target, ignore);
6685 case BUILT_IN_SUB_AND_FETCH_1:
6686 case BUILT_IN_SUB_AND_FETCH_2:
6687 case BUILT_IN_SUB_AND_FETCH_4:
6688 case BUILT_IN_SUB_AND_FETCH_8:
6689 case BUILT_IN_SUB_AND_FETCH_16:
6690 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6691 target = expand_builtin_sync_operation (mode, exp, MINUS,
6692 true, target, ignore);
6697 case BUILT_IN_OR_AND_FETCH_1:
6698 case BUILT_IN_OR_AND_FETCH_2:
6699 case BUILT_IN_OR_AND_FETCH_4:
6700 case BUILT_IN_OR_AND_FETCH_8:
6701 case BUILT_IN_OR_AND_FETCH_16:
6702 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6703 target = expand_builtin_sync_operation (mode, exp, IOR,
6704 true, target, ignore);
6709 case BUILT_IN_AND_AND_FETCH_1:
6710 case BUILT_IN_AND_AND_FETCH_2:
6711 case BUILT_IN_AND_AND_FETCH_4:
6712 case BUILT_IN_AND_AND_FETCH_8:
6713 case BUILT_IN_AND_AND_FETCH_16:
6714 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6715 target = expand_builtin_sync_operation (mode, exp, AND,
6716 true, target, ignore);
6721 case BUILT_IN_XOR_AND_FETCH_1:
6722 case BUILT_IN_XOR_AND_FETCH_2:
6723 case BUILT_IN_XOR_AND_FETCH_4:
6724 case BUILT_IN_XOR_AND_FETCH_8:
6725 case BUILT_IN_XOR_AND_FETCH_16:
6726 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6727 target = expand_builtin_sync_operation (mode, exp, XOR,
6728 true, target, ignore);
6733 case BUILT_IN_NAND_AND_FETCH_1:
6734 case BUILT_IN_NAND_AND_FETCH_2:
6735 case BUILT_IN_NAND_AND_FETCH_4:
6736 case BUILT_IN_NAND_AND_FETCH_8:
6737 case BUILT_IN_NAND_AND_FETCH_16:
6738 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6739 target = expand_builtin_sync_operation (mode, exp, NOT,
6740 true, target, ignore);
6745 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6746 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6747 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6748 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6749 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6750 if (mode == VOIDmode)
6751 mode = TYPE_MODE (boolean_type_node);
6752 if (!target || !register_operand (target, mode))
6753 target = gen_reg_rtx (mode);
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6756 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6761 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6762 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6763 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6764 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6765 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6766 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6767 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6772 case BUILT_IN_LOCK_TEST_AND_SET_1:
6773 case BUILT_IN_LOCK_TEST_AND_SET_2:
6774 case BUILT_IN_LOCK_TEST_AND_SET_4:
6775 case BUILT_IN_LOCK_TEST_AND_SET_8:
6776 case BUILT_IN_LOCK_TEST_AND_SET_16:
6777 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6778 target = expand_builtin_lock_test_and_set (mode, exp, target);
6783 case BUILT_IN_LOCK_RELEASE_1:
6784 case BUILT_IN_LOCK_RELEASE_2:
6785 case BUILT_IN_LOCK_RELEASE_4:
6786 case BUILT_IN_LOCK_RELEASE_8:
6787 case BUILT_IN_LOCK_RELEASE_16:
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6789 expand_builtin_lock_release (mode, exp);
6792 case BUILT_IN_SYNCHRONIZE:
6793 expand_builtin_synchronize ();
6796 case BUILT_IN_OBJECT_SIZE:
6797 return expand_builtin_object_size (exp);
6799 case BUILT_IN_MEMCPY_CHK:
6800 case BUILT_IN_MEMPCPY_CHK:
6801 case BUILT_IN_MEMMOVE_CHK:
6802 case BUILT_IN_MEMSET_CHK:
6803 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6808 case BUILT_IN_STRCPY_CHK:
6809 case BUILT_IN_STPCPY_CHK:
6810 case BUILT_IN_STRNCPY_CHK:
6811 case BUILT_IN_STRCAT_CHK:
6812 case BUILT_IN_STRNCAT_CHK:
6813 case BUILT_IN_SNPRINTF_CHK:
6814 case BUILT_IN_VSNPRINTF_CHK:
6815 maybe_emit_chk_warning (exp, fcode);
6818 case BUILT_IN_SPRINTF_CHK:
6819 case BUILT_IN_VSPRINTF_CHK:
6820 maybe_emit_sprintf_chk_warning (exp, fcode);
6823 default: /* just do library call, if unknown builtin */
6827 /* The switch statement above can drop through to cause the function
6828 to be called normally. */
6829 return expand_call (exp, target, ignore);
6832 /* Determine whether a tree node represents a call to a built-in
6833 function. If the tree T is a call to a built-in function with
6834 the right number of arguments of the appropriate types, return
6835 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6836 Otherwise the return value is END_BUILTINS. */
6838 enum built_in_function
6839 builtin_mathfn_code (tree t)
6841 tree fndecl, arg, parmlist;
6842 tree argtype, parmtype;
6843 call_expr_arg_iterator iter;
6845 if (TREE_CODE (t) != CALL_EXPR
6846 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6847 return END_BUILTINS;
6849 fndecl = get_callee_fndecl (t);
6850 if (fndecl == NULL_TREE
6851 || TREE_CODE (fndecl) != FUNCTION_DECL
6852 || ! DECL_BUILT_IN (fndecl)
6853 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6854 return END_BUILTINS;
6856 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6857 init_call_expr_arg_iterator (t, &iter);
6858 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6860 /* If a function doesn't take a variable number of arguments,
6861 the last element in the list will have type `void'. */
6862 parmtype = TREE_VALUE (parmlist);
6863 if (VOID_TYPE_P (parmtype))
6865 if (more_call_expr_args_p (&iter))
6866 return END_BUILTINS;
6867 return DECL_FUNCTION_CODE (fndecl);
6870 if (! more_call_expr_args_p (&iter))
6871 return END_BUILTINS;
6873 arg = next_call_expr_arg (&iter);
6874 argtype = TREE_TYPE (arg);
6876 if (SCALAR_FLOAT_TYPE_P (parmtype))
6878 if (! SCALAR_FLOAT_TYPE_P (argtype))
6879 return END_BUILTINS;
6881 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6883 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6884 return END_BUILTINS;
6886 else if (POINTER_TYPE_P (parmtype))
6888 if (! POINTER_TYPE_P (argtype))
6889 return END_BUILTINS;
6891 else if (INTEGRAL_TYPE_P (parmtype))
6893 if (! INTEGRAL_TYPE_P (argtype))
6894 return END_BUILTINS;
6897 return END_BUILTINS;
6900 /* Variable-length argument list. */
6901 return DECL_FUNCTION_CODE (fndecl);
6904 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6905 evaluate to a constant. */
6908 fold_builtin_constant_p (tree arg)
6910 /* We return 1 for a numeric type that's known to be a constant
6911 value at compile-time or for an aggregate type that's a
6912 literal constant. */
6915 /* If we know this is a constant, emit the constant of one. */
6916 if (CONSTANT_CLASS_P (arg)
6917 || (TREE_CODE (arg) == CONSTRUCTOR
6918 && TREE_CONSTANT (arg)))
6919 return integer_one_node;
6920 if (TREE_CODE (arg) == ADDR_EXPR)
6922 tree op = TREE_OPERAND (arg, 0);
6923 if (TREE_CODE (op) == STRING_CST
6924 || (TREE_CODE (op) == ARRAY_REF
6925 && integer_zerop (TREE_OPERAND (op, 1))
6926 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6927 return integer_one_node;
6930 /* If this expression has side effects, show we don't know it to be a
6931 constant. Likewise if it's a pointer or aggregate type since in
6932 those case we only want literals, since those are only optimized
6933 when generating RTL, not later.
6934 And finally, if we are compiling an initializer, not code, we
6935 need to return a definite result now; there's not going to be any
6936 more optimization done. */
6937 if (TREE_SIDE_EFFECTS (arg)
6938 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6939 || POINTER_TYPE_P (TREE_TYPE (arg))
6941 || folding_initializer)
6942 return integer_zero_node;
6947 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6948 comparison against the argument will fold to a constant. In practice,
6949 this means a true constant or the address of a non-weak symbol. */
6952 fold_builtin_expect (tree arg)
6956 /* If the argument isn't invariant, then there's nothing we can do. */
6957 if (!TREE_INVARIANT (arg))
6960 /* If we're looking at an address of a weak decl, then do not fold. */
6963 if (TREE_CODE (inner) == ADDR_EXPR)
6967 inner = TREE_OPERAND (inner, 0);
6969 while (TREE_CODE (inner) == COMPONENT_REF
6970 || TREE_CODE (inner) == ARRAY_REF);
6971 if (DECL_P (inner) && DECL_WEAK (inner))
6975 /* Otherwise, ARG already has the proper type for the return value. */
6979 /* Fold a call to __builtin_classify_type with argument ARG. */
6982 fold_builtin_classify_type (tree arg)
6985 return build_int_cst (NULL_TREE, no_type_class);
6987 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6990 /* Fold a call to __builtin_strlen with argument ARG. */
6993 fold_builtin_strlen (tree arg)
6995 if (!validate_arg (arg, POINTER_TYPE))
6999 tree len = c_strlen (arg, 0);
7003 /* Convert from the internal "sizetype" type to "size_t". */
7005 len = fold_convert (size_type_node, len);
7013 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7016 fold_builtin_inf (tree type, int warn)
7018 REAL_VALUE_TYPE real;
7020 /* __builtin_inff is intended to be usable to define INFINITY on all
7021 targets. If an infinity is not available, INFINITY expands "to a
7022 positive constant of type float that overflows at translation
7023 time", footnote "In this case, using INFINITY will violate the
7024 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7025 Thus we pedwarn to ensure this constraint violation is
7027 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7028 pedwarn ("target format does not support infinity");
7031 return build_real (type, real);
7034 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7037 fold_builtin_nan (tree arg, tree type, int quiet)
7039 REAL_VALUE_TYPE real;
7042 if (!validate_arg (arg, POINTER_TYPE))
7044 str = c_getstr (arg);
7048 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7051 return build_real (type, real);
7054 /* Return true if the floating point expression T has an integer value.
7055 We also allow +Inf, -Inf and NaN to be considered integer values. */
7058 integer_valued_real_p (tree t)
7060 switch (TREE_CODE (t))
7067 case NON_LVALUE_EXPR:
7068 return integer_valued_real_p (TREE_OPERAND (t, 0));
7073 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7080 return integer_valued_real_p (TREE_OPERAND (t, 0))
7081 && integer_valued_real_p (TREE_OPERAND (t, 1));
7084 return integer_valued_real_p (TREE_OPERAND (t, 1))
7085 && integer_valued_real_p (TREE_OPERAND (t, 2));
7088 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7092 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7093 if (TREE_CODE (type) == INTEGER_TYPE)
7095 if (TREE_CODE (type) == REAL_TYPE)
7096 return integer_valued_real_p (TREE_OPERAND (t, 0));
7101 switch (builtin_mathfn_code (t))
7103 CASE_FLT_FN (BUILT_IN_CEIL):
7104 CASE_FLT_FN (BUILT_IN_FLOOR):
7105 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7106 CASE_FLT_FN (BUILT_IN_RINT):
7107 CASE_FLT_FN (BUILT_IN_ROUND):
7108 CASE_FLT_FN (BUILT_IN_TRUNC):
7111 CASE_FLT_FN (BUILT_IN_FMIN):
7112 CASE_FLT_FN (BUILT_IN_FMAX):
7113 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7114 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7127 /* FNDECL is assumed to be a builtin where truncation can be propagated
7128 across (for instance floor((double)f) == (double)floorf (f).
7129 Do the transformation for a call with argument ARG. */
7132 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7134 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7136 if (!validate_arg (arg, REAL_TYPE))
7139 /* Integer rounding functions are idempotent. */
7140 if (fcode == builtin_mathfn_code (arg))
7143 /* If argument is already integer valued, and we don't need to worry
7144 about setting errno, there's no need to perform rounding. */
7145 if (! flag_errno_math && integer_valued_real_p (arg))
7150 tree arg0 = strip_float_extensions (arg);
7151 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7152 tree newtype = TREE_TYPE (arg0);
7155 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7156 && (decl = mathfn_built_in (newtype, fcode)))
7157 return fold_convert (ftype,
7158 build_call_expr (decl, 1,
7159 fold_convert (newtype, arg0)));
7164 /* FNDECL is assumed to be builtin which can narrow the FP type of
7165 the argument, for instance lround((double)f) -> lroundf (f).
7166 Do the transformation for a call with argument ARG. */
7169 fold_fixed_mathfn (tree fndecl, tree arg)
7171 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7173 if (!validate_arg (arg, REAL_TYPE))
7176 /* If argument is already integer valued, and we don't need to worry
7177 about setting errno, there's no need to perform rounding. */
7178 if (! flag_errno_math && integer_valued_real_p (arg))
7179 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7183 tree ftype = TREE_TYPE (arg);
7184 tree arg0 = strip_float_extensions (arg);
7185 tree newtype = TREE_TYPE (arg0);
7188 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7189 && (decl = mathfn_built_in (newtype, fcode)))
7190 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7193 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7194 sizeof (long long) == sizeof (long). */
7195 if (TYPE_PRECISION (long_long_integer_type_node)
7196 == TYPE_PRECISION (long_integer_type_node))
7198 tree newfn = NULL_TREE;
7201 CASE_FLT_FN (BUILT_IN_LLCEIL):
7202 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7205 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7206 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7209 CASE_FLT_FN (BUILT_IN_LLROUND):
7210 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7213 CASE_FLT_FN (BUILT_IN_LLRINT):
7214 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7223 tree newcall = build_call_expr(newfn, 1, arg);
7224 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7231 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7232 return type. Return NULL_TREE if no simplification can be made. */
7235 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7239 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7240 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7243 /* Calculate the result when the argument is a constant. */
7244 if (TREE_CODE (arg) == COMPLEX_CST
7245 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7249 if (TREE_CODE (arg) == COMPLEX_EXPR)
7251 tree real = TREE_OPERAND (arg, 0);
7252 tree imag = TREE_OPERAND (arg, 1);
7254 /* If either part is zero, cabs is fabs of the other. */
7255 if (real_zerop (real))
7256 return fold_build1 (ABS_EXPR, type, imag);
7257 if (real_zerop (imag))
7258 return fold_build1 (ABS_EXPR, type, real);
7260 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7261 if (flag_unsafe_math_optimizations
7262 && operand_equal_p (real, imag, OEP_PURE_SAME))
7264 const REAL_VALUE_TYPE sqrt2_trunc
7265 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7267 return fold_build2 (MULT_EXPR, type,
7268 fold_build1 (ABS_EXPR, type, real),
7269 build_real (type, sqrt2_trunc));
7273 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7274 if (TREE_CODE (arg) == NEGATE_EXPR
7275 || TREE_CODE (arg) == CONJ_EXPR)
7276 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7278 /* Don't do this when optimizing for size. */
7279 if (flag_unsafe_math_optimizations
7280 && optimize && !optimize_size)
7282 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7284 if (sqrtfn != NULL_TREE)
7286 tree rpart, ipart, result;
7288 arg = builtin_save_expr (arg);
7290 rpart = fold_build1 (REALPART_EXPR, type, arg);
7291 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7293 rpart = builtin_save_expr (rpart);
7294 ipart = builtin_save_expr (ipart);
7296 result = fold_build2 (PLUS_EXPR, type,
7297 fold_build2 (MULT_EXPR, type,
7299 fold_build2 (MULT_EXPR, type,
7302 return build_call_expr (sqrtfn, 1, result);
7309 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7310 Return NULL_TREE if no simplification can be made. */
7313 fold_builtin_sqrt (tree arg, tree type)
7316 enum built_in_function fcode;
7319 if (!validate_arg (arg, REAL_TYPE))
7322 /* Calculate the result when the argument is a constant. */
7323 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7326 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7327 fcode = builtin_mathfn_code (arg);
7328 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7330 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7331 arg = fold_build2 (MULT_EXPR, type,
7332 CALL_EXPR_ARG (arg, 0),
7333 build_real (type, dconsthalf));
7334 return build_call_expr (expfn, 1, arg);
7337 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7338 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7340 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7344 tree arg0 = CALL_EXPR_ARG (arg, 0);
7346 /* The inner root was either sqrt or cbrt. */
7347 REAL_VALUE_TYPE dconstroot =
7348 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7350 /* Adjust for the outer root. */
7351 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7352 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7353 tree_root = build_real (type, dconstroot);
7354 return build_call_expr (powfn, 2, arg0, tree_root);
7358 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7359 if (flag_unsafe_math_optimizations
7360 && (fcode == BUILT_IN_POW
7361 || fcode == BUILT_IN_POWF
7362 || fcode == BUILT_IN_POWL))
7364 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7365 tree arg0 = CALL_EXPR_ARG (arg, 0);
7366 tree arg1 = CALL_EXPR_ARG (arg, 1);
7368 if (!tree_expr_nonnegative_p (arg0))
7369 arg0 = build1 (ABS_EXPR, type, arg0);
7370 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7371 build_real (type, dconsthalf));
7372 return build_call_expr (powfn, 2, arg0, narg1);
7378 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7379 Return NULL_TREE if no simplification can be made. */
7382 fold_builtin_cbrt (tree arg, tree type)
7384 const enum built_in_function fcode = builtin_mathfn_code (arg);
7387 if (!validate_arg (arg, REAL_TYPE))
7390 /* Calculate the result when the argument is a constant. */
7391 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7394 if (flag_unsafe_math_optimizations)
7396 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7397 if (BUILTIN_EXPONENT_P (fcode))
7399 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7400 const REAL_VALUE_TYPE third_trunc =
7401 real_value_truncate (TYPE_MODE (type), dconstthird);
7402 arg = fold_build2 (MULT_EXPR, type,
7403 CALL_EXPR_ARG (arg, 0),
7404 build_real (type, third_trunc));
7405 return build_call_expr (expfn, 1, arg);
7408 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7409 if (BUILTIN_SQRT_P (fcode))
7411 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7415 tree arg0 = CALL_EXPR_ARG (arg, 0);
7417 REAL_VALUE_TYPE dconstroot = dconstthird;
7419 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7420 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7421 tree_root = build_real (type, dconstroot);
7422 return build_call_expr (powfn, 2, arg0, tree_root);
7426 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7427 if (BUILTIN_CBRT_P (fcode))
7429 tree arg0 = CALL_EXPR_ARG (arg, 0);
7430 if (tree_expr_nonnegative_p (arg0))
7432 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7437 REAL_VALUE_TYPE dconstroot;
7439 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7440 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7441 tree_root = build_real (type, dconstroot);
7442 return build_call_expr (powfn, 2, arg0, tree_root);
7447 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7448 if (fcode == BUILT_IN_POW
7449 || fcode == BUILT_IN_POWF
7450 || fcode == BUILT_IN_POWL)
7452 tree arg00 = CALL_EXPR_ARG (arg, 0);
7453 tree arg01 = CALL_EXPR_ARG (arg, 1);
7454 if (tree_expr_nonnegative_p (arg00))
7456 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7457 const REAL_VALUE_TYPE dconstroot
7458 = real_value_truncate (TYPE_MODE (type), dconstthird);
7459 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7460 build_real (type, dconstroot));
7461 return build_call_expr (powfn, 2, arg00, narg01);
7468 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7469 TYPE is the type of the return value. Return NULL_TREE if no
7470 simplification can be made. */
7473 fold_builtin_cos (tree arg, tree type, tree fndecl)
7477 if (!validate_arg (arg, REAL_TYPE))
7480 /* Calculate the result when the argument is a constant. */
7481 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7484 /* Optimize cos(-x) into cos (x). */
7485 if ((narg = fold_strip_sign_ops (arg)))
7486 return build_call_expr (fndecl, 1, narg);
7491 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7492 Return NULL_TREE if no simplification can be made. */
7495 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7497 if (validate_arg (arg, REAL_TYPE))
7501 /* Calculate the result when the argument is a constant. */
7502 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7505 /* Optimize cosh(-x) into cosh (x). */
7506 if ((narg = fold_strip_sign_ops (arg)))
7507 return build_call_expr (fndecl, 1, narg);
7513 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7514 Return NULL_TREE if no simplification can be made. */
7517 fold_builtin_tan (tree arg, tree type)
7519 enum built_in_function fcode;
7522 if (!validate_arg (arg, REAL_TYPE))
7525 /* Calculate the result when the argument is a constant. */
7526 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7529 /* Optimize tan(atan(x)) = x. */
7530 fcode = builtin_mathfn_code (arg);
7531 if (flag_unsafe_math_optimizations
7532 && (fcode == BUILT_IN_ATAN
7533 || fcode == BUILT_IN_ATANF
7534 || fcode == BUILT_IN_ATANL))
7535 return CALL_EXPR_ARG (arg, 0);
7540 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7541 NULL_TREE if no simplification can be made. */
7544 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7549 if (!validate_arg (arg0, REAL_TYPE)
7550 || !validate_arg (arg1, POINTER_TYPE)
7551 || !validate_arg (arg2, POINTER_TYPE))
7554 type = TREE_TYPE (arg0);
7556 /* Calculate the result when the argument is a constant. */
7557 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7560 /* Canonicalize sincos to cexpi. */
7561 if (!TARGET_C99_FUNCTIONS)
7563 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7567 call = build_call_expr (fn, 1, arg0);
7568 call = builtin_save_expr (call);
7570 return build2 (COMPOUND_EXPR, type,
7571 build2 (MODIFY_EXPR, void_type_node,
7572 build_fold_indirect_ref (arg1),
7573 build1 (IMAGPART_EXPR, type, call)),
7574 build2 (MODIFY_EXPR, void_type_node,
7575 build_fold_indirect_ref (arg2),
7576 build1 (REALPART_EXPR, type, call)));
7579 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7580 NULL_TREE if no simplification can be made. */
7583 fold_builtin_cexp (tree arg0, tree type)
7586 tree realp, imagp, ifn;
7588 if (!validate_arg (arg0, COMPLEX_TYPE))
7591 rtype = TREE_TYPE (TREE_TYPE (arg0));
7593 /* In case we can figure out the real part of arg0 and it is constant zero
7595 if (!TARGET_C99_FUNCTIONS)
7597 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7601 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7602 && real_zerop (realp))
7604 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7605 return build_call_expr (ifn, 1, narg);
7608 /* In case we can easily decompose real and imaginary parts split cexp
7609 to exp (r) * cexpi (i). */
7610 if (flag_unsafe_math_optimizations
7613 tree rfn, rcall, icall;
7615 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7619 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7623 icall = build_call_expr (ifn, 1, imagp);
7624 icall = builtin_save_expr (icall);
7625 rcall = build_call_expr (rfn, 1, realp);
7626 rcall = builtin_save_expr (rcall);
7627 return build2 (COMPLEX_EXPR, type,
7628 build2 (MULT_EXPR, rtype,
7630 build1 (REALPART_EXPR, rtype, icall)),
7631 build2 (MULT_EXPR, rtype,
7633 build1 (IMAGPART_EXPR, rtype, icall)));
7639 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7640 Return NULL_TREE if no simplification can be made. */
7643 fold_builtin_trunc (tree fndecl, tree arg)
7645 if (!validate_arg (arg, REAL_TYPE))
7648 /* Optimize trunc of constant value. */
7649 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7651 REAL_VALUE_TYPE r, x;
7652 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7654 x = TREE_REAL_CST (arg);
7655 real_trunc (&r, TYPE_MODE (type), &x);
7656 return build_real (type, r);
7659 return fold_trunc_transparent_mathfn (fndecl, arg);
7662 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7663 Return NULL_TREE if no simplification can be made. */
7666 fold_builtin_floor (tree fndecl, tree arg)
7668 if (!validate_arg (arg, REAL_TYPE))
7671 /* Optimize floor of constant value. */
7672 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7676 x = TREE_REAL_CST (arg);
7677 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7679 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7682 real_floor (&r, TYPE_MODE (type), &x);
7683 return build_real (type, r);
7687 /* Fold floor (x) where x is nonnegative to trunc (x). */
7688 if (tree_expr_nonnegative_p (arg))
7690 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7692 return build_call_expr (truncfn, 1, arg);
7695 return fold_trunc_transparent_mathfn (fndecl, arg);
7698 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7699 Return NULL_TREE if no simplification can be made. */
7702 fold_builtin_ceil (tree fndecl, tree arg)
7704 if (!validate_arg (arg, REAL_TYPE))
7707 /* Optimize ceil of constant value. */
7708 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7712 x = TREE_REAL_CST (arg);
7713 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7715 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7718 real_ceil (&r, TYPE_MODE (type), &x);
7719 return build_real (type, r);
7723 return fold_trunc_transparent_mathfn (fndecl, arg);
7726 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7727 Return NULL_TREE if no simplification can be made. */
7730 fold_builtin_round (tree fndecl, tree arg)
7732 if (!validate_arg (arg, REAL_TYPE))
7735 /* Optimize round of constant value. */
7736 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7740 x = TREE_REAL_CST (arg);
7741 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7743 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7746 real_round (&r, TYPE_MODE (type), &x);
7747 return build_real (type, r);
7751 return fold_trunc_transparent_mathfn (fndecl, arg);
7754 /* Fold function call to builtin lround, lroundf or lroundl (or the
7755 corresponding long long versions) and other rounding functions. ARG
7756 is the argument to the call. Return NULL_TREE if no simplification
7760 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7762 if (!validate_arg (arg, REAL_TYPE))
7765 /* Optimize lround of constant value. */
7766 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7768 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7770 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7772 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7773 tree ftype = TREE_TYPE (arg);
7774 unsigned HOST_WIDE_INT lo2;
7775 HOST_WIDE_INT hi, lo;
7778 switch (DECL_FUNCTION_CODE (fndecl))
7780 CASE_FLT_FN (BUILT_IN_LFLOOR):
7781 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7782 real_floor (&r, TYPE_MODE (ftype), &x);
7785 CASE_FLT_FN (BUILT_IN_LCEIL):
7786 CASE_FLT_FN (BUILT_IN_LLCEIL):
7787 real_ceil (&r, TYPE_MODE (ftype), &x);
7790 CASE_FLT_FN (BUILT_IN_LROUND):
7791 CASE_FLT_FN (BUILT_IN_LLROUND):
7792 real_round (&r, TYPE_MODE (ftype), &x);
7799 REAL_VALUE_TO_INT (&lo, &hi, r);
7800 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7801 return build_int_cst_wide (itype, lo2, hi);
7805 switch (DECL_FUNCTION_CODE (fndecl))
7807 CASE_FLT_FN (BUILT_IN_LFLOOR):
7808 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7809 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7810 if (tree_expr_nonnegative_p (arg))
7811 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7817 return fold_fixed_mathfn (fndecl, arg);
7820 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7821 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7822 the argument to the call. Return NULL_TREE if no simplification can
7826 fold_builtin_bitop (tree fndecl, tree arg)
7828 if (!validate_arg (arg, INTEGER_TYPE))
7831 /* Optimize for constant argument. */
7832 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7834 HOST_WIDE_INT hi, width, result;
7835 unsigned HOST_WIDE_INT lo;
7838 type = TREE_TYPE (arg);
7839 width = TYPE_PRECISION (type);
7840 lo = TREE_INT_CST_LOW (arg);
7842 /* Clear all the bits that are beyond the type's precision. */
7843 if (width > HOST_BITS_PER_WIDE_INT)
7845 hi = TREE_INT_CST_HIGH (arg);
7846 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7847 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7852 if (width < HOST_BITS_PER_WIDE_INT)
7853 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7856 switch (DECL_FUNCTION_CODE (fndecl))
7858 CASE_INT_FN (BUILT_IN_FFS):
7860 result = exact_log2 (lo & -lo) + 1;
7862 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7867 CASE_INT_FN (BUILT_IN_CLZ):
7869 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7871 result = width - floor_log2 (lo) - 1;
7872 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7876 CASE_INT_FN (BUILT_IN_CTZ):
7878 result = exact_log2 (lo & -lo);
7880 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7881 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7885 CASE_INT_FN (BUILT_IN_POPCOUNT):
7888 result++, lo &= lo - 1;
7890 result++, hi &= hi - 1;
7893 CASE_INT_FN (BUILT_IN_PARITY):
7896 result++, lo &= lo - 1;
7898 result++, hi &= hi - 1;
7906 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7912 /* Fold function call to builtin_bswap and the long and long long
7913 variants. Return NULL_TREE if no simplification can be made. */
7915 fold_builtin_bswap (tree fndecl, tree arg)
7917 if (! validate_arg (arg, INTEGER_TYPE))
7920 /* Optimize constant value. */
7921 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7923 HOST_WIDE_INT hi, width, r_hi = 0;
7924 unsigned HOST_WIDE_INT lo, r_lo = 0;
7927 type = TREE_TYPE (arg);
7928 width = TYPE_PRECISION (type);
7929 lo = TREE_INT_CST_LOW (arg);
7930 hi = TREE_INT_CST_HIGH (arg);
7932 switch (DECL_FUNCTION_CODE (fndecl))
7934 case BUILT_IN_BSWAP32:
7935 case BUILT_IN_BSWAP64:
7939 for (s = 0; s < width; s += 8)
7941 int d = width - s - 8;
7942 unsigned HOST_WIDE_INT byte;
7944 if (s < HOST_BITS_PER_WIDE_INT)
7945 byte = (lo >> s) & 0xff;
7947 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7949 if (d < HOST_BITS_PER_WIDE_INT)
7952 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7962 if (width < HOST_BITS_PER_WIDE_INT)
7963 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7965 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7971 /* Return true if EXPR is the real constant contained in VALUE. */
7974 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7978 return ((TREE_CODE (expr) == REAL_CST
7979 && !TREE_OVERFLOW (expr)
7980 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7981 || (TREE_CODE (expr) == COMPLEX_CST
7982 && real_dconstp (TREE_REALPART (expr), value)
7983 && real_zerop (TREE_IMAGPART (expr))));
7986 /* A subroutine of fold_builtin to fold the various logarithmic
7987 functions. Return NULL_TREE if no simplification can me made.
7988 FUNC is the corresponding MPFR logarithm function. */
7991 fold_builtin_logarithm (tree fndecl, tree arg,
7992 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7994 if (validate_arg (arg, REAL_TYPE))
7996 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7998 const enum built_in_function fcode = builtin_mathfn_code (arg);
8000 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8001 instead we'll look for 'e' truncated to MODE. So only do
8002 this if flag_unsafe_math_optimizations is set. */
8003 if (flag_unsafe_math_optimizations && func == mpfr_log)
8005 const REAL_VALUE_TYPE e_truncated =
8006 real_value_truncate (TYPE_MODE (type), dconste);
8007 if (real_dconstp (arg, &e_truncated))
8008 return build_real (type, dconst1);
8011 /* Calculate the result when the argument is a constant. */
8012 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8015 /* Special case, optimize logN(expN(x)) = x. */
8016 if (flag_unsafe_math_optimizations
8017 && ((func == mpfr_log
8018 && (fcode == BUILT_IN_EXP
8019 || fcode == BUILT_IN_EXPF
8020 || fcode == BUILT_IN_EXPL))
8021 || (func == mpfr_log2
8022 && (fcode == BUILT_IN_EXP2
8023 || fcode == BUILT_IN_EXP2F
8024 || fcode == BUILT_IN_EXP2L))
8025 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8026 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8028 /* Optimize logN(func()) for various exponential functions. We
8029 want to determine the value "x" and the power "exponent" in
8030 order to transform logN(x**exponent) into exponent*logN(x). */
8031 if (flag_unsafe_math_optimizations)
8033 tree exponent = 0, x = 0;
8037 CASE_FLT_FN (BUILT_IN_EXP):
8038 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8039 x = build_real (type,
8040 real_value_truncate (TYPE_MODE (type), dconste));
8041 exponent = CALL_EXPR_ARG (arg, 0);
8043 CASE_FLT_FN (BUILT_IN_EXP2):
8044 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8045 x = build_real (type, dconst2);
8046 exponent = CALL_EXPR_ARG (arg, 0);
8048 CASE_FLT_FN (BUILT_IN_EXP10):
8049 CASE_FLT_FN (BUILT_IN_POW10):
8050 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8051 x = build_real (type, dconst10);
8052 exponent = CALL_EXPR_ARG (arg, 0);
8054 CASE_FLT_FN (BUILT_IN_SQRT):
8055 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8056 x = CALL_EXPR_ARG (arg, 0);
8057 exponent = build_real (type, dconsthalf);
8059 CASE_FLT_FN (BUILT_IN_CBRT):
8060 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8061 x = CALL_EXPR_ARG (arg, 0);
8062 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8065 CASE_FLT_FN (BUILT_IN_POW):
8066 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8067 x = CALL_EXPR_ARG (arg, 0);
8068 exponent = CALL_EXPR_ARG (arg, 1);
8074 /* Now perform the optimization. */
8077 tree logfn = build_call_expr (fndecl, 1, x);
8078 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8086 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8087 NULL_TREE if no simplification can be made. */
8090 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8092 tree res, narg0, narg1;
8094 if (!validate_arg (arg0, REAL_TYPE)
8095 || !validate_arg (arg1, REAL_TYPE))
8098 /* Calculate the result when the argument is a constant. */
8099 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8102 /* If either argument to hypot has a negate or abs, strip that off.
8103 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8104 narg0 = fold_strip_sign_ops (arg0);
8105 narg1 = fold_strip_sign_ops (arg1);
8108 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8109 narg1 ? narg1 : arg1);
8112 /* If either argument is zero, hypot is fabs of the other. */
8113 if (real_zerop (arg0))
8114 return fold_build1 (ABS_EXPR, type, arg1);
8115 else if (real_zerop (arg1))
8116 return fold_build1 (ABS_EXPR, type, arg0);
8118 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8119 if (flag_unsafe_math_optimizations
8120 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8122 const REAL_VALUE_TYPE sqrt2_trunc
8123 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8124 return fold_build2 (MULT_EXPR, type,
8125 fold_build1 (ABS_EXPR, type, arg0),
8126 build_real (type, sqrt2_trunc));
8133 /* Fold a builtin function call to pow, powf, or powl. Return
8134 NULL_TREE if no simplification can be made. */
8136 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8140 if (!validate_arg (arg0, REAL_TYPE)
8141 || !validate_arg (arg1, REAL_TYPE))
8144 /* Calculate the result when the argument is a constant. */
8145 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8148 /* Optimize pow(1.0,y) = 1.0. */
8149 if (real_onep (arg0))
8150 return omit_one_operand (type, build_real (type, dconst1), arg1);
8152 if (TREE_CODE (arg1) == REAL_CST
8153 && !TREE_OVERFLOW (arg1))
8155 REAL_VALUE_TYPE cint;
8159 c = TREE_REAL_CST (arg1);
8161 /* Optimize pow(x,0.0) = 1.0. */
8162 if (REAL_VALUES_EQUAL (c, dconst0))
8163 return omit_one_operand (type, build_real (type, dconst1),
8166 /* Optimize pow(x,1.0) = x. */
8167 if (REAL_VALUES_EQUAL (c, dconst1))
8170 /* Optimize pow(x,-1.0) = 1.0/x. */
8171 if (REAL_VALUES_EQUAL (c, dconstm1))
8172 return fold_build2 (RDIV_EXPR, type,
8173 build_real (type, dconst1), arg0);
8175 /* Optimize pow(x,0.5) = sqrt(x). */
8176 if (flag_unsafe_math_optimizations
8177 && REAL_VALUES_EQUAL (c, dconsthalf))
8179 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8181 if (sqrtfn != NULL_TREE)
8182 return build_call_expr (sqrtfn, 1, arg0);
8185 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8186 if (flag_unsafe_math_optimizations)
8188 const REAL_VALUE_TYPE dconstroot
8189 = real_value_truncate (TYPE_MODE (type), dconstthird);
8191 if (REAL_VALUES_EQUAL (c, dconstroot))
8193 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8194 if (cbrtfn != NULL_TREE)
8195 return build_call_expr (cbrtfn, 1, arg0);
8199 /* Check for an integer exponent. */
8200 n = real_to_integer (&c);
8201 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8202 if (real_identical (&c, &cint))
8204 /* Attempt to evaluate pow at compile-time. */
8205 if (TREE_CODE (arg0) == REAL_CST
8206 && !TREE_OVERFLOW (arg0))
8211 x = TREE_REAL_CST (arg0);
8212 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8213 if (flag_unsafe_math_optimizations || !inexact)
8214 return build_real (type, x);
8217 /* Strip sign ops from even integer powers. */
8218 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8220 tree narg0 = fold_strip_sign_ops (arg0);
8222 return build_call_expr (fndecl, 2, narg0, arg1);
8227 if (flag_unsafe_math_optimizations)
8229 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8231 /* Optimize pow(expN(x),y) = expN(x*y). */
8232 if (BUILTIN_EXPONENT_P (fcode))
8234 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8235 tree arg = CALL_EXPR_ARG (arg0, 0);
8236 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8237 return build_call_expr (expfn, 1, arg);
8240 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8241 if (BUILTIN_SQRT_P (fcode))
8243 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8244 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8245 build_real (type, dconsthalf));
8246 return build_call_expr (fndecl, 2, narg0, narg1);
8249 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8250 if (BUILTIN_CBRT_P (fcode))
8252 tree arg = CALL_EXPR_ARG (arg0, 0);
8253 if (tree_expr_nonnegative_p (arg))
8255 const REAL_VALUE_TYPE dconstroot
8256 = real_value_truncate (TYPE_MODE (type), dconstthird);
8257 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8258 build_real (type, dconstroot));
8259 return build_call_expr (fndecl, 2, arg, narg1);
8263 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8264 if (fcode == BUILT_IN_POW
8265 || fcode == BUILT_IN_POWF
8266 || fcode == BUILT_IN_POWL)
8268 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8269 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8270 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8271 return build_call_expr (fndecl, 2, arg00, narg1);
8278 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8279 Return NULL_TREE if no simplification can be made. */
8281 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8282 tree arg0, tree arg1, tree type)
8284 if (!validate_arg (arg0, REAL_TYPE)
8285 || !validate_arg (arg1, INTEGER_TYPE))
8288 /* Optimize pow(1.0,y) = 1.0. */
8289 if (real_onep (arg0))
8290 return omit_one_operand (type, build_real (type, dconst1), arg1);
8292 if (host_integerp (arg1, 0))
8294 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8296 /* Evaluate powi at compile-time. */
8297 if (TREE_CODE (arg0) == REAL_CST
8298 && !TREE_OVERFLOW (arg0))
8301 x = TREE_REAL_CST (arg0);
8302 real_powi (&x, TYPE_MODE (type), &x, c);
8303 return build_real (type, x);
8306 /* Optimize pow(x,0) = 1.0. */
8308 return omit_one_operand (type, build_real (type, dconst1),
8311 /* Optimize pow(x,1) = x. */
8315 /* Optimize pow(x,-1) = 1.0/x. */
8317 return fold_build2 (RDIV_EXPR, type,
8318 build_real (type, dconst1), arg0);
8324 /* A subroutine of fold_builtin to fold the various exponent
8325 functions. Return NULL_TREE if no simplification can be made.
8326 FUNC is the corresponding MPFR exponent function. */
8329 fold_builtin_exponent (tree fndecl, tree arg,
8330 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8332 if (validate_arg (arg, REAL_TYPE))
8334 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8337 /* Calculate the result when the argument is a constant. */
8338 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8341 /* Optimize expN(logN(x)) = x. */
8342 if (flag_unsafe_math_optimizations)
8344 const enum built_in_function fcode = builtin_mathfn_code (arg);
8346 if ((func == mpfr_exp
8347 && (fcode == BUILT_IN_LOG
8348 || fcode == BUILT_IN_LOGF
8349 || fcode == BUILT_IN_LOGL))
8350 || (func == mpfr_exp2
8351 && (fcode == BUILT_IN_LOG2
8352 || fcode == BUILT_IN_LOG2F
8353 || fcode == BUILT_IN_LOG2L))
8354 || (func == mpfr_exp10
8355 && (fcode == BUILT_IN_LOG10
8356 || fcode == BUILT_IN_LOG10F
8357 || fcode == BUILT_IN_LOG10L)))
8358 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8365 /* Return true if VAR is a VAR_DECL or a component thereof. */
8368 var_decl_component_p (tree var)
8371 while (handled_component_p (inner))
8372 inner = TREE_OPERAND (inner, 0);
8373 return SSA_VAR_P (inner);
8376 /* Fold function call to builtin memset. Return
8377 NULL_TREE if no simplification can be made. */
8380 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8383 unsigned HOST_WIDE_INT length, cval;
8385 if (! validate_arg (dest, POINTER_TYPE)
8386 || ! validate_arg (c, INTEGER_TYPE)
8387 || ! validate_arg (len, INTEGER_TYPE))
8390 if (! host_integerp (len, 1))
8393 /* If the LEN parameter is zero, return DEST. */
8394 if (integer_zerop (len))
8395 return omit_one_operand (type, dest, c);
8397 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8402 if (TREE_CODE (var) != ADDR_EXPR)
8405 var = TREE_OPERAND (var, 0);
8406 if (TREE_THIS_VOLATILE (var))
8409 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8410 && !POINTER_TYPE_P (TREE_TYPE (var)))
8413 if (! var_decl_component_p (var))
8416 length = tree_low_cst (len, 1);
8417 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8418 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8422 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8425 if (integer_zerop (c))
8429 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8432 cval = tree_low_cst (c, 1);
8436 cval |= (cval << 31) << 1;
8439 ret = build_int_cst_type (TREE_TYPE (var), cval);
8440 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8444 return omit_one_operand (type, dest, ret);
8447 /* Fold function call to builtin memset. Return
8448 NULL_TREE if no simplification can be made. */
8451 fold_builtin_bzero (tree dest, tree size, bool ignore)
8453 if (! validate_arg (dest, POINTER_TYPE)
8454 || ! validate_arg (size, INTEGER_TYPE))
8460 /* New argument list transforming bzero(ptr x, int y) to
8461 memset(ptr x, int 0, size_t y). This is done this way
8462 so that if it isn't expanded inline, we fallback to
8463 calling bzero instead of memset. */
8465 return fold_builtin_memset (dest, integer_zero_node,
8466 fold_convert (sizetype, size),
8467 void_type_node, ignore);
8470 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8471 NULL_TREE if no simplification can be made.
8472 If ENDP is 0, return DEST (like memcpy).
8473 If ENDP is 1, return DEST+LEN (like mempcpy).
8474 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8475 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8479 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8481 tree destvar, srcvar, expr;
8483 if (! validate_arg (dest, POINTER_TYPE)
8484 || ! validate_arg (src, POINTER_TYPE)
8485 || ! validate_arg (len, INTEGER_TYPE))
8488 /* If the LEN parameter is zero, return DEST. */
8489 if (integer_zerop (len))
8490 return omit_one_operand (type, dest, src);
8492 /* If SRC and DEST are the same (and not volatile), return
8493 DEST{,+LEN,+LEN-1}. */
8494 if (operand_equal_p (src, dest, 0))
8498 tree srctype, desttype;
8501 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8502 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8504 /* Both DEST and SRC must be pointer types.
8505 ??? This is what old code did. Is the testing for pointer types
8508 If either SRC is readonly or length is 1, we can use memcpy. */
8509 if (dest_align && src_align
8510 && (readonly_data_expr (src)
8511 || (host_integerp (len, 1)
8512 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8513 tree_low_cst (len, 1)))))
8515 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8518 return build_call_expr (fn, 3, dest, src, len);
8523 if (!host_integerp (len, 0))
8526 This logic lose for arguments like (type *)malloc (sizeof (type)),
8527 since we strip the casts of up to VOID return value from malloc.
8528 Perhaps we ought to inherit type from non-VOID argument here? */
8531 srctype = TREE_TYPE (TREE_TYPE (src));
8532 desttype = TREE_TYPE (TREE_TYPE (dest));
8533 if (!srctype || !desttype
8534 || !TYPE_SIZE_UNIT (srctype)
8535 || !TYPE_SIZE_UNIT (desttype)
8536 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8537 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8538 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8539 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8542 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8543 < (int) TYPE_ALIGN (desttype)
8544 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8545 < (int) TYPE_ALIGN (srctype)))
8549 dest = builtin_save_expr (dest);
8551 srcvar = build_fold_indirect_ref (src);
8552 if (TREE_THIS_VOLATILE (srcvar))
8554 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8556 /* With memcpy, it is possible to bypass aliasing rules, so without
8557 this check i. e. execute/20060930-2.c would be misoptimized, because
8558 it use conflicting alias set to hold argument for the memcpy call.
8559 This check is probably unnecesary with -fno-strict-aliasing.
8560 Similarly for destvar. See also PR29286. */
8561 if (!var_decl_component_p (srcvar)
8562 /* Accept: memcpy (*char_var, "test", 1); that simplify
8564 || is_gimple_min_invariant (srcvar)
8565 || readonly_data_expr (src))
8568 destvar = build_fold_indirect_ref (dest);
8569 if (TREE_THIS_VOLATILE (destvar))
8571 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8573 if (!var_decl_component_p (destvar))
8576 if (srctype == desttype
8577 || (gimple_in_ssa_p (cfun)
8578 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8580 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8581 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8582 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8583 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8584 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8586 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8587 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8593 if (endp == 0 || endp == 3)
8594 return omit_one_operand (type, dest, expr);
8600 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8603 len = fold_convert (TREE_TYPE (dest), len);
8604 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8605 dest = fold_convert (type, dest);
8607 dest = omit_one_operand (type, dest, expr);
8611 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8612 If LEN is not NULL, it represents the length of the string to be
8613 copied. Return NULL_TREE if no simplification can be made. */
8616 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8620 if (!validate_arg (dest, POINTER_TYPE)
8621 || !validate_arg (src, POINTER_TYPE))
8624 /* If SRC and DEST are the same (and not volatile), return DEST. */
8625 if (operand_equal_p (src, dest, 0))
8626 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8631 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8637 len = c_strlen (src, 1);
8638 if (! len || TREE_SIDE_EFFECTS (len))
8642 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8643 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8644 build_call_expr (fn, 3, dest, src, len));
8647 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8648 If SLEN is not NULL, it represents the length of the source string.
8649 Return NULL_TREE if no simplification can be made. */
8652 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8656 if (!validate_arg (dest, POINTER_TYPE)
8657 || !validate_arg (src, POINTER_TYPE)
8658 || !validate_arg (len, INTEGER_TYPE))
8661 /* If the LEN parameter is zero, return DEST. */
8662 if (integer_zerop (len))
8663 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8665 /* We can't compare slen with len as constants below if len is not a
8667 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8671 slen = c_strlen (src, 1);
8673 /* Now, we must be passed a constant src ptr parameter. */
8674 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8677 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8679 /* We do not support simplification of this case, though we do
8680 support it when expanding trees into RTL. */
8681 /* FIXME: generate a call to __builtin_memset. */
8682 if (tree_int_cst_lt (slen, len))
8685 /* OK transform into builtin memcpy. */
8686 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8689 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8690 build_call_expr (fn, 3, dest, src, len));
8693 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8694 arguments to the call, and TYPE is its return type.
8695 Return NULL_TREE if no simplification can be made. */
8698 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8700 if (!validate_arg (arg1, POINTER_TYPE)
8701 || !validate_arg (arg2, INTEGER_TYPE)
8702 || !validate_arg (len, INTEGER_TYPE))
8708 if (TREE_CODE (arg2) != INTEGER_CST
8709 || !host_integerp (len, 1))
8712 p1 = c_getstr (arg1);
8713 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8719 if (target_char_cast (arg2, &c))
8722 r = memchr (p1, c, tree_low_cst (len, 1));
8725 return build_int_cst (TREE_TYPE (arg1), 0);
8727 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (arg1), arg1,
8728 build_int_cst (TREE_TYPE (arg1), r - p1));
8729 return fold_convert (type, tem);
8735 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8736 Return NULL_TREE if no simplification can be made. */
8739 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8741 const char *p1, *p2;
8743 if (!validate_arg (arg1, POINTER_TYPE)
8744 || !validate_arg (arg2, POINTER_TYPE)
8745 || !validate_arg (len, INTEGER_TYPE))
8748 /* If the LEN parameter is zero, return zero. */
8749 if (integer_zerop (len))
8750 return omit_two_operands (integer_type_node, integer_zero_node,
8753 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8754 if (operand_equal_p (arg1, arg2, 0))
8755 return omit_one_operand (integer_type_node, integer_zero_node, len);
8757 p1 = c_getstr (arg1);
8758 p2 = c_getstr (arg2);
8760 /* If all arguments are constant, and the value of len is not greater
8761 than the lengths of arg1 and arg2, evaluate at compile-time. */
8762 if (host_integerp (len, 1) && p1 && p2
8763 && compare_tree_int (len, strlen (p1) + 1) <= 0
8764 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8766 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8769 return integer_one_node;
8771 return integer_minus_one_node;
8773 return integer_zero_node;
8776 /* If len parameter is one, return an expression corresponding to
8777 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8778 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8780 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8781 tree cst_uchar_ptr_node
8782 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8784 tree ind1 = fold_convert (integer_type_node,
8785 build1 (INDIRECT_REF, cst_uchar_node,
8786 fold_convert (cst_uchar_ptr_node,
8788 tree ind2 = fold_convert (integer_type_node,
8789 build1 (INDIRECT_REF, cst_uchar_node,
8790 fold_convert (cst_uchar_ptr_node,
8792 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8798 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8799 Return NULL_TREE if no simplification can be made. */
8802 fold_builtin_strcmp (tree arg1, tree arg2)
8804 const char *p1, *p2;
8806 if (!validate_arg (arg1, POINTER_TYPE)
8807 || !validate_arg (arg2, POINTER_TYPE))
8810 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8811 if (operand_equal_p (arg1, arg2, 0))
8812 return integer_zero_node;
8814 p1 = c_getstr (arg1);
8815 p2 = c_getstr (arg2);
8819 const int i = strcmp (p1, p2);
8821 return integer_minus_one_node;
8823 return integer_one_node;
8825 return integer_zero_node;
8828 /* If the second arg is "", return *(const unsigned char*)arg1. */
8829 if (p2 && *p2 == '\0')
8831 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8832 tree cst_uchar_ptr_node
8833 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8835 return fold_convert (integer_type_node,
8836 build1 (INDIRECT_REF, cst_uchar_node,
8837 fold_convert (cst_uchar_ptr_node,
8841 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8842 if (p1 && *p1 == '\0')
8844 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8845 tree cst_uchar_ptr_node
8846 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8848 tree temp = fold_convert (integer_type_node,
8849 build1 (INDIRECT_REF, cst_uchar_node,
8850 fold_convert (cst_uchar_ptr_node,
8852 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8858 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8859 Return NULL_TREE if no simplification can be made. */
8862 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8864 const char *p1, *p2;
8866 if (!validate_arg (arg1, POINTER_TYPE)
8867 || !validate_arg (arg2, POINTER_TYPE)
8868 || !validate_arg (len, INTEGER_TYPE))
8871 /* If the LEN parameter is zero, return zero. */
8872 if (integer_zerop (len))
8873 return omit_two_operands (integer_type_node, integer_zero_node,
8876 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8877 if (operand_equal_p (arg1, arg2, 0))
8878 return omit_one_operand (integer_type_node, integer_zero_node, len);
8880 p1 = c_getstr (arg1);
8881 p2 = c_getstr (arg2);
8883 if (host_integerp (len, 1) && p1 && p2)
8885 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8887 return integer_one_node;
8889 return integer_minus_one_node;
8891 return integer_zero_node;
8894 /* If the second arg is "", and the length is greater than zero,
8895 return *(const unsigned char*)arg1. */
8896 if (p2 && *p2 == '\0'
8897 && TREE_CODE (len) == INTEGER_CST
8898 && tree_int_cst_sgn (len) == 1)
8900 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8901 tree cst_uchar_ptr_node
8902 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8904 return fold_convert (integer_type_node,
8905 build1 (INDIRECT_REF, cst_uchar_node,
8906 fold_convert (cst_uchar_ptr_node,
8910 /* If the first arg is "", and the length is greater than zero,
8911 return -*(const unsigned char*)arg2. */
8912 if (p1 && *p1 == '\0'
8913 && TREE_CODE (len) == INTEGER_CST
8914 && tree_int_cst_sgn (len) == 1)
8916 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8917 tree cst_uchar_ptr_node
8918 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8920 tree temp = fold_convert (integer_type_node,
8921 build1 (INDIRECT_REF, cst_uchar_node,
8922 fold_convert (cst_uchar_ptr_node,
8924 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8927 /* If len parameter is one, return an expression corresponding to
8928 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8929 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8931 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8932 tree cst_uchar_ptr_node
8933 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8935 tree ind1 = fold_convert (integer_type_node,
8936 build1 (INDIRECT_REF, cst_uchar_node,
8937 fold_convert (cst_uchar_ptr_node,
8939 tree ind2 = fold_convert (integer_type_node,
8940 build1 (INDIRECT_REF, cst_uchar_node,
8941 fold_convert (cst_uchar_ptr_node,
8943 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8949 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8950 ARG. Return NULL_TREE if no simplification can be made. */
8953 fold_builtin_signbit (tree arg, tree type)
8957 if (!validate_arg (arg, REAL_TYPE))
8960 /* If ARG is a compile-time constant, determine the result. */
8961 if (TREE_CODE (arg) == REAL_CST
8962 && !TREE_OVERFLOW (arg))
8966 c = TREE_REAL_CST (arg);
8967 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8968 return fold_convert (type, temp);
8971 /* If ARG is non-negative, the result is always zero. */
8972 if (tree_expr_nonnegative_p (arg))
8973 return omit_one_operand (type, integer_zero_node, arg);
8975 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8976 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8977 return fold_build2 (LT_EXPR, type, arg,
8978 build_real (TREE_TYPE (arg), dconst0));
8983 /* Fold function call to builtin copysign, copysignf or copysignl with
8984 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8988 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8992 if (!validate_arg (arg1, REAL_TYPE)
8993 || !validate_arg (arg2, REAL_TYPE))
8996 /* copysign(X,X) is X. */
8997 if (operand_equal_p (arg1, arg2, 0))
8998 return fold_convert (type, arg1);
9000 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9001 if (TREE_CODE (arg1) == REAL_CST
9002 && TREE_CODE (arg2) == REAL_CST
9003 && !TREE_OVERFLOW (arg1)
9004 && !TREE_OVERFLOW (arg2))
9006 REAL_VALUE_TYPE c1, c2;
9008 c1 = TREE_REAL_CST (arg1);
9009 c2 = TREE_REAL_CST (arg2);
9010 /* c1.sign := c2.sign. */
9011 real_copysign (&c1, &c2);
9012 return build_real (type, c1);
9015 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9016 Remember to evaluate Y for side-effects. */
9017 if (tree_expr_nonnegative_p (arg2))
9018 return omit_one_operand (type,
9019 fold_build1 (ABS_EXPR, type, arg1),
9022 /* Strip sign changing operations for the first argument. */
9023 tem = fold_strip_sign_ops (arg1);
9025 return build_call_expr (fndecl, 2, tem, arg2);
9030 /* Fold a call to builtin isascii with argument ARG. */
9033 fold_builtin_isascii (tree arg)
9035 if (!validate_arg (arg, INTEGER_TYPE))
9039 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9040 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9041 build_int_cst (NULL_TREE,
9042 ~ (unsigned HOST_WIDE_INT) 0x7f));
9043 return fold_build2 (EQ_EXPR, integer_type_node,
9044 arg, integer_zero_node);
9048 /* Fold a call to builtin toascii with argument ARG. */
9051 fold_builtin_toascii (tree arg)
9053 if (!validate_arg (arg, INTEGER_TYPE))
9056 /* Transform toascii(c) -> (c & 0x7f). */
9057 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9058 build_int_cst (NULL_TREE, 0x7f));
9061 /* Fold a call to builtin isdigit with argument ARG. */
9064 fold_builtin_isdigit (tree arg)
9066 if (!validate_arg (arg, INTEGER_TYPE))
9070 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9071 /* According to the C standard, isdigit is unaffected by locale.
9072 However, it definitely is affected by the target character set. */
9073 unsigned HOST_WIDE_INT target_digit0
9074 = lang_hooks.to_target_charset ('0');
9076 if (target_digit0 == 0)
9079 arg = fold_convert (unsigned_type_node, arg);
9080 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9081 build_int_cst (unsigned_type_node, target_digit0));
9082 return fold_build2 (LE_EXPR, integer_type_node, arg,
9083 build_int_cst (unsigned_type_node, 9));
9087 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9090 fold_builtin_fabs (tree arg, tree type)
9092 if (!validate_arg (arg, REAL_TYPE))
9095 arg = fold_convert (type, arg);
9096 if (TREE_CODE (arg) == REAL_CST)
9097 return fold_abs_const (arg, type);
9098 return fold_build1 (ABS_EXPR, type, arg);
9101 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9104 fold_builtin_abs (tree arg, tree type)
9106 if (!validate_arg (arg, INTEGER_TYPE))
9109 arg = fold_convert (type, arg);
9110 if (TREE_CODE (arg) == INTEGER_CST)
9111 return fold_abs_const (arg, type);
9112 return fold_build1 (ABS_EXPR, type, arg);
9115 /* Fold a call to builtin fmin or fmax. */
9118 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9120 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9122 /* Calculate the result when the argument is a constant. */
9123 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9128 /* If either argument is NaN, return the other one. Avoid the
9129 transformation if we get (and honor) a signalling NaN. Using
9130 omit_one_operand() ensures we create a non-lvalue. */
9131 if (TREE_CODE (arg0) == REAL_CST
9132 && real_isnan (&TREE_REAL_CST (arg0))
9133 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9134 || ! TREE_REAL_CST (arg0).signalling))
9135 return omit_one_operand (type, arg1, arg0);
9136 if (TREE_CODE (arg1) == REAL_CST
9137 && real_isnan (&TREE_REAL_CST (arg1))
9138 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9139 || ! TREE_REAL_CST (arg1).signalling))
9140 return omit_one_operand (type, arg0, arg1);
9142 /* Transform fmin/fmax(x,x) -> x. */
9143 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9144 return omit_one_operand (type, arg0, arg1);
9146 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9147 functions to return the numeric arg if the other one is NaN.
9148 These tree codes don't honor that, so only transform if
9149 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9150 handled, so we don't have to worry about it either. */
9151 if (flag_finite_math_only)
9152 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9153 fold_convert (type, arg0),
9154 fold_convert (type, arg1));
9159 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9162 fold_builtin_carg (tree arg, tree type)
9164 if (validate_arg (arg, COMPLEX_TYPE))
9166 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9170 tree new_arg = builtin_save_expr (arg);
9171 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9172 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9173 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9180 /* Fold a call to builtin logb/ilogb. */
9183 fold_builtin_logb (tree arg, tree rettype)
9185 if (! validate_arg (arg, REAL_TYPE))
9190 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9192 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9198 /* If arg is Inf or NaN and we're logb, return it. */
9199 if (TREE_CODE (rettype) == REAL_TYPE)
9200 return fold_convert (rettype, arg);
9201 /* Fall through... */
9203 /* Zero may set errno and/or raise an exception for logb, also
9204 for ilogb we don't know FP_ILOGB0. */
9207 /* For normal numbers, proceed iff radix == 2. In GCC,
9208 normalized significands are in the range [0.5, 1.0). We
9209 want the exponent as if they were [1.0, 2.0) so get the
9210 exponent and subtract 1. */
9211 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9212 return fold_convert (rettype, build_int_cst (NULL_TREE,
9213 REAL_EXP (value)-1));
9221 /* Fold a call to builtin significand, if radix == 2. */
9224 fold_builtin_significand (tree arg, tree rettype)
9226 if (! validate_arg (arg, REAL_TYPE))
9231 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9233 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9240 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9241 return fold_convert (rettype, arg);
9243 /* For normal numbers, proceed iff radix == 2. */
9244 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9246 REAL_VALUE_TYPE result = *value;
9247 /* In GCC, normalized significands are in the range [0.5,
9248 1.0). We want them to be [1.0, 2.0) so set the
9250 SET_REAL_EXP (&result, 1);
9251 return build_real (rettype, result);
9260 /* Fold a call to builtin frexp, we can assume the base is 2. */
9263 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9265 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9270 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9273 arg1 = build_fold_indirect_ref (arg1);
9275 /* Proceed if a valid pointer type was passed in. */
9276 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9278 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9284 /* For +-0, return (*exp = 0, +-0). */
9285 exp = integer_zero_node;
9290 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9291 return omit_one_operand (rettype, arg0, arg1);
9294 /* Since the frexp function always expects base 2, and in
9295 GCC normalized significands are already in the range
9296 [0.5, 1.0), we have exactly what frexp wants. */
9297 REAL_VALUE_TYPE frac_rvt = *value;
9298 SET_REAL_EXP (&frac_rvt, 0);
9299 frac = build_real (rettype, frac_rvt);
9300 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9307 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9308 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9309 TREE_SIDE_EFFECTS (arg1) = 1;
9310 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9316 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9317 then we can assume the base is two. If it's false, then we have to
9318 check the mode of the TYPE parameter in certain cases. */
9321 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9323 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9328 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9329 if (real_zerop (arg0) || integer_zerop (arg1)
9330 || (TREE_CODE (arg0) == REAL_CST
9331 && (real_isnan (&TREE_REAL_CST (arg0))
9332 || real_isinf (&TREE_REAL_CST (arg0)))))
9333 return omit_one_operand (type, arg0, arg1);
9335 /* If both arguments are constant, then try to evaluate it. */
9336 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9337 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9338 && host_integerp (arg1, 0))
9340 /* Bound the maximum adjustment to twice the range of the
9341 mode's valid exponents. Use abs to ensure the range is
9342 positive as a sanity check. */
9343 const long max_exp_adj = 2 *
9344 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9345 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9347 /* Get the user-requested adjustment. */
9348 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9350 /* The requested adjustment must be inside this range. This
9351 is a preliminary cap to avoid things like overflow, we
9352 may still fail to compute the result for other reasons. */
9353 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9355 REAL_VALUE_TYPE initial_result;
9357 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9359 /* Ensure we didn't overflow. */
9360 if (! real_isinf (&initial_result))
9362 const REAL_VALUE_TYPE trunc_result
9363 = real_value_truncate (TYPE_MODE (type), initial_result);
9365 /* Only proceed if the target mode can hold the
9367 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9368 return build_real (type, trunc_result);
9377 /* Fold a call to builtin modf. */
9380 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9382 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9387 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9390 arg1 = build_fold_indirect_ref (arg1);
9392 /* Proceed if a valid pointer type was passed in. */
9393 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9395 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9396 REAL_VALUE_TYPE trunc, frac;
9402 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9403 trunc = frac = *value;
9406 /* For +-Inf, return (*arg1 = arg0, +-0). */
9408 frac.sign = value->sign;
9412 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9413 real_trunc (&trunc, VOIDmode, value);
9414 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9415 /* If the original number was negative and already
9416 integral, then the fractional part is -0.0. */
9417 if (value->sign && frac.cl == rvc_zero)
9418 frac.sign = value->sign;
9422 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9423 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9424 build_real (rettype, trunc));
9425 TREE_SIDE_EFFECTS (arg1) = 1;
9426 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9427 build_real (rettype, frac));
9433 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9434 ARG is the argument for the call. */
9437 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9439 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9442 if (!validate_arg (arg, REAL_TYPE))
9444 error ("non-floating-point argument to function %qs",
9445 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9446 return error_mark_node;
9449 switch (builtin_index)
9451 case BUILT_IN_ISINF:
9452 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9453 return omit_one_operand (type, integer_zero_node, arg);
9455 if (TREE_CODE (arg) == REAL_CST)
9457 r = TREE_REAL_CST (arg);
9458 if (real_isinf (&r))
9459 return real_compare (GT_EXPR, &r, &dconst0)
9460 ? integer_one_node : integer_minus_one_node;
9462 return integer_zero_node;
9467 case BUILT_IN_FINITE:
9468 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9469 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9470 return omit_one_operand (type, integer_one_node, arg);
9472 if (TREE_CODE (arg) == REAL_CST)
9474 r = TREE_REAL_CST (arg);
9475 return real_isinf (&r) || real_isnan (&r)
9476 ? integer_zero_node : integer_one_node;
9481 case BUILT_IN_ISNAN:
9482 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9483 return omit_one_operand (type, integer_zero_node, arg);
9485 if (TREE_CODE (arg) == REAL_CST)
9487 r = TREE_REAL_CST (arg);
9488 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9491 arg = builtin_save_expr (arg);
9492 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9499 /* Fold a call to an unordered comparison function such as
9500 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9501 being called and ARG0 and ARG1 are the arguments for the call.
9502 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9503 the opposite of the desired result. UNORDERED_CODE is used
9504 for modes that can hold NaNs and ORDERED_CODE is used for
9508 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9509 enum tree_code unordered_code,
9510 enum tree_code ordered_code)
9512 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9513 enum tree_code code;
9515 enum tree_code code0, code1;
9516 tree cmp_type = NULL_TREE;
9518 type0 = TREE_TYPE (arg0);
9519 type1 = TREE_TYPE (arg1);
9521 code0 = TREE_CODE (type0);
9522 code1 = TREE_CODE (type1);
9524 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9525 /* Choose the wider of two real types. */
9526 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9528 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9530 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9534 error ("non-floating-point argument to function %qs",
9535 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9536 return error_mark_node;
9539 arg0 = fold_convert (cmp_type, arg0);
9540 arg1 = fold_convert (cmp_type, arg1);
9542 if (unordered_code == UNORDERED_EXPR)
9544 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9545 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9546 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9549 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9551 return fold_build1 (TRUTH_NOT_EXPR, type,
9552 fold_build2 (code, type, arg0, arg1));
9555 /* Fold a call to built-in function FNDECL with 0 arguments.
9556 IGNORE is true if the result of the function call is ignored. This
9557 function returns NULL_TREE if no simplification was possible. */
9560 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9562 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9563 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9566 CASE_FLT_FN (BUILT_IN_INF):
9567 case BUILT_IN_INFD32:
9568 case BUILT_IN_INFD64:
9569 case BUILT_IN_INFD128:
9570 return fold_builtin_inf (type, true);
9572 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9573 return fold_builtin_inf (type, false);
9575 case BUILT_IN_CLASSIFY_TYPE:
9576 return fold_builtin_classify_type (NULL_TREE);
9584 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9585 IGNORE is true if the result of the function call is ignored. This
9586 function returns NULL_TREE if no simplification was possible. */
9589 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9591 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9592 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9596 case BUILT_IN_CONSTANT_P:
9598 tree val = fold_builtin_constant_p (arg0);
9600 /* Gimplification will pull the CALL_EXPR for the builtin out of
9601 an if condition. When not optimizing, we'll not CSE it back.
9602 To avoid link error types of regressions, return false now. */
9603 if (!val && !optimize)
9604 val = integer_zero_node;
9609 case BUILT_IN_CLASSIFY_TYPE:
9610 return fold_builtin_classify_type (arg0);
9612 case BUILT_IN_STRLEN:
9613 return fold_builtin_strlen (arg0);
9615 CASE_FLT_FN (BUILT_IN_FABS):
9616 return fold_builtin_fabs (arg0, type);
9620 case BUILT_IN_LLABS:
9621 case BUILT_IN_IMAXABS:
9622 return fold_builtin_abs (arg0, type);
9624 CASE_FLT_FN (BUILT_IN_CONJ):
9625 if (validate_arg (arg0, COMPLEX_TYPE))
9626 return fold_build1 (CONJ_EXPR, type, arg0);
9629 CASE_FLT_FN (BUILT_IN_CREAL):
9630 if (validate_arg (arg0, COMPLEX_TYPE))
9631 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9634 CASE_FLT_FN (BUILT_IN_CIMAG):
9635 if (validate_arg (arg0, COMPLEX_TYPE))
9636 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9639 CASE_FLT_FN (BUILT_IN_CCOS):
9640 CASE_FLT_FN (BUILT_IN_CCOSH):
9641 /* These functions are "even", i.e. f(x) == f(-x). */
9642 if (validate_arg (arg0, COMPLEX_TYPE))
9644 tree narg = fold_strip_sign_ops (arg0);
9646 return build_call_expr (fndecl, 1, narg);
9650 CASE_FLT_FN (BUILT_IN_CABS):
9651 return fold_builtin_cabs (arg0, type, fndecl);
9653 CASE_FLT_FN (BUILT_IN_CARG):
9654 return fold_builtin_carg (arg0, type);
9656 CASE_FLT_FN (BUILT_IN_SQRT):
9657 return fold_builtin_sqrt (arg0, type);
9659 CASE_FLT_FN (BUILT_IN_CBRT):
9660 return fold_builtin_cbrt (arg0, type);
9662 CASE_FLT_FN (BUILT_IN_ASIN):
9663 if (validate_arg (arg0, REAL_TYPE))
9664 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9665 &dconstm1, &dconst1, true);
9668 CASE_FLT_FN (BUILT_IN_ACOS):
9669 if (validate_arg (arg0, REAL_TYPE))
9670 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9671 &dconstm1, &dconst1, true);
9674 CASE_FLT_FN (BUILT_IN_ATAN):
9675 if (validate_arg (arg0, REAL_TYPE))
9676 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9679 CASE_FLT_FN (BUILT_IN_ASINH):
9680 if (validate_arg (arg0, REAL_TYPE))
9681 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9684 CASE_FLT_FN (BUILT_IN_ACOSH):
9685 if (validate_arg (arg0, REAL_TYPE))
9686 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9687 &dconst1, NULL, true);
9690 CASE_FLT_FN (BUILT_IN_ATANH):
9691 if (validate_arg (arg0, REAL_TYPE))
9692 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9693 &dconstm1, &dconst1, false);
9696 CASE_FLT_FN (BUILT_IN_SIN):
9697 if (validate_arg (arg0, REAL_TYPE))
9698 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9701 CASE_FLT_FN (BUILT_IN_COS):
9702 return fold_builtin_cos (arg0, type, fndecl);
9705 CASE_FLT_FN (BUILT_IN_TAN):
9706 return fold_builtin_tan (arg0, type);
9708 CASE_FLT_FN (BUILT_IN_CEXP):
9709 return fold_builtin_cexp (arg0, type);
9711 CASE_FLT_FN (BUILT_IN_CEXPI):
9712 if (validate_arg (arg0, REAL_TYPE))
9713 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9716 CASE_FLT_FN (BUILT_IN_SINH):
9717 if (validate_arg (arg0, REAL_TYPE))
9718 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9721 CASE_FLT_FN (BUILT_IN_COSH):
9722 return fold_builtin_cosh (arg0, type, fndecl);
9724 CASE_FLT_FN (BUILT_IN_TANH):
9725 if (validate_arg (arg0, REAL_TYPE))
9726 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9729 CASE_FLT_FN (BUILT_IN_ERF):
9730 if (validate_arg (arg0, REAL_TYPE))
9731 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9734 CASE_FLT_FN (BUILT_IN_ERFC):
9735 if (validate_arg (arg0, REAL_TYPE))
9736 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9739 CASE_FLT_FN (BUILT_IN_TGAMMA):
9740 if (validate_arg (arg0, REAL_TYPE))
9741 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9744 CASE_FLT_FN (BUILT_IN_EXP):
9745 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9747 CASE_FLT_FN (BUILT_IN_EXP2):
9748 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9750 CASE_FLT_FN (BUILT_IN_EXP10):
9751 CASE_FLT_FN (BUILT_IN_POW10):
9752 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9754 CASE_FLT_FN (BUILT_IN_EXPM1):
9755 if (validate_arg (arg0, REAL_TYPE))
9756 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9759 CASE_FLT_FN (BUILT_IN_LOG):
9760 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9762 CASE_FLT_FN (BUILT_IN_LOG2):
9763 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9765 CASE_FLT_FN (BUILT_IN_LOG10):
9766 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9768 CASE_FLT_FN (BUILT_IN_LOG1P):
9769 if (validate_arg (arg0, REAL_TYPE))
9770 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9771 &dconstm1, NULL, false);
9774 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9775 CASE_FLT_FN (BUILT_IN_J0):
9776 if (validate_arg (arg0, REAL_TYPE))
9777 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9781 CASE_FLT_FN (BUILT_IN_J1):
9782 if (validate_arg (arg0, REAL_TYPE))
9783 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9787 CASE_FLT_FN (BUILT_IN_Y0):
9788 if (validate_arg (arg0, REAL_TYPE))
9789 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9790 &dconst0, NULL, false);
9793 CASE_FLT_FN (BUILT_IN_Y1):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9796 &dconst0, NULL, false);
9800 CASE_FLT_FN (BUILT_IN_NAN):
9801 case BUILT_IN_NAND32:
9802 case BUILT_IN_NAND64:
9803 case BUILT_IN_NAND128:
9804 return fold_builtin_nan (arg0, type, true);
9806 CASE_FLT_FN (BUILT_IN_NANS):
9807 return fold_builtin_nan (arg0, type, false);
9809 CASE_FLT_FN (BUILT_IN_FLOOR):
9810 return fold_builtin_floor (fndecl, arg0);
9812 CASE_FLT_FN (BUILT_IN_CEIL):
9813 return fold_builtin_ceil (fndecl, arg0);
9815 CASE_FLT_FN (BUILT_IN_TRUNC):
9816 return fold_builtin_trunc (fndecl, arg0);
9818 CASE_FLT_FN (BUILT_IN_ROUND):
9819 return fold_builtin_round (fndecl, arg0);
9821 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9822 CASE_FLT_FN (BUILT_IN_RINT):
9823 return fold_trunc_transparent_mathfn (fndecl, arg0);
9825 CASE_FLT_FN (BUILT_IN_LCEIL):
9826 CASE_FLT_FN (BUILT_IN_LLCEIL):
9827 CASE_FLT_FN (BUILT_IN_LFLOOR):
9828 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9829 CASE_FLT_FN (BUILT_IN_LROUND):
9830 CASE_FLT_FN (BUILT_IN_LLROUND):
9831 return fold_builtin_int_roundingfn (fndecl, arg0);
9833 CASE_FLT_FN (BUILT_IN_LRINT):
9834 CASE_FLT_FN (BUILT_IN_LLRINT):
9835 return fold_fixed_mathfn (fndecl, arg0);
9837 case BUILT_IN_BSWAP32:
9838 case BUILT_IN_BSWAP64:
9839 return fold_builtin_bswap (fndecl, arg0);
9841 CASE_INT_FN (BUILT_IN_FFS):
9842 CASE_INT_FN (BUILT_IN_CLZ):
9843 CASE_INT_FN (BUILT_IN_CTZ):
9844 CASE_INT_FN (BUILT_IN_POPCOUNT):
9845 CASE_INT_FN (BUILT_IN_PARITY):
9846 return fold_builtin_bitop (fndecl, arg0);
9848 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9849 return fold_builtin_signbit (arg0, type);
9851 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9852 return fold_builtin_significand (arg0, type);
9854 CASE_FLT_FN (BUILT_IN_ILOGB):
9855 CASE_FLT_FN (BUILT_IN_LOGB):
9856 return fold_builtin_logb (arg0, type);
9858 case BUILT_IN_ISASCII:
9859 return fold_builtin_isascii (arg0);
9861 case BUILT_IN_TOASCII:
9862 return fold_builtin_toascii (arg0);
9864 case BUILT_IN_ISDIGIT:
9865 return fold_builtin_isdigit (arg0);
9867 CASE_FLT_FN (BUILT_IN_FINITE):
9868 case BUILT_IN_FINITED32:
9869 case BUILT_IN_FINITED64:
9870 case BUILT_IN_FINITED128:
9871 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9873 CASE_FLT_FN (BUILT_IN_ISINF):
9874 case BUILT_IN_ISINFD32:
9875 case BUILT_IN_ISINFD64:
9876 case BUILT_IN_ISINFD128:
9877 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9879 CASE_FLT_FN (BUILT_IN_ISNAN):
9880 case BUILT_IN_ISNAND32:
9881 case BUILT_IN_ISNAND64:
9882 case BUILT_IN_ISNAND128:
9883 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9885 case BUILT_IN_PRINTF:
9886 case BUILT_IN_PRINTF_UNLOCKED:
9887 case BUILT_IN_VPRINTF:
9888 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9898 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9899 IGNORE is true if the result of the function call is ignored. This
9900 function returns NULL_TREE if no simplification was possible. */
9903 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9905 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9906 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9910 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9911 CASE_FLT_FN (BUILT_IN_JN):
9912 if (validate_arg (arg0, INTEGER_TYPE)
9913 && validate_arg (arg1, REAL_TYPE))
9914 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9917 CASE_FLT_FN (BUILT_IN_YN):
9918 if (validate_arg (arg0, INTEGER_TYPE)
9919 && validate_arg (arg1, REAL_TYPE))
9920 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9925 CASE_FLT_FN (BUILT_IN_ATAN2):
9926 if (validate_arg (arg0, REAL_TYPE)
9927 && validate_arg(arg1, REAL_TYPE))
9928 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9931 CASE_FLT_FN (BUILT_IN_FDIM):
9932 if (validate_arg (arg0, REAL_TYPE)
9933 && validate_arg(arg1, REAL_TYPE))
9934 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9937 CASE_FLT_FN (BUILT_IN_HYPOT):
9938 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9940 CASE_FLT_FN (BUILT_IN_LDEXP):
9941 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9942 CASE_FLT_FN (BUILT_IN_SCALBN):
9943 CASE_FLT_FN (BUILT_IN_SCALBLN):
9944 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9946 CASE_FLT_FN (BUILT_IN_FREXP):
9947 return fold_builtin_frexp (arg0, arg1, type);
9949 CASE_FLT_FN (BUILT_IN_MODF):
9950 return fold_builtin_modf (arg0, arg1, type);
9952 case BUILT_IN_BZERO:
9953 return fold_builtin_bzero (arg0, arg1, ignore);
9955 case BUILT_IN_FPUTS:
9956 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9958 case BUILT_IN_FPUTS_UNLOCKED:
9959 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9961 case BUILT_IN_STRSTR:
9962 return fold_builtin_strstr (arg0, arg1, type);
9964 case BUILT_IN_STRCAT:
9965 return fold_builtin_strcat (arg0, arg1);
9967 case BUILT_IN_STRSPN:
9968 return fold_builtin_strspn (arg0, arg1);
9970 case BUILT_IN_STRCSPN:
9971 return fold_builtin_strcspn (arg0, arg1);
9973 case BUILT_IN_STRCHR:
9974 case BUILT_IN_INDEX:
9975 return fold_builtin_strchr (arg0, arg1, type);
9977 case BUILT_IN_STRRCHR:
9978 case BUILT_IN_RINDEX:
9979 return fold_builtin_strrchr (arg0, arg1, type);
9981 case BUILT_IN_STRCPY:
9982 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
9984 case BUILT_IN_STRCMP:
9985 return fold_builtin_strcmp (arg0, arg1);
9987 case BUILT_IN_STRPBRK:
9988 return fold_builtin_strpbrk (arg0, arg1, type);
9990 case BUILT_IN_EXPECT:
9991 return fold_builtin_expect (arg0);
9993 CASE_FLT_FN (BUILT_IN_POW):
9994 return fold_builtin_pow (fndecl, arg0, arg1, type);
9996 CASE_FLT_FN (BUILT_IN_POWI):
9997 return fold_builtin_powi (fndecl, arg0, arg1, type);
9999 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10000 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10002 CASE_FLT_FN (BUILT_IN_FMIN):
10003 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10005 CASE_FLT_FN (BUILT_IN_FMAX):
10006 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10008 case BUILT_IN_ISGREATER:
10009 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10010 case BUILT_IN_ISGREATEREQUAL:
10011 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10012 case BUILT_IN_ISLESS:
10013 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10014 case BUILT_IN_ISLESSEQUAL:
10015 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10016 case BUILT_IN_ISLESSGREATER:
10017 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10018 case BUILT_IN_ISUNORDERED:
10019 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10022 /* We do the folding for va_start in the expander. */
10023 case BUILT_IN_VA_START:
10026 case BUILT_IN_SPRINTF:
10027 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10029 case BUILT_IN_OBJECT_SIZE:
10030 return fold_builtin_object_size (arg0, arg1);
10032 case BUILT_IN_PRINTF:
10033 case BUILT_IN_PRINTF_UNLOCKED:
10034 case BUILT_IN_VPRINTF:
10035 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10037 case BUILT_IN_PRINTF_CHK:
10038 case BUILT_IN_VPRINTF_CHK:
10039 if (!validate_arg (arg0, INTEGER_TYPE)
10040 || TREE_SIDE_EFFECTS (arg0))
10043 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10046 case BUILT_IN_FPRINTF:
10047 case BUILT_IN_FPRINTF_UNLOCKED:
10048 case BUILT_IN_VFPRINTF:
10049 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10058 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10059 and ARG2. IGNORE is true if the result of the function call is ignored.
10060 This function returns NULL_TREE if no simplification was possible. */
10063 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10065 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10066 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10070 CASE_FLT_FN (BUILT_IN_SINCOS):
10071 return fold_builtin_sincos (arg0, arg1, arg2);
10073 CASE_FLT_FN (BUILT_IN_FMA):
10074 if (validate_arg (arg0, REAL_TYPE)
10075 && validate_arg(arg1, REAL_TYPE)
10076 && validate_arg(arg2, REAL_TYPE))
10077 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10080 case BUILT_IN_MEMSET:
10081 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10083 case BUILT_IN_BCOPY:
10084 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10086 case BUILT_IN_MEMCPY:
10087 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10089 case BUILT_IN_MEMPCPY:
10090 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10092 case BUILT_IN_MEMMOVE:
10093 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10095 case BUILT_IN_STRNCAT:
10096 return fold_builtin_strncat (arg0, arg1, arg2);
10098 case BUILT_IN_STRNCPY:
10099 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10101 case BUILT_IN_STRNCMP:
10102 return fold_builtin_strncmp (arg0, arg1, arg2);
10104 case BUILT_IN_MEMCHR:
10105 return fold_builtin_memchr (arg0, arg1, arg2, type);
10107 case BUILT_IN_BCMP:
10108 case BUILT_IN_MEMCMP:
10109 return fold_builtin_memcmp (arg0, arg1, arg2);;
10111 case BUILT_IN_SPRINTF:
10112 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10114 case BUILT_IN_STRCPY_CHK:
10115 case BUILT_IN_STPCPY_CHK:
10116 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10119 case BUILT_IN_STRCAT_CHK:
10120 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10122 case BUILT_IN_PRINTF_CHK:
10123 case BUILT_IN_VPRINTF_CHK:
10124 if (!validate_arg (arg0, INTEGER_TYPE)
10125 || TREE_SIDE_EFFECTS (arg0))
10128 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10131 case BUILT_IN_FPRINTF:
10132 case BUILT_IN_FPRINTF_UNLOCKED:
10133 case BUILT_IN_VFPRINTF:
10134 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10136 case BUILT_IN_FPRINTF_CHK:
10137 case BUILT_IN_VFPRINTF_CHK:
10138 if (!validate_arg (arg1, INTEGER_TYPE)
10139 || TREE_SIDE_EFFECTS (arg1))
10142 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10151 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10152 ARG2, and ARG3. IGNORE is true if the result of the function call is
10153 ignored. This function returns NULL_TREE if no simplification was
10157 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10160 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10164 case BUILT_IN_MEMCPY_CHK:
10165 case BUILT_IN_MEMPCPY_CHK:
10166 case BUILT_IN_MEMMOVE_CHK:
10167 case BUILT_IN_MEMSET_CHK:
10168 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10170 DECL_FUNCTION_CODE (fndecl));
10172 case BUILT_IN_STRNCPY_CHK:
10173 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10175 case BUILT_IN_STRNCAT_CHK:
10176 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10178 case BUILT_IN_FPRINTF_CHK:
10179 case BUILT_IN_VFPRINTF_CHK:
10180 if (!validate_arg (arg1, INTEGER_TYPE)
10181 || TREE_SIDE_EFFECTS (arg1))
10184 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10194 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10195 arguments, where NARGS <= 4. IGNORE is true if the result of the
10196 function call is ignored. This function returns NULL_TREE if no
10197 simplification was possible. Note that this only folds builtins with
10198 fixed argument patterns. Foldings that do varargs-to-varargs
10199 transformations, or that match calls with more than 4 arguments,
10200 need to be handled with fold_builtin_varargs instead. */
10202 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10205 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10207 tree ret = NULL_TREE;
10211 ret = fold_builtin_0 (fndecl, ignore);
10214 ret = fold_builtin_1 (fndecl, args[0], ignore);
10217 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10220 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10223 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10231 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10232 TREE_NO_WARNING (ret) = 1;
10238 /* Builtins with folding operations that operate on "..." arguments
10239 need special handling; we need to store the arguments in a convenient
10240 data structure before attempting any folding. Fortunately there are
10241 only a few builtins that fall into this category. FNDECL is the
10242 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10243 result of the function call is ignored. */
10246 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10248 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10249 tree ret = NULL_TREE;
10253 case BUILT_IN_SPRINTF_CHK:
10254 case BUILT_IN_VSPRINTF_CHK:
10255 ret = fold_builtin_sprintf_chk (exp, fcode);
10258 case BUILT_IN_SNPRINTF_CHK:
10259 case BUILT_IN_VSNPRINTF_CHK:
10260 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10267 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10268 TREE_NO_WARNING (ret) = 1;
10274 /* A wrapper function for builtin folding that prevents warnings for
10275 "statement without effect" and the like, caused by removing the
10276 call node earlier than the warning is generated. */
10279 fold_call_expr (tree exp, bool ignore)
10281 tree ret = NULL_TREE;
10282 tree fndecl = get_callee_fndecl (exp);
10284 && TREE_CODE (fndecl) == FUNCTION_DECL
10285 && DECL_BUILT_IN (fndecl))
10287 /* FIXME: Don't use a list in this interface. */
10288 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10289 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10292 int nargs = call_expr_nargs (exp);
10293 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10295 tree *args = CALL_EXPR_ARGP (exp);
10296 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10299 ret = fold_builtin_varargs (fndecl, exp, ignore);
10302 /* Propagate location information from original call to
10303 expansion of builtin. Otherwise things like
10304 maybe_emit_chk_warning, that operate on the expansion
10305 of a builtin, will use the wrong location information. */
10306 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10308 tree realret = ret;
10309 if (TREE_CODE (ret) == NOP_EXPR)
10310 realret = TREE_OPERAND (ret, 0);
10311 if (CAN_HAVE_LOCATION_P (realret)
10312 && !EXPR_HAS_LOCATION (realret))
10313 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10322 /* Conveniently construct a function call expression. FNDECL names the
10323 function to be called and ARGLIST is a TREE_LIST of arguments. */
10326 build_function_call_expr (tree fndecl, tree arglist)
10328 tree fntype = TREE_TYPE (fndecl);
10329 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10330 int n = list_length (arglist);
10331 tree *argarray = (tree *) alloca (n * sizeof (tree));
10334 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10335 argarray[i] = TREE_VALUE (arglist);
10336 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10339 /* Conveniently construct a function call expression. FNDECL names the
10340 function to be called, N is the number of arguments, and the "..."
10341 parameters are the argument expressions. */
10344 build_call_expr (tree fndecl, int n, ...)
10347 tree fntype = TREE_TYPE (fndecl);
10348 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10349 tree *argarray = (tree *) alloca (n * sizeof (tree));
10353 for (i = 0; i < n; i++)
10354 argarray[i] = va_arg (ap, tree);
10356 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10359 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10360 N arguments are passed in the array ARGARRAY. */
10363 fold_builtin_call_array (tree type,
10368 tree ret = NULL_TREE;
10372 if (TREE_CODE (fn) == ADDR_EXPR)
10374 tree fndecl = TREE_OPERAND (fn, 0);
10375 if (TREE_CODE (fndecl) == FUNCTION_DECL
10376 && DECL_BUILT_IN (fndecl))
10378 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10380 tree arglist = NULL_TREE;
10381 for (i = n - 1; i >= 0; i--)
10382 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10383 ret = targetm.fold_builtin (fndecl, arglist, false);
10387 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10389 /* First try the transformations that don't require consing up
10391 ret = fold_builtin_n (fndecl, argarray, n, false);
10396 /* If we got this far, we need to build an exp. */
10397 exp = build_call_array (type, fn, n, argarray);
10398 ret = fold_builtin_varargs (fndecl, exp, false);
10399 return ret ? ret : exp;
10403 return build_call_array (type, fn, n, argarray);
10406 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10407 along with N new arguments specified as the "..." parameters. SKIP
10408 is the number of arguments in EXP to be omitted. This function is used
10409 to do varargs-to-varargs transformations. */
10412 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10414 int oldnargs = call_expr_nargs (exp);
10415 int nargs = oldnargs - skip + n;
10416 tree fntype = TREE_TYPE (fndecl);
10417 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10425 buffer = alloca (nargs * sizeof (tree));
10427 for (i = 0; i < n; i++)
10428 buffer[i] = va_arg (ap, tree);
10430 for (j = skip; j < oldnargs; j++, i++)
10431 buffer[i] = CALL_EXPR_ARG (exp, j);
10434 buffer = CALL_EXPR_ARGP (exp) + skip;
10436 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10439 /* Validate a single argument ARG against a tree code CODE representing
10443 validate_arg (tree arg, enum tree_code code)
10447 else if (code == POINTER_TYPE)
10448 return POINTER_TYPE_P (TREE_TYPE (arg));
10449 return code == TREE_CODE (TREE_TYPE (arg));
10452 /* This function validates the types of a function call argument list
10453 against a specified list of tree_codes. If the last specifier is a 0,
10454 that represents an ellipses, otherwise the last specifier must be a
10458 validate_arglist (tree callexpr, ...)
10460 enum tree_code code;
10463 call_expr_arg_iterator iter;
10466 va_start (ap, callexpr);
10467 init_call_expr_arg_iterator (callexpr, &iter);
10471 code = va_arg (ap, enum tree_code);
10475 /* This signifies an ellipses, any further arguments are all ok. */
10479 /* This signifies an endlink, if no arguments remain, return
10480 true, otherwise return false. */
10481 res = !more_call_expr_args_p (&iter);
10484 /* If no parameters remain or the parameter's code does not
10485 match the specified code, return false. Otherwise continue
10486 checking any remaining arguments. */
10487 arg = next_call_expr_arg (&iter);
10488 if (!validate_arg (arg, code))
10495 /* We need gotos here since we can only have one VA_CLOSE in a
10503 /* Default target-specific builtin expander that does nothing. */
10506 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10507 rtx target ATTRIBUTE_UNUSED,
10508 rtx subtarget ATTRIBUTE_UNUSED,
10509 enum machine_mode mode ATTRIBUTE_UNUSED,
10510 int ignore ATTRIBUTE_UNUSED)
10515 /* Returns true is EXP represents data that would potentially reside
10516 in a readonly section. */
10519 readonly_data_expr (tree exp)
10523 if (TREE_CODE (exp) != ADDR_EXPR)
10526 exp = get_base_address (TREE_OPERAND (exp, 0));
10530 /* Make sure we call decl_readonly_section only for trees it
10531 can handle (since it returns true for everything it doesn't
10533 if (TREE_CODE (exp) == STRING_CST
10534 || TREE_CODE (exp) == CONSTRUCTOR
10535 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10536 return decl_readonly_section (exp, 0);
10541 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10542 to the call, and TYPE is its return type.
10544 Return NULL_TREE if no simplification was possible, otherwise return the
10545 simplified form of the call as a tree.
10547 The simplified form may be a constant or other expression which
10548 computes the same value, but in a more efficient manner (including
10549 calls to other builtin functions).
10551 The call may contain arguments which need to be evaluated, but
10552 which are not useful to determine the result of the call. In
10553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10554 COMPOUND_EXPR will be an argument which must be evaluated.
10555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10556 COMPOUND_EXPR in the chain will contain the tree for the simplified
10557 form of the builtin function call. */
10560 fold_builtin_strstr (tree s1, tree s2, tree type)
10562 if (!validate_arg (s1, POINTER_TYPE)
10563 || !validate_arg (s2, POINTER_TYPE))
10568 const char *p1, *p2;
10570 p2 = c_getstr (s2);
10574 p1 = c_getstr (s1);
10577 const char *r = strstr (p1, p2);
10581 return build_int_cst (TREE_TYPE (s1), 0);
10583 /* Return an offset into the constant string argument. */
10584 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10585 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10586 return fold_convert (type, tem);
10589 /* The argument is const char *, and the result is char *, so we need
10590 a type conversion here to avoid a warning. */
10592 return fold_convert (type, s1);
10597 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10601 /* New argument list transforming strstr(s1, s2) to
10602 strchr(s1, s2[0]). */
10603 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10607 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10608 the call, and TYPE is its return type.
10610 Return NULL_TREE if no simplification was possible, otherwise return the
10611 simplified form of the call as a tree.
10613 The simplified form may be a constant or other expression which
10614 computes the same value, but in a more efficient manner (including
10615 calls to other builtin functions).
10617 The call may contain arguments which need to be evaluated, but
10618 which are not useful to determine the result of the call. In
10619 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10620 COMPOUND_EXPR will be an argument which must be evaluated.
10621 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10622 COMPOUND_EXPR in the chain will contain the tree for the simplified
10623 form of the builtin function call. */
10626 fold_builtin_strchr (tree s1, tree s2, tree type)
10628 if (!validate_arg (s1, POINTER_TYPE)
10629 || !validate_arg (s2, INTEGER_TYPE))
10635 if (TREE_CODE (s2) != INTEGER_CST)
10638 p1 = c_getstr (s1);
10645 if (target_char_cast (s2, &c))
10648 r = strchr (p1, c);
10651 return build_int_cst (TREE_TYPE (s1), 0);
10653 /* Return an offset into the constant string argument. */
10654 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10655 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10656 return fold_convert (type, tem);
10662 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10663 the call, and TYPE is its return type.
10665 Return NULL_TREE if no simplification was possible, otherwise return the
10666 simplified form of the call as a tree.
10668 The simplified form may be a constant or other expression which
10669 computes the same value, but in a more efficient manner (including
10670 calls to other builtin functions).
10672 The call may contain arguments which need to be evaluated, but
10673 which are not useful to determine the result of the call. In
10674 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10675 COMPOUND_EXPR will be an argument which must be evaluated.
10676 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10677 COMPOUND_EXPR in the chain will contain the tree for the simplified
10678 form of the builtin function call. */
10681 fold_builtin_strrchr (tree s1, tree s2, tree type)
10683 if (!validate_arg (s1, POINTER_TYPE)
10684 || !validate_arg (s2, INTEGER_TYPE))
10691 if (TREE_CODE (s2) != INTEGER_CST)
10694 p1 = c_getstr (s1);
10701 if (target_char_cast (s2, &c))
10704 r = strrchr (p1, c);
10707 return build_int_cst (TREE_TYPE (s1), 0);
10709 /* Return an offset into the constant string argument. */
10710 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10711 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10712 return fold_convert (type, tem);
10715 if (! integer_zerop (s2))
10718 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10722 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10723 return build_call_expr (fn, 2, s1, s2);
10727 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10728 to the call, and TYPE is its return type.
10730 Return NULL_TREE if no simplification was possible, otherwise return the
10731 simplified form of the call as a tree.
10733 The simplified form may be a constant or other expression which
10734 computes the same value, but in a more efficient manner (including
10735 calls to other builtin functions).
10737 The call may contain arguments which need to be evaluated, but
10738 which are not useful to determine the result of the call. In
10739 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10740 COMPOUND_EXPR will be an argument which must be evaluated.
10741 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10742 COMPOUND_EXPR in the chain will contain the tree for the simplified
10743 form of the builtin function call. */
10746 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10748 if (!validate_arg (s1, POINTER_TYPE)
10749 || !validate_arg (s2, POINTER_TYPE))
10754 const char *p1, *p2;
10756 p2 = c_getstr (s2);
10760 p1 = c_getstr (s1);
10763 const char *r = strpbrk (p1, p2);
10767 return build_int_cst (TREE_TYPE (s1), 0);
10769 /* Return an offset into the constant string argument. */
10770 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10771 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10772 return fold_convert (type, tem);
10776 /* strpbrk(x, "") == NULL.
10777 Evaluate and ignore s1 in case it had side-effects. */
10778 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10781 return NULL_TREE; /* Really call strpbrk. */
10783 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10787 /* New argument list transforming strpbrk(s1, s2) to
10788 strchr(s1, s2[0]). */
10789 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10793 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10796 Return NULL_TREE if no simplification was possible, otherwise return the
10797 simplified form of the call as a tree.
10799 The simplified form may be a constant or other expression which
10800 computes the same value, but in a more efficient manner (including
10801 calls to other builtin functions).
10803 The call may contain arguments which need to be evaluated, but
10804 which are not useful to determine the result of the call. In
10805 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10806 COMPOUND_EXPR will be an argument which must be evaluated.
10807 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10808 COMPOUND_EXPR in the chain will contain the tree for the simplified
10809 form of the builtin function call. */
10812 fold_builtin_strcat (tree dst, tree src)
10814 if (!validate_arg (dst, POINTER_TYPE)
10815 || !validate_arg (src, POINTER_TYPE))
10819 const char *p = c_getstr (src);
10821 /* If the string length is zero, return the dst parameter. */
10822 if (p && *p == '\0')
10829 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10830 arguments to the call.
10832 Return NULL_TREE if no simplification was possible, otherwise return the
10833 simplified form of the call as a tree.
10835 The simplified form may be a constant or other expression which
10836 computes the same value, but in a more efficient manner (including
10837 calls to other builtin functions).
10839 The call may contain arguments which need to be evaluated, but
10840 which are not useful to determine the result of the call. In
10841 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10842 COMPOUND_EXPR will be an argument which must be evaluated.
10843 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10844 COMPOUND_EXPR in the chain will contain the tree for the simplified
10845 form of the builtin function call. */
10848 fold_builtin_strncat (tree dst, tree src, tree len)
10850 if (!validate_arg (dst, POINTER_TYPE)
10851 || !validate_arg (src, POINTER_TYPE)
10852 || !validate_arg (len, INTEGER_TYPE))
10856 const char *p = c_getstr (src);
10858 /* If the requested length is zero, or the src parameter string
10859 length is zero, return the dst parameter. */
10860 if (integer_zerop (len) || (p && *p == '\0'))
10861 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10863 /* If the requested len is greater than or equal to the string
10864 length, call strcat. */
10865 if (TREE_CODE (len) == INTEGER_CST && p
10866 && compare_tree_int (len, strlen (p)) >= 0)
10868 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10870 /* If the replacement _DECL isn't initialized, don't do the
10875 return build_call_expr (fn, 2, dst, src);
10881 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10884 Return NULL_TREE if no simplification was possible, otherwise return the
10885 simplified form of the call as a tree.
10887 The simplified form may be a constant or other expression which
10888 computes the same value, but in a more efficient manner (including
10889 calls to other builtin functions).
10891 The call may contain arguments which need to be evaluated, but
10892 which are not useful to determine the result of the call. In
10893 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10894 COMPOUND_EXPR will be an argument which must be evaluated.
10895 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10896 COMPOUND_EXPR in the chain will contain the tree for the simplified
10897 form of the builtin function call. */
10900 fold_builtin_strspn (tree s1, tree s2)
10902 if (!validate_arg (s1, POINTER_TYPE)
10903 || !validate_arg (s2, POINTER_TYPE))
10907 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10909 /* If both arguments are constants, evaluate at compile-time. */
10912 const size_t r = strspn (p1, p2);
10913 return size_int (r);
10916 /* If either argument is "", return NULL_TREE. */
10917 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10918 /* Evaluate and ignore both arguments in case either one has
10920 return omit_two_operands (integer_type_node, integer_zero_node,
10926 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10929 Return NULL_TREE if no simplification was possible, otherwise return the
10930 simplified form of the call as a tree.
10932 The simplified form may be a constant or other expression which
10933 computes the same value, but in a more efficient manner (including
10934 calls to other builtin functions).
10936 The call may contain arguments which need to be evaluated, but
10937 which are not useful to determine the result of the call. In
10938 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10939 COMPOUND_EXPR will be an argument which must be evaluated.
10940 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10941 COMPOUND_EXPR in the chain will contain the tree for the simplified
10942 form of the builtin function call. */
10945 fold_builtin_strcspn (tree s1, tree s2)
10947 if (!validate_arg (s1, POINTER_TYPE)
10948 || !validate_arg (s2, POINTER_TYPE))
10952 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10954 /* If both arguments are constants, evaluate at compile-time. */
10957 const size_t r = strcspn (p1, p2);
10958 return size_int (r);
10961 /* If the first argument is "", return NULL_TREE. */
10962 if (p1 && *p1 == '\0')
10964 /* Evaluate and ignore argument s2 in case it has
10966 return omit_one_operand (integer_type_node,
10967 integer_zero_node, s2);
10970 /* If the second argument is "", return __builtin_strlen(s1). */
10971 if (p2 && *p2 == '\0')
10973 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10975 /* If the replacement _DECL isn't initialized, don't do the
10980 return build_call_expr (fn, 1, s1);
10986 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
10987 to the call. IGNORE is true if the value returned
10988 by the builtin will be ignored. UNLOCKED is true is true if this
10989 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
10990 the known length of the string. Return NULL_TREE if no simplification
10994 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
10996 /* If we're using an unlocked function, assume the other unlocked
10997 functions exist explicitly. */
10998 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
10999 : implicit_built_in_decls[BUILT_IN_FPUTC];
11000 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11001 : implicit_built_in_decls[BUILT_IN_FWRITE];
11003 /* If the return value is used, don't do the transformation. */
11007 /* Verify the arguments in the original call. */
11008 if (!validate_arg (arg0, POINTER_TYPE)
11009 || !validate_arg (arg1, POINTER_TYPE))
11013 len = c_strlen (arg0, 0);
11015 /* Get the length of the string passed to fputs. If the length
11016 can't be determined, punt. */
11018 || TREE_CODE (len) != INTEGER_CST)
11021 switch (compare_tree_int (len, 1))
11023 case -1: /* length is 0, delete the call entirely . */
11024 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11026 case 0: /* length is 1, call fputc. */
11028 const char *p = c_getstr (arg0);
11033 return build_call_expr (fn_fputc, 2,
11034 build_int_cst (NULL_TREE, p[0]), arg1);
11040 case 1: /* length is greater than 1, call fwrite. */
11042 /* If optimizing for size keep fputs. */
11045 /* New argument list transforming fputs(string, stream) to
11046 fwrite(string, 1, len, stream). */
11048 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11053 gcc_unreachable ();
11058 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11059 produced. False otherwise. This is done so that we don't output the error
11060 or warning twice or three times. */
11062 fold_builtin_next_arg (tree exp, bool va_start_p)
11064 tree fntype = TREE_TYPE (current_function_decl);
11065 int nargs = call_expr_nargs (exp);
11068 if (TYPE_ARG_TYPES (fntype) == 0
11069 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11070 == void_type_node))
11072 error ("%<va_start%> used in function with fixed args");
11078 if (va_start_p && (nargs != 2))
11080 error ("wrong number of arguments to function %<va_start%>");
11083 arg = CALL_EXPR_ARG (exp, 1);
11085 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11086 when we checked the arguments and if needed issued a warning. */
11091 /* Evidently an out of date version of <stdarg.h>; can't validate
11092 va_start's second argument, but can still work as intended. */
11093 warning (0, "%<__builtin_next_arg%> called without an argument");
11096 else if (nargs > 1)
11098 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11101 arg = CALL_EXPR_ARG (exp, 0);
11104 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11105 or __builtin_next_arg (0) the first time we see it, after checking
11106 the arguments and if needed issuing a warning. */
11107 if (!integer_zerop (arg))
11109 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11111 /* Strip off all nops for the sake of the comparison. This
11112 is not quite the same as STRIP_NOPS. It does more.
11113 We must also strip off INDIRECT_EXPR for C++ reference
11115 while (TREE_CODE (arg) == NOP_EXPR
11116 || TREE_CODE (arg) == CONVERT_EXPR
11117 || TREE_CODE (arg) == NON_LVALUE_EXPR
11118 || TREE_CODE (arg) == INDIRECT_REF)
11119 arg = TREE_OPERAND (arg, 0);
11120 if (arg != last_parm)
11122 /* FIXME: Sometimes with the tree optimizers we can get the
11123 not the last argument even though the user used the last
11124 argument. We just warn and set the arg to be the last
11125 argument so that we will get wrong-code because of
11127 warning (0, "second parameter of %<va_start%> not last named argument");
11129 /* We want to verify the second parameter just once before the tree
11130 optimizers are run and then avoid keeping it in the tree,
11131 as otherwise we could warn even for correct code like:
11132 void foo (int i, ...)
11133 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11135 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11137 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11143 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11144 ORIG may be null if this is a 2-argument call. We don't attempt to
11145 simplify calls with more than 3 arguments.
11147 Return NULL_TREE if no simplification was possible, otherwise return the
11148 simplified form of the call as a tree. If IGNORED is true, it means that
11149 the caller does not use the returned value of the function. */
11152 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11155 const char *fmt_str = NULL;
11157 /* Verify the required arguments in the original call. We deal with two
11158 types of sprintf() calls: 'sprintf (str, fmt)' and
11159 'sprintf (dest, "%s", orig)'. */
11160 if (!validate_arg (dest, POINTER_TYPE)
11161 || !validate_arg (fmt, POINTER_TYPE))
11163 if (orig && !validate_arg (orig, POINTER_TYPE))
11166 /* Check whether the format is a literal string constant. */
11167 fmt_str = c_getstr (fmt);
11168 if (fmt_str == NULL)
11172 retval = NULL_TREE;
11174 if (!init_target_chars ())
11177 /* If the format doesn't contain % args or %%, use strcpy. */
11178 if (strchr (fmt_str, target_percent) == NULL)
11180 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11185 /* Don't optimize sprintf (buf, "abc", ptr++). */
11189 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11190 'format' is known to contain no % formats. */
11191 call = build_call_expr (fn, 2, dest, fmt);
11193 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11196 /* If the format is "%s", use strcpy if the result isn't used. */
11197 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11200 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11205 /* Don't crash on sprintf (str1, "%s"). */
11209 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11212 retval = c_strlen (orig, 1);
11213 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11216 call = build_call_expr (fn, 2, dest, orig);
11219 if (call && retval)
11221 retval = fold_convert
11222 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11224 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11230 /* Expand a call EXP to __builtin_object_size. */
11233 expand_builtin_object_size (tree exp)
11236 int object_size_type;
11237 tree fndecl = get_callee_fndecl (exp);
11238 location_t locus = EXPR_LOCATION (exp);
11240 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11242 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11244 expand_builtin_trap ();
11248 ost = CALL_EXPR_ARG (exp, 1);
11251 if (TREE_CODE (ost) != INTEGER_CST
11252 || tree_int_cst_sgn (ost) < 0
11253 || compare_tree_int (ost, 3) > 0)
11255 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11257 expand_builtin_trap ();
11261 object_size_type = tree_low_cst (ost, 0);
11263 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11266 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11267 FCODE is the BUILT_IN_* to use.
11268 Return NULL_RTX if we failed; the caller should emit a normal call,
11269 otherwise try to get the result in TARGET, if convenient (and in
11270 mode MODE if that's convenient). */
11273 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11274 enum built_in_function fcode)
11276 tree dest, src, len, size;
11278 if (!validate_arglist (exp,
11280 fcode == BUILT_IN_MEMSET_CHK
11281 ? INTEGER_TYPE : POINTER_TYPE,
11282 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11285 dest = CALL_EXPR_ARG (exp, 0);
11286 src = CALL_EXPR_ARG (exp, 1);
11287 len = CALL_EXPR_ARG (exp, 2);
11288 size = CALL_EXPR_ARG (exp, 3);
11290 if (! host_integerp (size, 1))
11293 if (host_integerp (len, 1) || integer_all_onesp (size))
11297 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11299 location_t locus = EXPR_LOCATION (exp);
11300 warning (0, "%Hcall to %D will always overflow destination buffer",
11301 &locus, get_callee_fndecl (exp));
11306 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11307 mem{cpy,pcpy,move,set} is available. */
11310 case BUILT_IN_MEMCPY_CHK:
11311 fn = built_in_decls[BUILT_IN_MEMCPY];
11313 case BUILT_IN_MEMPCPY_CHK:
11314 fn = built_in_decls[BUILT_IN_MEMPCPY];
11316 case BUILT_IN_MEMMOVE_CHK:
11317 fn = built_in_decls[BUILT_IN_MEMMOVE];
11319 case BUILT_IN_MEMSET_CHK:
11320 fn = built_in_decls[BUILT_IN_MEMSET];
11329 fn = build_call_expr (fn, 3, dest, src, len);
11330 if (TREE_CODE (fn) == CALL_EXPR)
11331 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11332 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11334 else if (fcode == BUILT_IN_MEMSET_CHK)
11338 unsigned int dest_align
11339 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11341 /* If DEST is not a pointer type, call the normal function. */
11342 if (dest_align == 0)
11345 /* If SRC and DEST are the same (and not volatile), do nothing. */
11346 if (operand_equal_p (src, dest, 0))
11350 if (fcode != BUILT_IN_MEMPCPY_CHK)
11352 /* Evaluate and ignore LEN in case it has side-effects. */
11353 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11354 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11357 len = fold_convert (TREE_TYPE (dest), len);
11358 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
11359 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11362 /* __memmove_chk special case. */
11363 if (fcode == BUILT_IN_MEMMOVE_CHK)
11365 unsigned int src_align
11366 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11368 if (src_align == 0)
11371 /* If src is categorized for a readonly section we can use
11372 normal __memcpy_chk. */
11373 if (readonly_data_expr (src))
11375 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11378 fn = build_call_expr (fn, 4, dest, src, len, size);
11379 if (TREE_CODE (fn) == CALL_EXPR)
11380 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11381 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11388 /* Emit warning if a buffer overflow is detected at compile time. */
11391 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11399 case BUILT_IN_STRCPY_CHK:
11400 case BUILT_IN_STPCPY_CHK:
11401 /* For __strcat_chk the warning will be emitted only if overflowing
11402 by at least strlen (dest) + 1 bytes. */
11403 case BUILT_IN_STRCAT_CHK:
11404 len = CALL_EXPR_ARG (exp, 1);
11405 size = CALL_EXPR_ARG (exp, 2);
11408 case BUILT_IN_STRNCAT_CHK:
11409 case BUILT_IN_STRNCPY_CHK:
11410 len = CALL_EXPR_ARG (exp, 2);
11411 size = CALL_EXPR_ARG (exp, 3);
11413 case BUILT_IN_SNPRINTF_CHK:
11414 case BUILT_IN_VSNPRINTF_CHK:
11415 len = CALL_EXPR_ARG (exp, 1);
11416 size = CALL_EXPR_ARG (exp, 3);
11419 gcc_unreachable ();
11425 if (! host_integerp (size, 1) || integer_all_onesp (size))
11430 len = c_strlen (len, 1);
11431 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11434 else if (fcode == BUILT_IN_STRNCAT_CHK)
11436 tree src = CALL_EXPR_ARG (exp, 1);
11437 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11439 src = c_strlen (src, 1);
11440 if (! src || ! host_integerp (src, 1))
11442 locus = EXPR_LOCATION (exp);
11443 warning (0, "%Hcall to %D might overflow destination buffer",
11444 &locus, get_callee_fndecl (exp));
11447 else if (tree_int_cst_lt (src, size))
11450 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11453 locus = EXPR_LOCATION (exp);
11454 warning (0, "%Hcall to %D will always overflow destination buffer",
11455 &locus, get_callee_fndecl (exp));
11458 /* Emit warning if a buffer overflow is detected at compile time
11459 in __sprintf_chk/__vsprintf_chk calls. */
11462 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11464 tree dest, size, len, fmt, flag;
11465 const char *fmt_str;
11466 int nargs = call_expr_nargs (exp);
11468 /* Verify the required arguments in the original call. */
11472 dest = CALL_EXPR_ARG (exp, 0);
11473 flag = CALL_EXPR_ARG (exp, 1);
11474 size = CALL_EXPR_ARG (exp, 2);
11475 fmt = CALL_EXPR_ARG (exp, 3);
11477 if (! host_integerp (size, 1) || integer_all_onesp (size))
11480 /* Check whether the format is a literal string constant. */
11481 fmt_str = c_getstr (fmt);
11482 if (fmt_str == NULL)
11485 if (!init_target_chars ())
11488 /* If the format doesn't contain % args or %%, we know its size. */
11489 if (strchr (fmt_str, target_percent) == 0)
11490 len = build_int_cstu (size_type_node, strlen (fmt_str));
11491 /* If the format is "%s" and first ... argument is a string literal,
11493 else if (fcode == BUILT_IN_SPRINTF_CHK
11494 && strcmp (fmt_str, target_percent_s) == 0)
11500 arg = CALL_EXPR_ARG (exp, 4);
11501 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11504 len = c_strlen (arg, 1);
11505 if (!len || ! host_integerp (len, 1))
11511 if (! tree_int_cst_lt (len, size))
11513 location_t locus = EXPR_LOCATION (exp);
11514 warning (0, "%Hcall to %D will always overflow destination buffer",
11515 &locus, get_callee_fndecl (exp));
11519 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11523 fold_builtin_object_size (tree ptr, tree ost)
11525 tree ret = NULL_TREE;
11526 int object_size_type;
11528 if (!validate_arg (ptr, POINTER_TYPE)
11529 || !validate_arg (ost, INTEGER_TYPE))
11534 if (TREE_CODE (ost) != INTEGER_CST
11535 || tree_int_cst_sgn (ost) < 0
11536 || compare_tree_int (ost, 3) > 0)
11539 object_size_type = tree_low_cst (ost, 0);
11541 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11542 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11543 and (size_t) 0 for types 2 and 3. */
11544 if (TREE_SIDE_EFFECTS (ptr))
11545 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11547 if (TREE_CODE (ptr) == ADDR_EXPR)
11548 ret = build_int_cstu (size_type_node,
11549 compute_builtin_object_size (ptr, object_size_type));
11551 else if (TREE_CODE (ptr) == SSA_NAME)
11553 unsigned HOST_WIDE_INT bytes;
11555 /* If object size is not known yet, delay folding until
11556 later. Maybe subsequent passes will help determining
11558 bytes = compute_builtin_object_size (ptr, object_size_type);
11559 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11561 ret = build_int_cstu (size_type_node, bytes);
11566 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11567 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11568 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11575 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11576 DEST, SRC, LEN, and SIZE are the arguments to the call.
11577 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11578 code of the builtin. If MAXLEN is not NULL, it is maximum length
11579 passed as third argument. */
11582 fold_builtin_memory_chk (tree fndecl,
11583 tree dest, tree src, tree len, tree size,
11584 tree maxlen, bool ignore,
11585 enum built_in_function fcode)
11589 if (!validate_arg (dest, POINTER_TYPE)
11590 || !validate_arg (src,
11591 (fcode == BUILT_IN_MEMSET_CHK
11592 ? INTEGER_TYPE : POINTER_TYPE))
11593 || !validate_arg (len, INTEGER_TYPE)
11594 || !validate_arg (size, INTEGER_TYPE))
11597 /* If SRC and DEST are the same (and not volatile), return DEST
11598 (resp. DEST+LEN for __mempcpy_chk). */
11599 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11601 if (fcode != BUILT_IN_MEMPCPY_CHK)
11602 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11605 tree temp = fold_convert (TREE_TYPE (dest), len);
11606 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11607 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11611 if (! host_integerp (size, 1))
11614 if (! integer_all_onesp (size))
11616 if (! host_integerp (len, 1))
11618 /* If LEN is not constant, try MAXLEN too.
11619 For MAXLEN only allow optimizing into non-_ocs function
11620 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11621 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11623 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11625 /* (void) __mempcpy_chk () can be optimized into
11626 (void) __memcpy_chk (). */
11627 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11631 return build_call_expr (fn, 4, dest, src, len, size);
11639 if (tree_int_cst_lt (size, maxlen))
11644 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11645 mem{cpy,pcpy,move,set} is available. */
11648 case BUILT_IN_MEMCPY_CHK:
11649 fn = built_in_decls[BUILT_IN_MEMCPY];
11651 case BUILT_IN_MEMPCPY_CHK:
11652 fn = built_in_decls[BUILT_IN_MEMPCPY];
11654 case BUILT_IN_MEMMOVE_CHK:
11655 fn = built_in_decls[BUILT_IN_MEMMOVE];
11657 case BUILT_IN_MEMSET_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMSET];
11667 return build_call_expr (fn, 3, dest, src, len);
11670 /* Fold a call to the __st[rp]cpy_chk builtin.
11671 DEST, SRC, and SIZE are the arguments to the call.
11672 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11673 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11674 strings passed as second argument. */
11677 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11678 tree maxlen, bool ignore,
11679 enum built_in_function fcode)
11683 if (!validate_arg (dest, POINTER_TYPE)
11684 || !validate_arg (src, POINTER_TYPE)
11685 || !validate_arg (size, INTEGER_TYPE))
11688 /* If SRC and DEST are the same (and not volatile), return DEST. */
11689 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11690 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11692 if (! host_integerp (size, 1))
11695 if (! integer_all_onesp (size))
11697 len = c_strlen (src, 1);
11698 if (! len || ! host_integerp (len, 1))
11700 /* If LEN is not constant, try MAXLEN too.
11701 For MAXLEN only allow optimizing into non-_ocs function
11702 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11703 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11705 if (fcode == BUILT_IN_STPCPY_CHK)
11710 /* If return value of __stpcpy_chk is ignored,
11711 optimize into __strcpy_chk. */
11712 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11716 return build_call_expr (fn, 3, dest, src, size);
11719 if (! len || TREE_SIDE_EFFECTS (len))
11722 /* If c_strlen returned something, but not a constant,
11723 transform __strcpy_chk into __memcpy_chk. */
11724 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11728 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11729 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11730 build_call_expr (fn, 4,
11731 dest, src, len, size));
11737 if (! tree_int_cst_lt (maxlen, size))
11741 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11742 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11743 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11747 return build_call_expr (fn, 2, dest, src);
11750 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11751 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11752 length passed as third argument. */
11755 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11760 if (!validate_arg (dest, POINTER_TYPE)
11761 || !validate_arg (src, POINTER_TYPE)
11762 || !validate_arg (len, INTEGER_TYPE)
11763 || !validate_arg (size, INTEGER_TYPE))
11766 if (! host_integerp (size, 1))
11769 if (! integer_all_onesp (size))
11771 if (! host_integerp (len, 1))
11773 /* If LEN is not constant, try MAXLEN too.
11774 For MAXLEN only allow optimizing into non-_ocs function
11775 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11776 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11782 if (tree_int_cst_lt (size, maxlen))
11786 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11787 fn = built_in_decls[BUILT_IN_STRNCPY];
11791 return build_call_expr (fn, 3, dest, src, len);
11794 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11795 are the arguments to the call. */
11798 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11803 if (!validate_arg (dest, POINTER_TYPE)
11804 || !validate_arg (src, POINTER_TYPE)
11805 || !validate_arg (size, INTEGER_TYPE))
11808 p = c_getstr (src);
11809 /* If the SRC parameter is "", return DEST. */
11810 if (p && *p == '\0')
11811 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11813 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11816 /* If __builtin_strcat_chk is used, assume strcat is available. */
11817 fn = built_in_decls[BUILT_IN_STRCAT];
11821 return build_call_expr (fn, 2, dest, src);
11824 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11828 fold_builtin_strncat_chk (tree fndecl,
11829 tree dest, tree src, tree len, tree size)
11834 if (!validate_arg (dest, POINTER_TYPE)
11835 || !validate_arg (src, POINTER_TYPE)
11836 || !validate_arg (size, INTEGER_TYPE)
11837 || !validate_arg (size, INTEGER_TYPE))
11840 p = c_getstr (src);
11841 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11842 if (p && *p == '\0')
11843 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11844 else if (integer_zerop (len))
11845 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11847 if (! host_integerp (size, 1))
11850 if (! integer_all_onesp (size))
11852 tree src_len = c_strlen (src, 1);
11854 && host_integerp (src_len, 1)
11855 && host_integerp (len, 1)
11856 && ! tree_int_cst_lt (len, src_len))
11858 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11859 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11863 return build_call_expr (fn, 3, dest, src, size);
11868 /* If __builtin_strncat_chk is used, assume strncat is available. */
11869 fn = built_in_decls[BUILT_IN_STRNCAT];
11873 return build_call_expr (fn, 3, dest, src, len);
11876 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11877 a normal call should be emitted rather than expanding the function
11878 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11881 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11883 tree dest, size, len, fn, fmt, flag;
11884 const char *fmt_str;
11885 int nargs = call_expr_nargs (exp);
11887 /* Verify the required arguments in the original call. */
11890 dest = CALL_EXPR_ARG (exp, 0);
11891 if (!validate_arg (dest, POINTER_TYPE))
11893 flag = CALL_EXPR_ARG (exp, 1);
11894 if (!validate_arg (flag, INTEGER_TYPE))
11896 size = CALL_EXPR_ARG (exp, 2);
11897 if (!validate_arg (size, INTEGER_TYPE))
11899 fmt = CALL_EXPR_ARG (exp, 3);
11900 if (!validate_arg (fmt, POINTER_TYPE))
11903 if (! host_integerp (size, 1))
11908 if (!init_target_chars ())
11911 /* Check whether the format is a literal string constant. */
11912 fmt_str = c_getstr (fmt);
11913 if (fmt_str != NULL)
11915 /* If the format doesn't contain % args or %%, we know the size. */
11916 if (strchr (fmt_str, target_percent) == 0)
11918 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11919 len = build_int_cstu (size_type_node, strlen (fmt_str));
11921 /* If the format is "%s" and first ... argument is a string literal,
11922 we know the size too. */
11923 else if (fcode == BUILT_IN_SPRINTF_CHK
11924 && strcmp (fmt_str, target_percent_s) == 0)
11930 arg = CALL_EXPR_ARG (exp, 4);
11931 if (validate_arg (arg, POINTER_TYPE))
11933 len = c_strlen (arg, 1);
11934 if (! len || ! host_integerp (len, 1))
11941 if (! integer_all_onesp (size))
11943 if (! len || ! tree_int_cst_lt (len, size))
11947 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11948 or if format doesn't contain % chars or is "%s". */
11949 if (! integer_zerop (flag))
11951 if (fmt_str == NULL)
11953 if (strchr (fmt_str, target_percent) != NULL
11954 && strcmp (fmt_str, target_percent_s))
11958 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11959 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11960 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11964 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11967 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11968 a normal call should be emitted rather than expanding the function
11969 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11970 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11971 passed as second argument. */
11974 fold_builtin_snprintf_chk (tree exp, tree maxlen,
11975 enum built_in_function fcode)
11977 tree dest, size, len, fn, fmt, flag;
11978 const char *fmt_str;
11980 /* Verify the required arguments in the original call. */
11981 if (call_expr_nargs (exp) < 5)
11983 dest = CALL_EXPR_ARG (exp, 0);
11984 if (!validate_arg (dest, POINTER_TYPE))
11986 len = CALL_EXPR_ARG (exp, 1);
11987 if (!validate_arg (len, INTEGER_TYPE))
11989 flag = CALL_EXPR_ARG (exp, 2);
11990 if (!validate_arg (flag, INTEGER_TYPE))
11992 size = CALL_EXPR_ARG (exp, 3);
11993 if (!validate_arg (size, INTEGER_TYPE))
11995 fmt = CALL_EXPR_ARG (exp, 4);
11996 if (!validate_arg (fmt, POINTER_TYPE))
11999 if (! host_integerp (size, 1))
12002 if (! integer_all_onesp (size))
12004 if (! host_integerp (len, 1))
12006 /* If LEN is not constant, try MAXLEN too.
12007 For MAXLEN only allow optimizing into non-_ocs function
12008 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12009 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12015 if (tree_int_cst_lt (size, maxlen))
12019 if (!init_target_chars ())
12022 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12023 or if format doesn't contain % chars or is "%s". */
12024 if (! integer_zerop (flag))
12026 fmt_str = c_getstr (fmt);
12027 if (fmt_str == NULL)
12029 if (strchr (fmt_str, target_percent) != NULL
12030 && strcmp (fmt_str, target_percent_s))
12034 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12036 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12037 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12041 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12044 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12045 FMT and ARG are the arguments to the call; we don't fold cases with
12046 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12048 Return NULL_TREE if no simplification was possible, otherwise return the
12049 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12050 code of the function to be simplified. */
12053 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12054 enum built_in_function fcode)
12056 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12057 const char *fmt_str = NULL;
12059 /* If the return value is used, don't do the transformation. */
12063 /* Verify the required arguments in the original call. */
12064 if (!validate_arg (fmt, POINTER_TYPE))
12067 /* Check whether the format is a literal string constant. */
12068 fmt_str = c_getstr (fmt);
12069 if (fmt_str == NULL)
12072 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12074 /* If we're using an unlocked function, assume the other
12075 unlocked functions exist explicitly. */
12076 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12077 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12081 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12082 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12085 if (!init_target_chars ())
12088 if (strcmp (fmt_str, target_percent_s) == 0
12089 || strchr (fmt_str, target_percent) == NULL)
12093 if (strcmp (fmt_str, target_percent_s) == 0)
12095 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12098 if (!arg || !validate_arg (arg, POINTER_TYPE))
12101 str = c_getstr (arg);
12107 /* The format specifier doesn't contain any '%' characters. */
12108 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12114 /* If the string was "", printf does nothing. */
12115 if (str[0] == '\0')
12116 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12118 /* If the string has length of 1, call putchar. */
12119 if (str[1] == '\0')
12121 /* Given printf("c"), (where c is any one character,)
12122 convert "c"[0] to an int and pass that to the replacement
12124 newarg = build_int_cst (NULL_TREE, str[0]);
12126 call = build_call_expr (fn_putchar, 1, newarg);
12130 /* If the string was "string\n", call puts("string"). */
12131 size_t len = strlen (str);
12132 if ((unsigned char)str[len - 1] == target_newline)
12134 /* Create a NUL-terminated string that's one char shorter
12135 than the original, stripping off the trailing '\n'. */
12136 char *newstr = alloca (len);
12137 memcpy (newstr, str, len - 1);
12138 newstr[len - 1] = 0;
12140 newarg = build_string_literal (len, newstr);
12142 call = build_call_expr (fn_puts, 1, newarg);
12145 /* We'd like to arrange to call fputs(string,stdout) here,
12146 but we need stdout and don't have a way to get it yet. */
12151 /* The other optimizations can be done only on the non-va_list variants. */
12152 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12155 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12156 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12158 if (!arg || !validate_arg (arg, POINTER_TYPE))
12161 call = build_call_expr (fn_puts, 1, arg);
12164 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12165 else if (strcmp (fmt_str, target_percent_c) == 0)
12167 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12170 call = build_call_expr (fn_putchar, 1, arg);
12176 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12179 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12180 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12181 more than 3 arguments, and ARG may be null in the 2-argument case.
12183 Return NULL_TREE if no simplification was possible, otherwise return the
12184 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12185 code of the function to be simplified. */
12188 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12189 enum built_in_function fcode)
12191 tree fn_fputc, fn_fputs, call = NULL_TREE;
12192 const char *fmt_str = NULL;
12194 /* If the return value is used, don't do the transformation. */
12198 /* Verify the required arguments in the original call. */
12199 if (!validate_arg (fp, POINTER_TYPE))
12201 if (!validate_arg (fmt, POINTER_TYPE))
12204 /* Check whether the format is a literal string constant. */
12205 fmt_str = c_getstr (fmt);
12206 if (fmt_str == NULL)
12209 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12211 /* If we're using an unlocked function, assume the other
12212 unlocked functions exist explicitly. */
12213 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12214 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12218 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12219 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12222 if (!init_target_chars ())
12225 /* If the format doesn't contain % args or %%, use strcpy. */
12226 if (strchr (fmt_str, target_percent) == NULL)
12228 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12232 /* If the format specifier was "", fprintf does nothing. */
12233 if (fmt_str[0] == '\0')
12235 /* If FP has side-effects, just wait until gimplification is
12237 if (TREE_SIDE_EFFECTS (fp))
12240 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12243 /* When "string" doesn't contain %, replace all cases of
12244 fprintf (fp, string) with fputs (string, fp). The fputs
12245 builtin will take care of special cases like length == 1. */
12247 call = build_call_expr (fn_fputs, 2, fmt, fp);
12250 /* The other optimizations can be done only on the non-va_list variants. */
12251 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12254 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12255 else if (strcmp (fmt_str, target_percent_s) == 0)
12257 if (!arg || !validate_arg (arg, POINTER_TYPE))
12260 call = build_call_expr (fn_fputs, 2, arg, fp);
12263 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12264 else if (strcmp (fmt_str, target_percent_c) == 0)
12266 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12269 call = build_call_expr (fn_fputc, 2, arg, fp);
12274 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12277 /* Initialize format string characters in the target charset. */
12280 init_target_chars (void)
12285 target_newline = lang_hooks.to_target_charset ('\n');
12286 target_percent = lang_hooks.to_target_charset ('%');
12287 target_c = lang_hooks.to_target_charset ('c');
12288 target_s = lang_hooks.to_target_charset ('s');
12289 if (target_newline == 0 || target_percent == 0 || target_c == 0
12293 target_percent_c[0] = target_percent;
12294 target_percent_c[1] = target_c;
12295 target_percent_c[2] = '\0';
12297 target_percent_s[0] = target_percent;
12298 target_percent_s[1] = target_s;
12299 target_percent_s[2] = '\0';
12301 target_percent_s_newline[0] = target_percent;
12302 target_percent_s_newline[1] = target_s;
12303 target_percent_s_newline[2] = target_newline;
12304 target_percent_s_newline[3] = '\0';
12311 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12312 and no overflow/underflow occurred. INEXACT is true if M was not
12313 exactly calculated. TYPE is the tree type for the result. This
12314 function assumes that you cleared the MPFR flags and then
12315 calculated M to see if anything subsequently set a flag prior to
12316 entering this function. Return NULL_TREE if any checks fail. */
12319 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12321 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12322 overflow/underflow occurred. If -frounding-math, proceed iff the
12323 result of calling FUNC was exact. */
12324 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12325 && (!flag_rounding_math || !inexact))
12327 REAL_VALUE_TYPE rr;
12329 real_from_mpfr (&rr, m, type, GMP_RNDN);
12330 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12331 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12332 but the mpft_t is not, then we underflowed in the
12334 if (!real_isnan (&rr) && !real_isinf (&rr)
12335 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12337 REAL_VALUE_TYPE rmode;
12339 real_convert (&rmode, TYPE_MODE (type), &rr);
12340 /* Proceed iff the specified mode can hold the value. */
12341 if (real_identical (&rmode, &rr))
12342 return build_real (type, rmode);
12348 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12349 FUNC on it and return the resulting value as a tree with type TYPE.
12350 If MIN and/or MAX are not NULL, then the supplied ARG must be
12351 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12352 acceptable values, otherwise they are not. The mpfr precision is
12353 set to the precision of TYPE. We assume that function FUNC returns
12354 zero if the result could be calculated exactly within the requested
12358 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12359 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12362 tree result = NULL_TREE;
12366 /* To proceed, MPFR must exactly represent the target floating point
12367 format, which only happens when the target base equals two. */
12368 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12369 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12371 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12373 if (!real_isnan (ra) && !real_isinf (ra)
12374 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12375 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12377 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12381 mpfr_init2 (m, prec);
12382 mpfr_from_real (m, ra, GMP_RNDN);
12383 mpfr_clear_flags ();
12384 inexact = func (m, m, GMP_RNDN);
12385 result = do_mpfr_ckconv (m, type, inexact);
12393 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12394 FUNC on it and return the resulting value as a tree with type TYPE.
12395 The mpfr precision is set to the precision of TYPE. We assume that
12396 function FUNC returns zero if the result could be calculated
12397 exactly within the requested precision. */
12400 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12401 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12403 tree result = NULL_TREE;
12408 /* To proceed, MPFR must exactly represent the target floating point
12409 format, which only happens when the target base equals two. */
12410 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12411 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12412 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12414 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12415 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12417 if (!real_isnan (ra1) && !real_isinf (ra1)
12418 && !real_isnan (ra2) && !real_isinf (ra2))
12420 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12424 mpfr_inits2 (prec, m1, m2, NULL);
12425 mpfr_from_real (m1, ra1, GMP_RNDN);
12426 mpfr_from_real (m2, ra2, GMP_RNDN);
12427 mpfr_clear_flags ();
12428 inexact = func (m1, m1, m2, GMP_RNDN);
12429 result = do_mpfr_ckconv (m1, type, inexact);
12430 mpfr_clears (m1, m2, NULL);
12437 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12438 FUNC on it and return the resulting value as a tree with type TYPE.
12439 The mpfr precision is set to the precision of TYPE. We assume that
12440 function FUNC returns zero if the result could be calculated
12441 exactly within the requested precision. */
12444 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12445 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12447 tree result = NULL_TREE;
12453 /* To proceed, MPFR must exactly represent the target floating point
12454 format, which only happens when the target base equals two. */
12455 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12456 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12457 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12458 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12460 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12461 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12462 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12464 if (!real_isnan (ra1) && !real_isinf (ra1)
12465 && !real_isnan (ra2) && !real_isinf (ra2)
12466 && !real_isnan (ra3) && !real_isinf (ra3))
12468 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12472 mpfr_inits2 (prec, m1, m2, m3, NULL);
12473 mpfr_from_real (m1, ra1, GMP_RNDN);
12474 mpfr_from_real (m2, ra2, GMP_RNDN);
12475 mpfr_from_real (m3, ra3, GMP_RNDN);
12476 mpfr_clear_flags ();
12477 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12478 result = do_mpfr_ckconv (m1, type, inexact);
12479 mpfr_clears (m1, m2, m3, NULL);
12486 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12487 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12488 If ARG_SINP and ARG_COSP are NULL then the result is returned
12489 as a complex value.
12490 The type is taken from the type of ARG and is used for setting the
12491 precision of the calculation and results. */
12494 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12496 tree const type = TREE_TYPE (arg);
12497 tree result = NULL_TREE;
12501 /* To proceed, MPFR must exactly represent the target floating point
12502 format, which only happens when the target base equals two. */
12503 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12504 && TREE_CODE (arg) == REAL_CST
12505 && !TREE_OVERFLOW (arg))
12507 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12509 if (!real_isnan (ra) && !real_isinf (ra))
12511 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12512 tree result_s, result_c;
12516 mpfr_inits2 (prec, m, ms, mc, NULL);
12517 mpfr_from_real (m, ra, GMP_RNDN);
12518 mpfr_clear_flags ();
12519 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12520 result_s = do_mpfr_ckconv (ms, type, inexact);
12521 result_c = do_mpfr_ckconv (mc, type, inexact);
12522 mpfr_clears (m, ms, mc, NULL);
12523 if (result_s && result_c)
12525 /* If we are to return in a complex value do so. */
12526 if (!arg_sinp && !arg_cosp)
12527 return build_complex (build_complex_type (type),
12528 result_c, result_s);
12530 /* Dereference the sin/cos pointer arguments. */
12531 arg_sinp = build_fold_indirect_ref (arg_sinp);
12532 arg_cosp = build_fold_indirect_ref (arg_cosp);
12533 /* Proceed if valid pointer type were passed in. */
12534 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12535 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12537 /* Set the values. */
12538 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12540 TREE_SIDE_EFFECTS (result_s) = 1;
12541 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12543 TREE_SIDE_EFFECTS (result_c) = 1;
12544 /* Combine the assignments into a compound expr. */
12545 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12546 result_s, result_c));
12554 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12555 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12556 two-argument mpfr order N Bessel function FUNC on them and return
12557 the resulting value as a tree with type TYPE. The mpfr precision
12558 is set to the precision of TYPE. We assume that function FUNC
12559 returns zero if the result could be calculated exactly within the
12560 requested precision. */
12562 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12563 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12564 const REAL_VALUE_TYPE *min, bool inclusive)
12566 tree result = NULL_TREE;
12571 /* To proceed, MPFR must exactly represent the target floating point
12572 format, which only happens when the target base equals two. */
12573 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12574 && host_integerp (arg1, 0)
12575 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12577 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12578 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12581 && !real_isnan (ra) && !real_isinf (ra)
12582 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12584 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12588 mpfr_init2 (m, prec);
12589 mpfr_from_real (m, ra, GMP_RNDN);
12590 mpfr_clear_flags ();
12591 inexact = func (m, n, m, GMP_RNDN);
12592 result = do_mpfr_ckconv (m, type, inexact);