1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
66 #include "builtins.def"
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
243 /* This array records the insn_code of insns to imlement the signbit
245 enum insn_code signbit_optab[NUM_MACHINE_MODES];
248 /* Return true if NODE should be considered for inline expansion regardless
249 of the optimization level. This means whenever a function is invoked with
250 its "internal" name, which normally contains the prefix "__builtin". */
252 static bool called_as_built_in (tree node)
254 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
255 if (strncmp (name, "__builtin_", 10) == 0)
257 if (strncmp (name, "__sync_", 7) == 0)
262 /* Return the alignment in bits of EXP, a pointer valued expression.
263 But don't return more than MAX_ALIGN no matter what.
264 The alignment returned is, by default, the alignment of the thing that
265 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
267 Otherwise, look at the expression to see if we can do better, i.e., if the
268 expression is actually pointing at an object whose alignment is tighter. */
271 get_pointer_alignment (tree exp, unsigned int max_align)
273 unsigned int align, inner;
275 /* We rely on TER to compute accurate alignment information. */
276 if (!(optimize && flag_tree_ter))
279 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
282 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
283 align = MIN (align, max_align);
287 switch (TREE_CODE (exp))
291 case NON_LVALUE_EXPR:
292 exp = TREE_OPERAND (exp, 0);
293 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
296 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
297 align = MIN (inner, max_align);
300 case POINTER_PLUS_EXPR:
301 /* If sum of pointer + int, restrict our maximum alignment to that
302 imposed by the integer. If not, we can't do any better than
304 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
307 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
308 & (max_align / BITS_PER_UNIT - 1))
312 exp = TREE_OPERAND (exp, 0);
316 /* See what we are pointing at and look at its alignment. */
317 exp = TREE_OPERAND (exp, 0);
319 if (handled_component_p (exp))
321 HOST_WIDE_INT bitsize, bitpos;
323 enum machine_mode mode;
324 int unsignedp, volatilep;
326 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
327 &mode, &unsignedp, &volatilep, true);
329 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
330 if (offset && TREE_CODE (offset) == PLUS_EXPR
331 && host_integerp (TREE_OPERAND (offset, 1), 1))
333 /* Any overflow in calculating offset_bits won't change
336 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
340 inner = MIN (inner, (offset_bits & -offset_bits));
341 offset = TREE_OPERAND (offset, 0);
343 if (offset && TREE_CODE (offset) == MULT_EXPR
344 && host_integerp (TREE_OPERAND (offset, 1), 1))
346 /* Any overflow in calculating offset_factor won't change
348 unsigned offset_factor
349 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
353 inner = MIN (inner, (offset_factor & -offset_factor));
356 inner = MIN (inner, BITS_PER_UNIT);
358 if (TREE_CODE (exp) == FUNCTION_DECL)
359 align = FUNCTION_BOUNDARY;
360 else if (DECL_P (exp))
361 align = MIN (inner, DECL_ALIGN (exp));
362 #ifdef CONSTANT_ALIGNMENT
363 else if (CONSTANT_CLASS_P (exp))
364 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
366 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
367 || TREE_CODE (exp) == INDIRECT_REF)
368 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
370 align = MIN (align, inner);
371 return MIN (align, max_align);
379 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
380 way, because it could contain a zero byte in the middle.
381 TREE_STRING_LENGTH is the size of the character array, not the string.
383 ONLY_VALUE should be nonzero if the result is not going to be emitted
384 into the instruction stream and zero if it is going to be expanded.
385 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
386 is returned, otherwise NULL, since
387 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
388 evaluate the side-effects.
390 The value returned is of type `ssizetype'.
392 Unfortunately, string_constant can't access the values of const char
393 arrays with initializers, so neither can we do so here. */
396 c_strlen (tree src, int only_value)
399 HOST_WIDE_INT offset;
404 if (TREE_CODE (src) == COND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
409 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
410 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
411 if (tree_int_cst_equal (len1, len2))
415 if (TREE_CODE (src) == COMPOUND_EXPR
416 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
417 return c_strlen (TREE_OPERAND (src, 1), only_value);
419 src = string_constant (src, &offset_node);
423 max = TREE_STRING_LENGTH (src) - 1;
424 ptr = TREE_STRING_POINTER (src);
426 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
428 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
429 compute the offset to the following null if we don't know where to
430 start searching for it. */
433 for (i = 0; i < max; i++)
437 /* We don't know the starting offset, but we do know that the string
438 has no internal zero bytes. We can assume that the offset falls
439 within the bounds of the string; otherwise, the programmer deserves
440 what he gets. Subtract the offset from the length of the string,
441 and return that. This would perhaps not be valid if we were dealing
442 with named arrays in addition to literal string constants. */
444 return size_diffop (size_int (max), offset_node);
447 /* We have a known offset into the string. Start searching there for
448 a null character if we can represent it as a single HOST_WIDE_INT. */
449 if (offset_node == 0)
451 else if (! host_integerp (offset_node, 0))
454 offset = tree_low_cst (offset_node, 0);
456 /* If the offset is known to be out of bounds, warn, and call strlen at
458 if (offset < 0 || offset > max)
460 warning (0, "offset outside bounds of constant string");
464 /* Use strlen to search for the first zero byte. Since any strings
465 constructed with build_string will have nulls appended, we win even
466 if we get handed something like (char[4])"abcd".
468 Since OFFSET is our starting index into the string, no further
469 calculation is needed. */
470 return ssize_int (strlen (ptr + offset));
473 /* Return a char pointer for a C string if it is a string constant
474 or sum of string constant and integer constant. */
481 src = string_constant (src, &offset_node);
485 if (offset_node == 0)
486 return TREE_STRING_POINTER (src);
487 else if (!host_integerp (offset_node, 1)
488 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
491 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
494 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
495 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
498 c_readstr (const char *str, enum machine_mode mode)
504 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
509 for (i = 0; i < GET_MODE_SIZE (mode); i++)
512 if (WORDS_BIG_ENDIAN)
513 j = GET_MODE_SIZE (mode) - i - 1;
514 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
515 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
516 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
518 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
521 ch = (unsigned char) str[i];
522 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
524 return immed_double_const (c[0], c[1], mode);
527 /* Cast a target constant CST to target CHAR and if that value fits into
528 host char type, return zero and put that value into variable pointed to by
532 target_char_cast (tree cst, char *p)
534 unsigned HOST_WIDE_INT val, hostval;
536 if (!host_integerp (cst, 1)
537 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
540 val = tree_low_cst (cst, 1);
541 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
542 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
545 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
546 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
555 /* Similar to save_expr, but assumes that arbitrary code is not executed
556 in between the multiple evaluations. In particular, we assume that a
557 non-addressable local variable will not be modified. */
560 builtin_save_expr (tree exp)
562 if (TREE_ADDRESSABLE (exp) == 0
563 && (TREE_CODE (exp) == PARM_DECL
564 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
567 return save_expr (exp);
570 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
571 times to get the address of either a higher stack frame, or a return
572 address located within it (depending on FNDECL_CODE). */
575 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
579 #ifdef INITIAL_FRAME_ADDRESS_RTX
580 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
584 /* For a zero count with __builtin_return_address, we don't care what
585 frame address we return, because target-specific definitions will
586 override us. Therefore frame pointer elimination is OK, and using
587 the soft frame pointer is OK.
589 For a nonzero count, or a zero count with __builtin_frame_address,
590 we require a stable offset from the current frame pointer to the
591 previous one, so we must use the hard frame pointer, and
592 we must disable frame pointer elimination. */
593 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
594 tem = frame_pointer_rtx;
597 tem = hard_frame_pointer_rtx;
599 /* Tell reload not to eliminate the frame pointer. */
600 current_function_accesses_prior_frames = 1;
604 /* Some machines need special handling before we can access
605 arbitrary frames. For example, on the SPARC, we must first flush
606 all register windows to the stack. */
607 #ifdef SETUP_FRAME_ADDRESSES
609 SETUP_FRAME_ADDRESSES ();
612 /* On the SPARC, the return address is not in the frame, it is in a
613 register. There is no way to access it off of the current frame
614 pointer, but it can be accessed off the previous frame pointer by
615 reading the value from the register window save area. */
616 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
617 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
621 /* Scan back COUNT frames to the specified frame. */
622 for (i = 0; i < count; i++)
624 /* Assume the dynamic chain pointer is in the word that the
625 frame address points to, unless otherwise specified. */
626 #ifdef DYNAMIC_CHAIN_ADDRESS
627 tem = DYNAMIC_CHAIN_ADDRESS (tem);
629 tem = memory_address (Pmode, tem);
630 tem = gen_frame_mem (Pmode, tem);
631 tem = copy_to_reg (tem);
634 /* For __builtin_frame_address, return what we've got. But, on
635 the SPARC for example, we may have to add a bias. */
636 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
637 #ifdef FRAME_ADDR_RTX
638 return FRAME_ADDR_RTX (tem);
643 /* For __builtin_return_address, get the return address from that frame. */
644 #ifdef RETURN_ADDR_RTX
645 tem = RETURN_ADDR_RTX (count, tem);
647 tem = memory_address (Pmode,
648 plus_constant (tem, GET_MODE_SIZE (Pmode)));
649 tem = gen_frame_mem (Pmode, tem);
654 /* Alias set used for setjmp buffer. */
655 static HOST_WIDE_INT setjmp_alias_set = -1;
657 /* Construct the leading half of a __builtin_setjmp call. Control will
658 return to RECEIVER_LABEL. This is also called directly by the SJLJ
659 exception handling code. */
662 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
664 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
668 if (setjmp_alias_set == -1)
669 setjmp_alias_set = new_alias_set ();
671 buf_addr = convert_memory_address (Pmode, buf_addr);
673 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
675 /* We store the frame pointer and the address of receiver_label in
676 the buffer and use the rest of it for the stack save area, which
677 is machine-dependent. */
679 mem = gen_rtx_MEM (Pmode, buf_addr);
680 set_mem_alias_set (mem, setjmp_alias_set);
681 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
683 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
684 set_mem_alias_set (mem, setjmp_alias_set);
686 emit_move_insn (validize_mem (mem),
687 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
689 stack_save = gen_rtx_MEM (sa_mode,
690 plus_constant (buf_addr,
691 2 * GET_MODE_SIZE (Pmode)));
692 set_mem_alias_set (stack_save, setjmp_alias_set);
693 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
695 /* If there is further processing to do, do it. */
696 #ifdef HAVE_builtin_setjmp_setup
697 if (HAVE_builtin_setjmp_setup)
698 emit_insn (gen_builtin_setjmp_setup (buf_addr));
701 /* Tell optimize_save_area_alloca that extra work is going to
702 need to go on during alloca. */
703 current_function_calls_setjmp = 1;
705 /* We have a nonlocal label. */
706 current_function_has_nonlocal_label = 1;
709 /* Construct the trailing part of a __builtin_setjmp call. This is
710 also called directly by the SJLJ exception handling code. */
713 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
715 /* Clobber the FP when we get here, so we have to make sure it's
716 marked as used by this function. */
717 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
719 /* Mark the static chain as clobbered here so life information
720 doesn't get messed up for it. */
721 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
723 /* Now put in the code to restore the frame pointer, and argument
724 pointer, if needed. */
725 #ifdef HAVE_nonlocal_goto
726 if (! HAVE_nonlocal_goto)
729 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
730 /* This might change the hard frame pointer in ways that aren't
731 apparent to early optimization passes, so force a clobber. */
732 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
735 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
736 if (fixed_regs[ARG_POINTER_REGNUM])
738 #ifdef ELIMINABLE_REGS
740 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
742 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
743 if (elim_regs[i].from == ARG_POINTER_REGNUM
744 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
747 if (i == ARRAY_SIZE (elim_regs))
750 /* Now restore our arg pointer from the address at which it
751 was saved in our stack frame. */
752 emit_move_insn (virtual_incoming_args_rtx,
753 copy_to_reg (get_arg_pointer_save_area (cfun)));
758 #ifdef HAVE_builtin_setjmp_receiver
759 if (HAVE_builtin_setjmp_receiver)
760 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
763 #ifdef HAVE_nonlocal_goto_receiver
764 if (HAVE_nonlocal_goto_receiver)
765 emit_insn (gen_nonlocal_goto_receiver ());
770 /* We must not allow the code we just generated to be reordered by
771 scheduling. Specifically, the update of the frame pointer must
772 happen immediately, not later. */
773 emit_insn (gen_blockage ());
776 /* __builtin_longjmp is passed a pointer to an array of five words (not
777 all will be used on all machines). It operates similarly to the C
778 library function of the same name, but is more efficient. Much of
779 the code below is copied from the handling of non-local gotos. */
782 expand_builtin_longjmp (rtx buf_addr, rtx value)
784 rtx fp, lab, stack, insn, last;
785 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
787 if (setjmp_alias_set == -1)
788 setjmp_alias_set = new_alias_set ();
790 buf_addr = convert_memory_address (Pmode, buf_addr);
792 buf_addr = force_reg (Pmode, buf_addr);
794 /* We used to store value in static_chain_rtx, but that fails if pointers
795 are smaller than integers. We instead require that the user must pass
796 a second argument of 1, because that is what builtin_setjmp will
797 return. This also makes EH slightly more efficient, since we are no
798 longer copying around a value that we don't care about. */
799 gcc_assert (value == const1_rtx);
801 last = get_last_insn ();
802 #ifdef HAVE_builtin_longjmp
803 if (HAVE_builtin_longjmp)
804 emit_insn (gen_builtin_longjmp (buf_addr));
808 fp = gen_rtx_MEM (Pmode, buf_addr);
809 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
810 GET_MODE_SIZE (Pmode)));
812 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
813 2 * GET_MODE_SIZE (Pmode)));
814 set_mem_alias_set (fp, setjmp_alias_set);
815 set_mem_alias_set (lab, setjmp_alias_set);
816 set_mem_alias_set (stack, setjmp_alias_set);
818 /* Pick up FP, label, and SP from the block and jump. This code is
819 from expand_goto in stmt.c; see there for detailed comments. */
820 #ifdef HAVE_nonlocal_goto
821 if (HAVE_nonlocal_goto)
822 /* We have to pass a value to the nonlocal_goto pattern that will
823 get copied into the static_chain pointer, but it does not matter
824 what that value is, because builtin_setjmp does not use it. */
825 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
829 lab = copy_to_reg (lab);
831 emit_insn (gen_rtx_CLOBBER (VOIDmode,
832 gen_rtx_MEM (BLKmode,
833 gen_rtx_SCRATCH (VOIDmode))));
834 emit_insn (gen_rtx_CLOBBER (VOIDmode,
835 gen_rtx_MEM (BLKmode,
836 hard_frame_pointer_rtx)));
838 emit_move_insn (hard_frame_pointer_rtx, fp);
839 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
841 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
842 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
843 emit_indirect_jump (lab);
847 /* Search backwards and mark the jump insn as a non-local goto.
848 Note that this precludes the use of __builtin_longjmp to a
849 __builtin_setjmp target in the same function. However, we've
850 already cautioned the user that these functions are for
851 internal exception handling use only. */
852 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
854 gcc_assert (insn != last);
858 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
862 else if (CALL_P (insn))
867 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
868 and the address of the save area. */
871 expand_builtin_nonlocal_goto (tree exp)
873 tree t_label, t_save_area;
874 rtx r_label, r_save_area, r_fp, r_sp, insn;
876 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
879 t_label = CALL_EXPR_ARG (exp, 0);
880 t_save_area = CALL_EXPR_ARG (exp, 1);
882 r_label = expand_normal (t_label);
883 r_label = convert_memory_address (Pmode, r_label);
884 r_save_area = expand_normal (t_save_area);
885 r_save_area = convert_memory_address (Pmode, r_save_area);
886 r_fp = gen_rtx_MEM (Pmode, r_save_area);
887 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
888 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
890 current_function_has_nonlocal_goto = 1;
892 #ifdef HAVE_nonlocal_goto
893 /* ??? We no longer need to pass the static chain value, afaik. */
894 if (HAVE_nonlocal_goto)
895 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
899 r_label = copy_to_reg (r_label);
901 emit_insn (gen_rtx_CLOBBER (VOIDmode,
902 gen_rtx_MEM (BLKmode,
903 gen_rtx_SCRATCH (VOIDmode))));
905 emit_insn (gen_rtx_CLOBBER (VOIDmode,
906 gen_rtx_MEM (BLKmode,
907 hard_frame_pointer_rtx)));
909 /* Restore frame pointer for containing function.
910 This sets the actual hard register used for the frame pointer
911 to the location of the function's incoming static chain info.
912 The non-local goto handler will then adjust it to contain the
913 proper value and reload the argument pointer, if needed. */
914 emit_move_insn (hard_frame_pointer_rtx, r_fp);
915 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
917 /* USE of hard_frame_pointer_rtx added for consistency;
918 not clear if really needed. */
919 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
920 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
921 emit_indirect_jump (r_label);
924 /* Search backwards to the jump insn and mark it as a
926 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
930 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
931 const0_rtx, REG_NOTES (insn));
934 else if (CALL_P (insn))
941 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
942 (not all will be used on all machines) that was passed to __builtin_setjmp.
943 It updates the stack pointer in that block to correspond to the current
947 expand_builtin_update_setjmp_buf (rtx buf_addr)
949 enum machine_mode sa_mode = Pmode;
953 #ifdef HAVE_save_stack_nonlocal
954 if (HAVE_save_stack_nonlocal)
955 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
957 #ifdef STACK_SAVEAREA_MODE
958 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
962 = gen_rtx_MEM (sa_mode,
965 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
969 emit_insn (gen_setjmp ());
972 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
975 /* Expand a call to __builtin_prefetch. For a target that does not support
976 data prefetch, evaluate the memory address argument in case it has side
980 expand_builtin_prefetch (tree exp)
982 tree arg0, arg1, arg2;
986 if (!validate_arglist (exp, POINTER_TYPE, 0))
989 arg0 = CALL_EXPR_ARG (exp, 0);
991 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
992 zero (read) and argument 2 (locality) defaults to 3 (high degree of
994 nargs = call_expr_nargs (exp);
996 arg1 = CALL_EXPR_ARG (exp, 1);
998 arg1 = integer_zero_node;
1000 arg2 = CALL_EXPR_ARG (exp, 2);
1002 arg2 = build_int_cst (NULL_TREE, 3);
1004 /* Argument 0 is an address. */
1005 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1007 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1008 if (TREE_CODE (arg1) != INTEGER_CST)
1010 error ("second argument to %<__builtin_prefetch%> must be a constant");
1011 arg1 = integer_zero_node;
1013 op1 = expand_normal (arg1);
1014 /* Argument 1 must be either zero or one. */
1015 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1017 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1022 /* Argument 2 (locality) must be a compile-time constant int. */
1023 if (TREE_CODE (arg2) != INTEGER_CST)
1025 error ("third argument to %<__builtin_prefetch%> must be a constant");
1026 arg2 = integer_zero_node;
1028 op2 = expand_normal (arg2);
1029 /* Argument 2 must be 0, 1, 2, or 3. */
1030 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1032 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1036 #ifdef HAVE_prefetch
1039 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1041 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1042 || (GET_MODE (op0) != Pmode))
1044 op0 = convert_memory_address (Pmode, op0);
1045 op0 = force_reg (Pmode, op0);
1047 emit_insn (gen_prefetch (op0, op1, op2));
1051 /* Don't do anything with direct references to volatile memory, but
1052 generate code to handle other side effects. */
1053 if (!MEM_P (op0) && side_effects_p (op0))
1057 /* Get a MEM rtx for expression EXP which is the address of an operand
1058 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1059 the maximum length of the block of memory that might be accessed or
1063 get_memory_rtx (tree exp, tree len)
1065 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1066 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1068 /* Get an expression we can use to find the attributes to assign to MEM.
1069 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1070 we can. First remove any nops. */
1071 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1072 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1073 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1074 exp = TREE_OPERAND (exp, 0);
1076 if (TREE_CODE (exp) == ADDR_EXPR)
1077 exp = TREE_OPERAND (exp, 0);
1078 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1079 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1083 /* Honor attributes derived from exp, except for the alias set
1084 (as builtin stringops may alias with anything) and the size
1085 (as stringops may access multiple array elements). */
1088 set_mem_attributes (mem, exp, 0);
1090 /* Allow the string and memory builtins to overflow from one
1091 field into another, see http://gcc.gnu.org/PR23561.
1092 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1093 memory accessed by the string or memory builtin will fit
1094 within the field. */
1095 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1097 tree mem_expr = MEM_EXPR (mem);
1098 HOST_WIDE_INT offset = -1, length = -1;
1101 while (TREE_CODE (inner) == ARRAY_REF
1102 || TREE_CODE (inner) == NOP_EXPR
1103 || TREE_CODE (inner) == CONVERT_EXPR
1104 || TREE_CODE (inner) == NON_LVALUE_EXPR
1105 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1106 || TREE_CODE (inner) == SAVE_EXPR)
1107 inner = TREE_OPERAND (inner, 0);
1109 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1111 if (MEM_OFFSET (mem)
1112 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1113 offset = INTVAL (MEM_OFFSET (mem));
1115 if (offset >= 0 && len && host_integerp (len, 0))
1116 length = tree_low_cst (len, 0);
1118 while (TREE_CODE (inner) == COMPONENT_REF)
1120 tree field = TREE_OPERAND (inner, 1);
1121 gcc_assert (! DECL_BIT_FIELD (field));
1122 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1123 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1126 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1127 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1130 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1131 /* If we can prove the memory starting at XEXP (mem, 0)
1132 and ending at XEXP (mem, 0) + LENGTH will fit into
1133 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1136 && offset + length <= size)
1141 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1142 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1143 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1151 mem_expr = TREE_OPERAND (mem_expr, 0);
1152 inner = TREE_OPERAND (inner, 0);
1155 if (mem_expr == NULL)
1157 if (mem_expr != MEM_EXPR (mem))
1159 set_mem_expr (mem, mem_expr);
1160 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1163 set_mem_alias_set (mem, 0);
1164 set_mem_size (mem, NULL_RTX);
1170 /* Built-in functions to perform an untyped call and return. */
1172 /* For each register that may be used for calling a function, this
1173 gives a mode used to copy the register's value. VOIDmode indicates
1174 the register is not used for calling a function. If the machine
1175 has register windows, this gives only the outbound registers.
1176 INCOMING_REGNO gives the corresponding inbound register. */
1177 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1179 /* For each register that may be used for returning values, this gives
1180 a mode used to copy the register's value. VOIDmode indicates the
1181 register is not used for returning values. If the machine has
1182 register windows, this gives only the outbound registers.
1183 INCOMING_REGNO gives the corresponding inbound register. */
1184 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1186 /* For each register that may be used for calling a function, this
1187 gives the offset of that register into the block returned by
1188 __builtin_apply_args. 0 indicates that the register is not
1189 used for calling a function. */
1190 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1192 /* Return the size required for the block returned by __builtin_apply_args,
1193 and initialize apply_args_mode. */
1196 apply_args_size (void)
1198 static int size = -1;
1201 enum machine_mode mode;
1203 /* The values computed by this function never change. */
1206 /* The first value is the incoming arg-pointer. */
1207 size = GET_MODE_SIZE (Pmode);
1209 /* The second value is the structure value address unless this is
1210 passed as an "invisible" first argument. */
1211 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1212 size += GET_MODE_SIZE (Pmode);
1214 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1215 if (FUNCTION_ARG_REGNO_P (regno))
1217 mode = reg_raw_mode[regno];
1219 gcc_assert (mode != VOIDmode);
1221 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1222 if (size % align != 0)
1223 size = CEIL (size, align) * align;
1224 apply_args_reg_offset[regno] = size;
1225 size += GET_MODE_SIZE (mode);
1226 apply_args_mode[regno] = mode;
1230 apply_args_mode[regno] = VOIDmode;
1231 apply_args_reg_offset[regno] = 0;
1237 /* Return the size required for the block returned by __builtin_apply,
1238 and initialize apply_result_mode. */
1241 apply_result_size (void)
1243 static int size = -1;
1245 enum machine_mode mode;
1247 /* The values computed by this function never change. */
1252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1253 if (FUNCTION_VALUE_REGNO_P (regno))
1255 mode = reg_raw_mode[regno];
1257 gcc_assert (mode != VOIDmode);
1259 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1260 if (size % align != 0)
1261 size = CEIL (size, align) * align;
1262 size += GET_MODE_SIZE (mode);
1263 apply_result_mode[regno] = mode;
1266 apply_result_mode[regno] = VOIDmode;
1268 /* Allow targets that use untyped_call and untyped_return to override
1269 the size so that machine-specific information can be stored here. */
1270 #ifdef APPLY_RESULT_SIZE
1271 size = APPLY_RESULT_SIZE;
1277 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1278 /* Create a vector describing the result block RESULT. If SAVEP is true,
1279 the result block is used to save the values; otherwise it is used to
1280 restore the values. */
1283 result_vector (int savep, rtx result)
1285 int regno, size, align, nelts;
1286 enum machine_mode mode;
1288 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1292 if ((mode = apply_result_mode[regno]) != VOIDmode)
1294 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1295 if (size % align != 0)
1296 size = CEIL (size, align) * align;
1297 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1298 mem = adjust_address (result, mode, size);
1299 savevec[nelts++] = (savep
1300 ? gen_rtx_SET (VOIDmode, mem, reg)
1301 : gen_rtx_SET (VOIDmode, reg, mem));
1302 size += GET_MODE_SIZE (mode);
1304 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1306 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1308 /* Save the state required to perform an untyped call with the same
1309 arguments as were passed to the current function. */
1312 expand_builtin_apply_args_1 (void)
1315 int size, align, regno;
1316 enum machine_mode mode;
1317 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1319 /* Create a block where the arg-pointer, structure value address,
1320 and argument registers can be saved. */
1321 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1323 /* Walk past the arg-pointer and structure value address. */
1324 size = GET_MODE_SIZE (Pmode);
1325 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1326 size += GET_MODE_SIZE (Pmode);
1328 /* Save each register used in calling a function to the block. */
1329 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1330 if ((mode = apply_args_mode[regno]) != VOIDmode)
1332 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1333 if (size % align != 0)
1334 size = CEIL (size, align) * align;
1336 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1338 emit_move_insn (adjust_address (registers, mode, size), tem);
1339 size += GET_MODE_SIZE (mode);
1342 /* Save the arg pointer to the block. */
1343 tem = copy_to_reg (virtual_incoming_args_rtx);
1344 #ifdef STACK_GROWS_DOWNWARD
1345 /* We need the pointer as the caller actually passed them to us, not
1346 as we might have pretended they were passed. Make sure it's a valid
1347 operand, as emit_move_insn isn't expected to handle a PLUS. */
1349 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1352 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1354 size = GET_MODE_SIZE (Pmode);
1356 /* Save the structure value address unless this is passed as an
1357 "invisible" first argument. */
1358 if (struct_incoming_value)
1360 emit_move_insn (adjust_address (registers, Pmode, size),
1361 copy_to_reg (struct_incoming_value));
1362 size += GET_MODE_SIZE (Pmode);
1365 /* Return the address of the block. */
1366 return copy_addr_to_reg (XEXP (registers, 0));
1369 /* __builtin_apply_args returns block of memory allocated on
1370 the stack into which is stored the arg pointer, structure
1371 value address, static chain, and all the registers that might
1372 possibly be used in performing a function call. The code is
1373 moved to the start of the function so the incoming values are
1377 expand_builtin_apply_args (void)
1379 /* Don't do __builtin_apply_args more than once in a function.
1380 Save the result of the first call and reuse it. */
1381 if (apply_args_value != 0)
1382 return apply_args_value;
1384 /* When this function is called, it means that registers must be
1385 saved on entry to this function. So we migrate the
1386 call to the first insn of this function. */
1391 temp = expand_builtin_apply_args_1 ();
1395 apply_args_value = temp;
1397 /* Put the insns after the NOTE that starts the function.
1398 If this is inside a start_sequence, make the outer-level insn
1399 chain current, so the code is placed at the start of the
1401 push_topmost_sequence ();
1402 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1403 pop_topmost_sequence ();
1408 /* Perform an untyped call and save the state required to perform an
1409 untyped return of whatever value was returned by the given function. */
1412 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1414 int size, align, regno;
1415 enum machine_mode mode;
1416 rtx incoming_args, result, reg, dest, src, call_insn;
1417 rtx old_stack_level = 0;
1418 rtx call_fusage = 0;
1419 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1421 arguments = convert_memory_address (Pmode, arguments);
1423 /* Create a block where the return registers can be saved. */
1424 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1426 /* Fetch the arg pointer from the ARGUMENTS block. */
1427 incoming_args = gen_reg_rtx (Pmode);
1428 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1429 #ifndef STACK_GROWS_DOWNWARD
1430 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1431 incoming_args, 0, OPTAB_LIB_WIDEN);
1434 /* Push a new argument block and copy the arguments. Do not allow
1435 the (potential) memcpy call below to interfere with our stack
1437 do_pending_stack_adjust ();
1440 /* Save the stack with nonlocal if available. */
1441 #ifdef HAVE_save_stack_nonlocal
1442 if (HAVE_save_stack_nonlocal)
1443 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1446 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1448 /* Allocate a block of memory onto the stack and copy the memory
1449 arguments to the outgoing arguments address. */
1450 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1451 dest = virtual_outgoing_args_rtx;
1452 #ifndef STACK_GROWS_DOWNWARD
1453 if (GET_CODE (argsize) == CONST_INT)
1454 dest = plus_constant (dest, -INTVAL (argsize));
1456 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1458 dest = gen_rtx_MEM (BLKmode, dest);
1459 set_mem_align (dest, PARM_BOUNDARY);
1460 src = gen_rtx_MEM (BLKmode, incoming_args);
1461 set_mem_align (src, PARM_BOUNDARY);
1462 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1464 /* Refer to the argument block. */
1466 arguments = gen_rtx_MEM (BLKmode, arguments);
1467 set_mem_align (arguments, PARM_BOUNDARY);
1469 /* Walk past the arg-pointer and structure value address. */
1470 size = GET_MODE_SIZE (Pmode);
1472 size += GET_MODE_SIZE (Pmode);
1474 /* Restore each of the registers previously saved. Make USE insns
1475 for each of these registers for use in making the call. */
1476 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1477 if ((mode = apply_args_mode[regno]) != VOIDmode)
1479 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1480 if (size % align != 0)
1481 size = CEIL (size, align) * align;
1482 reg = gen_rtx_REG (mode, regno);
1483 emit_move_insn (reg, adjust_address (arguments, mode, size));
1484 use_reg (&call_fusage, reg);
1485 size += GET_MODE_SIZE (mode);
1488 /* Restore the structure value address unless this is passed as an
1489 "invisible" first argument. */
1490 size = GET_MODE_SIZE (Pmode);
1493 rtx value = gen_reg_rtx (Pmode);
1494 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1495 emit_move_insn (struct_value, value);
1496 if (REG_P (struct_value))
1497 use_reg (&call_fusage, struct_value);
1498 size += GET_MODE_SIZE (Pmode);
1501 /* All arguments and registers used for the call are set up by now! */
1502 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1504 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1505 and we don't want to load it into a register as an optimization,
1506 because prepare_call_address already did it if it should be done. */
1507 if (GET_CODE (function) != SYMBOL_REF)
1508 function = memory_address (FUNCTION_MODE, function);
1510 /* Generate the actual call instruction and save the return value. */
1511 #ifdef HAVE_untyped_call
1512 if (HAVE_untyped_call)
1513 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1514 result, result_vector (1, result)));
1517 #ifdef HAVE_call_value
1518 if (HAVE_call_value)
1522 /* Locate the unique return register. It is not possible to
1523 express a call that sets more than one return register using
1524 call_value; use untyped_call for that. In fact, untyped_call
1525 only needs to save the return registers in the given block. */
1526 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1527 if ((mode = apply_result_mode[regno]) != VOIDmode)
1529 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1531 valreg = gen_rtx_REG (mode, regno);
1534 emit_call_insn (GEN_CALL_VALUE (valreg,
1535 gen_rtx_MEM (FUNCTION_MODE, function),
1536 const0_rtx, NULL_RTX, const0_rtx));
1538 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1544 /* Find the CALL insn we just emitted, and attach the register usage
1546 call_insn = last_call_insn ();
1547 add_function_usage_to (call_insn, call_fusage);
1549 /* Restore the stack. */
1550 #ifdef HAVE_save_stack_nonlocal
1551 if (HAVE_save_stack_nonlocal)
1552 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1555 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1559 /* Return the address of the result block. */
1560 result = copy_addr_to_reg (XEXP (result, 0));
1561 return convert_memory_address (ptr_mode, result);
1564 /* Perform an untyped return. */
1567 expand_builtin_return (rtx result)
1569 int size, align, regno;
1570 enum machine_mode mode;
1572 rtx call_fusage = 0;
1574 result = convert_memory_address (Pmode, result);
1576 apply_result_size ();
1577 result = gen_rtx_MEM (BLKmode, result);
1579 #ifdef HAVE_untyped_return
1580 if (HAVE_untyped_return)
1582 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1588 /* Restore the return value and note that each value is used. */
1590 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1591 if ((mode = apply_result_mode[regno]) != VOIDmode)
1593 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1594 if (size % align != 0)
1595 size = CEIL (size, align) * align;
1596 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1597 emit_move_insn (reg, adjust_address (result, mode, size));
1599 push_to_sequence (call_fusage);
1600 emit_insn (gen_rtx_USE (VOIDmode, reg));
1601 call_fusage = get_insns ();
1603 size += GET_MODE_SIZE (mode);
1606 /* Put the USE insns before the return. */
1607 emit_insn (call_fusage);
1609 /* Return whatever values was restored by jumping directly to the end
1611 expand_naked_return ();
1614 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1616 static enum type_class
1617 type_to_class (tree type)
1619 switch (TREE_CODE (type))
1621 case VOID_TYPE: return void_type_class;
1622 case INTEGER_TYPE: return integer_type_class;
1623 case ENUMERAL_TYPE: return enumeral_type_class;
1624 case BOOLEAN_TYPE: return boolean_type_class;
1625 case POINTER_TYPE: return pointer_type_class;
1626 case REFERENCE_TYPE: return reference_type_class;
1627 case OFFSET_TYPE: return offset_type_class;
1628 case REAL_TYPE: return real_type_class;
1629 case COMPLEX_TYPE: return complex_type_class;
1630 case FUNCTION_TYPE: return function_type_class;
1631 case METHOD_TYPE: return method_type_class;
1632 case RECORD_TYPE: return record_type_class;
1634 case QUAL_UNION_TYPE: return union_type_class;
1635 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1636 ? string_type_class : array_type_class);
1637 case LANG_TYPE: return lang_type_class;
1638 default: return no_type_class;
1642 /* Expand a call EXP to __builtin_classify_type. */
1645 expand_builtin_classify_type (tree exp)
1647 if (call_expr_nargs (exp))
1648 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1649 return GEN_INT (no_type_class);
1652 /* This helper macro, meant to be used in mathfn_built_in below,
1653 determines which among a set of three builtin math functions is
1654 appropriate for a given type mode. The `F' and `L' cases are
1655 automatically generated from the `double' case. */
1656 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1657 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1658 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1659 fcodel = BUILT_IN_MATHFN##L ; break;
1660 /* Similar to above, but appends _R after any F/L suffix. */
1661 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1663 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1664 fcodel = BUILT_IN_MATHFN##L_R ; break;
1666 /* Return mathematic function equivalent to FN but operating directly
1667 on TYPE, if available. If we can't do the conversion, return zero. */
1669 mathfn_built_in (tree type, enum built_in_function fn)
1671 enum built_in_function fcode, fcodef, fcodel;
1675 CASE_MATHFN (BUILT_IN_ACOS)
1676 CASE_MATHFN (BUILT_IN_ACOSH)
1677 CASE_MATHFN (BUILT_IN_ASIN)
1678 CASE_MATHFN (BUILT_IN_ASINH)
1679 CASE_MATHFN (BUILT_IN_ATAN)
1680 CASE_MATHFN (BUILT_IN_ATAN2)
1681 CASE_MATHFN (BUILT_IN_ATANH)
1682 CASE_MATHFN (BUILT_IN_CBRT)
1683 CASE_MATHFN (BUILT_IN_CEIL)
1684 CASE_MATHFN (BUILT_IN_CEXPI)
1685 CASE_MATHFN (BUILT_IN_COPYSIGN)
1686 CASE_MATHFN (BUILT_IN_COS)
1687 CASE_MATHFN (BUILT_IN_COSH)
1688 CASE_MATHFN (BUILT_IN_DREM)
1689 CASE_MATHFN (BUILT_IN_ERF)
1690 CASE_MATHFN (BUILT_IN_ERFC)
1691 CASE_MATHFN (BUILT_IN_EXP)
1692 CASE_MATHFN (BUILT_IN_EXP10)
1693 CASE_MATHFN (BUILT_IN_EXP2)
1694 CASE_MATHFN (BUILT_IN_EXPM1)
1695 CASE_MATHFN (BUILT_IN_FABS)
1696 CASE_MATHFN (BUILT_IN_FDIM)
1697 CASE_MATHFN (BUILT_IN_FLOOR)
1698 CASE_MATHFN (BUILT_IN_FMA)
1699 CASE_MATHFN (BUILT_IN_FMAX)
1700 CASE_MATHFN (BUILT_IN_FMIN)
1701 CASE_MATHFN (BUILT_IN_FMOD)
1702 CASE_MATHFN (BUILT_IN_FREXP)
1703 CASE_MATHFN (BUILT_IN_GAMMA)
1704 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1705 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1706 CASE_MATHFN (BUILT_IN_HYPOT)
1707 CASE_MATHFN (BUILT_IN_ILOGB)
1708 CASE_MATHFN (BUILT_IN_INF)
1709 CASE_MATHFN (BUILT_IN_ISINF)
1710 CASE_MATHFN (BUILT_IN_J0)
1711 CASE_MATHFN (BUILT_IN_J1)
1712 CASE_MATHFN (BUILT_IN_JN)
1713 CASE_MATHFN (BUILT_IN_LCEIL)
1714 CASE_MATHFN (BUILT_IN_LDEXP)
1715 CASE_MATHFN (BUILT_IN_LFLOOR)
1716 CASE_MATHFN (BUILT_IN_LGAMMA)
1717 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1718 CASE_MATHFN (BUILT_IN_LLCEIL)
1719 CASE_MATHFN (BUILT_IN_LLFLOOR)
1720 CASE_MATHFN (BUILT_IN_LLRINT)
1721 CASE_MATHFN (BUILT_IN_LLROUND)
1722 CASE_MATHFN (BUILT_IN_LOG)
1723 CASE_MATHFN (BUILT_IN_LOG10)
1724 CASE_MATHFN (BUILT_IN_LOG1P)
1725 CASE_MATHFN (BUILT_IN_LOG2)
1726 CASE_MATHFN (BUILT_IN_LOGB)
1727 CASE_MATHFN (BUILT_IN_LRINT)
1728 CASE_MATHFN (BUILT_IN_LROUND)
1729 CASE_MATHFN (BUILT_IN_MODF)
1730 CASE_MATHFN (BUILT_IN_NAN)
1731 CASE_MATHFN (BUILT_IN_NANS)
1732 CASE_MATHFN (BUILT_IN_NEARBYINT)
1733 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1734 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1735 CASE_MATHFN (BUILT_IN_POW)
1736 CASE_MATHFN (BUILT_IN_POWI)
1737 CASE_MATHFN (BUILT_IN_POW10)
1738 CASE_MATHFN (BUILT_IN_REMAINDER)
1739 CASE_MATHFN (BUILT_IN_REMQUO)
1740 CASE_MATHFN (BUILT_IN_RINT)
1741 CASE_MATHFN (BUILT_IN_ROUND)
1742 CASE_MATHFN (BUILT_IN_SCALB)
1743 CASE_MATHFN (BUILT_IN_SCALBLN)
1744 CASE_MATHFN (BUILT_IN_SCALBN)
1745 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1746 CASE_MATHFN (BUILT_IN_SIN)
1747 CASE_MATHFN (BUILT_IN_SINCOS)
1748 CASE_MATHFN (BUILT_IN_SINH)
1749 CASE_MATHFN (BUILT_IN_SQRT)
1750 CASE_MATHFN (BUILT_IN_TAN)
1751 CASE_MATHFN (BUILT_IN_TANH)
1752 CASE_MATHFN (BUILT_IN_TGAMMA)
1753 CASE_MATHFN (BUILT_IN_TRUNC)
1754 CASE_MATHFN (BUILT_IN_Y0)
1755 CASE_MATHFN (BUILT_IN_Y1)
1756 CASE_MATHFN (BUILT_IN_YN)
1762 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1763 return implicit_built_in_decls[fcode];
1764 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1765 return implicit_built_in_decls[fcodef];
1766 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1767 return implicit_built_in_decls[fcodel];
1772 /* If errno must be maintained, expand the RTL to check if the result,
1773 TARGET, of a built-in function call, EXP, is NaN, and if so set
1777 expand_errno_check (tree exp, rtx target)
1779 rtx lab = gen_label_rtx ();
1781 /* Test the result; if it is NaN, set errno=EDOM because
1782 the argument was not in the domain. */
1783 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1787 /* If this built-in doesn't throw an exception, set errno directly. */
1788 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1790 #ifdef GEN_ERRNO_RTX
1791 rtx errno_rtx = GEN_ERRNO_RTX;
1794 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1796 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1802 /* We can't set errno=EDOM directly; let the library call do it.
1803 Pop the arguments right away in case the call gets deleted. */
1805 expand_call (exp, target, 0);
1810 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1811 Return NULL_RTX if a normal call should be emitted rather than expanding
1812 the function in-line. EXP is the expression that is a call to the builtin
1813 function; if convenient, the result should be placed in TARGET.
1814 SUBTARGET may be used as the target for computing one of EXP's operands. */
1817 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1819 optab builtin_optab;
1820 rtx op0, insns, before_call;
1821 tree fndecl = get_callee_fndecl (exp);
1822 enum machine_mode mode;
1823 bool errno_set = false;
1826 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1829 arg = CALL_EXPR_ARG (exp, 0);
1831 switch (DECL_FUNCTION_CODE (fndecl))
1833 CASE_FLT_FN (BUILT_IN_SQRT):
1834 errno_set = ! tree_expr_nonnegative_p (arg);
1835 builtin_optab = sqrt_optab;
1837 CASE_FLT_FN (BUILT_IN_EXP):
1838 errno_set = true; builtin_optab = exp_optab; break;
1839 CASE_FLT_FN (BUILT_IN_EXP10):
1840 CASE_FLT_FN (BUILT_IN_POW10):
1841 errno_set = true; builtin_optab = exp10_optab; break;
1842 CASE_FLT_FN (BUILT_IN_EXP2):
1843 errno_set = true; builtin_optab = exp2_optab; break;
1844 CASE_FLT_FN (BUILT_IN_EXPM1):
1845 errno_set = true; builtin_optab = expm1_optab; break;
1846 CASE_FLT_FN (BUILT_IN_LOGB):
1847 errno_set = true; builtin_optab = logb_optab; break;
1848 CASE_FLT_FN (BUILT_IN_LOG):
1849 errno_set = true; builtin_optab = log_optab; break;
1850 CASE_FLT_FN (BUILT_IN_LOG10):
1851 errno_set = true; builtin_optab = log10_optab; break;
1852 CASE_FLT_FN (BUILT_IN_LOG2):
1853 errno_set = true; builtin_optab = log2_optab; break;
1854 CASE_FLT_FN (BUILT_IN_LOG1P):
1855 errno_set = true; builtin_optab = log1p_optab; break;
1856 CASE_FLT_FN (BUILT_IN_ASIN):
1857 builtin_optab = asin_optab; break;
1858 CASE_FLT_FN (BUILT_IN_ACOS):
1859 builtin_optab = acos_optab; break;
1860 CASE_FLT_FN (BUILT_IN_TAN):
1861 builtin_optab = tan_optab; break;
1862 CASE_FLT_FN (BUILT_IN_ATAN):
1863 builtin_optab = atan_optab; break;
1864 CASE_FLT_FN (BUILT_IN_FLOOR):
1865 builtin_optab = floor_optab; break;
1866 CASE_FLT_FN (BUILT_IN_CEIL):
1867 builtin_optab = ceil_optab; break;
1868 CASE_FLT_FN (BUILT_IN_TRUNC):
1869 builtin_optab = btrunc_optab; break;
1870 CASE_FLT_FN (BUILT_IN_ROUND):
1871 builtin_optab = round_optab; break;
1872 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1873 builtin_optab = nearbyint_optab;
1874 if (flag_trapping_math)
1876 /* Else fallthrough and expand as rint. */
1877 CASE_FLT_FN (BUILT_IN_RINT):
1878 builtin_optab = rint_optab; break;
1883 /* Make a suitable register to place result in. */
1884 mode = TYPE_MODE (TREE_TYPE (exp));
1886 if (! flag_errno_math || ! HONOR_NANS (mode))
1889 /* Before working hard, check whether the instruction is available. */
1890 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1892 target = gen_reg_rtx (mode);
1894 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1895 need to expand the argument again. This way, we will not perform
1896 side-effects more the once. */
1897 narg = builtin_save_expr (arg);
1901 exp = build_call_expr (fndecl, 1, arg);
1904 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1908 /* Compute into TARGET.
1909 Set TARGET to wherever the result comes back. */
1910 target = expand_unop (mode, builtin_optab, op0, target, 0);
1915 expand_errno_check (exp, target);
1917 /* Output the entire sequence. */
1918 insns = get_insns ();
1924 /* If we were unable to expand via the builtin, stop the sequence
1925 (without outputting the insns) and call to the library function
1926 with the stabilized argument list. */
1930 before_call = get_last_insn ();
1932 target = expand_call (exp, target, target == const0_rtx);
1934 /* If this is a sqrt operation and we don't care about errno, try to
1935 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1936 This allows the semantics of the libcall to be visible to the RTL
1938 if (builtin_optab == sqrt_optab && !errno_set)
1940 /* Search backwards through the insns emitted by expand_call looking
1941 for the instruction with the REG_RETVAL note. */
1942 rtx last = get_last_insn ();
1943 while (last != before_call)
1945 if (find_reg_note (last, REG_RETVAL, NULL))
1947 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1948 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1949 two elements, i.e. symbol_ref(sqrt) and the operand. */
1951 && GET_CODE (note) == EXPR_LIST
1952 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1953 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1954 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1956 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1957 /* Check operand is a register with expected mode. */
1960 && GET_MODE (operand) == mode)
1962 /* Replace the REG_EQUAL note with a SQRT rtx. */
1963 rtx equiv = gen_rtx_SQRT (mode, operand);
1964 set_unique_reg_note (last, REG_EQUAL, equiv);
1969 last = PREV_INSN (last);
1976 /* Expand a call to the builtin binary math functions (pow and atan2).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding the
1978 function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's
1984 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1986 optab builtin_optab;
1987 rtx op0, op1, insns;
1988 int op1_type = REAL_TYPE;
1989 tree fndecl = get_callee_fndecl (exp);
1990 tree arg0, arg1, narg;
1991 enum machine_mode mode;
1992 bool errno_set = true;
1995 switch (DECL_FUNCTION_CODE (fndecl))
1997 CASE_FLT_FN (BUILT_IN_SCALBN):
1998 CASE_FLT_FN (BUILT_IN_SCALBLN):
1999 CASE_FLT_FN (BUILT_IN_LDEXP):
2000 op1_type = INTEGER_TYPE;
2005 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2008 arg0 = CALL_EXPR_ARG (exp, 0);
2009 arg1 = CALL_EXPR_ARG (exp, 1);
2011 switch (DECL_FUNCTION_CODE (fndecl))
2013 CASE_FLT_FN (BUILT_IN_POW):
2014 builtin_optab = pow_optab; break;
2015 CASE_FLT_FN (BUILT_IN_ATAN2):
2016 builtin_optab = atan2_optab; break;
2017 CASE_FLT_FN (BUILT_IN_SCALB):
2018 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2020 builtin_optab = scalb_optab; break;
2021 CASE_FLT_FN (BUILT_IN_SCALBN):
2022 CASE_FLT_FN (BUILT_IN_SCALBLN):
2023 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2025 /* Fall through... */
2026 CASE_FLT_FN (BUILT_IN_LDEXP):
2027 builtin_optab = ldexp_optab; break;
2028 CASE_FLT_FN (BUILT_IN_FMOD):
2029 builtin_optab = fmod_optab; break;
2030 CASE_FLT_FN (BUILT_IN_REMAINDER):
2031 CASE_FLT_FN (BUILT_IN_DREM):
2032 builtin_optab = remainder_optab; break;
2037 /* Make a suitable register to place result in. */
2038 mode = TYPE_MODE (TREE_TYPE (exp));
2040 /* Before working hard, check whether the instruction is available. */
2041 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2044 target = gen_reg_rtx (mode);
2046 if (! flag_errno_math || ! HONOR_NANS (mode))
2049 /* Always stabilize the argument list. */
2050 narg = builtin_save_expr (arg1);
2056 narg = builtin_save_expr (arg0);
2064 exp = build_call_expr (fndecl, 2, arg0, arg1);
2066 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2067 op1 = expand_normal (arg1);
2071 /* Compute into TARGET.
2072 Set TARGET to wherever the result comes back. */
2073 target = expand_binop (mode, builtin_optab, op0, op1,
2074 target, 0, OPTAB_DIRECT);
2076 /* If we were unable to expand via the builtin, stop the sequence
2077 (without outputting the insns) and call to the library function
2078 with the stabilized argument list. */
2082 return expand_call (exp, target, target == const0_rtx);
2086 expand_errno_check (exp, target);
2088 /* Output the entire sequence. */
2089 insns = get_insns ();
2096 /* Expand a call to the builtin sin and cos math functions.
2097 Return NULL_RTX if a normal call should be emitted rather than expanding the
2098 function in-line. EXP is the expression that is a call to the builtin
2099 function; if convenient, the result should be placed in TARGET.
2100 SUBTARGET may be used as the target for computing one of EXP's
2104 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2106 optab builtin_optab;
2108 tree fndecl = get_callee_fndecl (exp);
2109 enum machine_mode mode;
2112 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2115 arg = CALL_EXPR_ARG (exp, 0);
2117 switch (DECL_FUNCTION_CODE (fndecl))
2119 CASE_FLT_FN (BUILT_IN_SIN):
2120 CASE_FLT_FN (BUILT_IN_COS):
2121 builtin_optab = sincos_optab; break;
2126 /* Make a suitable register to place result in. */
2127 mode = TYPE_MODE (TREE_TYPE (exp));
2129 /* Check if sincos insn is available, otherwise fallback
2130 to sin or cos insn. */
2131 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2132 switch (DECL_FUNCTION_CODE (fndecl))
2134 CASE_FLT_FN (BUILT_IN_SIN):
2135 builtin_optab = sin_optab; break;
2136 CASE_FLT_FN (BUILT_IN_COS):
2137 builtin_optab = cos_optab; break;
2142 /* Before working hard, check whether the instruction is available. */
2143 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2145 target = gen_reg_rtx (mode);
2147 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2148 need to expand the argument again. This way, we will not perform
2149 side-effects more the once. */
2150 narg = save_expr (arg);
2154 exp = build_call_expr (fndecl, 1, arg);
2157 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2161 /* Compute into TARGET.
2162 Set TARGET to wherever the result comes back. */
2163 if (builtin_optab == sincos_optab)
2167 switch (DECL_FUNCTION_CODE (fndecl))
2169 CASE_FLT_FN (BUILT_IN_SIN):
2170 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2172 CASE_FLT_FN (BUILT_IN_COS):
2173 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2178 gcc_assert (result);
2182 target = expand_unop (mode, builtin_optab, op0, target, 0);
2187 /* Output the entire sequence. */
2188 insns = get_insns ();
2194 /* If we were unable to expand via the builtin, stop the sequence
2195 (without outputting the insns) and call to the library function
2196 with the stabilized argument list. */
2200 target = expand_call (exp, target, target == const0_rtx);
2205 /* Expand a call to one of the builtin math functions that operate on
2206 floating point argument and output an integer result (ilogb, isinf,
2208 Return 0 if a normal call should be emitted rather than expanding the
2209 function in-line. EXP is the expression that is a call to the builtin
2210 function; if convenient, the result should be placed in TARGET.
2211 SUBTARGET may be used as the target for computing one of EXP's operands. */
2214 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2216 optab builtin_optab;
2217 enum insn_code icode;
2219 tree fndecl = get_callee_fndecl (exp);
2220 enum machine_mode mode;
2221 bool errno_set = false;
2224 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2227 arg = CALL_EXPR_ARG (exp, 0);
2229 switch (DECL_FUNCTION_CODE (fndecl))
2231 CASE_FLT_FN (BUILT_IN_ILOGB):
2232 errno_set = true; builtin_optab = ilogb_optab; break;
2233 CASE_FLT_FN (BUILT_IN_ISINF):
2234 builtin_optab = isinf_optab; break;
2239 /* There's no easy way to detect the case we need to set EDOM. */
2240 if (flag_errno_math && errno_set)
2243 /* Optab mode depends on the mode of the input argument. */
2244 mode = TYPE_MODE (TREE_TYPE (arg));
2246 icode = builtin_optab->handlers[(int) mode].insn_code;
2248 /* Before working hard, check whether the instruction is available. */
2249 if (icode != CODE_FOR_nothing)
2251 /* Make a suitable register to place result in. */
2253 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2254 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2256 gcc_assert (insn_data[icode].operand[0].predicate
2257 (target, GET_MODE (target)));
2259 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2260 need to expand the argument again. This way, we will not perform
2261 side-effects more the once. */
2262 narg = builtin_save_expr (arg);
2266 exp = build_call_expr (fndecl, 1, arg);
2269 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2271 if (mode != GET_MODE (op0))
2272 op0 = convert_to_mode (mode, op0, 0);
2274 /* Compute into TARGET.
2275 Set TARGET to wherever the result comes back. */
2276 emit_unop_insn (icode, target, op0, UNKNOWN);
2280 target = expand_call (exp, target, target == const0_rtx);
2285 /* Expand a call to the builtin sincos math function.
2286 Return NULL_RTX if a normal call should be emitted rather than expanding the
2287 function in-line. EXP is the expression that is a call to the builtin
2291 expand_builtin_sincos (tree exp)
2293 rtx op0, op1, op2, target1, target2;
2294 enum machine_mode mode;
2295 tree arg, sinp, cosp;
2298 if (!validate_arglist (exp, REAL_TYPE,
2299 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2302 arg = CALL_EXPR_ARG (exp, 0);
2303 sinp = CALL_EXPR_ARG (exp, 1);
2304 cosp = CALL_EXPR_ARG (exp, 2);
2306 /* Make a suitable register to place result in. */
2307 mode = TYPE_MODE (TREE_TYPE (arg));
2309 /* Check if sincos insn is available, otherwise emit the call. */
2310 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2313 target1 = gen_reg_rtx (mode);
2314 target2 = gen_reg_rtx (mode);
2316 op0 = expand_normal (arg);
2317 op1 = expand_normal (build_fold_indirect_ref (sinp));
2318 op2 = expand_normal (build_fold_indirect_ref (cosp));
2320 /* Compute into target1 and target2.
2321 Set TARGET to wherever the result comes back. */
2322 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2323 gcc_assert (result);
2325 /* Move target1 and target2 to the memory locations indicated
2327 emit_move_insn (op1, target1);
2328 emit_move_insn (op2, target2);
2333 /* Expand a call to the internal cexpi builtin to the sincos math function.
2334 EXP is the expression that is a call to the builtin function; if convenient,
2335 the result should be placed in TARGET. SUBTARGET may be used as the target
2336 for computing one of EXP's operands. */
2339 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2341 tree fndecl = get_callee_fndecl (exp);
2343 enum machine_mode mode;
2346 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2349 arg = CALL_EXPR_ARG (exp, 0);
2350 type = TREE_TYPE (arg);
2351 mode = TYPE_MODE (TREE_TYPE (arg));
2353 /* Try expanding via a sincos optab, fall back to emitting a libcall
2354 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2355 is only generated from sincos, cexp or if we have either of them. */
2356 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2358 op1 = gen_reg_rtx (mode);
2359 op2 = gen_reg_rtx (mode);
2361 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2363 /* Compute into op1 and op2. */
2364 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2366 else if (TARGET_HAS_SINCOS)
2368 tree call, fn = NULL_TREE;
2372 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2373 fn = built_in_decls[BUILT_IN_SINCOSF];
2374 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2375 fn = built_in_decls[BUILT_IN_SINCOS];
2376 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2377 fn = built_in_decls[BUILT_IN_SINCOSL];
2381 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2382 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2383 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2384 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2385 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2386 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2388 /* Make sure not to fold the sincos call again. */
2389 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2390 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2391 call, 3, arg, top1, top2));
2395 tree call, fn = NULL_TREE, narg;
2396 tree ctype = build_complex_type (type);
2398 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2399 fn = built_in_decls[BUILT_IN_CEXPF];
2400 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2401 fn = built_in_decls[BUILT_IN_CEXP];
2402 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2403 fn = built_in_decls[BUILT_IN_CEXPL];
2407 /* If we don't have a decl for cexp create one. This is the
2408 friendliest fallback if the user calls __builtin_cexpi
2409 without full target C99 function support. */
2410 if (fn == NULL_TREE)
2413 const char *name = NULL;
2415 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2417 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2419 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2422 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2423 fn = build_fn_decl (name, fntype);
2426 narg = fold_build2 (COMPLEX_EXPR, ctype,
2427 build_real (type, dconst0), arg);
2429 /* Make sure not to fold the cexp call again. */
2430 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2431 return expand_expr (build_call_nary (ctype, call, 1, narg),
2432 target, VOIDmode, EXPAND_NORMAL);
2435 /* Now build the proper return type. */
2436 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2437 make_tree (TREE_TYPE (arg), op2),
2438 make_tree (TREE_TYPE (arg), op1)),
2439 target, VOIDmode, EXPAND_NORMAL);
2442 /* Expand a call to one of the builtin rounding functions gcc defines
2443 as an extension (lfloor and lceil). As these are gcc extensions we
2444 do not need to worry about setting errno to EDOM.
2445 If expanding via optab fails, lower expression to (int)(floor(x)).
2446 EXP is the expression that is a call to the builtin function;
2447 if convenient, the result should be placed in TARGET. SUBTARGET may
2448 be used as the target for computing one of EXP's operands. */
2451 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2453 convert_optab builtin_optab;
2454 rtx op0, insns, tmp;
2455 tree fndecl = get_callee_fndecl (exp);
2456 enum built_in_function fallback_fn;
2457 tree fallback_fndecl;
2458 enum machine_mode mode;
2461 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2464 arg = CALL_EXPR_ARG (exp, 0);
2466 switch (DECL_FUNCTION_CODE (fndecl))
2468 CASE_FLT_FN (BUILT_IN_LCEIL):
2469 CASE_FLT_FN (BUILT_IN_LLCEIL):
2470 builtin_optab = lceil_optab;
2471 fallback_fn = BUILT_IN_CEIL;
2474 CASE_FLT_FN (BUILT_IN_LFLOOR):
2475 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2476 builtin_optab = lfloor_optab;
2477 fallback_fn = BUILT_IN_FLOOR;
2484 /* Make a suitable register to place result in. */
2485 mode = TYPE_MODE (TREE_TYPE (exp));
2487 target = gen_reg_rtx (mode);
2489 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2490 need to expand the argument again. This way, we will not perform
2491 side-effects more the once. */
2492 narg = builtin_save_expr (arg);
2496 exp = build_call_expr (fndecl, 1, arg);
2499 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2503 /* Compute into TARGET. */
2504 if (expand_sfix_optab (target, op0, builtin_optab))
2506 /* Output the entire sequence. */
2507 insns = get_insns ();
2513 /* If we were unable to expand via the builtin, stop the sequence
2514 (without outputting the insns). */
2517 /* Fall back to floating point rounding optab. */
2518 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2520 /* For non-C99 targets we may end up without a fallback fndecl here
2521 if the user called __builtin_lfloor directly. In this case emit
2522 a call to the floor/ceil variants nevertheless. This should result
2523 in the best user experience for not full C99 targets. */
2524 if (fallback_fndecl == NULL_TREE)
2527 const char *name = NULL;
2529 switch (DECL_FUNCTION_CODE (fndecl))
2531 case BUILT_IN_LCEIL:
2532 case BUILT_IN_LLCEIL:
2535 case BUILT_IN_LCEILF:
2536 case BUILT_IN_LLCEILF:
2539 case BUILT_IN_LCEILL:
2540 case BUILT_IN_LLCEILL:
2543 case BUILT_IN_LFLOOR:
2544 case BUILT_IN_LLFLOOR:
2547 case BUILT_IN_LFLOORF:
2548 case BUILT_IN_LLFLOORF:
2551 case BUILT_IN_LFLOORL:
2552 case BUILT_IN_LLFLOORL:
2559 fntype = build_function_type_list (TREE_TYPE (arg),
2560 TREE_TYPE (arg), NULL_TREE);
2561 fallback_fndecl = build_fn_decl (name, fntype);
2564 exp = build_call_expr (fallback_fndecl, 1, arg);
2566 tmp = expand_normal (exp);
2568 /* Truncate the result of floating point optab to integer
2569 via expand_fix (). */
2570 target = gen_reg_rtx (mode);
2571 expand_fix (target, tmp, 0);
2576 /* Expand a call to one of the builtin math functions doing integer
2578 Return 0 if a normal call should be emitted rather than expanding the
2579 function in-line. EXP is the expression that is a call to the builtin
2580 function; if convenient, the result should be placed in TARGET.
2581 SUBTARGET may be used as the target for computing one of EXP's operands. */
2584 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2586 convert_optab builtin_optab;
2588 tree fndecl = get_callee_fndecl (exp);
2590 enum machine_mode mode;
2592 /* There's no easy way to detect the case we need to set EDOM. */
2593 if (flag_errno_math)
2596 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2599 arg = CALL_EXPR_ARG (exp, 0);
2601 switch (DECL_FUNCTION_CODE (fndecl))
2603 CASE_FLT_FN (BUILT_IN_LRINT):
2604 CASE_FLT_FN (BUILT_IN_LLRINT):
2605 builtin_optab = lrint_optab; break;
2606 CASE_FLT_FN (BUILT_IN_LROUND):
2607 CASE_FLT_FN (BUILT_IN_LLROUND):
2608 builtin_optab = lround_optab; break;
2613 /* Make a suitable register to place result in. */
2614 mode = TYPE_MODE (TREE_TYPE (exp));
2616 target = gen_reg_rtx (mode);
2618 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2619 need to expand the argument again. This way, we will not perform
2620 side-effects more the once. */
2621 narg = builtin_save_expr (arg);
2625 exp = build_call_expr (fndecl, 1, arg);
2628 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2632 if (expand_sfix_optab (target, op0, builtin_optab))
2634 /* Output the entire sequence. */
2635 insns = get_insns ();
2641 /* If we were unable to expand via the builtin, stop the sequence
2642 (without outputting the insns) and call to the library function
2643 with the stabilized argument list. */
2646 target = expand_call (exp, target, target == const0_rtx);
2651 /* To evaluate powi(x,n), the floating point value x raised to the
2652 constant integer exponent n, we use a hybrid algorithm that
2653 combines the "window method" with look-up tables. For an
2654 introduction to exponentiation algorithms and "addition chains",
2655 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2656 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2657 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2658 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2660 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2661 multiplications to inline before calling the system library's pow
2662 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2663 so this default never requires calling pow, powf or powl. */
2665 #ifndef POWI_MAX_MULTS
2666 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2669 /* The size of the "optimal power tree" lookup table. All
2670 exponents less than this value are simply looked up in the
2671 powi_table below. This threshold is also used to size the
2672 cache of pseudo registers that hold intermediate results. */
2673 #define POWI_TABLE_SIZE 256
2675 /* The size, in bits of the window, used in the "window method"
2676 exponentiation algorithm. This is equivalent to a radix of
2677 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2678 #define POWI_WINDOW_SIZE 3
2680 /* The following table is an efficient representation of an
2681 "optimal power tree". For each value, i, the corresponding
2682 value, j, in the table states than an optimal evaluation
2683 sequence for calculating pow(x,i) can be found by evaluating
2684 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2685 100 integers is given in Knuth's "Seminumerical algorithms". */
2687 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2689 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2690 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2691 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2692 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2693 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2694 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2695 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2696 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2697 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2698 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2699 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2700 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2701 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2702 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2703 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2704 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2705 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2706 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2707 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2708 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2709 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2710 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2711 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2712 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2713 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2714 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2715 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2716 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2717 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2718 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2719 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2720 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2724 /* Return the number of multiplications required to calculate
2725 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2726 subroutine of powi_cost. CACHE is an array indicating
2727 which exponents have already been calculated. */
2730 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2732 /* If we've already calculated this exponent, then this evaluation
2733 doesn't require any additional multiplications. */
2738 return powi_lookup_cost (n - powi_table[n], cache)
2739 + powi_lookup_cost (powi_table[n], cache) + 1;
2742 /* Return the number of multiplications required to calculate
2743 powi(x,n) for an arbitrary x, given the exponent N. This
2744 function needs to be kept in sync with expand_powi below. */
2747 powi_cost (HOST_WIDE_INT n)
2749 bool cache[POWI_TABLE_SIZE];
2750 unsigned HOST_WIDE_INT digit;
2751 unsigned HOST_WIDE_INT val;
2757 /* Ignore the reciprocal when calculating the cost. */
2758 val = (n < 0) ? -n : n;
2760 /* Initialize the exponent cache. */
2761 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2766 while (val >= POWI_TABLE_SIZE)
2770 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2771 result += powi_lookup_cost (digit, cache)
2772 + POWI_WINDOW_SIZE + 1;
2773 val >>= POWI_WINDOW_SIZE;
2782 return result + powi_lookup_cost (val, cache);
2785 /* Recursive subroutine of expand_powi. This function takes the array,
2786 CACHE, of already calculated exponents and an exponent N and returns
2787 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2790 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2792 unsigned HOST_WIDE_INT digit;
2796 if (n < POWI_TABLE_SIZE)
2801 target = gen_reg_rtx (mode);
2804 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2805 op1 = expand_powi_1 (mode, powi_table[n], cache);
2809 target = gen_reg_rtx (mode);
2810 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2811 op0 = expand_powi_1 (mode, n - digit, cache);
2812 op1 = expand_powi_1 (mode, digit, cache);
2816 target = gen_reg_rtx (mode);
2817 op0 = expand_powi_1 (mode, n >> 1, cache);
2821 result = expand_mult (mode, op0, op1, target, 0);
2822 if (result != target)
2823 emit_move_insn (target, result);
2827 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2828 floating point operand in mode MODE, and N is the exponent. This
2829 function needs to be kept in sync with powi_cost above. */
2832 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2834 unsigned HOST_WIDE_INT val;
2835 rtx cache[POWI_TABLE_SIZE];
2839 return CONST1_RTX (mode);
2841 val = (n < 0) ? -n : n;
2843 memset (cache, 0, sizeof (cache));
2846 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2848 /* If the original exponent was negative, reciprocate the result. */
2850 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2851 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2856 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2857 a normal call should be emitted rather than expanding the function
2858 in-line. EXP is the expression that is a call to the builtin
2859 function; if convenient, the result should be placed in TARGET. */
2862 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2866 tree type = TREE_TYPE (exp);
2867 REAL_VALUE_TYPE cint, c, c2;
2870 enum machine_mode mode = TYPE_MODE (type);
2872 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2875 arg0 = CALL_EXPR_ARG (exp, 0);
2876 arg1 = CALL_EXPR_ARG (exp, 1);
2878 if (TREE_CODE (arg1) != REAL_CST
2879 || TREE_OVERFLOW (arg1))
2880 return expand_builtin_mathfn_2 (exp, target, subtarget);
2882 /* Handle constant exponents. */
2884 /* For integer valued exponents we can expand to an optimal multiplication
2885 sequence using expand_powi. */
2886 c = TREE_REAL_CST (arg1);
2887 n = real_to_integer (&c);
2888 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2889 if (real_identical (&c, &cint)
2890 && ((n >= -1 && n <= 2)
2891 || (flag_unsafe_math_optimizations
2893 && powi_cost (n) <= POWI_MAX_MULTS)))
2895 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2898 op = force_reg (mode, op);
2899 op = expand_powi (op, mode, n);
2904 narg0 = builtin_save_expr (arg0);
2906 /* If the exponent is not integer valued, check if it is half of an integer.
2907 In this case we can expand to sqrt (x) * x**(n/2). */
2908 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2909 if (fn != NULL_TREE)
2911 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2912 n = real_to_integer (&c2);
2913 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2914 if (real_identical (&c2, &cint)
2915 && ((flag_unsafe_math_optimizations
2917 && powi_cost (n/2) <= POWI_MAX_MULTS)
2920 tree call_expr = build_call_expr (fn, 1, narg0);
2921 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2924 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2925 op2 = force_reg (mode, op2);
2926 op2 = expand_powi (op2, mode, abs (n / 2));
2927 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2928 0, OPTAB_LIB_WIDEN);
2929 /* If the original exponent was negative, reciprocate the
2932 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2933 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2939 /* Try if the exponent is a third of an integer. In this case
2940 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2941 different from pow (x, 1./3.) due to rounding and behavior
2942 with negative x we need to constrain this transformation to
2943 unsafe math and positive x or finite math. */
2944 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2946 && flag_unsafe_math_optimizations
2947 && (tree_expr_nonnegative_p (arg0)
2948 || !HONOR_NANS (mode)))
2950 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2951 real_round (&c2, mode, &c2);
2952 n = real_to_integer (&c2);
2953 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2954 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2955 real_convert (&c2, mode, &c2);
2956 if (real_identical (&c2, &c)
2958 && powi_cost (n/3) <= POWI_MAX_MULTS)
2961 tree call_expr = build_call_expr (fn, 1,narg0);
2962 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2963 if (abs (n) % 3 == 2)
2964 op = expand_simple_binop (mode, MULT, op, op, op,
2965 0, OPTAB_LIB_WIDEN);
2968 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2969 op2 = force_reg (mode, op2);
2970 op2 = expand_powi (op2, mode, abs (n / 3));
2971 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2972 0, OPTAB_LIB_WIDEN);
2973 /* If the original exponent was negative, reciprocate the
2976 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2977 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2983 /* Fall back to optab expansion. */
2984 return expand_builtin_mathfn_2 (exp, target, subtarget);
2987 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2988 a normal call should be emitted rather than expanding the function
2989 in-line. EXP is the expression that is a call to the builtin
2990 function; if convenient, the result should be placed in TARGET. */
2993 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2997 enum machine_mode mode;
2998 enum machine_mode mode2;
3000 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3003 arg0 = CALL_EXPR_ARG (exp, 0);
3004 arg1 = CALL_EXPR_ARG (exp, 1);
3005 mode = TYPE_MODE (TREE_TYPE (exp));
3007 /* Handle constant power. */
3009 if (TREE_CODE (arg1) == INTEGER_CST
3010 && !TREE_OVERFLOW (arg1))
3012 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3014 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3015 Otherwise, check the number of multiplications required. */
3016 if ((TREE_INT_CST_HIGH (arg1) == 0
3017 || TREE_INT_CST_HIGH (arg1) == -1)
3018 && ((n >= -1 && n <= 2)
3020 && powi_cost (n) <= POWI_MAX_MULTS)))
3022 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3023 op0 = force_reg (mode, op0);
3024 return expand_powi (op0, mode, n);
3028 /* Emit a libcall to libgcc. */
3030 /* Mode of the 2nd argument must match that of an int. */
3031 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3033 if (target == NULL_RTX)
3034 target = gen_reg_rtx (mode);
3036 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3037 if (GET_MODE (op0) != mode)
3038 op0 = convert_to_mode (mode, op0, 0);
3039 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3040 if (GET_MODE (op1) != mode2)
3041 op1 = convert_to_mode (mode2, op1, 0);
3043 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3044 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3045 op0, mode, op1, mode2);
3050 /* Expand expression EXP which is a call to the strlen builtin. Return
3051 NULL_RTX if we failed the caller should emit a normal call, otherwise
3052 try to get the result in TARGET, if convenient. */
3055 expand_builtin_strlen (tree exp, rtx target,
3056 enum machine_mode target_mode)
3058 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3064 tree src = CALL_EXPR_ARG (exp, 0);
3065 rtx result, src_reg, char_rtx, before_strlen;
3066 enum machine_mode insn_mode = target_mode, char_mode;
3067 enum insn_code icode = CODE_FOR_nothing;
3070 /* If the length can be computed at compile-time, return it. */
3071 len = c_strlen (src, 0);
3073 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3075 /* If the length can be computed at compile-time and is constant
3076 integer, but there are side-effects in src, evaluate
3077 src for side-effects, then return len.
3078 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3079 can be optimized into: i++; x = 3; */
3080 len = c_strlen (src, 1);
3081 if (len && TREE_CODE (len) == INTEGER_CST)
3083 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3084 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3087 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3089 /* If SRC is not a pointer type, don't do this operation inline. */
3093 /* Bail out if we can't compute strlen in the right mode. */
3094 while (insn_mode != VOIDmode)
3096 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3097 if (icode != CODE_FOR_nothing)
3100 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3102 if (insn_mode == VOIDmode)
3105 /* Make a place to write the result of the instruction. */
3109 && GET_MODE (result) == insn_mode
3110 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3111 result = gen_reg_rtx (insn_mode);
3113 /* Make a place to hold the source address. We will not expand
3114 the actual source until we are sure that the expansion will
3115 not fail -- there are trees that cannot be expanded twice. */
3116 src_reg = gen_reg_rtx (Pmode);
3118 /* Mark the beginning of the strlen sequence so we can emit the
3119 source operand later. */
3120 before_strlen = get_last_insn ();
3122 char_rtx = const0_rtx;
3123 char_mode = insn_data[(int) icode].operand[2].mode;
3124 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3126 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3128 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3129 char_rtx, GEN_INT (align));
3134 /* Now that we are assured of success, expand the source. */
3136 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3138 emit_move_insn (src_reg, pat);
3143 emit_insn_after (pat, before_strlen);
3145 emit_insn_before (pat, get_insns ());
3147 /* Return the value in the proper mode for this function. */
3148 if (GET_MODE (result) == target_mode)
3150 else if (target != 0)
3151 convert_move (target, result, 0);
3153 target = convert_to_mode (target_mode, result, 0);
3159 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3160 caller should emit a normal call, otherwise try to get the result
3161 in TARGET, if convenient (and in mode MODE if that's convenient). */
3164 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3166 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3168 tree type = TREE_TYPE (exp);
3169 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3170 CALL_EXPR_ARG (exp, 1), type);
3172 return expand_expr (result, target, mode, EXPAND_NORMAL);
3177 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3178 caller should emit a normal call, otherwise try to get the result
3179 in TARGET, if convenient (and in mode MODE if that's convenient). */
3182 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3184 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3186 tree type = TREE_TYPE (exp);
3187 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3188 CALL_EXPR_ARG (exp, 1), type);
3190 return expand_expr (result, target, mode, EXPAND_NORMAL);
3192 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3197 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3198 caller should emit a normal call, otherwise try to get the result
3199 in TARGET, if convenient (and in mode MODE if that's convenient). */
3202 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3204 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3206 tree type = TREE_TYPE (exp);
3207 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3208 CALL_EXPR_ARG (exp, 1), type);
3210 return expand_expr (result, target, mode, EXPAND_NORMAL);
3215 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3216 caller should emit a normal call, otherwise try to get the result
3217 in TARGET, if convenient (and in mode MODE if that's convenient). */
3220 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3222 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3224 tree type = TREE_TYPE (exp);
3225 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3226 CALL_EXPR_ARG (exp, 1), type);
3228 return expand_expr (result, target, mode, EXPAND_NORMAL);
3233 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3234 bytes from constant string DATA + OFFSET and return it as target
3238 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3239 enum machine_mode mode)
3241 const char *str = (const char *) data;
3243 gcc_assert (offset >= 0
3244 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3245 <= strlen (str) + 1));
3247 return c_readstr (str + offset, mode);
3250 /* Expand a call EXP to the memcpy builtin.
3251 Return NULL_RTX if we failed, the caller should emit a normal call,
3252 otherwise try to get the result in TARGET, if convenient (and in
3253 mode MODE if that's convenient). */
3256 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3258 tree fndecl = get_callee_fndecl (exp);
3260 if (!validate_arglist (exp,
3261 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3265 tree dest = CALL_EXPR_ARG (exp, 0);
3266 tree src = CALL_EXPR_ARG (exp, 1);
3267 tree len = CALL_EXPR_ARG (exp, 2);
3268 const char *src_str;
3269 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3270 unsigned int dest_align
3271 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3272 rtx dest_mem, src_mem, dest_addr, len_rtx;
3273 tree result = fold_builtin_memory_op (dest, src, len,
3274 TREE_TYPE (TREE_TYPE (fndecl)),
3276 HOST_WIDE_INT expected_size = -1;
3277 unsigned int expected_align = 0;
3281 while (TREE_CODE (result) == COMPOUND_EXPR)
3283 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3285 result = TREE_OPERAND (result, 1);
3287 return expand_expr (result, target, mode, EXPAND_NORMAL);
3290 /* If DEST is not a pointer type, call the normal function. */
3291 if (dest_align == 0)
3294 /* If either SRC is not a pointer type, don't do this
3295 operation in-line. */
3299 stringop_block_profile (exp, &expected_align, &expected_size);
3300 if (expected_align < dest_align)
3301 expected_align = dest_align;
3302 dest_mem = get_memory_rtx (dest, len);
3303 set_mem_align (dest_mem, dest_align);
3304 len_rtx = expand_normal (len);
3305 src_str = c_getstr (src);
3307 /* If SRC is a string constant and block move would be done
3308 by pieces, we can avoid loading the string from memory
3309 and only stored the computed constants. */
3311 && GET_CODE (len_rtx) == CONST_INT
3312 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3313 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3314 (void *) src_str, dest_align))
3316 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3317 builtin_memcpy_read_str,
3318 (void *) src_str, dest_align, 0);
3319 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3320 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3324 src_mem = get_memory_rtx (src, len);
3325 set_mem_align (src_mem, src_align);
3327 /* Copy word part most expediently. */
3328 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3329 CALL_EXPR_TAILCALL (exp)
3330 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3331 expected_align, expected_size);
3335 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3336 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3342 /* Expand a call EXP to the mempcpy builtin.
3343 Return NULL_RTX if we failed; the caller should emit a normal call,
3344 otherwise try to get the result in TARGET, if convenient (and in
3345 mode MODE if that's convenient). If ENDP is 0 return the
3346 destination pointer, if ENDP is 1 return the end pointer ala
3347 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3351 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3353 if (!validate_arglist (exp,
3354 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3358 tree dest = CALL_EXPR_ARG (exp, 0);
3359 tree src = CALL_EXPR_ARG (exp, 1);
3360 tree len = CALL_EXPR_ARG (exp, 2);
3361 return expand_builtin_mempcpy_args (dest, src, len,
3363 target, mode, /*endp=*/ 1);
3367 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3368 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3369 so that this can also be called without constructing an actual CALL_EXPR.
3370 TYPE is the return type of the call. The other arguments and return value
3371 are the same as for expand_builtin_mempcpy. */
3374 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3375 rtx target, enum machine_mode mode, int endp)
3377 /* If return value is ignored, transform mempcpy into memcpy. */
3378 if (target == const0_rtx)
3380 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3385 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3386 target, mode, EXPAND_NORMAL);
3390 const char *src_str;
3391 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3392 unsigned int dest_align
3393 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3394 rtx dest_mem, src_mem, len_rtx;
3395 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3399 while (TREE_CODE (result) == COMPOUND_EXPR)
3401 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3403 result = TREE_OPERAND (result, 1);
3405 return expand_expr (result, target, mode, EXPAND_NORMAL);
3408 /* If either SRC or DEST is not a pointer type, don't do this
3409 operation in-line. */
3410 if (dest_align == 0 || src_align == 0)
3413 /* If LEN is not constant, call the normal function. */
3414 if (! host_integerp (len, 1))
3417 len_rtx = expand_normal (len);
3418 src_str = c_getstr (src);
3420 /* If SRC is a string constant and block move would be done
3421 by pieces, we can avoid loading the string from memory
3422 and only stored the computed constants. */
3424 && GET_CODE (len_rtx) == CONST_INT
3425 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3426 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3427 (void *) src_str, dest_align))
3429 dest_mem = get_memory_rtx (dest, len);
3430 set_mem_align (dest_mem, dest_align);
3431 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3432 builtin_memcpy_read_str,
3433 (void *) src_str, dest_align, endp);
3434 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3439 if (GET_CODE (len_rtx) == CONST_INT
3440 && can_move_by_pieces (INTVAL (len_rtx),
3441 MIN (dest_align, src_align)))
3443 dest_mem = get_memory_rtx (dest, len);
3444 set_mem_align (dest_mem, dest_align);
3445 src_mem = get_memory_rtx (src, len);
3446 set_mem_align (src_mem, src_align);
3447 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3448 MIN (dest_align, src_align), endp);
3449 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3450 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3458 /* Expand expression EXP, which is a call to the memmove builtin. Return
3459 NULL_RTX if we failed; the caller should emit a normal call. */
3462 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3464 if (!validate_arglist (exp,
3465 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3469 tree dest = CALL_EXPR_ARG (exp, 0);
3470 tree src = CALL_EXPR_ARG (exp, 1);
3471 tree len = CALL_EXPR_ARG (exp, 2);
3472 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3473 target, mode, ignore);
3477 /* Helper function to do the actual work for expand_builtin_memmove. The
3478 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3479 so that this can also be called without constructing an actual CALL_EXPR.
3480 TYPE is the return type of the call. The other arguments and return value
3481 are the same as for expand_builtin_memmove. */
3484 expand_builtin_memmove_args (tree dest, tree src, tree len,
3485 tree type, rtx target, enum machine_mode mode,
3488 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3492 STRIP_TYPE_NOPS (result);
3493 while (TREE_CODE (result) == COMPOUND_EXPR)
3495 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3497 result = TREE_OPERAND (result, 1);
3499 return expand_expr (result, target, mode, EXPAND_NORMAL);
3502 /* Otherwise, call the normal function. */
3506 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3507 NULL_RTX if we failed the caller should emit a normal call. */
3510 expand_builtin_bcopy (tree exp, int ignore)
3512 tree type = TREE_TYPE (exp);
3513 tree src, dest, size;
3515 if (!validate_arglist (exp,
3516 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3519 src = CALL_EXPR_ARG (exp, 0);
3520 dest = CALL_EXPR_ARG (exp, 1);
3521 size = CALL_EXPR_ARG (exp, 2);
3523 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3524 This is done this way so that if it isn't expanded inline, we fall
3525 back to calling bcopy instead of memmove. */
3526 return expand_builtin_memmove_args (dest, src,
3527 fold_convert (sizetype, size),
3528 type, const0_rtx, VOIDmode,
3533 # define HAVE_movstr 0
3534 # define CODE_FOR_movstr CODE_FOR_nothing
3537 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3538 we failed, the caller should emit a normal call, otherwise try to
3539 get the result in TARGET, if convenient. If ENDP is 0 return the
3540 destination pointer, if ENDP is 1 return the end pointer ala
3541 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3545 expand_movstr (tree dest, tree src, rtx target, int endp)
3551 const struct insn_data * data;
3556 dest_mem = get_memory_rtx (dest, NULL);
3557 src_mem = get_memory_rtx (src, NULL);
3560 target = force_reg (Pmode, XEXP (dest_mem, 0));
3561 dest_mem = replace_equiv_address (dest_mem, target);
3562 end = gen_reg_rtx (Pmode);
3566 if (target == 0 || target == const0_rtx)
3568 end = gen_reg_rtx (Pmode);
3576 data = insn_data + CODE_FOR_movstr;
3578 if (data->operand[0].mode != VOIDmode)
3579 end = gen_lowpart (data->operand[0].mode, end);
3581 insn = data->genfun (end, dest_mem, src_mem);
3587 /* movstr is supposed to set end to the address of the NUL
3588 terminator. If the caller requested a mempcpy-like return value,
3590 if (endp == 1 && target != const0_rtx)
3592 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3593 emit_move_insn (target, force_operand (tem, NULL_RTX));
3599 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3600 NULL_RTX if we failed the caller should emit a normal call, otherwise
3601 try to get the result in TARGET, if convenient (and in mode MODE if that's
3605 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3607 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3609 tree dest = CALL_EXPR_ARG (exp, 0);
3610 tree src = CALL_EXPR_ARG (exp, 1);
3611 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3616 /* Helper function to do the actual work for expand_builtin_strcpy. The
3617 arguments to the builtin_strcpy call DEST and SRC are broken out
3618 so that this can also be called without constructing an actual CALL_EXPR.
3619 The other arguments and return value are the same as for
3620 expand_builtin_strcpy. */
3623 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3624 rtx target, enum machine_mode mode)
3626 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3628 return expand_expr (result, target, mode, EXPAND_NORMAL);
3629 return expand_movstr (dest, src, target, /*endp=*/0);
3633 /* Expand a call EXP to the stpcpy builtin.
3634 Return NULL_RTX if we failed the caller should emit a normal call,
3635 otherwise try to get the result in TARGET, if convenient (and in
3636 mode MODE if that's convenient). */
3639 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3643 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3646 dst = CALL_EXPR_ARG (exp, 0);
3647 src = CALL_EXPR_ARG (exp, 1);
3649 /* If return value is ignored, transform stpcpy into strcpy. */
3650 if (target == const0_rtx)
3652 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3656 return expand_expr (build_call_expr (fn, 2, dst, src),
3657 target, mode, EXPAND_NORMAL);
3664 /* Ensure we get an actual string whose length can be evaluated at
3665 compile-time, not an expression containing a string. This is
3666 because the latter will potentially produce pessimized code
3667 when used to produce the return value. */
3668 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3669 return expand_movstr (dst, src, target, /*endp=*/2);
3671 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3672 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3673 target, mode, /*endp=*/2);
3678 if (TREE_CODE (len) == INTEGER_CST)
3680 rtx len_rtx = expand_normal (len);
3682 if (GET_CODE (len_rtx) == CONST_INT)
3684 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3685 dst, src, target, mode);
3691 if (mode != VOIDmode)
3692 target = gen_reg_rtx (mode);
3694 target = gen_reg_rtx (GET_MODE (ret));
3696 if (GET_MODE (target) != GET_MODE (ret))
3697 ret = gen_lowpart (GET_MODE (target), ret);
3699 ret = plus_constant (ret, INTVAL (len_rtx));
3700 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3708 return expand_movstr (dst, src, target, /*endp=*/2);
3712 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3713 bytes from constant string DATA + OFFSET and return it as target
3717 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3718 enum machine_mode mode)
3720 const char *str = (const char *) data;
3722 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3725 return c_readstr (str + offset, mode);
3728 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3729 NULL_RTX if we failed the caller should emit a normal call. */
3732 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3734 tree fndecl = get_callee_fndecl (exp);
3736 if (validate_arglist (exp,
3737 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3739 tree dest = CALL_EXPR_ARG (exp, 0);
3740 tree src = CALL_EXPR_ARG (exp, 1);
3741 tree len = CALL_EXPR_ARG (exp, 2);
3742 tree slen = c_strlen (src, 1);
3743 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3747 while (TREE_CODE (result) == COMPOUND_EXPR)
3749 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3751 result = TREE_OPERAND (result, 1);
3753 return expand_expr (result, target, mode, EXPAND_NORMAL);
3756 /* We must be passed a constant len and src parameter. */
3757 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3760 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3762 /* We're required to pad with trailing zeros if the requested
3763 len is greater than strlen(s2)+1. In that case try to
3764 use store_by_pieces, if it fails, punt. */
3765 if (tree_int_cst_lt (slen, len))
3767 unsigned int dest_align
3768 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3769 const char *p = c_getstr (src);
3772 if (!p || dest_align == 0 || !host_integerp (len, 1)
3773 || !can_store_by_pieces (tree_low_cst (len, 1),
3774 builtin_strncpy_read_str,
3775 (void *) p, dest_align))
3778 dest_mem = get_memory_rtx (dest, len);
3779 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3780 builtin_strncpy_read_str,
3781 (void *) p, dest_align, 0);
3782 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3783 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3790 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3791 bytes from constant string DATA + OFFSET and return it as target
3795 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3796 enum machine_mode mode)
3798 const char *c = (const char *) data;
3799 char *p = alloca (GET_MODE_SIZE (mode));
3801 memset (p, *c, GET_MODE_SIZE (mode));
3803 return c_readstr (p, mode);
3806 /* Callback routine for store_by_pieces. Return the RTL of a register
3807 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3808 char value given in the RTL register data. For example, if mode is
3809 4 bytes wide, return the RTL for 0x01010101*data. */
3812 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3813 enum machine_mode mode)
3819 size = GET_MODE_SIZE (mode);
3824 memset (p, 1, size);
3825 coeff = c_readstr (p, mode);
3827 target = convert_to_mode (mode, (rtx) data, 1);
3828 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3829 return force_reg (mode, target);
3832 /* Expand expression EXP, which is a call to the memset builtin. Return
3833 NULL_RTX if we failed the caller should emit a normal call, otherwise
3834 try to get the result in TARGET, if convenient (and in mode MODE if that's
3838 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3840 if (!validate_arglist (exp,
3841 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3845 tree dest = CALL_EXPR_ARG (exp, 0);
3846 tree val = CALL_EXPR_ARG (exp, 1);
3847 tree len = CALL_EXPR_ARG (exp, 2);
3848 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3852 /* Helper function to do the actual work for expand_builtin_memset. The
3853 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3854 so that this can also be called without constructing an actual CALL_EXPR.
3855 The other arguments and return value are the same as for
3856 expand_builtin_memset. */
3859 expand_builtin_memset_args (tree dest, tree val, tree len,
3860 rtx target, enum machine_mode mode, tree orig_exp)
3863 enum built_in_function fcode;
3865 unsigned int dest_align;
3866 rtx dest_mem, dest_addr, len_rtx;
3867 HOST_WIDE_INT expected_size = -1;
3868 unsigned int expected_align = 0;
3870 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3872 /* If DEST is not a pointer type, don't do this operation in-line. */
3873 if (dest_align == 0)
3876 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3877 if (expected_align < dest_align)
3878 expected_align = dest_align;
3880 /* If the LEN parameter is zero, return DEST. */
3881 if (integer_zerop (len))
3883 /* Evaluate and ignore VAL in case it has side-effects. */
3884 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3885 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3888 /* Stabilize the arguments in case we fail. */
3889 dest = builtin_save_expr (dest);
3890 val = builtin_save_expr (val);
3891 len = builtin_save_expr (len);
3893 len_rtx = expand_normal (len);
3894 dest_mem = get_memory_rtx (dest, len);
3896 if (TREE_CODE (val) != INTEGER_CST)
3900 val_rtx = expand_normal (val);
3901 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3904 /* Assume that we can memset by pieces if we can store
3905 * the coefficients by pieces (in the required modes).
3906 * We can't pass builtin_memset_gen_str as that emits RTL. */
3908 if (host_integerp (len, 1)
3909 && !(optimize_size && tree_low_cst (len, 1) > 1)
3910 && can_store_by_pieces (tree_low_cst (len, 1),
3911 builtin_memset_read_str, &c, dest_align))
3913 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3915 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3916 builtin_memset_gen_str, val_rtx, dest_align, 0);
3918 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3919 dest_align, expected_align,
3923 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3924 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3928 if (target_char_cast (val, &c))
3933 if (host_integerp (len, 1)
3934 && !(optimize_size && tree_low_cst (len, 1) > 1)
3935 && can_store_by_pieces (tree_low_cst (len, 1),
3936 builtin_memset_read_str, &c, dest_align))
3937 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3938 builtin_memset_read_str, &c, dest_align, 0);
3939 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3940 dest_align, expected_align,
3944 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3945 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3949 set_mem_align (dest_mem, dest_align);
3950 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3951 CALL_EXPR_TAILCALL (orig_exp)
3952 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3953 expected_align, expected_size);
3957 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3958 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3964 fndecl = get_callee_fndecl (orig_exp);
3965 fcode = DECL_FUNCTION_CODE (fndecl);
3966 if (fcode == BUILT_IN_MEMSET)
3967 fn = build_call_expr (fndecl, 3, dest, val, len);
3968 else if (fcode == BUILT_IN_BZERO)
3969 fn = build_call_expr (fndecl, 2, dest, len);
3972 if (TREE_CODE (fn) == CALL_EXPR)
3973 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3974 return expand_call (fn, target, target == const0_rtx);
3977 /* Expand expression EXP, which is a call to the bzero builtin. Return
3978 NULL_RTX if we failed the caller should emit a normal call. */
3981 expand_builtin_bzero (tree exp)
3985 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3988 dest = CALL_EXPR_ARG (exp, 0);
3989 size = CALL_EXPR_ARG (exp, 1);
3991 /* New argument list transforming bzero(ptr x, int y) to
3992 memset(ptr x, int 0, size_t y). This is done this way
3993 so that if it isn't expanded inline, we fallback to
3994 calling bzero instead of memset. */
3996 return expand_builtin_memset_args (dest, integer_zero_node,
3997 fold_convert (sizetype, size),
3998 const0_rtx, VOIDmode, exp);
4001 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4002 caller should emit a normal call, otherwise try to get the result
4003 in TARGET, if convenient (and in mode MODE if that's convenient). */
4006 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4008 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4009 INTEGER_TYPE, VOID_TYPE))
4011 tree type = TREE_TYPE (exp);
4012 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4013 CALL_EXPR_ARG (exp, 1),
4014 CALL_EXPR_ARG (exp, 2), type);
4016 return expand_expr (result, target, mode, EXPAND_NORMAL);
4021 /* Expand expression EXP, which is a call to the memcmp built-in function.
4022 Return NULL_RTX if we failed and the
4023 caller should emit a normal call, otherwise try to get the result in
4024 TARGET, if convenient (and in mode MODE, if that's convenient). */
4027 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4029 if (!validate_arglist (exp,
4030 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4034 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4035 CALL_EXPR_ARG (exp, 1),
4036 CALL_EXPR_ARG (exp, 2));
4038 return expand_expr (result, target, mode, EXPAND_NORMAL);
4041 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4043 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4046 tree arg1 = CALL_EXPR_ARG (exp, 0);
4047 tree arg2 = CALL_EXPR_ARG (exp, 1);
4048 tree len = CALL_EXPR_ARG (exp, 2);
4051 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4053 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4054 enum machine_mode insn_mode;
4056 #ifdef HAVE_cmpmemsi
4058 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4061 #ifdef HAVE_cmpstrnsi
4063 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4068 /* If we don't have POINTER_TYPE, call the function. */
4069 if (arg1_align == 0 || arg2_align == 0)
4072 /* Make a place to write the result of the instruction. */
4075 && REG_P (result) && GET_MODE (result) == insn_mode
4076 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4077 result = gen_reg_rtx (insn_mode);
4079 arg1_rtx = get_memory_rtx (arg1, len);
4080 arg2_rtx = get_memory_rtx (arg2, len);
4081 arg3_rtx = expand_normal (len);
4083 /* Set MEM_SIZE as appropriate. */
4084 if (GET_CODE (arg3_rtx) == CONST_INT)
4086 set_mem_size (arg1_rtx, arg3_rtx);
4087 set_mem_size (arg2_rtx, arg3_rtx);
4090 #ifdef HAVE_cmpmemsi
4092 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4093 GEN_INT (MIN (arg1_align, arg2_align)));
4096 #ifdef HAVE_cmpstrnsi
4098 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4099 GEN_INT (MIN (arg1_align, arg2_align)));
4107 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4108 TYPE_MODE (integer_type_node), 3,
4109 XEXP (arg1_rtx, 0), Pmode,
4110 XEXP (arg2_rtx, 0), Pmode,
4111 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4112 TYPE_UNSIGNED (sizetype)),
4113 TYPE_MODE (sizetype));
4115 /* Return the value in the proper mode for this function. */
4116 mode = TYPE_MODE (TREE_TYPE (exp));
4117 if (GET_MODE (result) == mode)
4119 else if (target != 0)
4121 convert_move (target, result, 0);
4125 return convert_to_mode (mode, result, 0);
4132 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4133 if we failed the caller should emit a normal call, otherwise try to get
4134 the result in TARGET, if convenient. */
4137 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4139 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4143 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4144 CALL_EXPR_ARG (exp, 1));
4146 return expand_expr (result, target, mode, EXPAND_NORMAL);
4149 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4150 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4151 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4153 rtx arg1_rtx, arg2_rtx;
4154 rtx result, insn = NULL_RTX;
4156 tree arg1 = CALL_EXPR_ARG (exp, 0);
4157 tree arg2 = CALL_EXPR_ARG (exp, 1);
4160 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4162 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4164 /* If we don't have POINTER_TYPE, call the function. */
4165 if (arg1_align == 0 || arg2_align == 0)
4168 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4169 arg1 = builtin_save_expr (arg1);
4170 arg2 = builtin_save_expr (arg2);
4172 arg1_rtx = get_memory_rtx (arg1, NULL);
4173 arg2_rtx = get_memory_rtx (arg2, NULL);
4175 #ifdef HAVE_cmpstrsi
4176 /* Try to call cmpstrsi. */
4179 enum machine_mode insn_mode
4180 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4182 /* Make a place to write the result of the instruction. */
4185 && REG_P (result) && GET_MODE (result) == insn_mode
4186 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4187 result = gen_reg_rtx (insn_mode);
4189 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4190 GEN_INT (MIN (arg1_align, arg2_align)));
4193 #ifdef HAVE_cmpstrnsi
4194 /* Try to determine at least one length and call cmpstrnsi. */
4195 if (!insn && HAVE_cmpstrnsi)
4200 enum machine_mode insn_mode
4201 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4202 tree len1 = c_strlen (arg1, 1);
4203 tree len2 = c_strlen (arg2, 1);
4206 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4208 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4210 /* If we don't have a constant length for the first, use the length
4211 of the second, if we know it. We don't require a constant for
4212 this case; some cost analysis could be done if both are available
4213 but neither is constant. For now, assume they're equally cheap,
4214 unless one has side effects. If both strings have constant lengths,
4221 else if (TREE_SIDE_EFFECTS (len1))
4223 else if (TREE_SIDE_EFFECTS (len2))
4225 else if (TREE_CODE (len1) != INTEGER_CST)
4227 else if (TREE_CODE (len2) != INTEGER_CST)
4229 else if (tree_int_cst_lt (len1, len2))
4234 /* If both arguments have side effects, we cannot optimize. */
4235 if (!len || TREE_SIDE_EFFECTS (len))
4238 arg3_rtx = expand_normal (len);
4240 /* Make a place to write the result of the instruction. */
4243 && REG_P (result) && GET_MODE (result) == insn_mode
4244 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4245 result = gen_reg_rtx (insn_mode);
4247 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4248 GEN_INT (MIN (arg1_align, arg2_align)));
4256 /* Return the value in the proper mode for this function. */
4257 mode = TYPE_MODE (TREE_TYPE (exp));
4258 if (GET_MODE (result) == mode)
4261 return convert_to_mode (mode, result, 0);
4262 convert_move (target, result, 0);
4266 /* Expand the library call ourselves using a stabilized argument
4267 list to avoid re-evaluating the function's arguments twice. */
4268 #ifdef HAVE_cmpstrnsi
4271 fndecl = get_callee_fndecl (exp);
4272 fn = build_call_expr (fndecl, 2, arg1, arg2);
4273 if (TREE_CODE (fn) == CALL_EXPR)
4274 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4275 return expand_call (fn, target, target == const0_rtx);
4281 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4282 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4283 the result in TARGET, if convenient. */
4286 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4288 if (!validate_arglist (exp,
4289 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4293 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4294 CALL_EXPR_ARG (exp, 1),
4295 CALL_EXPR_ARG (exp, 2));
4297 return expand_expr (result, target, mode, EXPAND_NORMAL);
4300 /* If c_strlen can determine an expression for one of the string
4301 lengths, and it doesn't have side effects, then emit cmpstrnsi
4302 using length MIN(strlen(string)+1, arg3). */
4303 #ifdef HAVE_cmpstrnsi
4306 tree len, len1, len2;
4307 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4310 tree arg1 = CALL_EXPR_ARG (exp, 0);
4311 tree arg2 = CALL_EXPR_ARG (exp, 1);
4312 tree arg3 = CALL_EXPR_ARG (exp, 2);
4315 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4317 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4318 enum machine_mode insn_mode
4319 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4321 len1 = c_strlen (arg1, 1);
4322 len2 = c_strlen (arg2, 1);
4325 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4327 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4329 /* If we don't have a constant length for the first, use the length
4330 of the second, if we know it. We don't require a constant for
4331 this case; some cost analysis could be done if both are available
4332 but neither is constant. For now, assume they're equally cheap,
4333 unless one has side effects. If both strings have constant lengths,
4340 else if (TREE_SIDE_EFFECTS (len1))
4342 else if (TREE_SIDE_EFFECTS (len2))
4344 else if (TREE_CODE (len1) != INTEGER_CST)
4346 else if (TREE_CODE (len2) != INTEGER_CST)
4348 else if (tree_int_cst_lt (len1, len2))
4353 /* If both arguments have side effects, we cannot optimize. */
4354 if (!len || TREE_SIDE_EFFECTS (len))
4357 /* The actual new length parameter is MIN(len,arg3). */
4358 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4359 fold_convert (TREE_TYPE (len), arg3));
4361 /* If we don't have POINTER_TYPE, call the function. */
4362 if (arg1_align == 0 || arg2_align == 0)
4365 /* Make a place to write the result of the instruction. */
4368 && REG_P (result) && GET_MODE (result) == insn_mode
4369 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4370 result = gen_reg_rtx (insn_mode);
4372 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4373 arg1 = builtin_save_expr (arg1);
4374 arg2 = builtin_save_expr (arg2);
4375 len = builtin_save_expr (len);
4377 arg1_rtx = get_memory_rtx (arg1, len);
4378 arg2_rtx = get_memory_rtx (arg2, len);
4379 arg3_rtx = expand_normal (len);
4380 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4381 GEN_INT (MIN (arg1_align, arg2_align)));
4386 /* Return the value in the proper mode for this function. */
4387 mode = TYPE_MODE (TREE_TYPE (exp));
4388 if (GET_MODE (result) == mode)
4391 return convert_to_mode (mode, result, 0);
4392 convert_move (target, result, 0);
4396 /* Expand the library call ourselves using a stabilized argument
4397 list to avoid re-evaluating the function's arguments twice. */
4398 fndecl = get_callee_fndecl (exp);
4399 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4400 if (TREE_CODE (fn) == CALL_EXPR)
4401 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4402 return expand_call (fn, target, target == const0_rtx);
4408 /* Expand expression EXP, which is a call to the strcat builtin.
4409 Return NULL_RTX if we failed the caller should emit a normal call,
4410 otherwise try to get the result in TARGET, if convenient. */
4413 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4415 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4419 tree dst = CALL_EXPR_ARG (exp, 0);
4420 tree src = CALL_EXPR_ARG (exp, 1);
4421 const char *p = c_getstr (src);
4423 /* If the string length is zero, return the dst parameter. */
4424 if (p && *p == '\0')
4425 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4429 /* See if we can store by pieces into (dst + strlen(dst)). */
4430 tree newsrc, newdst,
4431 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4434 /* Stabilize the argument list. */
4435 newsrc = builtin_save_expr (src);
4436 dst = builtin_save_expr (dst);
4440 /* Create strlen (dst). */
4441 newdst = build_call_expr (strlen_fn, 1, dst);
4442 /* Create (dst p+ strlen (dst)). */
4444 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4445 newdst = builtin_save_expr (newdst);
4447 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4449 end_sequence (); /* Stop sequence. */
4453 /* Output the entire sequence. */
4454 insns = get_insns ();
4458 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4465 /* Expand expression EXP, which is a call to the strncat builtin.
4466 Return NULL_RTX if we failed the caller should emit a normal call,
4467 otherwise try to get the result in TARGET, if convenient. */
4470 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4472 if (validate_arglist (exp,
4473 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4475 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4476 CALL_EXPR_ARG (exp, 1),
4477 CALL_EXPR_ARG (exp, 2));
4479 return expand_expr (result, target, mode, EXPAND_NORMAL);
4484 /* Expand expression EXP, which is a call to the strspn builtin.
4485 Return NULL_RTX if we failed the caller should emit a normal call,
4486 otherwise try to get the result in TARGET, if convenient. */
4489 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4491 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4493 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4494 CALL_EXPR_ARG (exp, 1));
4496 return expand_expr (result, target, mode, EXPAND_NORMAL);
4501 /* Expand expression EXP, which is a call to the strcspn builtin.
4502 Return NULL_RTX if we failed the caller should emit a normal call,
4503 otherwise try to get the result in TARGET, if convenient. */
4506 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4508 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4510 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4511 CALL_EXPR_ARG (exp, 1));
4513 return expand_expr (result, target, mode, EXPAND_NORMAL);
4518 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4519 if that's convenient. */
4522 expand_builtin_saveregs (void)
4526 /* Don't do __builtin_saveregs more than once in a function.
4527 Save the result of the first call and reuse it. */
4528 if (saveregs_value != 0)
4529 return saveregs_value;
4531 /* When this function is called, it means that registers must be
4532 saved on entry to this function. So we migrate the call to the
4533 first insn of this function. */
4537 /* Do whatever the machine needs done in this case. */
4538 val = targetm.calls.expand_builtin_saveregs ();
4543 saveregs_value = val;
4545 /* Put the insns after the NOTE that starts the function. If this
4546 is inside a start_sequence, make the outer-level insn chain current, so
4547 the code is placed at the start of the function. */
4548 push_topmost_sequence ();
4549 emit_insn_after (seq, entry_of_function ());
4550 pop_topmost_sequence ();
4555 /* __builtin_args_info (N) returns word N of the arg space info
4556 for the current function. The number and meanings of words
4557 is controlled by the definition of CUMULATIVE_ARGS. */
4560 expand_builtin_args_info (tree exp)
4562 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4563 int *word_ptr = (int *) ¤t_function_args_info;
4565 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4567 if (call_expr_nargs (exp) != 0)
4569 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4570 error ("argument of %<__builtin_args_info%> must be constant");
4573 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4575 if (wordnum < 0 || wordnum >= nwords)
4576 error ("argument of %<__builtin_args_info%> out of range");
4578 return GEN_INT (word_ptr[wordnum]);
4582 error ("missing argument in %<__builtin_args_info%>");
4587 /* Expand a call to __builtin_next_arg. */
4590 expand_builtin_next_arg (void)
4592 /* Checking arguments is already done in fold_builtin_next_arg
4593 that must be called before this function. */
4594 return expand_binop (ptr_mode, add_optab,
4595 current_function_internal_arg_pointer,
4596 current_function_arg_offset_rtx,
4597 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4600 /* Make it easier for the backends by protecting the valist argument
4601 from multiple evaluations. */
4604 stabilize_va_list (tree valist, int needs_lvalue)
4606 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4608 if (TREE_SIDE_EFFECTS (valist))
4609 valist = save_expr (valist);
4611 /* For this case, the backends will be expecting a pointer to
4612 TREE_TYPE (va_list_type_node), but it's possible we've
4613 actually been given an array (an actual va_list_type_node).
4615 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4617 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4618 valist = build_fold_addr_expr_with_type (valist, p1);
4627 if (! TREE_SIDE_EFFECTS (valist))
4630 pt = build_pointer_type (va_list_type_node);
4631 valist = fold_build1 (ADDR_EXPR, pt, valist);
4632 TREE_SIDE_EFFECTS (valist) = 1;
4635 if (TREE_SIDE_EFFECTS (valist))
4636 valist = save_expr (valist);
4637 valist = build_fold_indirect_ref (valist);
4643 /* The "standard" definition of va_list is void*. */
4646 std_build_builtin_va_list (void)
4648 return ptr_type_node;
4651 /* The "standard" implementation of va_start: just assign `nextarg' to
4655 std_expand_builtin_va_start (tree valist, rtx nextarg)
4657 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4658 convert_move (va_r, nextarg, 0);
4661 /* Expand EXP, a call to __builtin_va_start. */
4664 expand_builtin_va_start (tree exp)
4669 if (call_expr_nargs (exp) < 2)
4671 error ("too few arguments to function %<va_start%>");
4675 if (fold_builtin_next_arg (exp, true))
4678 nextarg = expand_builtin_next_arg ();
4679 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4681 #ifdef EXPAND_BUILTIN_VA_START
4682 EXPAND_BUILTIN_VA_START (valist, nextarg);
4684 std_expand_builtin_va_start (valist, nextarg);
4690 /* The "standard" implementation of va_arg: read the value from the
4691 current (padded) address and increment by the (padded) size. */
4694 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4696 tree addr, t, type_size, rounded_size, valist_tmp;
4697 unsigned HOST_WIDE_INT align, boundary;
4700 #ifdef ARGS_GROW_DOWNWARD
4701 /* All of the alignment and movement below is for args-grow-up machines.
4702 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4703 implement their own specialized gimplify_va_arg_expr routines. */
4707 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4709 type = build_pointer_type (type);
4711 align = PARM_BOUNDARY / BITS_PER_UNIT;
4712 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4714 /* Hoist the valist value into a temporary for the moment. */
4715 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4717 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4718 requires greater alignment, we must perform dynamic alignment. */
4719 if (boundary > align
4720 && !integer_zerop (TYPE_SIZE (type)))
4722 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4723 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4724 valist_tmp, size_int (boundary - 1)));
4725 gimplify_and_add (t, pre_p);
4727 t = fold_convert (sizetype, valist_tmp);
4728 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4729 fold_convert (TREE_TYPE (valist),
4730 fold_build2 (BIT_AND_EXPR, sizetype, t,
4731 size_int (-boundary))));
4732 gimplify_and_add (t, pre_p);
4737 /* If the actual alignment is less than the alignment of the type,
4738 adjust the type accordingly so that we don't assume strict alignment
4739 when deferencing the pointer. */
4740 boundary *= BITS_PER_UNIT;
4741 if (boundary < TYPE_ALIGN (type))
4743 type = build_variant_type_copy (type);
4744 TYPE_ALIGN (type) = boundary;
4747 /* Compute the rounded size of the type. */
4748 type_size = size_in_bytes (type);
4749 rounded_size = round_up (type_size, align);
4751 /* Reduce rounded_size so it's sharable with the postqueue. */
4752 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4756 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4758 /* Small args are padded downward. */
4759 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4760 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4761 size_binop (MINUS_EXPR, rounded_size, type_size));
4762 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4765 /* Compute new value for AP. */
4766 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4767 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4768 gimplify_and_add (t, pre_p);
4770 addr = fold_convert (build_pointer_type (type), addr);
4773 addr = build_va_arg_indirect_ref (addr);
4775 return build_va_arg_indirect_ref (addr);
4778 /* Build an indirect-ref expression over the given TREE, which represents a
4779 piece of a va_arg() expansion. */
4781 build_va_arg_indirect_ref (tree addr)
4783 addr = build_fold_indirect_ref (addr);
4785 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4791 /* Return a dummy expression of type TYPE in order to keep going after an
4795 dummy_object (tree type)
4797 tree t = build_int_cst (build_pointer_type (type), 0);
4798 return build1 (INDIRECT_REF, type, t);
4801 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4802 builtin function, but a very special sort of operator. */
4804 enum gimplify_status
4805 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4807 tree promoted_type, want_va_type, have_va_type;
4808 tree valist = TREE_OPERAND (*expr_p, 0);
4809 tree type = TREE_TYPE (*expr_p);
4812 /* Verify that valist is of the proper type. */
4813 want_va_type = va_list_type_node;
4814 have_va_type = TREE_TYPE (valist);
4816 if (have_va_type == error_mark_node)
4819 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4821 /* If va_list is an array type, the argument may have decayed
4822 to a pointer type, e.g. by being passed to another function.
4823 In that case, unwrap both types so that we can compare the
4824 underlying records. */
4825 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4826 || POINTER_TYPE_P (have_va_type))
4828 want_va_type = TREE_TYPE (want_va_type);
4829 have_va_type = TREE_TYPE (have_va_type);
4833 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4835 error ("first argument to %<va_arg%> not of type %<va_list%>");
4839 /* Generate a diagnostic for requesting data of a type that cannot
4840 be passed through `...' due to type promotion at the call site. */
4841 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4844 static bool gave_help;
4846 /* Unfortunately, this is merely undefined, rather than a constraint
4847 violation, so we cannot make this an error. If this call is never
4848 executed, the program is still strictly conforming. */
4849 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4850 type, promoted_type);
4854 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4855 promoted_type, type);
4858 /* We can, however, treat "undefined" any way we please.
4859 Call abort to encourage the user to fix the program. */
4860 inform ("if this code is reached, the program will abort");
4861 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4862 append_to_statement_list (t, pre_p);
4864 /* This is dead code, but go ahead and finish so that the
4865 mode of the result comes out right. */
4866 *expr_p = dummy_object (type);
4871 /* Make it easier for the backends by protecting the valist argument
4872 from multiple evaluations. */
4873 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4875 /* For this case, the backends will be expecting a pointer to
4876 TREE_TYPE (va_list_type_node), but it's possible we've
4877 actually been given an array (an actual va_list_type_node).
4879 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4881 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4882 valist = build_fold_addr_expr_with_type (valist, p1);
4884 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4887 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4889 if (!targetm.gimplify_va_arg_expr)
4890 /* FIXME:Once most targets are converted we should merely
4891 assert this is non-null. */
4894 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4899 /* Expand EXP, a call to __builtin_va_end. */
4902 expand_builtin_va_end (tree exp)
4904 tree valist = CALL_EXPR_ARG (exp, 0);
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4908 if (TREE_SIDE_EFFECTS (valist))
4909 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4919 expand_builtin_va_copy (tree exp)
4923 dst = CALL_EXPR_ARG (exp, 0);
4924 src = CALL_EXPR_ARG (exp, 1);
4926 dst = stabilize_va_list (dst, 1);
4927 src = stabilize_va_list (src, 0);
4929 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4931 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4932 TREE_SIDE_EFFECTS (t) = 1;
4933 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4937 rtx dstb, srcb, size;
4939 /* Evaluate to pointers. */
4940 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4941 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4942 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4943 VOIDmode, EXPAND_NORMAL);
4945 dstb = convert_memory_address (Pmode, dstb);
4946 srcb = convert_memory_address (Pmode, srcb);
4948 /* "Dereference" to BLKmode memories. */
4949 dstb = gen_rtx_MEM (BLKmode, dstb);
4950 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4951 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4952 srcb = gen_rtx_MEM (BLKmode, srcb);
4953 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4954 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4957 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4963 /* Expand a call to one of the builtin functions __builtin_frame_address or
4964 __builtin_return_address. */
4967 expand_builtin_frame_address (tree fndecl, tree exp)
4969 /* The argument must be a nonnegative integer constant.
4970 It counts the number of frames to scan up the stack.
4971 The value is the return address saved in that frame. */
4972 if (call_expr_nargs (exp) == 0)
4973 /* Warning about missing arg was already issued. */
4975 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4977 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4978 error ("invalid argument to %<__builtin_frame_address%>");
4980 error ("invalid argument to %<__builtin_return_address%>");
4986 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4987 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4989 /* Some ports cannot access arbitrary stack frames. */
4992 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4993 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4995 warning (0, "unsupported argument to %<__builtin_return_address%>");
4999 /* For __builtin_frame_address, return what we've got. */
5000 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5004 && ! CONSTANT_P (tem))
5005 tem = copy_to_mode_reg (Pmode, tem);
5010 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5011 we failed and the caller should emit a normal call, otherwise try to get
5012 the result in TARGET, if convenient. */
5015 expand_builtin_alloca (tree exp, rtx target)
5020 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5021 should always expand to function calls. These can be intercepted
5026 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5029 /* Compute the argument. */
5030 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5032 /* Allocate the desired space. */
5033 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5034 result = convert_memory_address (ptr_mode, result);
5039 /* Expand a call to a bswap builtin with argument ARG0. MODE
5040 is the mode to expand with. */
5043 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5045 enum machine_mode mode;
5049 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5052 arg = CALL_EXPR_ARG (exp, 0);
5053 mode = TYPE_MODE (TREE_TYPE (arg));
5054 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5056 target = expand_unop (mode, bswap_optab, op0, target, 1);
5058 gcc_assert (target);
5060 return convert_to_mode (mode, target, 0);
5063 /* Expand a call to a unary builtin in EXP.
5064 Return NULL_RTX if a normal call should be emitted rather than expanding the
5065 function in-line. If convenient, the result should be placed in TARGET.
5066 SUBTARGET may be used as the target for computing one of EXP's operands. */
5069 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5070 rtx subtarget, optab op_optab)
5074 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5077 /* Compute the argument. */
5078 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5079 VOIDmode, EXPAND_NORMAL);
5080 /* Compute op, into TARGET if possible.
5081 Set TARGET to wherever the result comes back. */
5082 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5083 op_optab, op0, target, 1);
5084 gcc_assert (target);
5086 return convert_to_mode (target_mode, target, 0);
5089 /* If the string passed to fputs is a constant and is one character
5090 long, we attempt to transform this call into __builtin_fputc(). */
5093 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5095 /* Verify the arguments in the original call. */
5096 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5098 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5099 CALL_EXPR_ARG (exp, 1),
5100 (target == const0_rtx),
5101 unlocked, NULL_TREE);
5103 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5108 /* Expand a call to __builtin_expect. We just return our argument
5109 as the builtin_expect semantic should've been already executed by
5110 tree branch prediction pass. */
5113 expand_builtin_expect (tree exp, rtx target)
5117 if (call_expr_nargs (exp) < 2)
5119 arg = CALL_EXPR_ARG (exp, 0);
5120 c = CALL_EXPR_ARG (exp, 1);
5122 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5123 /* When guessing was done, the hints should be already stripped away. */
5124 gcc_assert (!flag_guess_branch_prob
5125 || optimize == 0 || errorcount || sorrycount);
5130 expand_builtin_trap (void)
5134 emit_insn (gen_trap ());
5137 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5141 /* Expand EXP, a call to fabs, fabsf or fabsl.
5142 Return NULL_RTX if a normal call should be emitted rather than expanding
5143 the function inline. If convenient, the result should be placed
5144 in TARGET. SUBTARGET may be used as the target for computing
5148 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5150 enum machine_mode mode;
5154 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5157 arg = CALL_EXPR_ARG (exp, 0);
5158 mode = TYPE_MODE (TREE_TYPE (arg));
5159 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5160 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5163 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5164 Return NULL is a normal call should be emitted rather than expanding the
5165 function inline. If convenient, the result should be placed in TARGET.
5166 SUBTARGET may be used as the target for computing the operand. */
5169 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5174 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5177 arg = CALL_EXPR_ARG (exp, 0);
5178 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5180 arg = CALL_EXPR_ARG (exp, 1);
5181 op1 = expand_normal (arg);
5183 return expand_copysign (op0, op1, target);
5186 /* Create a new constant string literal and return a char* pointer to it.
5187 The STRING_CST value is the LEN characters at STR. */
5189 build_string_literal (int len, const char *str)
5191 tree t, elem, index, type;
5193 t = build_string (len, str);
5194 elem = build_type_variant (char_type_node, 1, 0);
5195 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5196 type = build_array_type (elem, index);
5197 TREE_TYPE (t) = type;
5198 TREE_CONSTANT (t) = 1;
5199 TREE_INVARIANT (t) = 1;
5200 TREE_READONLY (t) = 1;
5201 TREE_STATIC (t) = 1;
5203 type = build_pointer_type (type);
5204 t = build1 (ADDR_EXPR, type, t);
5206 type = build_pointer_type (elem);
5207 t = build1 (NOP_EXPR, type, t);
5211 /* Expand EXP, a call to printf or printf_unlocked.
5212 Return NULL_RTX if a normal call should be emitted rather than transforming
5213 the function inline. If convenient, the result should be placed in
5214 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5217 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5220 /* If we're using an unlocked function, assume the other unlocked
5221 functions exist explicitly. */
5222 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5223 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5224 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5225 : implicit_built_in_decls[BUILT_IN_PUTS];
5226 const char *fmt_str;
5229 int nargs = call_expr_nargs (exp);
5231 /* If the return value is used, don't do the transformation. */
5232 if (target != const0_rtx)
5235 /* Verify the required arguments in the original call. */
5238 fmt = CALL_EXPR_ARG (exp, 0);
5239 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5242 /* Check whether the format is a literal string constant. */
5243 fmt_str = c_getstr (fmt);
5244 if (fmt_str == NULL)
5247 if (!init_target_chars ())
5250 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5251 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5254 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5257 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5259 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5260 else if (strcmp (fmt_str, target_percent_c) == 0)
5263 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5266 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5270 /* We can't handle anything else with % args or %% ... yet. */
5271 if (strchr (fmt_str, target_percent))
5277 /* If the format specifier was "", printf does nothing. */
5278 if (fmt_str[0] == '\0')
5280 /* If the format specifier has length of 1, call putchar. */
5281 if (fmt_str[1] == '\0')
5283 /* Given printf("c"), (where c is any one character,)
5284 convert "c"[0] to an int and pass that to the replacement
5286 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5288 fn = build_call_expr (fn_putchar, 1, arg);
5292 /* If the format specifier was "string\n", call puts("string"). */
5293 size_t len = strlen (fmt_str);
5294 if ((unsigned char)fmt_str[len - 1] == target_newline)
5296 /* Create a NUL-terminated string that's one char shorter
5297 than the original, stripping off the trailing '\n'. */
5298 char *newstr = alloca (len);
5299 memcpy (newstr, fmt_str, len - 1);
5300 newstr[len - 1] = 0;
5301 arg = build_string_literal (len, newstr);
5303 fn = build_call_expr (fn_puts, 1, arg);
5306 /* We'd like to arrange to call fputs(string,stdout) here,
5307 but we need stdout and don't have a way to get it yet. */
5314 if (TREE_CODE (fn) == CALL_EXPR)
5315 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5316 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5319 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5320 Return NULL_RTX if a normal call should be emitted rather than transforming
5321 the function inline. If convenient, the result should be placed in
5322 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5325 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5328 /* If we're using an unlocked function, assume the other unlocked
5329 functions exist explicitly. */
5330 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5331 : implicit_built_in_decls[BUILT_IN_FPUTC];
5332 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5333 : implicit_built_in_decls[BUILT_IN_FPUTS];
5334 const char *fmt_str;
5337 int nargs = call_expr_nargs (exp);
5339 /* If the return value is used, don't do the transformation. */
5340 if (target != const0_rtx)
5343 /* Verify the required arguments in the original call. */
5346 fp = CALL_EXPR_ARG (exp, 0);
5347 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5349 fmt = CALL_EXPR_ARG (exp, 1);
5350 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5353 /* Check whether the format is a literal string constant. */
5354 fmt_str = c_getstr (fmt);
5355 if (fmt_str == NULL)
5358 if (!init_target_chars ())
5361 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5362 if (strcmp (fmt_str, target_percent_s) == 0)
5365 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5367 arg = CALL_EXPR_ARG (exp, 2);
5369 fn = build_call_expr (fn_fputs, 2, arg, fp);
5371 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5372 else if (strcmp (fmt_str, target_percent_c) == 0)
5375 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5377 arg = CALL_EXPR_ARG (exp, 2);
5379 fn = build_call_expr (fn_fputc, 2, arg, fp);
5383 /* We can't handle anything else with % args or %% ... yet. */
5384 if (strchr (fmt_str, target_percent))
5390 /* If the format specifier was "", fprintf does nothing. */
5391 if (fmt_str[0] == '\0')
5393 /* Evaluate and ignore FILE* argument for side-effects. */
5394 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5398 /* When "string" doesn't contain %, replace all cases of
5399 fprintf(stream,string) with fputs(string,stream). The fputs
5400 builtin will take care of special cases like length == 1. */
5402 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5407 if (TREE_CODE (fn) == CALL_EXPR)
5408 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5409 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5412 /* Expand a call EXP to sprintf. Return NULL_RTX if
5413 a normal call should be emitted rather than expanding the function
5414 inline. If convenient, the result should be placed in TARGET with
5418 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5421 const char *fmt_str;
5422 int nargs = call_expr_nargs (exp);
5424 /* Verify the required arguments in the original call. */
5427 dest = CALL_EXPR_ARG (exp, 0);
5428 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5430 fmt = CALL_EXPR_ARG (exp, 0);
5431 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5434 /* Check whether the format is a literal string constant. */
5435 fmt_str = c_getstr (fmt);
5436 if (fmt_str == NULL)
5439 if (!init_target_chars ())
5442 /* If the format doesn't contain % args or %%, use strcpy. */
5443 if (strchr (fmt_str, target_percent) == 0)
5445 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5448 if ((nargs > 2) || ! fn)
5450 expand_expr (build_call_expr (fn, 2, dest, fmt),
5451 const0_rtx, VOIDmode, EXPAND_NORMAL);
5452 if (target == const0_rtx)
5454 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5455 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5457 /* If the format is "%s", use strcpy if the result isn't used. */
5458 else if (strcmp (fmt_str, target_percent_s) == 0)
5461 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5467 arg = CALL_EXPR_ARG (exp, 2);
5468 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5471 if (target != const0_rtx)
5473 len = c_strlen (arg, 1);
5474 if (! len || TREE_CODE (len) != INTEGER_CST)
5480 expand_expr (build_call_expr (fn, 2, dest, arg),
5481 const0_rtx, VOIDmode, EXPAND_NORMAL);
5483 if (target == const0_rtx)
5485 return expand_expr (len, target, mode, EXPAND_NORMAL);
5491 /* Expand a call to either the entry or exit function profiler. */
5494 expand_builtin_profile_func (bool exitp)
5498 this = DECL_RTL (current_function_decl);
5499 gcc_assert (MEM_P (this));
5500 this = XEXP (this, 0);
5503 which = profile_function_exit_libfunc;
5505 which = profile_function_entry_libfunc;
5507 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5508 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5515 /* Expand a call to __builtin___clear_cache. */
5518 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5520 #ifndef HAVE_clear_cache
5521 #ifdef CLEAR_INSN_CACHE
5522 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5523 does something. Just do the default expansion to a call to
5527 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5528 does nothing. There is no need to call it. Do nothing. */
5530 #endif /* CLEAR_INSN_CACHE */
5532 /* We have a "clear_cache" insn, and it will handle everything. */
5534 rtx begin_rtx, end_rtx;
5535 enum insn_code icode;
5537 /* We must not expand to a library call. If we did, any
5538 fallback library function in libgcc that might contain a call to
5539 __builtin___clear_cache() would recurse infinitely. */
5540 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5542 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5546 if (HAVE_clear_cache)
5548 icode = CODE_FOR_clear_cache;
5550 begin = CALL_EXPR_ARG (exp, 0);
5551 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5552 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5553 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5554 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5556 end = CALL_EXPR_ARG (exp, 1);
5557 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5558 end_rtx = convert_memory_address (Pmode, end_rtx);
5559 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5560 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5562 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5565 #endif /* HAVE_clear_cache */
5568 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5571 round_trampoline_addr (rtx tramp)
5573 rtx temp, addend, mask;
5575 /* If we don't need too much alignment, we'll have been guaranteed
5576 proper alignment by get_trampoline_type. */
5577 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5580 /* Round address up to desired boundary. */
5581 temp = gen_reg_rtx (Pmode);
5582 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5583 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5585 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5586 temp, 0, OPTAB_LIB_WIDEN);
5587 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5588 temp, 0, OPTAB_LIB_WIDEN);
5594 expand_builtin_init_trampoline (tree exp)
5596 tree t_tramp, t_func, t_chain;
5597 rtx r_tramp, r_func, r_chain;
5598 #ifdef TRAMPOLINE_TEMPLATE
5602 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5603 POINTER_TYPE, VOID_TYPE))
5606 t_tramp = CALL_EXPR_ARG (exp, 0);
5607 t_func = CALL_EXPR_ARG (exp, 1);
5608 t_chain = CALL_EXPR_ARG (exp, 2);
5610 r_tramp = expand_normal (t_tramp);
5611 r_func = expand_normal (t_func);
5612 r_chain = expand_normal (t_chain);
5614 /* Generate insns to initialize the trampoline. */
5615 r_tramp = round_trampoline_addr (r_tramp);
5616 #ifdef TRAMPOLINE_TEMPLATE
5617 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5618 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5619 emit_block_move (blktramp, assemble_trampoline_template (),
5620 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5622 trampolines_created = 1;
5623 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5629 expand_builtin_adjust_trampoline (tree exp)
5633 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5636 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5637 tramp = round_trampoline_addr (tramp);
5638 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5639 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5645 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5646 function. The function first checks whether the back end provides
5647 an insn to implement signbit for the respective mode. If not, it
5648 checks whether the floating point format of the value is such that
5649 the sign bit can be extracted. If that is not the case, the
5650 function returns NULL_RTX to indicate that a normal call should be
5651 emitted rather than expanding the function in-line. EXP is the
5652 expression that is a call to the builtin function; if convenient,
5653 the result should be placed in TARGET. */
5655 expand_builtin_signbit (tree exp, rtx target)
5657 const struct real_format *fmt;
5658 enum machine_mode fmode, imode, rmode;
5659 HOST_WIDE_INT hi, lo;
5662 enum insn_code signbit_insn_code;
5665 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5668 arg = CALL_EXPR_ARG (exp, 0);
5669 fmode = TYPE_MODE (TREE_TYPE (arg));
5670 rmode = TYPE_MODE (TREE_TYPE (exp));
5671 fmt = REAL_MODE_FORMAT (fmode);
5673 arg = builtin_save_expr (arg);
5675 /* Expand the argument yielding a RTX expression. */
5676 temp = expand_normal (arg);
5678 /* Check if the back end provides an insn that handles signbit for the
5680 signbit_insn_code = signbit_optab [(int) fmode];
5681 if (signbit_insn_code != CODE_FOR_nothing)
5683 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5684 emit_unop_insn (signbit_insn_code, target, temp, UNKNOWN);
5688 /* For floating point formats without a sign bit, implement signbit
5690 bitpos = fmt->signbit_ro;
5693 /* But we can't do this if the format supports signed zero. */
5694 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5697 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5698 build_real (TREE_TYPE (arg), dconst0));
5699 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5702 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5704 imode = int_mode_for_mode (fmode);
5705 if (imode == BLKmode)
5707 temp = gen_lowpart (imode, temp);
5712 /* Handle targets with different FP word orders. */
5713 if (FLOAT_WORDS_BIG_ENDIAN)
5714 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5716 word = bitpos / BITS_PER_WORD;
5717 temp = operand_subword_force (temp, word, fmode);
5718 bitpos = bitpos % BITS_PER_WORD;
5721 /* Force the intermediate word_mode (or narrower) result into a
5722 register. This avoids attempting to create paradoxical SUBREGs
5723 of floating point modes below. */
5724 temp = force_reg (imode, temp);
5726 /* If the bitpos is within the "result mode" lowpart, the operation
5727 can be implement with a single bitwise AND. Otherwise, we need
5728 a right shift and an AND. */
5730 if (bitpos < GET_MODE_BITSIZE (rmode))
5732 if (bitpos < HOST_BITS_PER_WIDE_INT)
5735 lo = (HOST_WIDE_INT) 1 << bitpos;
5739 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5744 temp = gen_lowpart (rmode, temp);
5745 temp = expand_binop (rmode, and_optab, temp,
5746 immed_double_const (lo, hi, rmode),
5747 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5751 /* Perform a logical right shift to place the signbit in the least
5752 significant bit, then truncate the result to the desired mode
5753 and mask just this bit. */
5754 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5755 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5756 temp = gen_lowpart (rmode, temp);
5757 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5758 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5764 /* Expand fork or exec calls. TARGET is the desired target of the
5765 call. EXP is the call. FN is the
5766 identificator of the actual function. IGNORE is nonzero if the
5767 value is to be ignored. */
5770 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5775 /* If we are not profiling, just call the function. */
5776 if (!profile_arc_flag)
5779 /* Otherwise call the wrapper. This should be equivalent for the rest of
5780 compiler, so the code does not diverge, and the wrapper may run the
5781 code necessary for keeping the profiling sane. */
5783 switch (DECL_FUNCTION_CODE (fn))
5786 id = get_identifier ("__gcov_fork");
5789 case BUILT_IN_EXECL:
5790 id = get_identifier ("__gcov_execl");
5793 case BUILT_IN_EXECV:
5794 id = get_identifier ("__gcov_execv");
5797 case BUILT_IN_EXECLP:
5798 id = get_identifier ("__gcov_execlp");
5801 case BUILT_IN_EXECLE:
5802 id = get_identifier ("__gcov_execle");
5805 case BUILT_IN_EXECVP:
5806 id = get_identifier ("__gcov_execvp");
5809 case BUILT_IN_EXECVE:
5810 id = get_identifier ("__gcov_execve");
5817 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5818 DECL_EXTERNAL (decl) = 1;
5819 TREE_PUBLIC (decl) = 1;
5820 DECL_ARTIFICIAL (decl) = 1;
5821 TREE_NOTHROW (decl) = 1;
5822 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5823 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5824 call = rewrite_call_expr (exp, 0, decl, 0);
5825 return expand_call (call, target, ignore);
5830 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5831 the pointer in these functions is void*, the tree optimizers may remove
5832 casts. The mode computed in expand_builtin isn't reliable either, due
5833 to __sync_bool_compare_and_swap.
5835 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5836 group of builtins. This gives us log2 of the mode size. */
5838 static inline enum machine_mode
5839 get_builtin_sync_mode (int fcode_diff)
5841 /* The size is not negotiable, so ask not to get BLKmode in return
5842 if the target indicates that a smaller size would be better. */
5843 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5846 /* Expand the memory expression LOC and return the appropriate memory operand
5847 for the builtin_sync operations. */
5850 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5854 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5856 /* Note that we explicitly do not want any alias information for this
5857 memory, so that we kill all other live memories. Otherwise we don't
5858 satisfy the full barrier semantics of the intrinsic. */
5859 mem = validize_mem (gen_rtx_MEM (mode, addr));
5861 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5862 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5863 MEM_VOLATILE_P (mem) = 1;
5868 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5869 EXP is the CALL_EXPR. CODE is the rtx code
5870 that corresponds to the arithmetic or logical operation from the name;
5871 an exception here is that NOT actually means NAND. TARGET is an optional
5872 place for us to store the results; AFTER is true if this is the
5873 fetch_and_xxx form. IGNORE is true if we don't actually care about
5874 the result of the operation at all. */
5877 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5878 enum rtx_code code, bool after,
5879 rtx target, bool ignore)
5882 enum machine_mode old_mode;
5884 /* Expand the operands. */
5885 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5887 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5888 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5889 of CONST_INTs, where we know the old_mode only from the call argument. */
5890 old_mode = GET_MODE (val);
5891 if (old_mode == VOIDmode)
5892 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5893 val = convert_modes (mode, old_mode, val, 1);
5896 return expand_sync_operation (mem, val, code);
5898 return expand_sync_fetch_operation (mem, val, code, after, target);
5901 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5902 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5903 true if this is the boolean form. TARGET is a place for us to store the
5904 results; this is NOT optional if IS_BOOL is true. */
5907 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5908 bool is_bool, rtx target)
5910 rtx old_val, new_val, mem;
5911 enum machine_mode old_mode;
5913 /* Expand the operands. */
5914 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5917 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5918 mode, EXPAND_NORMAL);
5919 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5920 of CONST_INTs, where we know the old_mode only from the call argument. */
5921 old_mode = GET_MODE (old_val);
5922 if (old_mode == VOIDmode)
5923 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5924 old_val = convert_modes (mode, old_mode, old_val, 1);
5926 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5927 mode, EXPAND_NORMAL);
5928 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5929 of CONST_INTs, where we know the old_mode only from the call argument. */
5930 old_mode = GET_MODE (new_val);
5931 if (old_mode == VOIDmode)
5932 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5933 new_val = convert_modes (mode, old_mode, new_val, 1);
5936 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5938 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5941 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5942 general form is actually an atomic exchange, and some targets only
5943 support a reduced form with the second argument being a constant 1.
5944 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5948 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5952 enum machine_mode old_mode;
5954 /* Expand the operands. */
5955 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5956 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5957 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5958 of CONST_INTs, where we know the old_mode only from the call argument. */
5959 old_mode = GET_MODE (val);
5960 if (old_mode == VOIDmode)
5961 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5962 val = convert_modes (mode, old_mode, val, 1);
5964 return expand_sync_lock_test_and_set (mem, val, target);
5967 /* Expand the __sync_synchronize intrinsic. */
5970 expand_builtin_synchronize (void)
5974 #ifdef HAVE_memory_barrier
5975 if (HAVE_memory_barrier)
5977 emit_insn (gen_memory_barrier ());
5982 /* If no explicit memory barrier instruction is available, create an
5983 empty asm stmt with a memory clobber. */
5984 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5985 tree_cons (NULL, build_string (6, "memory"), NULL));
5986 ASM_VOLATILE_P (x) = 1;
5987 expand_asm_expr (x);
5990 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5993 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5995 enum insn_code icode;
5997 rtx val = const0_rtx;
5999 /* Expand the operands. */
6000 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6002 /* If there is an explicit operation in the md file, use it. */
6003 icode = sync_lock_release[mode];
6004 if (icode != CODE_FOR_nothing)
6006 if (!insn_data[icode].operand[1].predicate (val, mode))
6007 val = force_reg (mode, val);
6009 insn = GEN_FCN (icode) (mem, val);
6017 /* Otherwise we can implement this operation by emitting a barrier
6018 followed by a store of zero. */
6019 expand_builtin_synchronize ();
6020 emit_move_insn (mem, val);
6023 /* Expand an expression EXP that calls a built-in function,
6024 with result going to TARGET if that's convenient
6025 (and in mode MODE if that's convenient).
6026 SUBTARGET may be used as the target for computing one of EXP's operands.
6027 IGNORE is nonzero if the value is to be ignored. */
6030 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6033 tree fndecl = get_callee_fndecl (exp);
6034 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6035 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6038 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6040 /* When not optimizing, generate calls to library functions for a certain
6043 && !called_as_built_in (fndecl)
6044 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6045 && fcode != BUILT_IN_ALLOCA)
6046 return expand_call (exp, target, ignore);
6048 /* The built-in function expanders test for target == const0_rtx
6049 to determine whether the function's result will be ignored. */
6051 target = const0_rtx;
6053 /* If the result of a pure or const built-in function is ignored, and
6054 none of its arguments are volatile, we can avoid expanding the
6055 built-in call and just evaluate the arguments for side-effects. */
6056 if (target == const0_rtx
6057 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
6059 bool volatilep = false;
6061 call_expr_arg_iterator iter;
6063 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6064 if (TREE_THIS_VOLATILE (arg))
6072 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6073 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6080 CASE_FLT_FN (BUILT_IN_FABS):
6081 target = expand_builtin_fabs (exp, target, subtarget);
6086 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6087 target = expand_builtin_copysign (exp, target, subtarget);
6092 /* Just do a normal library call if we were unable to fold
6094 CASE_FLT_FN (BUILT_IN_CABS):
6097 CASE_FLT_FN (BUILT_IN_EXP):
6098 CASE_FLT_FN (BUILT_IN_EXP10):
6099 CASE_FLT_FN (BUILT_IN_POW10):
6100 CASE_FLT_FN (BUILT_IN_EXP2):
6101 CASE_FLT_FN (BUILT_IN_EXPM1):
6102 CASE_FLT_FN (BUILT_IN_LOGB):
6103 CASE_FLT_FN (BUILT_IN_LOG):
6104 CASE_FLT_FN (BUILT_IN_LOG10):
6105 CASE_FLT_FN (BUILT_IN_LOG2):
6106 CASE_FLT_FN (BUILT_IN_LOG1P):
6107 CASE_FLT_FN (BUILT_IN_TAN):
6108 CASE_FLT_FN (BUILT_IN_ASIN):
6109 CASE_FLT_FN (BUILT_IN_ACOS):
6110 CASE_FLT_FN (BUILT_IN_ATAN):
6111 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6112 because of possible accuracy problems. */
6113 if (! flag_unsafe_math_optimizations)
6115 CASE_FLT_FN (BUILT_IN_SQRT):
6116 CASE_FLT_FN (BUILT_IN_FLOOR):
6117 CASE_FLT_FN (BUILT_IN_CEIL):
6118 CASE_FLT_FN (BUILT_IN_TRUNC):
6119 CASE_FLT_FN (BUILT_IN_ROUND):
6120 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6121 CASE_FLT_FN (BUILT_IN_RINT):
6122 target = expand_builtin_mathfn (exp, target, subtarget);
6127 CASE_FLT_FN (BUILT_IN_ILOGB):
6128 if (! flag_unsafe_math_optimizations)
6130 CASE_FLT_FN (BUILT_IN_ISINF):
6131 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6136 CASE_FLT_FN (BUILT_IN_LCEIL):
6137 CASE_FLT_FN (BUILT_IN_LLCEIL):
6138 CASE_FLT_FN (BUILT_IN_LFLOOR):
6139 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6140 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6145 CASE_FLT_FN (BUILT_IN_LRINT):
6146 CASE_FLT_FN (BUILT_IN_LLRINT):
6147 CASE_FLT_FN (BUILT_IN_LROUND):
6148 CASE_FLT_FN (BUILT_IN_LLROUND):
6149 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6154 CASE_FLT_FN (BUILT_IN_POW):
6155 target = expand_builtin_pow (exp, target, subtarget);
6160 CASE_FLT_FN (BUILT_IN_POWI):
6161 target = expand_builtin_powi (exp, target, subtarget);
6166 CASE_FLT_FN (BUILT_IN_ATAN2):
6167 CASE_FLT_FN (BUILT_IN_LDEXP):
6168 CASE_FLT_FN (BUILT_IN_SCALB):
6169 CASE_FLT_FN (BUILT_IN_SCALBN):
6170 CASE_FLT_FN (BUILT_IN_SCALBLN):
6171 if (! flag_unsafe_math_optimizations)
6174 CASE_FLT_FN (BUILT_IN_FMOD):
6175 CASE_FLT_FN (BUILT_IN_REMAINDER):
6176 CASE_FLT_FN (BUILT_IN_DREM):
6177 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6182 CASE_FLT_FN (BUILT_IN_CEXPI):
6183 target = expand_builtin_cexpi (exp, target, subtarget);
6184 gcc_assert (target);
6187 CASE_FLT_FN (BUILT_IN_SIN):
6188 CASE_FLT_FN (BUILT_IN_COS):
6189 if (! flag_unsafe_math_optimizations)
6191 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6196 CASE_FLT_FN (BUILT_IN_SINCOS):
6197 if (! flag_unsafe_math_optimizations)
6199 target = expand_builtin_sincos (exp);
6204 case BUILT_IN_APPLY_ARGS:
6205 return expand_builtin_apply_args ();
6207 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6208 FUNCTION with a copy of the parameters described by
6209 ARGUMENTS, and ARGSIZE. It returns a block of memory
6210 allocated on the stack into which is stored all the registers
6211 that might possibly be used for returning the result of a
6212 function. ARGUMENTS is the value returned by
6213 __builtin_apply_args. ARGSIZE is the number of bytes of
6214 arguments that must be copied. ??? How should this value be
6215 computed? We'll also need a safe worst case value for varargs
6217 case BUILT_IN_APPLY:
6218 if (!validate_arglist (exp, POINTER_TYPE,
6219 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6220 && !validate_arglist (exp, REFERENCE_TYPE,
6221 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6227 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6228 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6229 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6231 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6234 /* __builtin_return (RESULT) causes the function to return the
6235 value described by RESULT. RESULT is address of the block of
6236 memory returned by __builtin_apply. */
6237 case BUILT_IN_RETURN:
6238 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6239 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6242 case BUILT_IN_SAVEREGS:
6243 return expand_builtin_saveregs ();
6245 case BUILT_IN_ARGS_INFO:
6246 return expand_builtin_args_info (exp);
6248 /* Return the address of the first anonymous stack arg. */
6249 case BUILT_IN_NEXT_ARG:
6250 if (fold_builtin_next_arg (exp, false))
6252 return expand_builtin_next_arg ();
6254 case BUILT_IN_CLEAR_CACHE:
6255 target = expand_builtin___clear_cache (exp);
6260 case BUILT_IN_CLASSIFY_TYPE:
6261 return expand_builtin_classify_type (exp);
6263 case BUILT_IN_CONSTANT_P:
6266 case BUILT_IN_FRAME_ADDRESS:
6267 case BUILT_IN_RETURN_ADDRESS:
6268 return expand_builtin_frame_address (fndecl, exp);
6270 /* Returns the address of the area where the structure is returned.
6272 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6273 if (call_expr_nargs (exp) != 0
6274 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6275 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6278 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6280 case BUILT_IN_ALLOCA:
6281 target = expand_builtin_alloca (exp, target);
6286 case BUILT_IN_STACK_SAVE:
6287 return expand_stack_save ();
6289 case BUILT_IN_STACK_RESTORE:
6290 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6293 case BUILT_IN_BSWAP32:
6294 case BUILT_IN_BSWAP64:
6295 target = expand_builtin_bswap (exp, target, subtarget);
6301 CASE_INT_FN (BUILT_IN_FFS):
6302 case BUILT_IN_FFSIMAX:
6303 target = expand_builtin_unop (target_mode, exp, target,
6304 subtarget, ffs_optab);
6309 CASE_INT_FN (BUILT_IN_CLZ):
6310 case BUILT_IN_CLZIMAX:
6311 target = expand_builtin_unop (target_mode, exp, target,
6312 subtarget, clz_optab);
6317 CASE_INT_FN (BUILT_IN_CTZ):
6318 case BUILT_IN_CTZIMAX:
6319 target = expand_builtin_unop (target_mode, exp, target,
6320 subtarget, ctz_optab);
6325 CASE_INT_FN (BUILT_IN_POPCOUNT):
6326 case BUILT_IN_POPCOUNTIMAX:
6327 target = expand_builtin_unop (target_mode, exp, target,
6328 subtarget, popcount_optab);
6333 CASE_INT_FN (BUILT_IN_PARITY):
6334 case BUILT_IN_PARITYIMAX:
6335 target = expand_builtin_unop (target_mode, exp, target,
6336 subtarget, parity_optab);
6341 case BUILT_IN_STRLEN:
6342 target = expand_builtin_strlen (exp, target, target_mode);
6347 case BUILT_IN_STRCPY:
6348 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6353 case BUILT_IN_STRNCPY:
6354 target = expand_builtin_strncpy (exp, target, mode);
6359 case BUILT_IN_STPCPY:
6360 target = expand_builtin_stpcpy (exp, target, mode);
6365 case BUILT_IN_STRCAT:
6366 target = expand_builtin_strcat (fndecl, exp, target, mode);
6371 case BUILT_IN_STRNCAT:
6372 target = expand_builtin_strncat (exp, target, mode);
6377 case BUILT_IN_STRSPN:
6378 target = expand_builtin_strspn (exp, target, mode);
6383 case BUILT_IN_STRCSPN:
6384 target = expand_builtin_strcspn (exp, target, mode);
6389 case BUILT_IN_STRSTR:
6390 target = expand_builtin_strstr (exp, target, mode);
6395 case BUILT_IN_STRPBRK:
6396 target = expand_builtin_strpbrk (exp, target, mode);
6401 case BUILT_IN_INDEX:
6402 case BUILT_IN_STRCHR:
6403 target = expand_builtin_strchr (exp, target, mode);
6408 case BUILT_IN_RINDEX:
6409 case BUILT_IN_STRRCHR:
6410 target = expand_builtin_strrchr (exp, target, mode);
6415 case BUILT_IN_MEMCPY:
6416 target = expand_builtin_memcpy (exp, target, mode);
6421 case BUILT_IN_MEMPCPY:
6422 target = expand_builtin_mempcpy (exp, target, mode);
6427 case BUILT_IN_MEMMOVE:
6428 target = expand_builtin_memmove (exp, target, mode, ignore);
6433 case BUILT_IN_BCOPY:
6434 target = expand_builtin_bcopy (exp, ignore);
6439 case BUILT_IN_MEMSET:
6440 target = expand_builtin_memset (exp, target, mode);
6445 case BUILT_IN_BZERO:
6446 target = expand_builtin_bzero (exp);
6451 case BUILT_IN_STRCMP:
6452 target = expand_builtin_strcmp (exp, target, mode);
6457 case BUILT_IN_STRNCMP:
6458 target = expand_builtin_strncmp (exp, target, mode);
6463 case BUILT_IN_MEMCHR:
6464 target = expand_builtin_memchr (exp, target, mode);
6470 case BUILT_IN_MEMCMP:
6471 target = expand_builtin_memcmp (exp, target, mode);
6476 case BUILT_IN_SETJMP:
6477 /* This should have been lowered to the builtins below. */
6480 case BUILT_IN_SETJMP_SETUP:
6481 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6482 and the receiver label. */
6483 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6485 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6486 VOIDmode, EXPAND_NORMAL);
6487 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6488 rtx label_r = label_rtx (label);
6490 /* This is copied from the handling of non-local gotos. */
6491 expand_builtin_setjmp_setup (buf_addr, label_r);
6492 nonlocal_goto_handler_labels
6493 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6494 nonlocal_goto_handler_labels);
6495 /* ??? Do not let expand_label treat us as such since we would
6496 not want to be both on the list of non-local labels and on
6497 the list of forced labels. */
6498 FORCED_LABEL (label) = 0;
6503 case BUILT_IN_SETJMP_DISPATCHER:
6504 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6505 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6507 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6508 rtx label_r = label_rtx (label);
6510 /* Remove the dispatcher label from the list of non-local labels
6511 since the receiver labels have been added to it above. */
6512 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6517 case BUILT_IN_SETJMP_RECEIVER:
6518 /* __builtin_setjmp_receiver is passed the receiver label. */
6519 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6521 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6522 rtx label_r = label_rtx (label);
6524 expand_builtin_setjmp_receiver (label_r);
6529 /* __builtin_longjmp is passed a pointer to an array of five words.
6530 It's similar to the C library longjmp function but works with
6531 __builtin_setjmp above. */
6532 case BUILT_IN_LONGJMP:
6533 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6535 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6536 VOIDmode, EXPAND_NORMAL);
6537 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6539 if (value != const1_rtx)
6541 error ("%<__builtin_longjmp%> second argument must be 1");
6545 expand_builtin_longjmp (buf_addr, value);
6550 case BUILT_IN_NONLOCAL_GOTO:
6551 target = expand_builtin_nonlocal_goto (exp);
6556 /* This updates the setjmp buffer that is its argument with the value
6557 of the current stack pointer. */
6558 case BUILT_IN_UPDATE_SETJMP_BUF:
6559 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6562 = expand_normal (CALL_EXPR_ARG (exp, 0));
6564 expand_builtin_update_setjmp_buf (buf_addr);
6570 expand_builtin_trap ();
6573 case BUILT_IN_PRINTF:
6574 target = expand_builtin_printf (exp, target, mode, false);
6579 case BUILT_IN_PRINTF_UNLOCKED:
6580 target = expand_builtin_printf (exp, target, mode, true);
6585 case BUILT_IN_FPUTS:
6586 target = expand_builtin_fputs (exp, target, false);
6590 case BUILT_IN_FPUTS_UNLOCKED:
6591 target = expand_builtin_fputs (exp, target, true);
6596 case BUILT_IN_FPRINTF:
6597 target = expand_builtin_fprintf (exp, target, mode, false);
6602 case BUILT_IN_FPRINTF_UNLOCKED:
6603 target = expand_builtin_fprintf (exp, target, mode, true);
6608 case BUILT_IN_SPRINTF:
6609 target = expand_builtin_sprintf (exp, target, mode);
6614 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6615 case BUILT_IN_SIGNBITD32:
6616 case BUILT_IN_SIGNBITD64:
6617 case BUILT_IN_SIGNBITD128:
6618 target = expand_builtin_signbit (exp, target);
6623 /* Various hooks for the DWARF 2 __throw routine. */
6624 case BUILT_IN_UNWIND_INIT:
6625 expand_builtin_unwind_init ();
6627 case BUILT_IN_DWARF_CFA:
6628 return virtual_cfa_rtx;
6629 #ifdef DWARF2_UNWIND_INFO
6630 case BUILT_IN_DWARF_SP_COLUMN:
6631 return expand_builtin_dwarf_sp_column ();
6632 case BUILT_IN_INIT_DWARF_REG_SIZES:
6633 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6636 case BUILT_IN_FROB_RETURN_ADDR:
6637 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6638 case BUILT_IN_EXTRACT_RETURN_ADDR:
6639 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6640 case BUILT_IN_EH_RETURN:
6641 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6642 CALL_EXPR_ARG (exp, 1));
6644 #ifdef EH_RETURN_DATA_REGNO
6645 case BUILT_IN_EH_RETURN_DATA_REGNO:
6646 return expand_builtin_eh_return_data_regno (exp);
6648 case BUILT_IN_EXTEND_POINTER:
6649 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6651 case BUILT_IN_VA_START:
6652 case BUILT_IN_STDARG_START:
6653 return expand_builtin_va_start (exp);
6654 case BUILT_IN_VA_END:
6655 return expand_builtin_va_end (exp);
6656 case BUILT_IN_VA_COPY:
6657 return expand_builtin_va_copy (exp);
6658 case BUILT_IN_EXPECT:
6659 return expand_builtin_expect (exp, target);
6660 case BUILT_IN_PREFETCH:
6661 expand_builtin_prefetch (exp);
6664 case BUILT_IN_PROFILE_FUNC_ENTER:
6665 return expand_builtin_profile_func (false);
6666 case BUILT_IN_PROFILE_FUNC_EXIT:
6667 return expand_builtin_profile_func (true);
6669 case BUILT_IN_INIT_TRAMPOLINE:
6670 return expand_builtin_init_trampoline (exp);
6671 case BUILT_IN_ADJUST_TRAMPOLINE:
6672 return expand_builtin_adjust_trampoline (exp);
6675 case BUILT_IN_EXECL:
6676 case BUILT_IN_EXECV:
6677 case BUILT_IN_EXECLP:
6678 case BUILT_IN_EXECLE:
6679 case BUILT_IN_EXECVP:
6680 case BUILT_IN_EXECVE:
6681 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6686 case BUILT_IN_FETCH_AND_ADD_1:
6687 case BUILT_IN_FETCH_AND_ADD_2:
6688 case BUILT_IN_FETCH_AND_ADD_4:
6689 case BUILT_IN_FETCH_AND_ADD_8:
6690 case BUILT_IN_FETCH_AND_ADD_16:
6691 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6692 target = expand_builtin_sync_operation (mode, exp, PLUS,
6693 false, target, ignore);
6698 case BUILT_IN_FETCH_AND_SUB_1:
6699 case BUILT_IN_FETCH_AND_SUB_2:
6700 case BUILT_IN_FETCH_AND_SUB_4:
6701 case BUILT_IN_FETCH_AND_SUB_8:
6702 case BUILT_IN_FETCH_AND_SUB_16:
6703 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6704 target = expand_builtin_sync_operation (mode, exp, MINUS,
6705 false, target, ignore);
6710 case BUILT_IN_FETCH_AND_OR_1:
6711 case BUILT_IN_FETCH_AND_OR_2:
6712 case BUILT_IN_FETCH_AND_OR_4:
6713 case BUILT_IN_FETCH_AND_OR_8:
6714 case BUILT_IN_FETCH_AND_OR_16:
6715 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6716 target = expand_builtin_sync_operation (mode, exp, IOR,
6717 false, target, ignore);
6722 case BUILT_IN_FETCH_AND_AND_1:
6723 case BUILT_IN_FETCH_AND_AND_2:
6724 case BUILT_IN_FETCH_AND_AND_4:
6725 case BUILT_IN_FETCH_AND_AND_8:
6726 case BUILT_IN_FETCH_AND_AND_16:
6727 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6728 target = expand_builtin_sync_operation (mode, exp, AND,
6729 false, target, ignore);
6734 case BUILT_IN_FETCH_AND_XOR_1:
6735 case BUILT_IN_FETCH_AND_XOR_2:
6736 case BUILT_IN_FETCH_AND_XOR_4:
6737 case BUILT_IN_FETCH_AND_XOR_8:
6738 case BUILT_IN_FETCH_AND_XOR_16:
6739 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6740 target = expand_builtin_sync_operation (mode, exp, XOR,
6741 false, target, ignore);
6746 case BUILT_IN_FETCH_AND_NAND_1:
6747 case BUILT_IN_FETCH_AND_NAND_2:
6748 case BUILT_IN_FETCH_AND_NAND_4:
6749 case BUILT_IN_FETCH_AND_NAND_8:
6750 case BUILT_IN_FETCH_AND_NAND_16:
6751 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6752 target = expand_builtin_sync_operation (mode, exp, NOT,
6753 false, target, ignore);
6758 case BUILT_IN_ADD_AND_FETCH_1:
6759 case BUILT_IN_ADD_AND_FETCH_2:
6760 case BUILT_IN_ADD_AND_FETCH_4:
6761 case BUILT_IN_ADD_AND_FETCH_8:
6762 case BUILT_IN_ADD_AND_FETCH_16:
6763 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6764 target = expand_builtin_sync_operation (mode, exp, PLUS,
6765 true, target, ignore);
6770 case BUILT_IN_SUB_AND_FETCH_1:
6771 case BUILT_IN_SUB_AND_FETCH_2:
6772 case BUILT_IN_SUB_AND_FETCH_4:
6773 case BUILT_IN_SUB_AND_FETCH_8:
6774 case BUILT_IN_SUB_AND_FETCH_16:
6775 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6776 target = expand_builtin_sync_operation (mode, exp, MINUS,
6777 true, target, ignore);
6782 case BUILT_IN_OR_AND_FETCH_1:
6783 case BUILT_IN_OR_AND_FETCH_2:
6784 case BUILT_IN_OR_AND_FETCH_4:
6785 case BUILT_IN_OR_AND_FETCH_8:
6786 case BUILT_IN_OR_AND_FETCH_16:
6787 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6788 target = expand_builtin_sync_operation (mode, exp, IOR,
6789 true, target, ignore);
6794 case BUILT_IN_AND_AND_FETCH_1:
6795 case BUILT_IN_AND_AND_FETCH_2:
6796 case BUILT_IN_AND_AND_FETCH_4:
6797 case BUILT_IN_AND_AND_FETCH_8:
6798 case BUILT_IN_AND_AND_FETCH_16:
6799 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6800 target = expand_builtin_sync_operation (mode, exp, AND,
6801 true, target, ignore);
6806 case BUILT_IN_XOR_AND_FETCH_1:
6807 case BUILT_IN_XOR_AND_FETCH_2:
6808 case BUILT_IN_XOR_AND_FETCH_4:
6809 case BUILT_IN_XOR_AND_FETCH_8:
6810 case BUILT_IN_XOR_AND_FETCH_16:
6811 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6812 target = expand_builtin_sync_operation (mode, exp, XOR,
6813 true, target, ignore);
6818 case BUILT_IN_NAND_AND_FETCH_1:
6819 case BUILT_IN_NAND_AND_FETCH_2:
6820 case BUILT_IN_NAND_AND_FETCH_4:
6821 case BUILT_IN_NAND_AND_FETCH_8:
6822 case BUILT_IN_NAND_AND_FETCH_16:
6823 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6824 target = expand_builtin_sync_operation (mode, exp, NOT,
6825 true, target, ignore);
6830 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6831 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6832 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6833 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6834 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6835 if (mode == VOIDmode)
6836 mode = TYPE_MODE (boolean_type_node);
6837 if (!target || !register_operand (target, mode))
6838 target = gen_reg_rtx (mode);
6840 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6841 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6846 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6847 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6848 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6849 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6850 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6851 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6852 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6857 case BUILT_IN_LOCK_TEST_AND_SET_1:
6858 case BUILT_IN_LOCK_TEST_AND_SET_2:
6859 case BUILT_IN_LOCK_TEST_AND_SET_4:
6860 case BUILT_IN_LOCK_TEST_AND_SET_8:
6861 case BUILT_IN_LOCK_TEST_AND_SET_16:
6862 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6863 target = expand_builtin_lock_test_and_set (mode, exp, target);
6868 case BUILT_IN_LOCK_RELEASE_1:
6869 case BUILT_IN_LOCK_RELEASE_2:
6870 case BUILT_IN_LOCK_RELEASE_4:
6871 case BUILT_IN_LOCK_RELEASE_8:
6872 case BUILT_IN_LOCK_RELEASE_16:
6873 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6874 expand_builtin_lock_release (mode, exp);
6877 case BUILT_IN_SYNCHRONIZE:
6878 expand_builtin_synchronize ();
6881 case BUILT_IN_OBJECT_SIZE:
6882 return expand_builtin_object_size (exp);
6884 case BUILT_IN_MEMCPY_CHK:
6885 case BUILT_IN_MEMPCPY_CHK:
6886 case BUILT_IN_MEMMOVE_CHK:
6887 case BUILT_IN_MEMSET_CHK:
6888 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6893 case BUILT_IN_STRCPY_CHK:
6894 case BUILT_IN_STPCPY_CHK:
6895 case BUILT_IN_STRNCPY_CHK:
6896 case BUILT_IN_STRCAT_CHK:
6897 case BUILT_IN_STRNCAT_CHK:
6898 case BUILT_IN_SNPRINTF_CHK:
6899 case BUILT_IN_VSNPRINTF_CHK:
6900 maybe_emit_chk_warning (exp, fcode);
6903 case BUILT_IN_SPRINTF_CHK:
6904 case BUILT_IN_VSPRINTF_CHK:
6905 maybe_emit_sprintf_chk_warning (exp, fcode);
6908 default: /* just do library call, if unknown builtin */
6912 /* The switch statement above can drop through to cause the function
6913 to be called normally. */
6914 return expand_call (exp, target, ignore);
6917 /* Determine whether a tree node represents a call to a built-in
6918 function. If the tree T is a call to a built-in function with
6919 the right number of arguments of the appropriate types, return
6920 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6921 Otherwise the return value is END_BUILTINS. */
6923 enum built_in_function
6924 builtin_mathfn_code (tree t)
6926 tree fndecl, arg, parmlist;
6927 tree argtype, parmtype;
6928 call_expr_arg_iterator iter;
6930 if (TREE_CODE (t) != CALL_EXPR
6931 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6932 return END_BUILTINS;
6934 fndecl = get_callee_fndecl (t);
6935 if (fndecl == NULL_TREE
6936 || TREE_CODE (fndecl) != FUNCTION_DECL
6937 || ! DECL_BUILT_IN (fndecl)
6938 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6939 return END_BUILTINS;
6941 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6942 init_call_expr_arg_iterator (t, &iter);
6943 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6945 /* If a function doesn't take a variable number of arguments,
6946 the last element in the list will have type `void'. */
6947 parmtype = TREE_VALUE (parmlist);
6948 if (VOID_TYPE_P (parmtype))
6950 if (more_call_expr_args_p (&iter))
6951 return END_BUILTINS;
6952 return DECL_FUNCTION_CODE (fndecl);
6955 if (! more_call_expr_args_p (&iter))
6956 return END_BUILTINS;
6958 arg = next_call_expr_arg (&iter);
6959 argtype = TREE_TYPE (arg);
6961 if (SCALAR_FLOAT_TYPE_P (parmtype))
6963 if (! SCALAR_FLOAT_TYPE_P (argtype))
6964 return END_BUILTINS;
6966 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6968 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6969 return END_BUILTINS;
6971 else if (POINTER_TYPE_P (parmtype))
6973 if (! POINTER_TYPE_P (argtype))
6974 return END_BUILTINS;
6976 else if (INTEGRAL_TYPE_P (parmtype))
6978 if (! INTEGRAL_TYPE_P (argtype))
6979 return END_BUILTINS;
6982 return END_BUILTINS;
6985 /* Variable-length argument list. */
6986 return DECL_FUNCTION_CODE (fndecl);
6989 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6990 evaluate to a constant. */
6993 fold_builtin_constant_p (tree arg)
6995 /* We return 1 for a numeric type that's known to be a constant
6996 value at compile-time or for an aggregate type that's a
6997 literal constant. */
7000 /* If we know this is a constant, emit the constant of one. */
7001 if (CONSTANT_CLASS_P (arg)
7002 || (TREE_CODE (arg) == CONSTRUCTOR
7003 && TREE_CONSTANT (arg)))
7004 return integer_one_node;
7005 if (TREE_CODE (arg) == ADDR_EXPR)
7007 tree op = TREE_OPERAND (arg, 0);
7008 if (TREE_CODE (op) == STRING_CST
7009 || (TREE_CODE (op) == ARRAY_REF
7010 && integer_zerop (TREE_OPERAND (op, 1))
7011 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7012 return integer_one_node;
7015 /* If this expression has side effects, show we don't know it to be a
7016 constant. Likewise if it's a pointer or aggregate type since in
7017 those case we only want literals, since those are only optimized
7018 when generating RTL, not later.
7019 And finally, if we are compiling an initializer, not code, we
7020 need to return a definite result now; there's not going to be any
7021 more optimization done. */
7022 if (TREE_SIDE_EFFECTS (arg)
7023 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7024 || POINTER_TYPE_P (TREE_TYPE (arg))
7026 || folding_initializer)
7027 return integer_zero_node;
7032 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
7033 comparison against the argument will fold to a constant. In practice,
7034 this means a true constant or the address of a non-weak symbol. */
7037 fold_builtin_expect (tree arg)
7041 /* If the argument isn't invariant, then there's nothing we can do. */
7042 if (!TREE_INVARIANT (arg))
7045 /* If we're looking at an address of a weak decl, then do not fold. */
7048 if (TREE_CODE (inner) == ADDR_EXPR)
7052 inner = TREE_OPERAND (inner, 0);
7054 while (TREE_CODE (inner) == COMPONENT_REF
7055 || TREE_CODE (inner) == ARRAY_REF);
7056 if (DECL_P (inner) && DECL_WEAK (inner))
7060 /* Otherwise, ARG already has the proper type for the return value. */
7064 /* Fold a call to __builtin_classify_type with argument ARG. */
7067 fold_builtin_classify_type (tree arg)
7070 return build_int_cst (NULL_TREE, no_type_class);
7072 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7075 /* Fold a call to __builtin_strlen with argument ARG. */
7078 fold_builtin_strlen (tree arg)
7080 if (!validate_arg (arg, POINTER_TYPE))
7084 tree len = c_strlen (arg, 0);
7088 /* Convert from the internal "sizetype" type to "size_t". */
7090 len = fold_convert (size_type_node, len);
7098 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7101 fold_builtin_inf (tree type, int warn)
7103 REAL_VALUE_TYPE real;
7105 /* __builtin_inff is intended to be usable to define INFINITY on all
7106 targets. If an infinity is not available, INFINITY expands "to a
7107 positive constant of type float that overflows at translation
7108 time", footnote "In this case, using INFINITY will violate the
7109 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7110 Thus we pedwarn to ensure this constraint violation is
7112 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7113 pedwarn ("target format does not support infinity");
7116 return build_real (type, real);
7119 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7122 fold_builtin_nan (tree arg, tree type, int quiet)
7124 REAL_VALUE_TYPE real;
7127 if (!validate_arg (arg, POINTER_TYPE))
7129 str = c_getstr (arg);
7133 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7136 return build_real (type, real);
7139 /* Return true if the floating point expression T has an integer value.
7140 We also allow +Inf, -Inf and NaN to be considered integer values. */
7143 integer_valued_real_p (tree t)
7145 switch (TREE_CODE (t))
7152 case NON_LVALUE_EXPR:
7153 return integer_valued_real_p (TREE_OPERAND (t, 0));
7158 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7165 return integer_valued_real_p (TREE_OPERAND (t, 0))
7166 && integer_valued_real_p (TREE_OPERAND (t, 1));
7169 return integer_valued_real_p (TREE_OPERAND (t, 1))
7170 && integer_valued_real_p (TREE_OPERAND (t, 2));
7173 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7177 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7178 if (TREE_CODE (type) == INTEGER_TYPE)
7180 if (TREE_CODE (type) == REAL_TYPE)
7181 return integer_valued_real_p (TREE_OPERAND (t, 0));
7186 switch (builtin_mathfn_code (t))
7188 CASE_FLT_FN (BUILT_IN_CEIL):
7189 CASE_FLT_FN (BUILT_IN_FLOOR):
7190 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7191 CASE_FLT_FN (BUILT_IN_RINT):
7192 CASE_FLT_FN (BUILT_IN_ROUND):
7193 CASE_FLT_FN (BUILT_IN_TRUNC):
7196 CASE_FLT_FN (BUILT_IN_FMIN):
7197 CASE_FLT_FN (BUILT_IN_FMAX):
7198 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7199 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7212 /* FNDECL is assumed to be a builtin where truncation can be propagated
7213 across (for instance floor((double)f) == (double)floorf (f).
7214 Do the transformation for a call with argument ARG. */
7217 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7219 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7221 if (!validate_arg (arg, REAL_TYPE))
7224 /* Integer rounding functions are idempotent. */
7225 if (fcode == builtin_mathfn_code (arg))
7228 /* If argument is already integer valued, and we don't need to worry
7229 about setting errno, there's no need to perform rounding. */
7230 if (! flag_errno_math && integer_valued_real_p (arg))
7235 tree arg0 = strip_float_extensions (arg);
7236 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7237 tree newtype = TREE_TYPE (arg0);
7240 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7241 && (decl = mathfn_built_in (newtype, fcode)))
7242 return fold_convert (ftype,
7243 build_call_expr (decl, 1,
7244 fold_convert (newtype, arg0)));
7249 /* FNDECL is assumed to be builtin which can narrow the FP type of
7250 the argument, for instance lround((double)f) -> lroundf (f).
7251 Do the transformation for a call with argument ARG. */
7254 fold_fixed_mathfn (tree fndecl, tree arg)
7256 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7258 if (!validate_arg (arg, REAL_TYPE))
7261 /* If argument is already integer valued, and we don't need to worry
7262 about setting errno, there's no need to perform rounding. */
7263 if (! flag_errno_math && integer_valued_real_p (arg))
7264 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7268 tree ftype = TREE_TYPE (arg);
7269 tree arg0 = strip_float_extensions (arg);
7270 tree newtype = TREE_TYPE (arg0);
7273 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7274 && (decl = mathfn_built_in (newtype, fcode)))
7275 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7278 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7279 sizeof (long long) == sizeof (long). */
7280 if (TYPE_PRECISION (long_long_integer_type_node)
7281 == TYPE_PRECISION (long_integer_type_node))
7283 tree newfn = NULL_TREE;
7286 CASE_FLT_FN (BUILT_IN_LLCEIL):
7287 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7290 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7291 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7294 CASE_FLT_FN (BUILT_IN_LLROUND):
7295 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7298 CASE_FLT_FN (BUILT_IN_LLRINT):
7299 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7308 tree newcall = build_call_expr(newfn, 1, arg);
7309 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7316 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7317 return type. Return NULL_TREE if no simplification can be made. */
7320 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7324 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7325 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7328 /* Calculate the result when the argument is a constant. */
7329 if (TREE_CODE (arg) == COMPLEX_CST
7330 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7334 if (TREE_CODE (arg) == COMPLEX_EXPR)
7336 tree real = TREE_OPERAND (arg, 0);
7337 tree imag = TREE_OPERAND (arg, 1);
7339 /* If either part is zero, cabs is fabs of the other. */
7340 if (real_zerop (real))
7341 return fold_build1 (ABS_EXPR, type, imag);
7342 if (real_zerop (imag))
7343 return fold_build1 (ABS_EXPR, type, real);
7345 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7346 if (flag_unsafe_math_optimizations
7347 && operand_equal_p (real, imag, OEP_PURE_SAME))
7349 const REAL_VALUE_TYPE sqrt2_trunc
7350 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7352 return fold_build2 (MULT_EXPR, type,
7353 fold_build1 (ABS_EXPR, type, real),
7354 build_real (type, sqrt2_trunc));
7358 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7359 if (TREE_CODE (arg) == NEGATE_EXPR
7360 || TREE_CODE (arg) == CONJ_EXPR)
7361 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7363 /* Don't do this when optimizing for size. */
7364 if (flag_unsafe_math_optimizations
7365 && optimize && !optimize_size)
7367 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7369 if (sqrtfn != NULL_TREE)
7371 tree rpart, ipart, result;
7373 arg = builtin_save_expr (arg);
7375 rpart = fold_build1 (REALPART_EXPR, type, arg);
7376 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7378 rpart = builtin_save_expr (rpart);
7379 ipart = builtin_save_expr (ipart);
7381 result = fold_build2 (PLUS_EXPR, type,
7382 fold_build2 (MULT_EXPR, type,
7384 fold_build2 (MULT_EXPR, type,
7387 return build_call_expr (sqrtfn, 1, result);
7394 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7395 Return NULL_TREE if no simplification can be made. */
7398 fold_builtin_sqrt (tree arg, tree type)
7401 enum built_in_function fcode;
7404 if (!validate_arg (arg, REAL_TYPE))
7407 /* Calculate the result when the argument is a constant. */
7408 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7411 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7412 fcode = builtin_mathfn_code (arg);
7413 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7415 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7416 arg = fold_build2 (MULT_EXPR, type,
7417 CALL_EXPR_ARG (arg, 0),
7418 build_real (type, dconsthalf));
7419 return build_call_expr (expfn, 1, arg);
7422 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7423 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7425 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7429 tree arg0 = CALL_EXPR_ARG (arg, 0);
7431 /* The inner root was either sqrt or cbrt. */
7432 REAL_VALUE_TYPE dconstroot =
7433 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7435 /* Adjust for the outer root. */
7436 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7437 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7438 tree_root = build_real (type, dconstroot);
7439 return build_call_expr (powfn, 2, arg0, tree_root);
7443 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7444 if (flag_unsafe_math_optimizations
7445 && (fcode == BUILT_IN_POW
7446 || fcode == BUILT_IN_POWF
7447 || fcode == BUILT_IN_POWL))
7449 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7450 tree arg0 = CALL_EXPR_ARG (arg, 0);
7451 tree arg1 = CALL_EXPR_ARG (arg, 1);
7453 if (!tree_expr_nonnegative_p (arg0))
7454 arg0 = build1 (ABS_EXPR, type, arg0);
7455 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7456 build_real (type, dconsthalf));
7457 return build_call_expr (powfn, 2, arg0, narg1);
7463 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7464 Return NULL_TREE if no simplification can be made. */
7467 fold_builtin_cbrt (tree arg, tree type)
7469 const enum built_in_function fcode = builtin_mathfn_code (arg);
7472 if (!validate_arg (arg, REAL_TYPE))
7475 /* Calculate the result when the argument is a constant. */
7476 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7479 if (flag_unsafe_math_optimizations)
7481 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7482 if (BUILTIN_EXPONENT_P (fcode))
7484 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7485 const REAL_VALUE_TYPE third_trunc =
7486 real_value_truncate (TYPE_MODE (type), dconstthird);
7487 arg = fold_build2 (MULT_EXPR, type,
7488 CALL_EXPR_ARG (arg, 0),
7489 build_real (type, third_trunc));
7490 return build_call_expr (expfn, 1, arg);
7493 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7494 if (BUILTIN_SQRT_P (fcode))
7496 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7500 tree arg0 = CALL_EXPR_ARG (arg, 0);
7502 REAL_VALUE_TYPE dconstroot = dconstthird;
7504 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7505 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7506 tree_root = build_real (type, dconstroot);
7507 return build_call_expr (powfn, 2, arg0, tree_root);
7511 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7512 if (BUILTIN_CBRT_P (fcode))
7514 tree arg0 = CALL_EXPR_ARG (arg, 0);
7515 if (tree_expr_nonnegative_p (arg0))
7517 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7522 REAL_VALUE_TYPE dconstroot;
7524 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7525 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7526 tree_root = build_real (type, dconstroot);
7527 return build_call_expr (powfn, 2, arg0, tree_root);
7532 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7533 if (fcode == BUILT_IN_POW
7534 || fcode == BUILT_IN_POWF
7535 || fcode == BUILT_IN_POWL)
7537 tree arg00 = CALL_EXPR_ARG (arg, 0);
7538 tree arg01 = CALL_EXPR_ARG (arg, 1);
7539 if (tree_expr_nonnegative_p (arg00))
7541 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7542 const REAL_VALUE_TYPE dconstroot
7543 = real_value_truncate (TYPE_MODE (type), dconstthird);
7544 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7545 build_real (type, dconstroot));
7546 return build_call_expr (powfn, 2, arg00, narg01);
7553 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7554 TYPE is the type of the return value. Return NULL_TREE if no
7555 simplification can be made. */
7558 fold_builtin_cos (tree arg, tree type, tree fndecl)
7562 if (!validate_arg (arg, REAL_TYPE))
7565 /* Calculate the result when the argument is a constant. */
7566 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7569 /* Optimize cos(-x) into cos (x). */
7570 if ((narg = fold_strip_sign_ops (arg)))
7571 return build_call_expr (fndecl, 1, narg);
7576 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7577 Return NULL_TREE if no simplification can be made. */
7580 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7582 if (validate_arg (arg, REAL_TYPE))
7586 /* Calculate the result when the argument is a constant. */
7587 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7590 /* Optimize cosh(-x) into cosh (x). */
7591 if ((narg = fold_strip_sign_ops (arg)))
7592 return build_call_expr (fndecl, 1, narg);
7598 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7599 Return NULL_TREE if no simplification can be made. */
7602 fold_builtin_tan (tree arg, tree type)
7604 enum built_in_function fcode;
7607 if (!validate_arg (arg, REAL_TYPE))
7610 /* Calculate the result when the argument is a constant. */
7611 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7614 /* Optimize tan(atan(x)) = x. */
7615 fcode = builtin_mathfn_code (arg);
7616 if (flag_unsafe_math_optimizations
7617 && (fcode == BUILT_IN_ATAN
7618 || fcode == BUILT_IN_ATANF
7619 || fcode == BUILT_IN_ATANL))
7620 return CALL_EXPR_ARG (arg, 0);
7625 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7626 NULL_TREE if no simplification can be made. */
7629 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7634 if (!validate_arg (arg0, REAL_TYPE)
7635 || !validate_arg (arg1, POINTER_TYPE)
7636 || !validate_arg (arg2, POINTER_TYPE))
7639 type = TREE_TYPE (arg0);
7641 /* Calculate the result when the argument is a constant. */
7642 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7645 /* Canonicalize sincos to cexpi. */
7646 if (!TARGET_C99_FUNCTIONS)
7648 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7652 call = build_call_expr (fn, 1, arg0);
7653 call = builtin_save_expr (call);
7655 return build2 (COMPOUND_EXPR, type,
7656 build2 (MODIFY_EXPR, void_type_node,
7657 build_fold_indirect_ref (arg1),
7658 build1 (IMAGPART_EXPR, type, call)),
7659 build2 (MODIFY_EXPR, void_type_node,
7660 build_fold_indirect_ref (arg2),
7661 build1 (REALPART_EXPR, type, call)));
7664 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7665 NULL_TREE if no simplification can be made. */
7668 fold_builtin_cexp (tree arg0, tree type)
7671 tree realp, imagp, ifn;
7673 if (!validate_arg (arg0, COMPLEX_TYPE))
7676 rtype = TREE_TYPE (TREE_TYPE (arg0));
7678 /* In case we can figure out the real part of arg0 and it is constant zero
7680 if (!TARGET_C99_FUNCTIONS)
7682 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7686 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7687 && real_zerop (realp))
7689 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7690 return build_call_expr (ifn, 1, narg);
7693 /* In case we can easily decompose real and imaginary parts split cexp
7694 to exp (r) * cexpi (i). */
7695 if (flag_unsafe_math_optimizations
7698 tree rfn, rcall, icall;
7700 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7704 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7708 icall = build_call_expr (ifn, 1, imagp);
7709 icall = builtin_save_expr (icall);
7710 rcall = build_call_expr (rfn, 1, realp);
7711 rcall = builtin_save_expr (rcall);
7712 return build2 (COMPLEX_EXPR, type,
7713 build2 (MULT_EXPR, rtype,
7715 build1 (REALPART_EXPR, rtype, icall)),
7716 build2 (MULT_EXPR, rtype,
7718 build1 (IMAGPART_EXPR, rtype, icall)));
7724 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7725 Return NULL_TREE if no simplification can be made. */
7728 fold_builtin_trunc (tree fndecl, tree arg)
7730 if (!validate_arg (arg, REAL_TYPE))
7733 /* Optimize trunc of constant value. */
7734 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7736 REAL_VALUE_TYPE r, x;
7737 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7739 x = TREE_REAL_CST (arg);
7740 real_trunc (&r, TYPE_MODE (type), &x);
7741 return build_real (type, r);
7744 return fold_trunc_transparent_mathfn (fndecl, arg);
7747 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7748 Return NULL_TREE if no simplification can be made. */
7751 fold_builtin_floor (tree fndecl, tree arg)
7753 if (!validate_arg (arg, REAL_TYPE))
7756 /* Optimize floor of constant value. */
7757 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7761 x = TREE_REAL_CST (arg);
7762 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7764 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7767 real_floor (&r, TYPE_MODE (type), &x);
7768 return build_real (type, r);
7772 /* Fold floor (x) where x is nonnegative to trunc (x). */
7773 if (tree_expr_nonnegative_p (arg))
7775 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7777 return build_call_expr (truncfn, 1, arg);
7780 return fold_trunc_transparent_mathfn (fndecl, arg);
7783 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7784 Return NULL_TREE if no simplification can be made. */
7787 fold_builtin_ceil (tree fndecl, tree arg)
7789 if (!validate_arg (arg, REAL_TYPE))
7792 /* Optimize ceil of constant value. */
7793 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7797 x = TREE_REAL_CST (arg);
7798 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7800 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7803 real_ceil (&r, TYPE_MODE (type), &x);
7804 return build_real (type, r);
7808 return fold_trunc_transparent_mathfn (fndecl, arg);
7811 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7812 Return NULL_TREE if no simplification can be made. */
7815 fold_builtin_round (tree fndecl, tree arg)
7817 if (!validate_arg (arg, REAL_TYPE))
7820 /* Optimize round of constant value. */
7821 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7825 x = TREE_REAL_CST (arg);
7826 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7828 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7831 real_round (&r, TYPE_MODE (type), &x);
7832 return build_real (type, r);
7836 return fold_trunc_transparent_mathfn (fndecl, arg);
7839 /* Fold function call to builtin lround, lroundf or lroundl (or the
7840 corresponding long long versions) and other rounding functions. ARG
7841 is the argument to the call. Return NULL_TREE if no simplification
7845 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7847 if (!validate_arg (arg, REAL_TYPE))
7850 /* Optimize lround of constant value. */
7851 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7853 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7855 if (real_isfinite (&x))
7857 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7858 tree ftype = TREE_TYPE (arg);
7859 unsigned HOST_WIDE_INT lo2;
7860 HOST_WIDE_INT hi, lo;
7863 switch (DECL_FUNCTION_CODE (fndecl))
7865 CASE_FLT_FN (BUILT_IN_LFLOOR):
7866 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7867 real_floor (&r, TYPE_MODE (ftype), &x);
7870 CASE_FLT_FN (BUILT_IN_LCEIL):
7871 CASE_FLT_FN (BUILT_IN_LLCEIL):
7872 real_ceil (&r, TYPE_MODE (ftype), &x);
7875 CASE_FLT_FN (BUILT_IN_LROUND):
7876 CASE_FLT_FN (BUILT_IN_LLROUND):
7877 real_round (&r, TYPE_MODE (ftype), &x);
7884 REAL_VALUE_TO_INT (&lo, &hi, r);
7885 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7886 return build_int_cst_wide (itype, lo2, hi);
7890 switch (DECL_FUNCTION_CODE (fndecl))
7892 CASE_FLT_FN (BUILT_IN_LFLOOR):
7893 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7894 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7895 if (tree_expr_nonnegative_p (arg))
7896 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7902 return fold_fixed_mathfn (fndecl, arg);
7905 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7906 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7907 the argument to the call. Return NULL_TREE if no simplification can
7911 fold_builtin_bitop (tree fndecl, tree arg)
7913 if (!validate_arg (arg, INTEGER_TYPE))
7916 /* Optimize for constant argument. */
7917 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7919 HOST_WIDE_INT hi, width, result;
7920 unsigned HOST_WIDE_INT lo;
7923 type = TREE_TYPE (arg);
7924 width = TYPE_PRECISION (type);
7925 lo = TREE_INT_CST_LOW (arg);
7927 /* Clear all the bits that are beyond the type's precision. */
7928 if (width > HOST_BITS_PER_WIDE_INT)
7930 hi = TREE_INT_CST_HIGH (arg);
7931 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7932 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7937 if (width < HOST_BITS_PER_WIDE_INT)
7938 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7941 switch (DECL_FUNCTION_CODE (fndecl))
7943 CASE_INT_FN (BUILT_IN_FFS):
7945 result = exact_log2 (lo & -lo) + 1;
7947 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7952 CASE_INT_FN (BUILT_IN_CLZ):
7954 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7956 result = width - floor_log2 (lo) - 1;
7957 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7961 CASE_INT_FN (BUILT_IN_CTZ):
7963 result = exact_log2 (lo & -lo);
7965 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7966 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7970 CASE_INT_FN (BUILT_IN_POPCOUNT):
7973 result++, lo &= lo - 1;
7975 result++, hi &= hi - 1;
7978 CASE_INT_FN (BUILT_IN_PARITY):
7981 result++, lo &= lo - 1;
7983 result++, hi &= hi - 1;
7991 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7997 /* Fold function call to builtin_bswap and the long and long long
7998 variants. Return NULL_TREE if no simplification can be made. */
8000 fold_builtin_bswap (tree fndecl, tree arg)
8002 if (! validate_arg (arg, INTEGER_TYPE))
8005 /* Optimize constant value. */
8006 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8008 HOST_WIDE_INT hi, width, r_hi = 0;
8009 unsigned HOST_WIDE_INT lo, r_lo = 0;
8012 type = TREE_TYPE (arg);
8013 width = TYPE_PRECISION (type);
8014 lo = TREE_INT_CST_LOW (arg);
8015 hi = TREE_INT_CST_HIGH (arg);
8017 switch (DECL_FUNCTION_CODE (fndecl))
8019 case BUILT_IN_BSWAP32:
8020 case BUILT_IN_BSWAP64:
8024 for (s = 0; s < width; s += 8)
8026 int d = width - s - 8;
8027 unsigned HOST_WIDE_INT byte;
8029 if (s < HOST_BITS_PER_WIDE_INT)
8030 byte = (lo >> s) & 0xff;
8032 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8034 if (d < HOST_BITS_PER_WIDE_INT)
8037 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8047 if (width < HOST_BITS_PER_WIDE_INT)
8048 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8050 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8056 /* Return true if EXPR is the real constant contained in VALUE. */
8059 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8063 return ((TREE_CODE (expr) == REAL_CST
8064 && !TREE_OVERFLOW (expr)
8065 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8066 || (TREE_CODE (expr) == COMPLEX_CST
8067 && real_dconstp (TREE_REALPART (expr), value)
8068 && real_zerop (TREE_IMAGPART (expr))));
8071 /* A subroutine of fold_builtin to fold the various logarithmic
8072 functions. Return NULL_TREE if no simplification can me made.
8073 FUNC is the corresponding MPFR logarithm function. */
8076 fold_builtin_logarithm (tree fndecl, tree arg,
8077 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8079 if (validate_arg (arg, REAL_TYPE))
8081 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8083 const enum built_in_function fcode = builtin_mathfn_code (arg);
8085 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8086 instead we'll look for 'e' truncated to MODE. So only do
8087 this if flag_unsafe_math_optimizations is set. */
8088 if (flag_unsafe_math_optimizations && func == mpfr_log)
8090 const REAL_VALUE_TYPE e_truncated =
8091 real_value_truncate (TYPE_MODE (type), dconste);
8092 if (real_dconstp (arg, &e_truncated))
8093 return build_real (type, dconst1);
8096 /* Calculate the result when the argument is a constant. */
8097 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8100 /* Special case, optimize logN(expN(x)) = x. */
8101 if (flag_unsafe_math_optimizations
8102 && ((func == mpfr_log
8103 && (fcode == BUILT_IN_EXP
8104 || fcode == BUILT_IN_EXPF
8105 || fcode == BUILT_IN_EXPL))
8106 || (func == mpfr_log2
8107 && (fcode == BUILT_IN_EXP2
8108 || fcode == BUILT_IN_EXP2F
8109 || fcode == BUILT_IN_EXP2L))
8110 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8111 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8113 /* Optimize logN(func()) for various exponential functions. We
8114 want to determine the value "x" and the power "exponent" in
8115 order to transform logN(x**exponent) into exponent*logN(x). */
8116 if (flag_unsafe_math_optimizations)
8118 tree exponent = 0, x = 0;
8122 CASE_FLT_FN (BUILT_IN_EXP):
8123 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8124 x = build_real (type,
8125 real_value_truncate (TYPE_MODE (type), dconste));
8126 exponent = CALL_EXPR_ARG (arg, 0);
8128 CASE_FLT_FN (BUILT_IN_EXP2):
8129 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8130 x = build_real (type, dconst2);
8131 exponent = CALL_EXPR_ARG (arg, 0);
8133 CASE_FLT_FN (BUILT_IN_EXP10):
8134 CASE_FLT_FN (BUILT_IN_POW10):
8135 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8136 x = build_real (type, dconst10);
8137 exponent = CALL_EXPR_ARG (arg, 0);
8139 CASE_FLT_FN (BUILT_IN_SQRT):
8140 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8141 x = CALL_EXPR_ARG (arg, 0);
8142 exponent = build_real (type, dconsthalf);
8144 CASE_FLT_FN (BUILT_IN_CBRT):
8145 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8146 x = CALL_EXPR_ARG (arg, 0);
8147 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8150 CASE_FLT_FN (BUILT_IN_POW):
8151 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8152 x = CALL_EXPR_ARG (arg, 0);
8153 exponent = CALL_EXPR_ARG (arg, 1);
8159 /* Now perform the optimization. */
8162 tree logfn = build_call_expr (fndecl, 1, x);
8163 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8171 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8172 NULL_TREE if no simplification can be made. */
8175 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8177 tree res, narg0, narg1;
8179 if (!validate_arg (arg0, REAL_TYPE)
8180 || !validate_arg (arg1, REAL_TYPE))
8183 /* Calculate the result when the argument is a constant. */
8184 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8187 /* If either argument to hypot has a negate or abs, strip that off.
8188 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8189 narg0 = fold_strip_sign_ops (arg0);
8190 narg1 = fold_strip_sign_ops (arg1);
8193 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8194 narg1 ? narg1 : arg1);
8197 /* If either argument is zero, hypot is fabs of the other. */
8198 if (real_zerop (arg0))
8199 return fold_build1 (ABS_EXPR, type, arg1);
8200 else if (real_zerop (arg1))
8201 return fold_build1 (ABS_EXPR, type, arg0);
8203 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8204 if (flag_unsafe_math_optimizations
8205 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8207 const REAL_VALUE_TYPE sqrt2_trunc
8208 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8209 return fold_build2 (MULT_EXPR, type,
8210 fold_build1 (ABS_EXPR, type, arg0),
8211 build_real (type, sqrt2_trunc));
8218 /* Fold a builtin function call to pow, powf, or powl. Return
8219 NULL_TREE if no simplification can be made. */
8221 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8225 if (!validate_arg (arg0, REAL_TYPE)
8226 || !validate_arg (arg1, REAL_TYPE))
8229 /* Calculate the result when the argument is a constant. */
8230 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8233 /* Optimize pow(1.0,y) = 1.0. */
8234 if (real_onep (arg0))
8235 return omit_one_operand (type, build_real (type, dconst1), arg1);
8237 if (TREE_CODE (arg1) == REAL_CST
8238 && !TREE_OVERFLOW (arg1))
8240 REAL_VALUE_TYPE cint;
8244 c = TREE_REAL_CST (arg1);
8246 /* Optimize pow(x,0.0) = 1.0. */
8247 if (REAL_VALUES_EQUAL (c, dconst0))
8248 return omit_one_operand (type, build_real (type, dconst1),
8251 /* Optimize pow(x,1.0) = x. */
8252 if (REAL_VALUES_EQUAL (c, dconst1))
8255 /* Optimize pow(x,-1.0) = 1.0/x. */
8256 if (REAL_VALUES_EQUAL (c, dconstm1))
8257 return fold_build2 (RDIV_EXPR, type,
8258 build_real (type, dconst1), arg0);
8260 /* Optimize pow(x,0.5) = sqrt(x). */
8261 if (flag_unsafe_math_optimizations
8262 && REAL_VALUES_EQUAL (c, dconsthalf))
8264 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8266 if (sqrtfn != NULL_TREE)
8267 return build_call_expr (sqrtfn, 1, arg0);
8270 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8271 if (flag_unsafe_math_optimizations)
8273 const REAL_VALUE_TYPE dconstroot
8274 = real_value_truncate (TYPE_MODE (type), dconstthird);
8276 if (REAL_VALUES_EQUAL (c, dconstroot))
8278 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8279 if (cbrtfn != NULL_TREE)
8280 return build_call_expr (cbrtfn, 1, arg0);
8284 /* Check for an integer exponent. */
8285 n = real_to_integer (&c);
8286 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8287 if (real_identical (&c, &cint))
8289 /* Attempt to evaluate pow at compile-time. */
8290 if (TREE_CODE (arg0) == REAL_CST
8291 && !TREE_OVERFLOW (arg0))
8296 x = TREE_REAL_CST (arg0);
8297 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8298 if (flag_unsafe_math_optimizations || !inexact)
8299 return build_real (type, x);
8302 /* Strip sign ops from even integer powers. */
8303 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8305 tree narg0 = fold_strip_sign_ops (arg0);
8307 return build_call_expr (fndecl, 2, narg0, arg1);
8312 if (flag_unsafe_math_optimizations)
8314 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8316 /* Optimize pow(expN(x),y) = expN(x*y). */
8317 if (BUILTIN_EXPONENT_P (fcode))
8319 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8320 tree arg = CALL_EXPR_ARG (arg0, 0);
8321 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8322 return build_call_expr (expfn, 1, arg);
8325 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8326 if (BUILTIN_SQRT_P (fcode))
8328 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8329 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8330 build_real (type, dconsthalf));
8331 return build_call_expr (fndecl, 2, narg0, narg1);
8334 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8335 if (BUILTIN_CBRT_P (fcode))
8337 tree arg = CALL_EXPR_ARG (arg0, 0);
8338 if (tree_expr_nonnegative_p (arg))
8340 const REAL_VALUE_TYPE dconstroot
8341 = real_value_truncate (TYPE_MODE (type), dconstthird);
8342 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8343 build_real (type, dconstroot));
8344 return build_call_expr (fndecl, 2, arg, narg1);
8348 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8349 if (fcode == BUILT_IN_POW
8350 || fcode == BUILT_IN_POWF
8351 || fcode == BUILT_IN_POWL)
8353 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8354 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8355 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8356 return build_call_expr (fndecl, 2, arg00, narg1);
8363 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8364 Return NULL_TREE if no simplification can be made. */
8366 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8367 tree arg0, tree arg1, tree type)
8369 if (!validate_arg (arg0, REAL_TYPE)
8370 || !validate_arg (arg1, INTEGER_TYPE))
8373 /* Optimize pow(1.0,y) = 1.0. */
8374 if (real_onep (arg0))
8375 return omit_one_operand (type, build_real (type, dconst1), arg1);
8377 if (host_integerp (arg1, 0))
8379 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8381 /* Evaluate powi at compile-time. */
8382 if (TREE_CODE (arg0) == REAL_CST
8383 && !TREE_OVERFLOW (arg0))
8386 x = TREE_REAL_CST (arg0);
8387 real_powi (&x, TYPE_MODE (type), &x, c);
8388 return build_real (type, x);
8391 /* Optimize pow(x,0) = 1.0. */
8393 return omit_one_operand (type, build_real (type, dconst1),
8396 /* Optimize pow(x,1) = x. */
8400 /* Optimize pow(x,-1) = 1.0/x. */
8402 return fold_build2 (RDIV_EXPR, type,
8403 build_real (type, dconst1), arg0);
8409 /* A subroutine of fold_builtin to fold the various exponent
8410 functions. Return NULL_TREE if no simplification can be made.
8411 FUNC is the corresponding MPFR exponent function. */
8414 fold_builtin_exponent (tree fndecl, tree arg,
8415 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8417 if (validate_arg (arg, REAL_TYPE))
8419 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8422 /* Calculate the result when the argument is a constant. */
8423 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8426 /* Optimize expN(logN(x)) = x. */
8427 if (flag_unsafe_math_optimizations)
8429 const enum built_in_function fcode = builtin_mathfn_code (arg);
8431 if ((func == mpfr_exp
8432 && (fcode == BUILT_IN_LOG
8433 || fcode == BUILT_IN_LOGF
8434 || fcode == BUILT_IN_LOGL))
8435 || (func == mpfr_exp2
8436 && (fcode == BUILT_IN_LOG2
8437 || fcode == BUILT_IN_LOG2F
8438 || fcode == BUILT_IN_LOG2L))
8439 || (func == mpfr_exp10
8440 && (fcode == BUILT_IN_LOG10
8441 || fcode == BUILT_IN_LOG10F
8442 || fcode == BUILT_IN_LOG10L)))
8443 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8450 /* Return true if VAR is a VAR_DECL or a component thereof. */
8453 var_decl_component_p (tree var)
8456 while (handled_component_p (inner))
8457 inner = TREE_OPERAND (inner, 0);
8458 return SSA_VAR_P (inner);
8461 /* Fold function call to builtin memset. Return
8462 NULL_TREE if no simplification can be made. */
8465 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8468 unsigned HOST_WIDE_INT length, cval;
8470 if (! validate_arg (dest, POINTER_TYPE)
8471 || ! validate_arg (c, INTEGER_TYPE)
8472 || ! validate_arg (len, INTEGER_TYPE))
8475 if (! host_integerp (len, 1))
8478 /* If the LEN parameter is zero, return DEST. */
8479 if (integer_zerop (len))
8480 return omit_one_operand (type, dest, c);
8482 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8487 if (TREE_CODE (var) != ADDR_EXPR)
8490 var = TREE_OPERAND (var, 0);
8491 if (TREE_THIS_VOLATILE (var))
8494 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8495 && !POINTER_TYPE_P (TREE_TYPE (var)))
8498 if (! var_decl_component_p (var))
8501 length = tree_low_cst (len, 1);
8502 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8503 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8507 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8510 if (integer_zerop (c))
8514 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8517 cval = tree_low_cst (c, 1);
8521 cval |= (cval << 31) << 1;
8524 ret = build_int_cst_type (TREE_TYPE (var), cval);
8525 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8529 return omit_one_operand (type, dest, ret);
8532 /* Fold function call to builtin memset. Return
8533 NULL_TREE if no simplification can be made. */
8536 fold_builtin_bzero (tree dest, tree size, bool ignore)
8538 if (! validate_arg (dest, POINTER_TYPE)
8539 || ! validate_arg (size, INTEGER_TYPE))
8545 /* New argument list transforming bzero(ptr x, int y) to
8546 memset(ptr x, int 0, size_t y). This is done this way
8547 so that if it isn't expanded inline, we fallback to
8548 calling bzero instead of memset. */
8550 return fold_builtin_memset (dest, integer_zero_node,
8551 fold_convert (sizetype, size),
8552 void_type_node, ignore);
8555 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8556 NULL_TREE if no simplification can be made.
8557 If ENDP is 0, return DEST (like memcpy).
8558 If ENDP is 1, return DEST+LEN (like mempcpy).
8559 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8560 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8564 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8566 tree destvar, srcvar, expr;
8568 if (! validate_arg (dest, POINTER_TYPE)
8569 || ! validate_arg (src, POINTER_TYPE)
8570 || ! validate_arg (len, INTEGER_TYPE))
8573 /* If the LEN parameter is zero, return DEST. */
8574 if (integer_zerop (len))
8575 return omit_one_operand (type, dest, src);
8577 /* If SRC and DEST are the same (and not volatile), return
8578 DEST{,+LEN,+LEN-1}. */
8579 if (operand_equal_p (src, dest, 0))
8583 tree srctype, desttype;
8586 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8587 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8589 /* Both DEST and SRC must be pointer types.
8590 ??? This is what old code did. Is the testing for pointer types
8593 If either SRC is readonly or length is 1, we can use memcpy. */
8594 if (dest_align && src_align
8595 && (readonly_data_expr (src)
8596 || (host_integerp (len, 1)
8597 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8598 tree_low_cst (len, 1)))))
8600 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8603 return build_call_expr (fn, 3, dest, src, len);
8608 if (!host_integerp (len, 0))
8611 This logic lose for arguments like (type *)malloc (sizeof (type)),
8612 since we strip the casts of up to VOID return value from malloc.
8613 Perhaps we ought to inherit type from non-VOID argument here? */
8616 srctype = TREE_TYPE (TREE_TYPE (src));
8617 desttype = TREE_TYPE (TREE_TYPE (dest));
8618 if (!srctype || !desttype
8619 || !TYPE_SIZE_UNIT (srctype)
8620 || !TYPE_SIZE_UNIT (desttype)
8621 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8622 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8623 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8624 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8627 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8628 < (int) TYPE_ALIGN (desttype)
8629 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8630 < (int) TYPE_ALIGN (srctype)))
8634 dest = builtin_save_expr (dest);
8636 srcvar = build_fold_indirect_ref (src);
8637 if (TREE_THIS_VOLATILE (srcvar))
8639 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8641 /* With memcpy, it is possible to bypass aliasing rules, so without
8642 this check i. e. execute/20060930-2.c would be misoptimized, because
8643 it use conflicting alias set to hold argument for the memcpy call.
8644 This check is probably unnecesary with -fno-strict-aliasing.
8645 Similarly for destvar. See also PR29286. */
8646 if (!var_decl_component_p (srcvar)
8647 /* Accept: memcpy (*char_var, "test", 1); that simplify
8649 || is_gimple_min_invariant (srcvar)
8650 || readonly_data_expr (src))
8653 destvar = build_fold_indirect_ref (dest);
8654 if (TREE_THIS_VOLATILE (destvar))
8656 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8658 if (!var_decl_component_p (destvar))
8661 if (srctype == desttype
8662 || (gimple_in_ssa_p (cfun)
8663 && useless_type_conversion_p (desttype, srctype)))
8665 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8666 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8667 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8668 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8669 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8671 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8672 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8678 if (endp == 0 || endp == 3)
8679 return omit_one_operand (type, dest, expr);
8685 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8688 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8689 dest = fold_convert (type, dest);
8691 dest = omit_one_operand (type, dest, expr);
8695 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8696 If LEN is not NULL, it represents the length of the string to be
8697 copied. Return NULL_TREE if no simplification can be made. */
8700 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8704 if (!validate_arg (dest, POINTER_TYPE)
8705 || !validate_arg (src, POINTER_TYPE))
8708 /* If SRC and DEST are the same (and not volatile), return DEST. */
8709 if (operand_equal_p (src, dest, 0))
8710 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8715 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8721 len = c_strlen (src, 1);
8722 if (! len || TREE_SIDE_EFFECTS (len))
8726 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8727 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8728 build_call_expr (fn, 3, dest, src, len));
8731 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8732 If SLEN is not NULL, it represents the length of the source string.
8733 Return NULL_TREE if no simplification can be made. */
8736 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8740 if (!validate_arg (dest, POINTER_TYPE)
8741 || !validate_arg (src, POINTER_TYPE)
8742 || !validate_arg (len, INTEGER_TYPE))
8745 /* If the LEN parameter is zero, return DEST. */
8746 if (integer_zerop (len))
8747 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8749 /* We can't compare slen with len as constants below if len is not a
8751 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8755 slen = c_strlen (src, 1);
8757 /* Now, we must be passed a constant src ptr parameter. */
8758 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8761 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8763 /* We do not support simplification of this case, though we do
8764 support it when expanding trees into RTL. */
8765 /* FIXME: generate a call to __builtin_memset. */
8766 if (tree_int_cst_lt (slen, len))
8769 /* OK transform into builtin memcpy. */
8770 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8773 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8774 build_call_expr (fn, 3, dest, src, len));
8777 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8778 arguments to the call, and TYPE is its return type.
8779 Return NULL_TREE if no simplification can be made. */
8782 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8784 if (!validate_arg (arg1, POINTER_TYPE)
8785 || !validate_arg (arg2, INTEGER_TYPE)
8786 || !validate_arg (len, INTEGER_TYPE))
8792 if (TREE_CODE (arg2) != INTEGER_CST
8793 || !host_integerp (len, 1))
8796 p1 = c_getstr (arg1);
8797 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8803 if (target_char_cast (arg2, &c))
8806 r = memchr (p1, c, tree_low_cst (len, 1));
8809 return build_int_cst (TREE_TYPE (arg1), 0);
8811 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8813 return fold_convert (type, tem);
8819 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8820 Return NULL_TREE if no simplification can be made. */
8823 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8825 const char *p1, *p2;
8827 if (!validate_arg (arg1, POINTER_TYPE)
8828 || !validate_arg (arg2, POINTER_TYPE)
8829 || !validate_arg (len, INTEGER_TYPE))
8832 /* If the LEN parameter is zero, return zero. */
8833 if (integer_zerop (len))
8834 return omit_two_operands (integer_type_node, integer_zero_node,
8837 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8838 if (operand_equal_p (arg1, arg2, 0))
8839 return omit_one_operand (integer_type_node, integer_zero_node, len);
8841 p1 = c_getstr (arg1);
8842 p2 = c_getstr (arg2);
8844 /* If all arguments are constant, and the value of len is not greater
8845 than the lengths of arg1 and arg2, evaluate at compile-time. */
8846 if (host_integerp (len, 1) && p1 && p2
8847 && compare_tree_int (len, strlen (p1) + 1) <= 0
8848 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8850 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8853 return integer_one_node;
8855 return integer_minus_one_node;
8857 return integer_zero_node;
8860 /* If len parameter is one, return an expression corresponding to
8861 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8862 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8864 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8865 tree cst_uchar_ptr_node
8866 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8868 tree ind1 = fold_convert (integer_type_node,
8869 build1 (INDIRECT_REF, cst_uchar_node,
8870 fold_convert (cst_uchar_ptr_node,
8872 tree ind2 = fold_convert (integer_type_node,
8873 build1 (INDIRECT_REF, cst_uchar_node,
8874 fold_convert (cst_uchar_ptr_node,
8876 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8882 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8883 Return NULL_TREE if no simplification can be made. */
8886 fold_builtin_strcmp (tree arg1, tree arg2)
8888 const char *p1, *p2;
8890 if (!validate_arg (arg1, POINTER_TYPE)
8891 || !validate_arg (arg2, POINTER_TYPE))
8894 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8895 if (operand_equal_p (arg1, arg2, 0))
8896 return integer_zero_node;
8898 p1 = c_getstr (arg1);
8899 p2 = c_getstr (arg2);
8903 const int i = strcmp (p1, p2);
8905 return integer_minus_one_node;
8907 return integer_one_node;
8909 return integer_zero_node;
8912 /* If the second arg is "", return *(const unsigned char*)arg1. */
8913 if (p2 && *p2 == '\0')
8915 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8916 tree cst_uchar_ptr_node
8917 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8919 return fold_convert (integer_type_node,
8920 build1 (INDIRECT_REF, cst_uchar_node,
8921 fold_convert (cst_uchar_ptr_node,
8925 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8926 if (p1 && *p1 == '\0')
8928 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8929 tree cst_uchar_ptr_node
8930 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8932 tree temp = fold_convert (integer_type_node,
8933 build1 (INDIRECT_REF, cst_uchar_node,
8934 fold_convert (cst_uchar_ptr_node,
8936 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8942 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8943 Return NULL_TREE if no simplification can be made. */
8946 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8948 const char *p1, *p2;
8950 if (!validate_arg (arg1, POINTER_TYPE)
8951 || !validate_arg (arg2, POINTER_TYPE)
8952 || !validate_arg (len, INTEGER_TYPE))
8955 /* If the LEN parameter is zero, return zero. */
8956 if (integer_zerop (len))
8957 return omit_two_operands (integer_type_node, integer_zero_node,
8960 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8961 if (operand_equal_p (arg1, arg2, 0))
8962 return omit_one_operand (integer_type_node, integer_zero_node, len);
8964 p1 = c_getstr (arg1);
8965 p2 = c_getstr (arg2);
8967 if (host_integerp (len, 1) && p1 && p2)
8969 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8971 return integer_one_node;
8973 return integer_minus_one_node;
8975 return integer_zero_node;
8978 /* If the second arg is "", and the length is greater than zero,
8979 return *(const unsigned char*)arg1. */
8980 if (p2 && *p2 == '\0'
8981 && TREE_CODE (len) == INTEGER_CST
8982 && tree_int_cst_sgn (len) == 1)
8984 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8985 tree cst_uchar_ptr_node
8986 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8988 return fold_convert (integer_type_node,
8989 build1 (INDIRECT_REF, cst_uchar_node,
8990 fold_convert (cst_uchar_ptr_node,
8994 /* If the first arg is "", and the length is greater than zero,
8995 return -*(const unsigned char*)arg2. */
8996 if (p1 && *p1 == '\0'
8997 && TREE_CODE (len) == INTEGER_CST
8998 && tree_int_cst_sgn (len) == 1)
9000 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9001 tree cst_uchar_ptr_node
9002 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9004 tree temp = fold_convert (integer_type_node,
9005 build1 (INDIRECT_REF, cst_uchar_node,
9006 fold_convert (cst_uchar_ptr_node,
9008 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9011 /* If len parameter is one, return an expression corresponding to
9012 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9013 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9015 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9016 tree cst_uchar_ptr_node
9017 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9019 tree ind1 = fold_convert (integer_type_node,
9020 build1 (INDIRECT_REF, cst_uchar_node,
9021 fold_convert (cst_uchar_ptr_node,
9023 tree ind2 = fold_convert (integer_type_node,
9024 build1 (INDIRECT_REF, cst_uchar_node,
9025 fold_convert (cst_uchar_ptr_node,
9027 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9033 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9034 ARG. Return NULL_TREE if no simplification can be made. */
9037 fold_builtin_signbit (tree arg, tree type)
9041 if (!validate_arg (arg, REAL_TYPE))
9044 /* If ARG is a compile-time constant, determine the result. */
9045 if (TREE_CODE (arg) == REAL_CST
9046 && !TREE_OVERFLOW (arg))
9050 c = TREE_REAL_CST (arg);
9051 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9052 return fold_convert (type, temp);
9055 /* If ARG is non-negative, the result is always zero. */
9056 if (tree_expr_nonnegative_p (arg))
9057 return omit_one_operand (type, integer_zero_node, arg);
9059 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9060 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9061 return fold_build2 (LT_EXPR, type, arg,
9062 build_real (TREE_TYPE (arg), dconst0));
9067 /* Fold function call to builtin copysign, copysignf or copysignl with
9068 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9072 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9076 if (!validate_arg (arg1, REAL_TYPE)
9077 || !validate_arg (arg2, REAL_TYPE))
9080 /* copysign(X,X) is X. */
9081 if (operand_equal_p (arg1, arg2, 0))
9082 return fold_convert (type, arg1);
9084 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9085 if (TREE_CODE (arg1) == REAL_CST
9086 && TREE_CODE (arg2) == REAL_CST
9087 && !TREE_OVERFLOW (arg1)
9088 && !TREE_OVERFLOW (arg2))
9090 REAL_VALUE_TYPE c1, c2;
9092 c1 = TREE_REAL_CST (arg1);
9093 c2 = TREE_REAL_CST (arg2);
9094 /* c1.sign := c2.sign. */
9095 real_copysign (&c1, &c2);
9096 return build_real (type, c1);
9099 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9100 Remember to evaluate Y for side-effects. */
9101 if (tree_expr_nonnegative_p (arg2))
9102 return omit_one_operand (type,
9103 fold_build1 (ABS_EXPR, type, arg1),
9106 /* Strip sign changing operations for the first argument. */
9107 tem = fold_strip_sign_ops (arg1);
9109 return build_call_expr (fndecl, 2, tem, arg2);
9114 /* Fold a call to builtin isascii with argument ARG. */
9117 fold_builtin_isascii (tree arg)
9119 if (!validate_arg (arg, INTEGER_TYPE))
9123 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9124 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9125 build_int_cst (NULL_TREE,
9126 ~ (unsigned HOST_WIDE_INT) 0x7f));
9127 return fold_build2 (EQ_EXPR, integer_type_node,
9128 arg, integer_zero_node);
9132 /* Fold a call to builtin toascii with argument ARG. */
9135 fold_builtin_toascii (tree arg)
9137 if (!validate_arg (arg, INTEGER_TYPE))
9140 /* Transform toascii(c) -> (c & 0x7f). */
9141 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9142 build_int_cst (NULL_TREE, 0x7f));
9145 /* Fold a call to builtin isdigit with argument ARG. */
9148 fold_builtin_isdigit (tree arg)
9150 if (!validate_arg (arg, INTEGER_TYPE))
9154 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9155 /* According to the C standard, isdigit is unaffected by locale.
9156 However, it definitely is affected by the target character set. */
9157 unsigned HOST_WIDE_INT target_digit0
9158 = lang_hooks.to_target_charset ('0');
9160 if (target_digit0 == 0)
9163 arg = fold_convert (unsigned_type_node, arg);
9164 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9165 build_int_cst (unsigned_type_node, target_digit0));
9166 return fold_build2 (LE_EXPR, integer_type_node, arg,
9167 build_int_cst (unsigned_type_node, 9));
9171 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9174 fold_builtin_fabs (tree arg, tree type)
9176 if (!validate_arg (arg, REAL_TYPE))
9179 arg = fold_convert (type, arg);
9180 if (TREE_CODE (arg) == REAL_CST)
9181 return fold_abs_const (arg, type);
9182 return fold_build1 (ABS_EXPR, type, arg);
9185 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9188 fold_builtin_abs (tree arg, tree type)
9190 if (!validate_arg (arg, INTEGER_TYPE))
9193 arg = fold_convert (type, arg);
9194 if (TREE_CODE (arg) == INTEGER_CST)
9195 return fold_abs_const (arg, type);
9196 return fold_build1 (ABS_EXPR, type, arg);
9199 /* Fold a call to builtin fmin or fmax. */
9202 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9204 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9206 /* Calculate the result when the argument is a constant. */
9207 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9212 /* If either argument is NaN, return the other one. Avoid the
9213 transformation if we get (and honor) a signalling NaN. Using
9214 omit_one_operand() ensures we create a non-lvalue. */
9215 if (TREE_CODE (arg0) == REAL_CST
9216 && real_isnan (&TREE_REAL_CST (arg0))
9217 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9218 || ! TREE_REAL_CST (arg0).signalling))
9219 return omit_one_operand (type, arg1, arg0);
9220 if (TREE_CODE (arg1) == REAL_CST
9221 && real_isnan (&TREE_REAL_CST (arg1))
9222 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9223 || ! TREE_REAL_CST (arg1).signalling))
9224 return omit_one_operand (type, arg0, arg1);
9226 /* Transform fmin/fmax(x,x) -> x. */
9227 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9228 return omit_one_operand (type, arg0, arg1);
9230 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9231 functions to return the numeric arg if the other one is NaN.
9232 These tree codes don't honor that, so only transform if
9233 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9234 handled, so we don't have to worry about it either. */
9235 if (flag_finite_math_only)
9236 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9237 fold_convert (type, arg0),
9238 fold_convert (type, arg1));
9243 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9246 fold_builtin_carg (tree arg, tree type)
9248 if (validate_arg (arg, COMPLEX_TYPE))
9250 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9254 tree new_arg = builtin_save_expr (arg);
9255 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9256 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9257 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9264 /* Fold a call to builtin logb/ilogb. */
9267 fold_builtin_logb (tree arg, tree rettype)
9269 if (! validate_arg (arg, REAL_TYPE))
9274 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9276 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9282 /* If arg is Inf or NaN and we're logb, return it. */
9283 if (TREE_CODE (rettype) == REAL_TYPE)
9284 return fold_convert (rettype, arg);
9285 /* Fall through... */
9287 /* Zero may set errno and/or raise an exception for logb, also
9288 for ilogb we don't know FP_ILOGB0. */
9291 /* For normal numbers, proceed iff radix == 2. In GCC,
9292 normalized significands are in the range [0.5, 1.0). We
9293 want the exponent as if they were [1.0, 2.0) so get the
9294 exponent and subtract 1. */
9295 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9296 return fold_convert (rettype, build_int_cst (NULL_TREE,
9297 REAL_EXP (value)-1));
9305 /* Fold a call to builtin significand, if radix == 2. */
9308 fold_builtin_significand (tree arg, tree rettype)
9310 if (! validate_arg (arg, REAL_TYPE))
9315 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9317 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9324 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9325 return fold_convert (rettype, arg);
9327 /* For normal numbers, proceed iff radix == 2. */
9328 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9330 REAL_VALUE_TYPE result = *value;
9331 /* In GCC, normalized significands are in the range [0.5,
9332 1.0). We want them to be [1.0, 2.0) so set the
9334 SET_REAL_EXP (&result, 1);
9335 return build_real (rettype, result);
9344 /* Fold a call to builtin frexp, we can assume the base is 2. */
9347 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9349 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9354 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9357 arg1 = build_fold_indirect_ref (arg1);
9359 /* Proceed if a valid pointer type was passed in. */
9360 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9362 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9368 /* For +-0, return (*exp = 0, +-0). */
9369 exp = integer_zero_node;
9374 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9375 return omit_one_operand (rettype, arg0, arg1);
9378 /* Since the frexp function always expects base 2, and in
9379 GCC normalized significands are already in the range
9380 [0.5, 1.0), we have exactly what frexp wants. */
9381 REAL_VALUE_TYPE frac_rvt = *value;
9382 SET_REAL_EXP (&frac_rvt, 0);
9383 frac = build_real (rettype, frac_rvt);
9384 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9391 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9392 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9393 TREE_SIDE_EFFECTS (arg1) = 1;
9394 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9400 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9401 then we can assume the base is two. If it's false, then we have to
9402 check the mode of the TYPE parameter in certain cases. */
9405 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9407 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9412 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9413 if (real_zerop (arg0) || integer_zerop (arg1)
9414 || (TREE_CODE (arg0) == REAL_CST
9415 && !real_isfinite (&TREE_REAL_CST (arg0))))
9416 return omit_one_operand (type, arg0, arg1);
9418 /* If both arguments are constant, then try to evaluate it. */
9419 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9420 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9421 && host_integerp (arg1, 0))
9423 /* Bound the maximum adjustment to twice the range of the
9424 mode's valid exponents. Use abs to ensure the range is
9425 positive as a sanity check. */
9426 const long max_exp_adj = 2 *
9427 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9428 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9430 /* Get the user-requested adjustment. */
9431 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9433 /* The requested adjustment must be inside this range. This
9434 is a preliminary cap to avoid things like overflow, we
9435 may still fail to compute the result for other reasons. */
9436 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9438 REAL_VALUE_TYPE initial_result;
9440 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9442 /* Ensure we didn't overflow. */
9443 if (! real_isinf (&initial_result))
9445 const REAL_VALUE_TYPE trunc_result
9446 = real_value_truncate (TYPE_MODE (type), initial_result);
9448 /* Only proceed if the target mode can hold the
9450 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9451 return build_real (type, trunc_result);
9460 /* Fold a call to builtin modf. */
9463 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9465 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9470 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9473 arg1 = build_fold_indirect_ref (arg1);
9475 /* Proceed if a valid pointer type was passed in. */
9476 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9478 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9479 REAL_VALUE_TYPE trunc, frac;
9485 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9486 trunc = frac = *value;
9489 /* For +-Inf, return (*arg1 = arg0, +-0). */
9491 frac.sign = value->sign;
9495 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9496 real_trunc (&trunc, VOIDmode, value);
9497 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9498 /* If the original number was negative and already
9499 integral, then the fractional part is -0.0. */
9500 if (value->sign && frac.cl == rvc_zero)
9501 frac.sign = value->sign;
9505 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9506 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9507 build_real (rettype, trunc));
9508 TREE_SIDE_EFFECTS (arg1) = 1;
9509 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9510 build_real (rettype, frac));
9516 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9517 ARG is the argument for the call. */
9520 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9522 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9525 if (!validate_arg (arg, REAL_TYPE))
9527 error ("non-floating-point argument to function %qs",
9528 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9529 return error_mark_node;
9532 switch (builtin_index)
9534 case BUILT_IN_ISINF:
9535 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9536 return omit_one_operand (type, integer_zero_node, arg);
9538 if (TREE_CODE (arg) == REAL_CST)
9540 r = TREE_REAL_CST (arg);
9541 if (real_isinf (&r))
9542 return real_compare (GT_EXPR, &r, &dconst0)
9543 ? integer_one_node : integer_minus_one_node;
9545 return integer_zero_node;
9550 case BUILT_IN_FINITE:
9551 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9552 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9553 return omit_one_operand (type, integer_one_node, arg);
9555 if (TREE_CODE (arg) == REAL_CST)
9557 r = TREE_REAL_CST (arg);
9558 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9563 case BUILT_IN_ISNAN:
9564 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9565 return omit_one_operand (type, integer_zero_node, arg);
9567 if (TREE_CODE (arg) == REAL_CST)
9569 r = TREE_REAL_CST (arg);
9570 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9573 arg = builtin_save_expr (arg);
9574 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9581 /* Fold a call to an unordered comparison function such as
9582 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9583 being called and ARG0 and ARG1 are the arguments for the call.
9584 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9585 the opposite of the desired result. UNORDERED_CODE is used
9586 for modes that can hold NaNs and ORDERED_CODE is used for
9590 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9591 enum tree_code unordered_code,
9592 enum tree_code ordered_code)
9594 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9595 enum tree_code code;
9597 enum tree_code code0, code1;
9598 tree cmp_type = NULL_TREE;
9600 type0 = TREE_TYPE (arg0);
9601 type1 = TREE_TYPE (arg1);
9603 code0 = TREE_CODE (type0);
9604 code1 = TREE_CODE (type1);
9606 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9607 /* Choose the wider of two real types. */
9608 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9610 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9612 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9616 error ("non-floating-point argument to function %qs",
9617 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9618 return error_mark_node;
9621 arg0 = fold_convert (cmp_type, arg0);
9622 arg1 = fold_convert (cmp_type, arg1);
9624 if (unordered_code == UNORDERED_EXPR)
9626 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9627 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9628 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9631 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9633 return fold_build1 (TRUTH_NOT_EXPR, type,
9634 fold_build2 (code, type, arg0, arg1));
9637 /* Fold a call to built-in function FNDECL with 0 arguments.
9638 IGNORE is true if the result of the function call is ignored. This
9639 function returns NULL_TREE if no simplification was possible. */
9642 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9644 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9645 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9648 CASE_FLT_FN (BUILT_IN_INF):
9649 case BUILT_IN_INFD32:
9650 case BUILT_IN_INFD64:
9651 case BUILT_IN_INFD128:
9652 return fold_builtin_inf (type, true);
9654 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9655 return fold_builtin_inf (type, false);
9657 case BUILT_IN_CLASSIFY_TYPE:
9658 return fold_builtin_classify_type (NULL_TREE);
9666 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9667 IGNORE is true if the result of the function call is ignored. This
9668 function returns NULL_TREE if no simplification was possible. */
9671 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9673 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9674 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9678 case BUILT_IN_CONSTANT_P:
9680 tree val = fold_builtin_constant_p (arg0);
9682 /* Gimplification will pull the CALL_EXPR for the builtin out of
9683 an if condition. When not optimizing, we'll not CSE it back.
9684 To avoid link error types of regressions, return false now. */
9685 if (!val && !optimize)
9686 val = integer_zero_node;
9691 case BUILT_IN_CLASSIFY_TYPE:
9692 return fold_builtin_classify_type (arg0);
9694 case BUILT_IN_STRLEN:
9695 return fold_builtin_strlen (arg0);
9697 CASE_FLT_FN (BUILT_IN_FABS):
9698 return fold_builtin_fabs (arg0, type);
9702 case BUILT_IN_LLABS:
9703 case BUILT_IN_IMAXABS:
9704 return fold_builtin_abs (arg0, type);
9706 CASE_FLT_FN (BUILT_IN_CONJ):
9707 if (validate_arg (arg0, COMPLEX_TYPE))
9708 return fold_build1 (CONJ_EXPR, type, arg0);
9711 CASE_FLT_FN (BUILT_IN_CREAL):
9712 if (validate_arg (arg0, COMPLEX_TYPE))
9713 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9716 CASE_FLT_FN (BUILT_IN_CIMAG):
9717 if (validate_arg (arg0, COMPLEX_TYPE))
9718 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9721 CASE_FLT_FN (BUILT_IN_CCOS):
9722 CASE_FLT_FN (BUILT_IN_CCOSH):
9723 /* These functions are "even", i.e. f(x) == f(-x). */
9724 if (validate_arg (arg0, COMPLEX_TYPE))
9726 tree narg = fold_strip_sign_ops (arg0);
9728 return build_call_expr (fndecl, 1, narg);
9732 CASE_FLT_FN (BUILT_IN_CABS):
9733 return fold_builtin_cabs (arg0, type, fndecl);
9735 CASE_FLT_FN (BUILT_IN_CARG):
9736 return fold_builtin_carg (arg0, type);
9738 CASE_FLT_FN (BUILT_IN_SQRT):
9739 return fold_builtin_sqrt (arg0, type);
9741 CASE_FLT_FN (BUILT_IN_CBRT):
9742 return fold_builtin_cbrt (arg0, type);
9744 CASE_FLT_FN (BUILT_IN_ASIN):
9745 if (validate_arg (arg0, REAL_TYPE))
9746 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9747 &dconstm1, &dconst1, true);
9750 CASE_FLT_FN (BUILT_IN_ACOS):
9751 if (validate_arg (arg0, REAL_TYPE))
9752 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9753 &dconstm1, &dconst1, true);
9756 CASE_FLT_FN (BUILT_IN_ATAN):
9757 if (validate_arg (arg0, REAL_TYPE))
9758 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9761 CASE_FLT_FN (BUILT_IN_ASINH):
9762 if (validate_arg (arg0, REAL_TYPE))
9763 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9766 CASE_FLT_FN (BUILT_IN_ACOSH):
9767 if (validate_arg (arg0, REAL_TYPE))
9768 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9769 &dconst1, NULL, true);
9772 CASE_FLT_FN (BUILT_IN_ATANH):
9773 if (validate_arg (arg0, REAL_TYPE))
9774 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9775 &dconstm1, &dconst1, false);
9778 CASE_FLT_FN (BUILT_IN_SIN):
9779 if (validate_arg (arg0, REAL_TYPE))
9780 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9783 CASE_FLT_FN (BUILT_IN_COS):
9784 return fold_builtin_cos (arg0, type, fndecl);
9787 CASE_FLT_FN (BUILT_IN_TAN):
9788 return fold_builtin_tan (arg0, type);
9790 CASE_FLT_FN (BUILT_IN_CEXP):
9791 return fold_builtin_cexp (arg0, type);
9793 CASE_FLT_FN (BUILT_IN_CEXPI):
9794 if (validate_arg (arg0, REAL_TYPE))
9795 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9798 CASE_FLT_FN (BUILT_IN_SINH):
9799 if (validate_arg (arg0, REAL_TYPE))
9800 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9803 CASE_FLT_FN (BUILT_IN_COSH):
9804 return fold_builtin_cosh (arg0, type, fndecl);
9806 CASE_FLT_FN (BUILT_IN_TANH):
9807 if (validate_arg (arg0, REAL_TYPE))
9808 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9811 CASE_FLT_FN (BUILT_IN_ERF):
9812 if (validate_arg (arg0, REAL_TYPE))
9813 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9816 CASE_FLT_FN (BUILT_IN_ERFC):
9817 if (validate_arg (arg0, REAL_TYPE))
9818 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9821 CASE_FLT_FN (BUILT_IN_TGAMMA):
9822 if (validate_arg (arg0, REAL_TYPE))
9823 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9826 CASE_FLT_FN (BUILT_IN_EXP):
9827 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9829 CASE_FLT_FN (BUILT_IN_EXP2):
9830 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9832 CASE_FLT_FN (BUILT_IN_EXP10):
9833 CASE_FLT_FN (BUILT_IN_POW10):
9834 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9836 CASE_FLT_FN (BUILT_IN_EXPM1):
9837 if (validate_arg (arg0, REAL_TYPE))
9838 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9841 CASE_FLT_FN (BUILT_IN_LOG):
9842 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9844 CASE_FLT_FN (BUILT_IN_LOG2):
9845 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9847 CASE_FLT_FN (BUILT_IN_LOG10):
9848 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9850 CASE_FLT_FN (BUILT_IN_LOG1P):
9851 if (validate_arg (arg0, REAL_TYPE))
9852 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9853 &dconstm1, NULL, false);
9856 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9857 CASE_FLT_FN (BUILT_IN_J0):
9858 if (validate_arg (arg0, REAL_TYPE))
9859 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9863 CASE_FLT_FN (BUILT_IN_J1):
9864 if (validate_arg (arg0, REAL_TYPE))
9865 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9869 CASE_FLT_FN (BUILT_IN_Y0):
9870 if (validate_arg (arg0, REAL_TYPE))
9871 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9872 &dconst0, NULL, false);
9875 CASE_FLT_FN (BUILT_IN_Y1):
9876 if (validate_arg (arg0, REAL_TYPE))
9877 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9878 &dconst0, NULL, false);
9882 CASE_FLT_FN (BUILT_IN_NAN):
9883 case BUILT_IN_NAND32:
9884 case BUILT_IN_NAND64:
9885 case BUILT_IN_NAND128:
9886 return fold_builtin_nan (arg0, type, true);
9888 CASE_FLT_FN (BUILT_IN_NANS):
9889 return fold_builtin_nan (arg0, type, false);
9891 CASE_FLT_FN (BUILT_IN_FLOOR):
9892 return fold_builtin_floor (fndecl, arg0);
9894 CASE_FLT_FN (BUILT_IN_CEIL):
9895 return fold_builtin_ceil (fndecl, arg0);
9897 CASE_FLT_FN (BUILT_IN_TRUNC):
9898 return fold_builtin_trunc (fndecl, arg0);
9900 CASE_FLT_FN (BUILT_IN_ROUND):
9901 return fold_builtin_round (fndecl, arg0);
9903 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9904 CASE_FLT_FN (BUILT_IN_RINT):
9905 return fold_trunc_transparent_mathfn (fndecl, arg0);
9907 CASE_FLT_FN (BUILT_IN_LCEIL):
9908 CASE_FLT_FN (BUILT_IN_LLCEIL):
9909 CASE_FLT_FN (BUILT_IN_LFLOOR):
9910 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9911 CASE_FLT_FN (BUILT_IN_LROUND):
9912 CASE_FLT_FN (BUILT_IN_LLROUND):
9913 return fold_builtin_int_roundingfn (fndecl, arg0);
9915 CASE_FLT_FN (BUILT_IN_LRINT):
9916 CASE_FLT_FN (BUILT_IN_LLRINT):
9917 return fold_fixed_mathfn (fndecl, arg0);
9919 case BUILT_IN_BSWAP32:
9920 case BUILT_IN_BSWAP64:
9921 return fold_builtin_bswap (fndecl, arg0);
9923 CASE_INT_FN (BUILT_IN_FFS):
9924 CASE_INT_FN (BUILT_IN_CLZ):
9925 CASE_INT_FN (BUILT_IN_CTZ):
9926 CASE_INT_FN (BUILT_IN_POPCOUNT):
9927 CASE_INT_FN (BUILT_IN_PARITY):
9928 return fold_builtin_bitop (fndecl, arg0);
9930 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9931 return fold_builtin_signbit (arg0, type);
9933 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9934 return fold_builtin_significand (arg0, type);
9936 CASE_FLT_FN (BUILT_IN_ILOGB):
9937 CASE_FLT_FN (BUILT_IN_LOGB):
9938 return fold_builtin_logb (arg0, type);
9940 case BUILT_IN_ISASCII:
9941 return fold_builtin_isascii (arg0);
9943 case BUILT_IN_TOASCII:
9944 return fold_builtin_toascii (arg0);
9946 case BUILT_IN_ISDIGIT:
9947 return fold_builtin_isdigit (arg0);
9949 CASE_FLT_FN (BUILT_IN_FINITE):
9950 case BUILT_IN_FINITED32:
9951 case BUILT_IN_FINITED64:
9952 case BUILT_IN_FINITED128:
9953 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9955 CASE_FLT_FN (BUILT_IN_ISINF):
9956 case BUILT_IN_ISINFD32:
9957 case BUILT_IN_ISINFD64:
9958 case BUILT_IN_ISINFD128:
9959 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9961 CASE_FLT_FN (BUILT_IN_ISNAN):
9962 case BUILT_IN_ISNAND32:
9963 case BUILT_IN_ISNAND64:
9964 case BUILT_IN_ISNAND128:
9965 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9967 case BUILT_IN_PRINTF:
9968 case BUILT_IN_PRINTF_UNLOCKED:
9969 case BUILT_IN_VPRINTF:
9970 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9980 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9981 IGNORE is true if the result of the function call is ignored. This
9982 function returns NULL_TREE if no simplification was possible. */
9985 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9987 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9988 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9992 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9993 CASE_FLT_FN (BUILT_IN_JN):
9994 if (validate_arg (arg0, INTEGER_TYPE)
9995 && validate_arg (arg1, REAL_TYPE))
9996 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9999 CASE_FLT_FN (BUILT_IN_YN):
10000 if (validate_arg (arg0, INTEGER_TYPE)
10001 && validate_arg (arg1, REAL_TYPE))
10002 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10006 CASE_FLT_FN (BUILT_IN_DREM):
10007 CASE_FLT_FN (BUILT_IN_REMAINDER):
10008 if (validate_arg (arg0, REAL_TYPE)
10009 && validate_arg(arg1, REAL_TYPE))
10010 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10013 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10014 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10015 if (validate_arg (arg0, REAL_TYPE)
10016 && validate_arg(arg1, POINTER_TYPE))
10017 return do_mpfr_lgamma_r (arg0, arg1, type);
10021 CASE_FLT_FN (BUILT_IN_ATAN2):
10022 if (validate_arg (arg0, REAL_TYPE)
10023 && validate_arg(arg1, REAL_TYPE))
10024 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10027 CASE_FLT_FN (BUILT_IN_FDIM):
10028 if (validate_arg (arg0, REAL_TYPE)
10029 && validate_arg(arg1, REAL_TYPE))
10030 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10033 CASE_FLT_FN (BUILT_IN_HYPOT):
10034 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10036 CASE_FLT_FN (BUILT_IN_LDEXP):
10037 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10038 CASE_FLT_FN (BUILT_IN_SCALBN):
10039 CASE_FLT_FN (BUILT_IN_SCALBLN):
10040 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10042 CASE_FLT_FN (BUILT_IN_FREXP):
10043 return fold_builtin_frexp (arg0, arg1, type);
10045 CASE_FLT_FN (BUILT_IN_MODF):
10046 return fold_builtin_modf (arg0, arg1, type);
10048 case BUILT_IN_BZERO:
10049 return fold_builtin_bzero (arg0, arg1, ignore);
10051 case BUILT_IN_FPUTS:
10052 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10054 case BUILT_IN_FPUTS_UNLOCKED:
10055 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10057 case BUILT_IN_STRSTR:
10058 return fold_builtin_strstr (arg0, arg1, type);
10060 case BUILT_IN_STRCAT:
10061 return fold_builtin_strcat (arg0, arg1);
10063 case BUILT_IN_STRSPN:
10064 return fold_builtin_strspn (arg0, arg1);
10066 case BUILT_IN_STRCSPN:
10067 return fold_builtin_strcspn (arg0, arg1);
10069 case BUILT_IN_STRCHR:
10070 case BUILT_IN_INDEX:
10071 return fold_builtin_strchr (arg0, arg1, type);
10073 case BUILT_IN_STRRCHR:
10074 case BUILT_IN_RINDEX:
10075 return fold_builtin_strrchr (arg0, arg1, type);
10077 case BUILT_IN_STRCPY:
10078 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10080 case BUILT_IN_STRCMP:
10081 return fold_builtin_strcmp (arg0, arg1);
10083 case BUILT_IN_STRPBRK:
10084 return fold_builtin_strpbrk (arg0, arg1, type);
10086 case BUILT_IN_EXPECT:
10087 return fold_builtin_expect (arg0);
10089 CASE_FLT_FN (BUILT_IN_POW):
10090 return fold_builtin_pow (fndecl, arg0, arg1, type);
10092 CASE_FLT_FN (BUILT_IN_POWI):
10093 return fold_builtin_powi (fndecl, arg0, arg1, type);
10095 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10096 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10098 CASE_FLT_FN (BUILT_IN_FMIN):
10099 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10101 CASE_FLT_FN (BUILT_IN_FMAX):
10102 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10104 case BUILT_IN_ISGREATER:
10105 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10106 case BUILT_IN_ISGREATEREQUAL:
10107 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10108 case BUILT_IN_ISLESS:
10109 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10110 case BUILT_IN_ISLESSEQUAL:
10111 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10112 case BUILT_IN_ISLESSGREATER:
10113 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10114 case BUILT_IN_ISUNORDERED:
10115 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10118 /* We do the folding for va_start in the expander. */
10119 case BUILT_IN_VA_START:
10122 case BUILT_IN_SPRINTF:
10123 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10125 case BUILT_IN_OBJECT_SIZE:
10126 return fold_builtin_object_size (arg0, arg1);
10128 case BUILT_IN_PRINTF:
10129 case BUILT_IN_PRINTF_UNLOCKED:
10130 case BUILT_IN_VPRINTF:
10131 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10133 case BUILT_IN_PRINTF_CHK:
10134 case BUILT_IN_VPRINTF_CHK:
10135 if (!validate_arg (arg0, INTEGER_TYPE)
10136 || TREE_SIDE_EFFECTS (arg0))
10139 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10142 case BUILT_IN_FPRINTF:
10143 case BUILT_IN_FPRINTF_UNLOCKED:
10144 case BUILT_IN_VFPRINTF:
10145 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10154 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10155 and ARG2. IGNORE is true if the result of the function call is ignored.
10156 This function returns NULL_TREE if no simplification was possible. */
10159 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10161 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10162 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10166 CASE_FLT_FN (BUILT_IN_SINCOS):
10167 return fold_builtin_sincos (arg0, arg1, arg2);
10169 CASE_FLT_FN (BUILT_IN_FMA):
10170 if (validate_arg (arg0, REAL_TYPE)
10171 && validate_arg(arg1, REAL_TYPE)
10172 && validate_arg(arg2, REAL_TYPE))
10173 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10176 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10177 CASE_FLT_FN (BUILT_IN_REMQUO):
10178 if (validate_arg (arg0, REAL_TYPE)
10179 && validate_arg(arg1, REAL_TYPE)
10180 && validate_arg(arg2, POINTER_TYPE))
10181 return do_mpfr_remquo (arg0, arg1, arg2);
10185 case BUILT_IN_MEMSET:
10186 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10188 case BUILT_IN_BCOPY:
10189 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10191 case BUILT_IN_MEMCPY:
10192 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10194 case BUILT_IN_MEMPCPY:
10195 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10197 case BUILT_IN_MEMMOVE:
10198 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10200 case BUILT_IN_STRNCAT:
10201 return fold_builtin_strncat (arg0, arg1, arg2);
10203 case BUILT_IN_STRNCPY:
10204 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10206 case BUILT_IN_STRNCMP:
10207 return fold_builtin_strncmp (arg0, arg1, arg2);
10209 case BUILT_IN_MEMCHR:
10210 return fold_builtin_memchr (arg0, arg1, arg2, type);
10212 case BUILT_IN_BCMP:
10213 case BUILT_IN_MEMCMP:
10214 return fold_builtin_memcmp (arg0, arg1, arg2);;
10216 case BUILT_IN_SPRINTF:
10217 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10219 case BUILT_IN_STRCPY_CHK:
10220 case BUILT_IN_STPCPY_CHK:
10221 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10224 case BUILT_IN_STRCAT_CHK:
10225 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10227 case BUILT_IN_PRINTF_CHK:
10228 case BUILT_IN_VPRINTF_CHK:
10229 if (!validate_arg (arg0, INTEGER_TYPE)
10230 || TREE_SIDE_EFFECTS (arg0))
10233 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10236 case BUILT_IN_FPRINTF:
10237 case BUILT_IN_FPRINTF_UNLOCKED:
10238 case BUILT_IN_VFPRINTF:
10239 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10241 case BUILT_IN_FPRINTF_CHK:
10242 case BUILT_IN_VFPRINTF_CHK:
10243 if (!validate_arg (arg1, INTEGER_TYPE)
10244 || TREE_SIDE_EFFECTS (arg1))
10247 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10256 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10257 ARG2, and ARG3. IGNORE is true if the result of the function call is
10258 ignored. This function returns NULL_TREE if no simplification was
10262 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10265 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10269 case BUILT_IN_MEMCPY_CHK:
10270 case BUILT_IN_MEMPCPY_CHK:
10271 case BUILT_IN_MEMMOVE_CHK:
10272 case BUILT_IN_MEMSET_CHK:
10273 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10275 DECL_FUNCTION_CODE (fndecl));
10277 case BUILT_IN_STRNCPY_CHK:
10278 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10280 case BUILT_IN_STRNCAT_CHK:
10281 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10283 case BUILT_IN_FPRINTF_CHK:
10284 case BUILT_IN_VFPRINTF_CHK:
10285 if (!validate_arg (arg1, INTEGER_TYPE)
10286 || TREE_SIDE_EFFECTS (arg1))
10289 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10299 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10300 arguments, where NARGS <= 4. IGNORE is true if the result of the
10301 function call is ignored. This function returns NULL_TREE if no
10302 simplification was possible. Note that this only folds builtins with
10303 fixed argument patterns. Foldings that do varargs-to-varargs
10304 transformations, or that match calls with more than 4 arguments,
10305 need to be handled with fold_builtin_varargs instead. */
10307 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10310 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10312 tree ret = NULL_TREE;
10316 ret = fold_builtin_0 (fndecl, ignore);
10319 ret = fold_builtin_1 (fndecl, args[0], ignore);
10322 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10325 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10328 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10336 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10337 TREE_NO_WARNING (ret) = 1;
10343 /* Builtins with folding operations that operate on "..." arguments
10344 need special handling; we need to store the arguments in a convenient
10345 data structure before attempting any folding. Fortunately there are
10346 only a few builtins that fall into this category. FNDECL is the
10347 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10348 result of the function call is ignored. */
10351 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10353 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10354 tree ret = NULL_TREE;
10358 case BUILT_IN_SPRINTF_CHK:
10359 case BUILT_IN_VSPRINTF_CHK:
10360 ret = fold_builtin_sprintf_chk (exp, fcode);
10363 case BUILT_IN_SNPRINTF_CHK:
10364 case BUILT_IN_VSNPRINTF_CHK:
10365 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10372 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10373 TREE_NO_WARNING (ret) = 1;
10379 /* A wrapper function for builtin folding that prevents warnings for
10380 "statement without effect" and the like, caused by removing the
10381 call node earlier than the warning is generated. */
10384 fold_call_expr (tree exp, bool ignore)
10386 tree ret = NULL_TREE;
10387 tree fndecl = get_callee_fndecl (exp);
10389 && TREE_CODE (fndecl) == FUNCTION_DECL
10390 && DECL_BUILT_IN (fndecl))
10392 /* FIXME: Don't use a list in this interface. */
10393 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10394 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10397 int nargs = call_expr_nargs (exp);
10398 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10400 tree *args = CALL_EXPR_ARGP (exp);
10401 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10404 ret = fold_builtin_varargs (fndecl, exp, ignore);
10407 /* Propagate location information from original call to
10408 expansion of builtin. Otherwise things like
10409 maybe_emit_chk_warning, that operate on the expansion
10410 of a builtin, will use the wrong location information. */
10411 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10413 tree realret = ret;
10414 if (TREE_CODE (ret) == NOP_EXPR)
10415 realret = TREE_OPERAND (ret, 0);
10416 if (CAN_HAVE_LOCATION_P (realret)
10417 && !EXPR_HAS_LOCATION (realret))
10418 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10427 /* Conveniently construct a function call expression. FNDECL names the
10428 function to be called and ARGLIST is a TREE_LIST of arguments. */
10431 build_function_call_expr (tree fndecl, tree arglist)
10433 tree fntype = TREE_TYPE (fndecl);
10434 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10435 int n = list_length (arglist);
10436 tree *argarray = (tree *) alloca (n * sizeof (tree));
10439 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10440 argarray[i] = TREE_VALUE (arglist);
10441 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10444 /* Conveniently construct a function call expression. FNDECL names the
10445 function to be called, N is the number of arguments, and the "..."
10446 parameters are the argument expressions. */
10449 build_call_expr (tree fndecl, int n, ...)
10452 tree fntype = TREE_TYPE (fndecl);
10453 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10454 tree *argarray = (tree *) alloca (n * sizeof (tree));
10458 for (i = 0; i < n; i++)
10459 argarray[i] = va_arg (ap, tree);
10461 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10464 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10465 N arguments are passed in the array ARGARRAY. */
10468 fold_builtin_call_array (tree type,
10473 tree ret = NULL_TREE;
10477 if (TREE_CODE (fn) == ADDR_EXPR)
10479 tree fndecl = TREE_OPERAND (fn, 0);
10480 if (TREE_CODE (fndecl) == FUNCTION_DECL
10481 && DECL_BUILT_IN (fndecl))
10483 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10485 tree arglist = NULL_TREE;
10486 for (i = n - 1; i >= 0; i--)
10487 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10488 ret = targetm.fold_builtin (fndecl, arglist, false);
10492 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10494 /* First try the transformations that don't require consing up
10496 ret = fold_builtin_n (fndecl, argarray, n, false);
10501 /* If we got this far, we need to build an exp. */
10502 exp = build_call_array (type, fn, n, argarray);
10503 ret = fold_builtin_varargs (fndecl, exp, false);
10504 return ret ? ret : exp;
10508 return build_call_array (type, fn, n, argarray);
10511 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10512 along with N new arguments specified as the "..." parameters. SKIP
10513 is the number of arguments in EXP to be omitted. This function is used
10514 to do varargs-to-varargs transformations. */
10517 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10519 int oldnargs = call_expr_nargs (exp);
10520 int nargs = oldnargs - skip + n;
10521 tree fntype = TREE_TYPE (fndecl);
10522 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10530 buffer = alloca (nargs * sizeof (tree));
10532 for (i = 0; i < n; i++)
10533 buffer[i] = va_arg (ap, tree);
10535 for (j = skip; j < oldnargs; j++, i++)
10536 buffer[i] = CALL_EXPR_ARG (exp, j);
10539 buffer = CALL_EXPR_ARGP (exp) + skip;
10541 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10544 /* Validate a single argument ARG against a tree code CODE representing
10548 validate_arg (tree arg, enum tree_code code)
10552 else if (code == POINTER_TYPE)
10553 return POINTER_TYPE_P (TREE_TYPE (arg));
10554 return code == TREE_CODE (TREE_TYPE (arg));
10557 /* This function validates the types of a function call argument list
10558 against a specified list of tree_codes. If the last specifier is a 0,
10559 that represents an ellipses, otherwise the last specifier must be a
10563 validate_arglist (tree callexpr, ...)
10565 enum tree_code code;
10568 call_expr_arg_iterator iter;
10571 va_start (ap, callexpr);
10572 init_call_expr_arg_iterator (callexpr, &iter);
10576 code = va_arg (ap, enum tree_code);
10580 /* This signifies an ellipses, any further arguments are all ok. */
10584 /* This signifies an endlink, if no arguments remain, return
10585 true, otherwise return false. */
10586 res = !more_call_expr_args_p (&iter);
10589 /* If no parameters remain or the parameter's code does not
10590 match the specified code, return false. Otherwise continue
10591 checking any remaining arguments. */
10592 arg = next_call_expr_arg (&iter);
10593 if (!validate_arg (arg, code))
10600 /* We need gotos here since we can only have one VA_CLOSE in a
10608 /* Default target-specific builtin expander that does nothing. */
10611 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10612 rtx target ATTRIBUTE_UNUSED,
10613 rtx subtarget ATTRIBUTE_UNUSED,
10614 enum machine_mode mode ATTRIBUTE_UNUSED,
10615 int ignore ATTRIBUTE_UNUSED)
10620 /* Returns true is EXP represents data that would potentially reside
10621 in a readonly section. */
10624 readonly_data_expr (tree exp)
10628 if (TREE_CODE (exp) != ADDR_EXPR)
10631 exp = get_base_address (TREE_OPERAND (exp, 0));
10635 /* Make sure we call decl_readonly_section only for trees it
10636 can handle (since it returns true for everything it doesn't
10638 if (TREE_CODE (exp) == STRING_CST
10639 || TREE_CODE (exp) == CONSTRUCTOR
10640 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10641 return decl_readonly_section (exp, 0);
10646 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10647 to the call, and TYPE is its return type.
10649 Return NULL_TREE if no simplification was possible, otherwise return the
10650 simplified form of the call as a tree.
10652 The simplified form may be a constant or other expression which
10653 computes the same value, but in a more efficient manner (including
10654 calls to other builtin functions).
10656 The call may contain arguments which need to be evaluated, but
10657 which are not useful to determine the result of the call. In
10658 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10659 COMPOUND_EXPR will be an argument which must be evaluated.
10660 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10661 COMPOUND_EXPR in the chain will contain the tree for the simplified
10662 form of the builtin function call. */
10665 fold_builtin_strstr (tree s1, tree s2, tree type)
10667 if (!validate_arg (s1, POINTER_TYPE)
10668 || !validate_arg (s2, POINTER_TYPE))
10673 const char *p1, *p2;
10675 p2 = c_getstr (s2);
10679 p1 = c_getstr (s1);
10682 const char *r = strstr (p1, p2);
10686 return build_int_cst (TREE_TYPE (s1), 0);
10688 /* Return an offset into the constant string argument. */
10689 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10690 s1, size_int (r - p1));
10691 return fold_convert (type, tem);
10694 /* The argument is const char *, and the result is char *, so we need
10695 a type conversion here to avoid a warning. */
10697 return fold_convert (type, s1);
10702 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10706 /* New argument list transforming strstr(s1, s2) to
10707 strchr(s1, s2[0]). */
10708 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10712 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10713 the call, and TYPE is its return type.
10715 Return NULL_TREE if no simplification was possible, otherwise return the
10716 simplified form of the call as a tree.
10718 The simplified form may be a constant or other expression which
10719 computes the same value, but in a more efficient manner (including
10720 calls to other builtin functions).
10722 The call may contain arguments which need to be evaluated, but
10723 which are not useful to determine the result of the call. In
10724 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10725 COMPOUND_EXPR will be an argument which must be evaluated.
10726 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10727 COMPOUND_EXPR in the chain will contain the tree for the simplified
10728 form of the builtin function call. */
10731 fold_builtin_strchr (tree s1, tree s2, tree type)
10733 if (!validate_arg (s1, POINTER_TYPE)
10734 || !validate_arg (s2, INTEGER_TYPE))
10740 if (TREE_CODE (s2) != INTEGER_CST)
10743 p1 = c_getstr (s1);
10750 if (target_char_cast (s2, &c))
10753 r = strchr (p1, c);
10756 return build_int_cst (TREE_TYPE (s1), 0);
10758 /* Return an offset into the constant string argument. */
10759 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10760 s1, size_int (r - p1));
10761 return fold_convert (type, tem);
10767 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10768 the call, and TYPE is its return type.
10770 Return NULL_TREE if no simplification was possible, otherwise return the
10771 simplified form of the call as a tree.
10773 The simplified form may be a constant or other expression which
10774 computes the same value, but in a more efficient manner (including
10775 calls to other builtin functions).
10777 The call may contain arguments which need to be evaluated, but
10778 which are not useful to determine the result of the call. In
10779 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10780 COMPOUND_EXPR will be an argument which must be evaluated.
10781 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10782 COMPOUND_EXPR in the chain will contain the tree for the simplified
10783 form of the builtin function call. */
10786 fold_builtin_strrchr (tree s1, tree s2, tree type)
10788 if (!validate_arg (s1, POINTER_TYPE)
10789 || !validate_arg (s2, INTEGER_TYPE))
10796 if (TREE_CODE (s2) != INTEGER_CST)
10799 p1 = c_getstr (s1);
10806 if (target_char_cast (s2, &c))
10809 r = strrchr (p1, c);
10812 return build_int_cst (TREE_TYPE (s1), 0);
10814 /* Return an offset into the constant string argument. */
10815 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10816 s1, size_int (r - p1));
10817 return fold_convert (type, tem);
10820 if (! integer_zerop (s2))
10823 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10827 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10828 return build_call_expr (fn, 2, s1, s2);
10832 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10833 to the call, and TYPE is its return type.
10835 Return NULL_TREE if no simplification was possible, otherwise return the
10836 simplified form of the call as a tree.
10838 The simplified form may be a constant or other expression which
10839 computes the same value, but in a more efficient manner (including
10840 calls to other builtin functions).
10842 The call may contain arguments which need to be evaluated, but
10843 which are not useful to determine the result of the call. In
10844 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10845 COMPOUND_EXPR will be an argument which must be evaluated.
10846 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10847 COMPOUND_EXPR in the chain will contain the tree for the simplified
10848 form of the builtin function call. */
10851 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10853 if (!validate_arg (s1, POINTER_TYPE)
10854 || !validate_arg (s2, POINTER_TYPE))
10859 const char *p1, *p2;
10861 p2 = c_getstr (s2);
10865 p1 = c_getstr (s1);
10868 const char *r = strpbrk (p1, p2);
10872 return build_int_cst (TREE_TYPE (s1), 0);
10874 /* Return an offset into the constant string argument. */
10875 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10876 s1, size_int (r - p1));
10877 return fold_convert (type, tem);
10881 /* strpbrk(x, "") == NULL.
10882 Evaluate and ignore s1 in case it had side-effects. */
10883 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10886 return NULL_TREE; /* Really call strpbrk. */
10888 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10892 /* New argument list transforming strpbrk(s1, s2) to
10893 strchr(s1, s2[0]). */
10894 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10898 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10901 Return NULL_TREE if no simplification was possible, otherwise return the
10902 simplified form of the call as a tree.
10904 The simplified form may be a constant or other expression which
10905 computes the same value, but in a more efficient manner (including
10906 calls to other builtin functions).
10908 The call may contain arguments which need to be evaluated, but
10909 which are not useful to determine the result of the call. In
10910 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10911 COMPOUND_EXPR will be an argument which must be evaluated.
10912 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10913 COMPOUND_EXPR in the chain will contain the tree for the simplified
10914 form of the builtin function call. */
10917 fold_builtin_strcat (tree dst, tree src)
10919 if (!validate_arg (dst, POINTER_TYPE)
10920 || !validate_arg (src, POINTER_TYPE))
10924 const char *p = c_getstr (src);
10926 /* If the string length is zero, return the dst parameter. */
10927 if (p && *p == '\0')
10934 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10935 arguments to the call.
10937 Return NULL_TREE if no simplification was possible, otherwise return the
10938 simplified form of the call as a tree.
10940 The simplified form may be a constant or other expression which
10941 computes the same value, but in a more efficient manner (including
10942 calls to other builtin functions).
10944 The call may contain arguments which need to be evaluated, but
10945 which are not useful to determine the result of the call. In
10946 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10947 COMPOUND_EXPR will be an argument which must be evaluated.
10948 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10949 COMPOUND_EXPR in the chain will contain the tree for the simplified
10950 form of the builtin function call. */
10953 fold_builtin_strncat (tree dst, tree src, tree len)
10955 if (!validate_arg (dst, POINTER_TYPE)
10956 || !validate_arg (src, POINTER_TYPE)
10957 || !validate_arg (len, INTEGER_TYPE))
10961 const char *p = c_getstr (src);
10963 /* If the requested length is zero, or the src parameter string
10964 length is zero, return the dst parameter. */
10965 if (integer_zerop (len) || (p && *p == '\0'))
10966 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10968 /* If the requested len is greater than or equal to the string
10969 length, call strcat. */
10970 if (TREE_CODE (len) == INTEGER_CST && p
10971 && compare_tree_int (len, strlen (p)) >= 0)
10973 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10975 /* If the replacement _DECL isn't initialized, don't do the
10980 return build_call_expr (fn, 2, dst, src);
10986 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10989 Return NULL_TREE if no simplification was possible, otherwise return the
10990 simplified form of the call as a tree.
10992 The simplified form may be a constant or other expression which
10993 computes the same value, but in a more efficient manner (including
10994 calls to other builtin functions).
10996 The call may contain arguments which need to be evaluated, but
10997 which are not useful to determine the result of the call. In
10998 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10999 COMPOUND_EXPR will be an argument which must be evaluated.
11000 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11001 COMPOUND_EXPR in the chain will contain the tree for the simplified
11002 form of the builtin function call. */
11005 fold_builtin_strspn (tree s1, tree s2)
11007 if (!validate_arg (s1, POINTER_TYPE)
11008 || !validate_arg (s2, POINTER_TYPE))
11012 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11014 /* If both arguments are constants, evaluate at compile-time. */
11017 const size_t r = strspn (p1, p2);
11018 return size_int (r);
11021 /* If either argument is "", return NULL_TREE. */
11022 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11023 /* Evaluate and ignore both arguments in case either one has
11025 return omit_two_operands (integer_type_node, integer_zero_node,
11031 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11034 Return NULL_TREE if no simplification was possible, otherwise return the
11035 simplified form of the call as a tree.
11037 The simplified form may be a constant or other expression which
11038 computes the same value, but in a more efficient manner (including
11039 calls to other builtin functions).
11041 The call may contain arguments which need to be evaluated, but
11042 which are not useful to determine the result of the call. In
11043 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11044 COMPOUND_EXPR will be an argument which must be evaluated.
11045 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11046 COMPOUND_EXPR in the chain will contain the tree for the simplified
11047 form of the builtin function call. */
11050 fold_builtin_strcspn (tree s1, tree s2)
11052 if (!validate_arg (s1, POINTER_TYPE)
11053 || !validate_arg (s2, POINTER_TYPE))
11057 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11059 /* If both arguments are constants, evaluate at compile-time. */
11062 const size_t r = strcspn (p1, p2);
11063 return size_int (r);
11066 /* If the first argument is "", return NULL_TREE. */
11067 if (p1 && *p1 == '\0')
11069 /* Evaluate and ignore argument s2 in case it has
11071 return omit_one_operand (integer_type_node,
11072 integer_zero_node, s2);
11075 /* If the second argument is "", return __builtin_strlen(s1). */
11076 if (p2 && *p2 == '\0')
11078 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11080 /* If the replacement _DECL isn't initialized, don't do the
11085 return build_call_expr (fn, 1, s1);
11091 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11092 to the call. IGNORE is true if the value returned
11093 by the builtin will be ignored. UNLOCKED is true is true if this
11094 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11095 the known length of the string. Return NULL_TREE if no simplification
11099 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11101 /* If we're using an unlocked function, assume the other unlocked
11102 functions exist explicitly. */
11103 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11104 : implicit_built_in_decls[BUILT_IN_FPUTC];
11105 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11106 : implicit_built_in_decls[BUILT_IN_FWRITE];
11108 /* If the return value is used, don't do the transformation. */
11112 /* Verify the arguments in the original call. */
11113 if (!validate_arg (arg0, POINTER_TYPE)
11114 || !validate_arg (arg1, POINTER_TYPE))
11118 len = c_strlen (arg0, 0);
11120 /* Get the length of the string passed to fputs. If the length
11121 can't be determined, punt. */
11123 || TREE_CODE (len) != INTEGER_CST)
11126 switch (compare_tree_int (len, 1))
11128 case -1: /* length is 0, delete the call entirely . */
11129 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11131 case 0: /* length is 1, call fputc. */
11133 const char *p = c_getstr (arg0);
11138 return build_call_expr (fn_fputc, 2,
11139 build_int_cst (NULL_TREE, p[0]), arg1);
11145 case 1: /* length is greater than 1, call fwrite. */
11147 /* If optimizing for size keep fputs. */
11150 /* New argument list transforming fputs(string, stream) to
11151 fwrite(string, 1, len, stream). */
11153 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11158 gcc_unreachable ();
11163 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11164 produced. False otherwise. This is done so that we don't output the error
11165 or warning twice or three times. */
11167 fold_builtin_next_arg (tree exp, bool va_start_p)
11169 tree fntype = TREE_TYPE (current_function_decl);
11170 int nargs = call_expr_nargs (exp);
11173 if (TYPE_ARG_TYPES (fntype) == 0
11174 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11175 == void_type_node))
11177 error ("%<va_start%> used in function with fixed args");
11183 if (va_start_p && (nargs != 2))
11185 error ("wrong number of arguments to function %<va_start%>");
11188 arg = CALL_EXPR_ARG (exp, 1);
11190 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11191 when we checked the arguments and if needed issued a warning. */
11196 /* Evidently an out of date version of <stdarg.h>; can't validate
11197 va_start's second argument, but can still work as intended. */
11198 warning (0, "%<__builtin_next_arg%> called without an argument");
11201 else if (nargs > 1)
11203 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11206 arg = CALL_EXPR_ARG (exp, 0);
11209 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11210 or __builtin_next_arg (0) the first time we see it, after checking
11211 the arguments and if needed issuing a warning. */
11212 if (!integer_zerop (arg))
11214 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11216 /* Strip off all nops for the sake of the comparison. This
11217 is not quite the same as STRIP_NOPS. It does more.
11218 We must also strip off INDIRECT_EXPR for C++ reference
11220 while (TREE_CODE (arg) == NOP_EXPR
11221 || TREE_CODE (arg) == CONVERT_EXPR
11222 || TREE_CODE (arg) == NON_LVALUE_EXPR
11223 || TREE_CODE (arg) == INDIRECT_REF)
11224 arg = TREE_OPERAND (arg, 0);
11225 if (arg != last_parm)
11227 /* FIXME: Sometimes with the tree optimizers we can get the
11228 not the last argument even though the user used the last
11229 argument. We just warn and set the arg to be the last
11230 argument so that we will get wrong-code because of
11232 warning (0, "second parameter of %<va_start%> not last named argument");
11234 /* We want to verify the second parameter just once before the tree
11235 optimizers are run and then avoid keeping it in the tree,
11236 as otherwise we could warn even for correct code like:
11237 void foo (int i, ...)
11238 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11240 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11242 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11248 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11249 ORIG may be null if this is a 2-argument call. We don't attempt to
11250 simplify calls with more than 3 arguments.
11252 Return NULL_TREE if no simplification was possible, otherwise return the
11253 simplified form of the call as a tree. If IGNORED is true, it means that
11254 the caller does not use the returned value of the function. */
11257 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11260 const char *fmt_str = NULL;
11262 /* Verify the required arguments in the original call. We deal with two
11263 types of sprintf() calls: 'sprintf (str, fmt)' and
11264 'sprintf (dest, "%s", orig)'. */
11265 if (!validate_arg (dest, POINTER_TYPE)
11266 || !validate_arg (fmt, POINTER_TYPE))
11268 if (orig && !validate_arg (orig, POINTER_TYPE))
11271 /* Check whether the format is a literal string constant. */
11272 fmt_str = c_getstr (fmt);
11273 if (fmt_str == NULL)
11277 retval = NULL_TREE;
11279 if (!init_target_chars ())
11282 /* If the format doesn't contain % args or %%, use strcpy. */
11283 if (strchr (fmt_str, target_percent) == NULL)
11285 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11290 /* Don't optimize sprintf (buf, "abc", ptr++). */
11294 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11295 'format' is known to contain no % formats. */
11296 call = build_call_expr (fn, 2, dest, fmt);
11298 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11301 /* If the format is "%s", use strcpy if the result isn't used. */
11302 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11305 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11310 /* Don't crash on sprintf (str1, "%s"). */
11314 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11317 retval = c_strlen (orig, 1);
11318 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11321 call = build_call_expr (fn, 2, dest, orig);
11324 if (call && retval)
11326 retval = fold_convert
11327 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11329 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11335 /* Expand a call EXP to __builtin_object_size. */
11338 expand_builtin_object_size (tree exp)
11341 int object_size_type;
11342 tree fndecl = get_callee_fndecl (exp);
11343 location_t locus = EXPR_LOCATION (exp);
11345 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11347 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11349 expand_builtin_trap ();
11353 ost = CALL_EXPR_ARG (exp, 1);
11356 if (TREE_CODE (ost) != INTEGER_CST
11357 || tree_int_cst_sgn (ost) < 0
11358 || compare_tree_int (ost, 3) > 0)
11360 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11362 expand_builtin_trap ();
11366 object_size_type = tree_low_cst (ost, 0);
11368 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11371 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11372 FCODE is the BUILT_IN_* to use.
11373 Return NULL_RTX if we failed; the caller should emit a normal call,
11374 otherwise try to get the result in TARGET, if convenient (and in
11375 mode MODE if that's convenient). */
11378 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11379 enum built_in_function fcode)
11381 tree dest, src, len, size;
11383 if (!validate_arglist (exp,
11385 fcode == BUILT_IN_MEMSET_CHK
11386 ? INTEGER_TYPE : POINTER_TYPE,
11387 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11390 dest = CALL_EXPR_ARG (exp, 0);
11391 src = CALL_EXPR_ARG (exp, 1);
11392 len = CALL_EXPR_ARG (exp, 2);
11393 size = CALL_EXPR_ARG (exp, 3);
11395 if (! host_integerp (size, 1))
11398 if (host_integerp (len, 1) || integer_all_onesp (size))
11402 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11404 location_t locus = EXPR_LOCATION (exp);
11405 warning (0, "%Hcall to %D will always overflow destination buffer",
11406 &locus, get_callee_fndecl (exp));
11411 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11412 mem{cpy,pcpy,move,set} is available. */
11415 case BUILT_IN_MEMCPY_CHK:
11416 fn = built_in_decls[BUILT_IN_MEMCPY];
11418 case BUILT_IN_MEMPCPY_CHK:
11419 fn = built_in_decls[BUILT_IN_MEMPCPY];
11421 case BUILT_IN_MEMMOVE_CHK:
11422 fn = built_in_decls[BUILT_IN_MEMMOVE];
11424 case BUILT_IN_MEMSET_CHK:
11425 fn = built_in_decls[BUILT_IN_MEMSET];
11434 fn = build_call_expr (fn, 3, dest, src, len);
11435 if (TREE_CODE (fn) == CALL_EXPR)
11436 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11437 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11439 else if (fcode == BUILT_IN_MEMSET_CHK)
11443 unsigned int dest_align
11444 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11446 /* If DEST is not a pointer type, call the normal function. */
11447 if (dest_align == 0)
11450 /* If SRC and DEST are the same (and not volatile), do nothing. */
11451 if (operand_equal_p (src, dest, 0))
11455 if (fcode != BUILT_IN_MEMPCPY_CHK)
11457 /* Evaluate and ignore LEN in case it has side-effects. */
11458 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11459 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11462 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11463 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11466 /* __memmove_chk special case. */
11467 if (fcode == BUILT_IN_MEMMOVE_CHK)
11469 unsigned int src_align
11470 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11472 if (src_align == 0)
11475 /* If src is categorized for a readonly section we can use
11476 normal __memcpy_chk. */
11477 if (readonly_data_expr (src))
11479 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11482 fn = build_call_expr (fn, 4, dest, src, len, size);
11483 if (TREE_CODE (fn) == CALL_EXPR)
11484 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11485 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11492 /* Emit warning if a buffer overflow is detected at compile time. */
11495 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11503 case BUILT_IN_STRCPY_CHK:
11504 case BUILT_IN_STPCPY_CHK:
11505 /* For __strcat_chk the warning will be emitted only if overflowing
11506 by at least strlen (dest) + 1 bytes. */
11507 case BUILT_IN_STRCAT_CHK:
11508 len = CALL_EXPR_ARG (exp, 1);
11509 size = CALL_EXPR_ARG (exp, 2);
11512 case BUILT_IN_STRNCAT_CHK:
11513 case BUILT_IN_STRNCPY_CHK:
11514 len = CALL_EXPR_ARG (exp, 2);
11515 size = CALL_EXPR_ARG (exp, 3);
11517 case BUILT_IN_SNPRINTF_CHK:
11518 case BUILT_IN_VSNPRINTF_CHK:
11519 len = CALL_EXPR_ARG (exp, 1);
11520 size = CALL_EXPR_ARG (exp, 3);
11523 gcc_unreachable ();
11529 if (! host_integerp (size, 1) || integer_all_onesp (size))
11534 len = c_strlen (len, 1);
11535 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11538 else if (fcode == BUILT_IN_STRNCAT_CHK)
11540 tree src = CALL_EXPR_ARG (exp, 1);
11541 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11543 src = c_strlen (src, 1);
11544 if (! src || ! host_integerp (src, 1))
11546 locus = EXPR_LOCATION (exp);
11547 warning (0, "%Hcall to %D might overflow destination buffer",
11548 &locus, get_callee_fndecl (exp));
11551 else if (tree_int_cst_lt (src, size))
11554 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11557 locus = EXPR_LOCATION (exp);
11558 warning (0, "%Hcall to %D will always overflow destination buffer",
11559 &locus, get_callee_fndecl (exp));
11562 /* Emit warning if a buffer overflow is detected at compile time
11563 in __sprintf_chk/__vsprintf_chk calls. */
11566 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11568 tree dest, size, len, fmt, flag;
11569 const char *fmt_str;
11570 int nargs = call_expr_nargs (exp);
11572 /* Verify the required arguments in the original call. */
11576 dest = CALL_EXPR_ARG (exp, 0);
11577 flag = CALL_EXPR_ARG (exp, 1);
11578 size = CALL_EXPR_ARG (exp, 2);
11579 fmt = CALL_EXPR_ARG (exp, 3);
11581 if (! host_integerp (size, 1) || integer_all_onesp (size))
11584 /* Check whether the format is a literal string constant. */
11585 fmt_str = c_getstr (fmt);
11586 if (fmt_str == NULL)
11589 if (!init_target_chars ())
11592 /* If the format doesn't contain % args or %%, we know its size. */
11593 if (strchr (fmt_str, target_percent) == 0)
11594 len = build_int_cstu (size_type_node, strlen (fmt_str));
11595 /* If the format is "%s" and first ... argument is a string literal,
11597 else if (fcode == BUILT_IN_SPRINTF_CHK
11598 && strcmp (fmt_str, target_percent_s) == 0)
11604 arg = CALL_EXPR_ARG (exp, 4);
11605 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11608 len = c_strlen (arg, 1);
11609 if (!len || ! host_integerp (len, 1))
11615 if (! tree_int_cst_lt (len, size))
11617 location_t locus = EXPR_LOCATION (exp);
11618 warning (0, "%Hcall to %D will always overflow destination buffer",
11619 &locus, get_callee_fndecl (exp));
11623 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11627 fold_builtin_object_size (tree ptr, tree ost)
11629 tree ret = NULL_TREE;
11630 int object_size_type;
11632 if (!validate_arg (ptr, POINTER_TYPE)
11633 || !validate_arg (ost, INTEGER_TYPE))
11638 if (TREE_CODE (ost) != INTEGER_CST
11639 || tree_int_cst_sgn (ost) < 0
11640 || compare_tree_int (ost, 3) > 0)
11643 object_size_type = tree_low_cst (ost, 0);
11645 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11646 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11647 and (size_t) 0 for types 2 and 3. */
11648 if (TREE_SIDE_EFFECTS (ptr))
11649 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11651 if (TREE_CODE (ptr) == ADDR_EXPR)
11652 ret = build_int_cstu (size_type_node,
11653 compute_builtin_object_size (ptr, object_size_type));
11655 else if (TREE_CODE (ptr) == SSA_NAME)
11657 unsigned HOST_WIDE_INT bytes;
11659 /* If object size is not known yet, delay folding until
11660 later. Maybe subsequent passes will help determining
11662 bytes = compute_builtin_object_size (ptr, object_size_type);
11663 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11665 ret = build_int_cstu (size_type_node, bytes);
11670 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11671 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11672 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11679 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11680 DEST, SRC, LEN, and SIZE are the arguments to the call.
11681 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11682 code of the builtin. If MAXLEN is not NULL, it is maximum length
11683 passed as third argument. */
11686 fold_builtin_memory_chk (tree fndecl,
11687 tree dest, tree src, tree len, tree size,
11688 tree maxlen, bool ignore,
11689 enum built_in_function fcode)
11693 if (!validate_arg (dest, POINTER_TYPE)
11694 || !validate_arg (src,
11695 (fcode == BUILT_IN_MEMSET_CHK
11696 ? INTEGER_TYPE : POINTER_TYPE))
11697 || !validate_arg (len, INTEGER_TYPE)
11698 || !validate_arg (size, INTEGER_TYPE))
11701 /* If SRC and DEST are the same (and not volatile), return DEST
11702 (resp. DEST+LEN for __mempcpy_chk). */
11703 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11705 if (fcode != BUILT_IN_MEMPCPY_CHK)
11706 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11709 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11710 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11714 if (! host_integerp (size, 1))
11717 if (! integer_all_onesp (size))
11719 if (! host_integerp (len, 1))
11721 /* If LEN is not constant, try MAXLEN too.
11722 For MAXLEN only allow optimizing into non-_ocs function
11723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11724 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11726 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11728 /* (void) __mempcpy_chk () can be optimized into
11729 (void) __memcpy_chk (). */
11730 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11734 return build_call_expr (fn, 4, dest, src, len, size);
11742 if (tree_int_cst_lt (size, maxlen))
11747 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11748 mem{cpy,pcpy,move,set} is available. */
11751 case BUILT_IN_MEMCPY_CHK:
11752 fn = built_in_decls[BUILT_IN_MEMCPY];
11754 case BUILT_IN_MEMPCPY_CHK:
11755 fn = built_in_decls[BUILT_IN_MEMPCPY];
11757 case BUILT_IN_MEMMOVE_CHK:
11758 fn = built_in_decls[BUILT_IN_MEMMOVE];
11760 case BUILT_IN_MEMSET_CHK:
11761 fn = built_in_decls[BUILT_IN_MEMSET];
11770 return build_call_expr (fn, 3, dest, src, len);
11773 /* Fold a call to the __st[rp]cpy_chk builtin.
11774 DEST, SRC, and SIZE are the arguments to the call.
11775 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11776 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11777 strings passed as second argument. */
11780 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11781 tree maxlen, bool ignore,
11782 enum built_in_function fcode)
11786 if (!validate_arg (dest, POINTER_TYPE)
11787 || !validate_arg (src, POINTER_TYPE)
11788 || !validate_arg (size, INTEGER_TYPE))
11791 /* If SRC and DEST are the same (and not volatile), return DEST. */
11792 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11793 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11795 if (! host_integerp (size, 1))
11798 if (! integer_all_onesp (size))
11800 len = c_strlen (src, 1);
11801 if (! len || ! host_integerp (len, 1))
11803 /* If LEN is not constant, try MAXLEN too.
11804 For MAXLEN only allow optimizing into non-_ocs function
11805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11806 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11808 if (fcode == BUILT_IN_STPCPY_CHK)
11813 /* If return value of __stpcpy_chk is ignored,
11814 optimize into __strcpy_chk. */
11815 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11819 return build_call_expr (fn, 3, dest, src, size);
11822 if (! len || TREE_SIDE_EFFECTS (len))
11825 /* If c_strlen returned something, but not a constant,
11826 transform __strcpy_chk into __memcpy_chk. */
11827 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11831 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11832 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11833 build_call_expr (fn, 4,
11834 dest, src, len, size));
11840 if (! tree_int_cst_lt (maxlen, size))
11844 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11845 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11846 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11850 return build_call_expr (fn, 2, dest, src);
11853 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11854 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11855 length passed as third argument. */
11858 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11863 if (!validate_arg (dest, POINTER_TYPE)
11864 || !validate_arg (src, POINTER_TYPE)
11865 || !validate_arg (len, INTEGER_TYPE)
11866 || !validate_arg (size, INTEGER_TYPE))
11869 if (! host_integerp (size, 1))
11872 if (! integer_all_onesp (size))
11874 if (! host_integerp (len, 1))
11876 /* If LEN is not constant, try MAXLEN too.
11877 For MAXLEN only allow optimizing into non-_ocs function
11878 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11879 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11885 if (tree_int_cst_lt (size, maxlen))
11889 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11890 fn = built_in_decls[BUILT_IN_STRNCPY];
11894 return build_call_expr (fn, 3, dest, src, len);
11897 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11898 are the arguments to the call. */
11901 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11906 if (!validate_arg (dest, POINTER_TYPE)
11907 || !validate_arg (src, POINTER_TYPE)
11908 || !validate_arg (size, INTEGER_TYPE))
11911 p = c_getstr (src);
11912 /* If the SRC parameter is "", return DEST. */
11913 if (p && *p == '\0')
11914 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11916 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11919 /* If __builtin_strcat_chk is used, assume strcat is available. */
11920 fn = built_in_decls[BUILT_IN_STRCAT];
11924 return build_call_expr (fn, 2, dest, src);
11927 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11931 fold_builtin_strncat_chk (tree fndecl,
11932 tree dest, tree src, tree len, tree size)
11937 if (!validate_arg (dest, POINTER_TYPE)
11938 || !validate_arg (src, POINTER_TYPE)
11939 || !validate_arg (size, INTEGER_TYPE)
11940 || !validate_arg (size, INTEGER_TYPE))
11943 p = c_getstr (src);
11944 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11945 if (p && *p == '\0')
11946 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11947 else if (integer_zerop (len))
11948 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11950 if (! host_integerp (size, 1))
11953 if (! integer_all_onesp (size))
11955 tree src_len = c_strlen (src, 1);
11957 && host_integerp (src_len, 1)
11958 && host_integerp (len, 1)
11959 && ! tree_int_cst_lt (len, src_len))
11961 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11962 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11966 return build_call_expr (fn, 3, dest, src, size);
11971 /* If __builtin_strncat_chk is used, assume strncat is available. */
11972 fn = built_in_decls[BUILT_IN_STRNCAT];
11976 return build_call_expr (fn, 3, dest, src, len);
11979 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11980 a normal call should be emitted rather than expanding the function
11981 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11984 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11986 tree dest, size, len, fn, fmt, flag;
11987 const char *fmt_str;
11988 int nargs = call_expr_nargs (exp);
11990 /* Verify the required arguments in the original call. */
11993 dest = CALL_EXPR_ARG (exp, 0);
11994 if (!validate_arg (dest, POINTER_TYPE))
11996 flag = CALL_EXPR_ARG (exp, 1);
11997 if (!validate_arg (flag, INTEGER_TYPE))
11999 size = CALL_EXPR_ARG (exp, 2);
12000 if (!validate_arg (size, INTEGER_TYPE))
12002 fmt = CALL_EXPR_ARG (exp, 3);
12003 if (!validate_arg (fmt, POINTER_TYPE))
12006 if (! host_integerp (size, 1))
12011 if (!init_target_chars ())
12014 /* Check whether the format is a literal string constant. */
12015 fmt_str = c_getstr (fmt);
12016 if (fmt_str != NULL)
12018 /* If the format doesn't contain % args or %%, we know the size. */
12019 if (strchr (fmt_str, target_percent) == 0)
12021 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12022 len = build_int_cstu (size_type_node, strlen (fmt_str));
12024 /* If the format is "%s" and first ... argument is a string literal,
12025 we know the size too. */
12026 else if (fcode == BUILT_IN_SPRINTF_CHK
12027 && strcmp (fmt_str, target_percent_s) == 0)
12033 arg = CALL_EXPR_ARG (exp, 4);
12034 if (validate_arg (arg, POINTER_TYPE))
12036 len = c_strlen (arg, 1);
12037 if (! len || ! host_integerp (len, 1))
12044 if (! integer_all_onesp (size))
12046 if (! len || ! tree_int_cst_lt (len, size))
12050 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12051 or if format doesn't contain % chars or is "%s". */
12052 if (! integer_zerop (flag))
12054 if (fmt_str == NULL)
12056 if (strchr (fmt_str, target_percent) != NULL
12057 && strcmp (fmt_str, target_percent_s))
12061 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12062 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12063 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12067 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12070 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12071 a normal call should be emitted rather than expanding the function
12072 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12073 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12074 passed as second argument. */
12077 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12078 enum built_in_function fcode)
12080 tree dest, size, len, fn, fmt, flag;
12081 const char *fmt_str;
12083 /* Verify the required arguments in the original call. */
12084 if (call_expr_nargs (exp) < 5)
12086 dest = CALL_EXPR_ARG (exp, 0);
12087 if (!validate_arg (dest, POINTER_TYPE))
12089 len = CALL_EXPR_ARG (exp, 1);
12090 if (!validate_arg (len, INTEGER_TYPE))
12092 flag = CALL_EXPR_ARG (exp, 2);
12093 if (!validate_arg (flag, INTEGER_TYPE))
12095 size = CALL_EXPR_ARG (exp, 3);
12096 if (!validate_arg (size, INTEGER_TYPE))
12098 fmt = CALL_EXPR_ARG (exp, 4);
12099 if (!validate_arg (fmt, POINTER_TYPE))
12102 if (! host_integerp (size, 1))
12105 if (! integer_all_onesp (size))
12107 if (! host_integerp (len, 1))
12109 /* If LEN is not constant, try MAXLEN too.
12110 For MAXLEN only allow optimizing into non-_ocs function
12111 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12112 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12118 if (tree_int_cst_lt (size, maxlen))
12122 if (!init_target_chars ())
12125 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12126 or if format doesn't contain % chars or is "%s". */
12127 if (! integer_zerop (flag))
12129 fmt_str = c_getstr (fmt);
12130 if (fmt_str == NULL)
12132 if (strchr (fmt_str, target_percent) != NULL
12133 && strcmp (fmt_str, target_percent_s))
12137 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12139 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12140 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12144 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12147 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12148 FMT and ARG are the arguments to the call; we don't fold cases with
12149 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12151 Return NULL_TREE if no simplification was possible, otherwise return the
12152 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12153 code of the function to be simplified. */
12156 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12157 enum built_in_function fcode)
12159 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12160 const char *fmt_str = NULL;
12162 /* If the return value is used, don't do the transformation. */
12166 /* Verify the required arguments in the original call. */
12167 if (!validate_arg (fmt, POINTER_TYPE))
12170 /* Check whether the format is a literal string constant. */
12171 fmt_str = c_getstr (fmt);
12172 if (fmt_str == NULL)
12175 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12177 /* If we're using an unlocked function, assume the other
12178 unlocked functions exist explicitly. */
12179 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12180 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12184 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12185 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12188 if (!init_target_chars ())
12191 if (strcmp (fmt_str, target_percent_s) == 0
12192 || strchr (fmt_str, target_percent) == NULL)
12196 if (strcmp (fmt_str, target_percent_s) == 0)
12198 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12201 if (!arg || !validate_arg (arg, POINTER_TYPE))
12204 str = c_getstr (arg);
12210 /* The format specifier doesn't contain any '%' characters. */
12211 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12217 /* If the string was "", printf does nothing. */
12218 if (str[0] == '\0')
12219 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12221 /* If the string has length of 1, call putchar. */
12222 if (str[1] == '\0')
12224 /* Given printf("c"), (where c is any one character,)
12225 convert "c"[0] to an int and pass that to the replacement
12227 newarg = build_int_cst (NULL_TREE, str[0]);
12229 call = build_call_expr (fn_putchar, 1, newarg);
12233 /* If the string was "string\n", call puts("string"). */
12234 size_t len = strlen (str);
12235 if ((unsigned char)str[len - 1] == target_newline)
12237 /* Create a NUL-terminated string that's one char shorter
12238 than the original, stripping off the trailing '\n'. */
12239 char *newstr = alloca (len);
12240 memcpy (newstr, str, len - 1);
12241 newstr[len - 1] = 0;
12243 newarg = build_string_literal (len, newstr);
12245 call = build_call_expr (fn_puts, 1, newarg);
12248 /* We'd like to arrange to call fputs(string,stdout) here,
12249 but we need stdout and don't have a way to get it yet. */
12254 /* The other optimizations can be done only on the non-va_list variants. */
12255 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12258 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12259 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12261 if (!arg || !validate_arg (arg, POINTER_TYPE))
12264 call = build_call_expr (fn_puts, 1, arg);
12267 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12268 else if (strcmp (fmt_str, target_percent_c) == 0)
12270 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12273 call = build_call_expr (fn_putchar, 1, arg);
12279 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12282 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12283 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12284 more than 3 arguments, and ARG may be null in the 2-argument case.
12286 Return NULL_TREE if no simplification was possible, otherwise return the
12287 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12288 code of the function to be simplified. */
12291 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12292 enum built_in_function fcode)
12294 tree fn_fputc, fn_fputs, call = NULL_TREE;
12295 const char *fmt_str = NULL;
12297 /* If the return value is used, don't do the transformation. */
12301 /* Verify the required arguments in the original call. */
12302 if (!validate_arg (fp, POINTER_TYPE))
12304 if (!validate_arg (fmt, POINTER_TYPE))
12307 /* Check whether the format is a literal string constant. */
12308 fmt_str = c_getstr (fmt);
12309 if (fmt_str == NULL)
12312 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12314 /* If we're using an unlocked function, assume the other
12315 unlocked functions exist explicitly. */
12316 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12317 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12321 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12322 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12325 if (!init_target_chars ())
12328 /* If the format doesn't contain % args or %%, use strcpy. */
12329 if (strchr (fmt_str, target_percent) == NULL)
12331 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12335 /* If the format specifier was "", fprintf does nothing. */
12336 if (fmt_str[0] == '\0')
12338 /* If FP has side-effects, just wait until gimplification is
12340 if (TREE_SIDE_EFFECTS (fp))
12343 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12346 /* When "string" doesn't contain %, replace all cases of
12347 fprintf (fp, string) with fputs (string, fp). The fputs
12348 builtin will take care of special cases like length == 1. */
12350 call = build_call_expr (fn_fputs, 2, fmt, fp);
12353 /* The other optimizations can be done only on the non-va_list variants. */
12354 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12357 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12358 else if (strcmp (fmt_str, target_percent_s) == 0)
12360 if (!arg || !validate_arg (arg, POINTER_TYPE))
12363 call = build_call_expr (fn_fputs, 2, arg, fp);
12366 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12367 else if (strcmp (fmt_str, target_percent_c) == 0)
12369 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12372 call = build_call_expr (fn_fputc, 2, arg, fp);
12377 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12380 /* Initialize format string characters in the target charset. */
12383 init_target_chars (void)
12388 target_newline = lang_hooks.to_target_charset ('\n');
12389 target_percent = lang_hooks.to_target_charset ('%');
12390 target_c = lang_hooks.to_target_charset ('c');
12391 target_s = lang_hooks.to_target_charset ('s');
12392 if (target_newline == 0 || target_percent == 0 || target_c == 0
12396 target_percent_c[0] = target_percent;
12397 target_percent_c[1] = target_c;
12398 target_percent_c[2] = '\0';
12400 target_percent_s[0] = target_percent;
12401 target_percent_s[1] = target_s;
12402 target_percent_s[2] = '\0';
12404 target_percent_s_newline[0] = target_percent;
12405 target_percent_s_newline[1] = target_s;
12406 target_percent_s_newline[2] = target_newline;
12407 target_percent_s_newline[3] = '\0';
12414 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12415 and no overflow/underflow occurred. INEXACT is true if M was not
12416 exactly calculated. TYPE is the tree type for the result. This
12417 function assumes that you cleared the MPFR flags and then
12418 calculated M to see if anything subsequently set a flag prior to
12419 entering this function. Return NULL_TREE if any checks fail. */
12422 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12424 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12425 overflow/underflow occurred. If -frounding-math, proceed iff the
12426 result of calling FUNC was exact. */
12427 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12428 && (!flag_rounding_math || !inexact))
12430 REAL_VALUE_TYPE rr;
12432 real_from_mpfr (&rr, m, type, GMP_RNDN);
12433 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12434 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12435 but the mpft_t is not, then we underflowed in the
12437 if (real_isfinite (&rr)
12438 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12440 REAL_VALUE_TYPE rmode;
12442 real_convert (&rmode, TYPE_MODE (type), &rr);
12443 /* Proceed iff the specified mode can hold the value. */
12444 if (real_identical (&rmode, &rr))
12445 return build_real (type, rmode);
12451 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12452 FUNC on it and return the resulting value as a tree with type TYPE.
12453 If MIN and/or MAX are not NULL, then the supplied ARG must be
12454 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12455 acceptable values, otherwise they are not. The mpfr precision is
12456 set to the precision of TYPE. We assume that function FUNC returns
12457 zero if the result could be calculated exactly within the requested
12461 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12462 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12465 tree result = NULL_TREE;
12469 /* To proceed, MPFR must exactly represent the target floating point
12470 format, which only happens when the target base equals two. */
12471 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12472 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12474 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12476 if (real_isfinite (ra)
12477 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12478 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12480 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12484 mpfr_init2 (m, prec);
12485 mpfr_from_real (m, ra, GMP_RNDN);
12486 mpfr_clear_flags ();
12487 inexact = func (m, m, GMP_RNDN);
12488 result = do_mpfr_ckconv (m, type, inexact);
12496 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12497 FUNC on it and return the resulting value as a tree with type TYPE.
12498 The mpfr precision is set to the precision of TYPE. We assume that
12499 function FUNC returns zero if the result could be calculated
12500 exactly within the requested precision. */
12503 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12504 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12506 tree result = NULL_TREE;
12511 /* To proceed, MPFR must exactly represent the target floating point
12512 format, which only happens when the target base equals two. */
12513 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12514 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12515 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12517 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12518 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12520 if (real_isfinite (ra1) && real_isfinite (ra2))
12522 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12526 mpfr_inits2 (prec, m1, m2, NULL);
12527 mpfr_from_real (m1, ra1, GMP_RNDN);
12528 mpfr_from_real (m2, ra2, GMP_RNDN);
12529 mpfr_clear_flags ();
12530 inexact = func (m1, m1, m2, GMP_RNDN);
12531 result = do_mpfr_ckconv (m1, type, inexact);
12532 mpfr_clears (m1, m2, NULL);
12539 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12540 FUNC on it and return the resulting value as a tree with type TYPE.
12541 The mpfr precision is set to the precision of TYPE. We assume that
12542 function FUNC returns zero if the result could be calculated
12543 exactly within the requested precision. */
12546 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12547 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12549 tree result = NULL_TREE;
12555 /* To proceed, MPFR must exactly represent the target floating point
12556 format, which only happens when the target base equals two. */
12557 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12558 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12559 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12560 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12562 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12563 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12564 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12566 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12568 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12572 mpfr_inits2 (prec, m1, m2, m3, NULL);
12573 mpfr_from_real (m1, ra1, GMP_RNDN);
12574 mpfr_from_real (m2, ra2, GMP_RNDN);
12575 mpfr_from_real (m3, ra3, GMP_RNDN);
12576 mpfr_clear_flags ();
12577 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12578 result = do_mpfr_ckconv (m1, type, inexact);
12579 mpfr_clears (m1, m2, m3, NULL);
12586 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12587 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12588 If ARG_SINP and ARG_COSP are NULL then the result is returned
12589 as a complex value.
12590 The type is taken from the type of ARG and is used for setting the
12591 precision of the calculation and results. */
12594 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12596 tree const type = TREE_TYPE (arg);
12597 tree result = NULL_TREE;
12601 /* To proceed, MPFR must exactly represent the target floating point
12602 format, which only happens when the target base equals two. */
12603 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12604 && TREE_CODE (arg) == REAL_CST
12605 && !TREE_OVERFLOW (arg))
12607 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12609 if (real_isfinite (ra))
12611 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12612 tree result_s, result_c;
12616 mpfr_inits2 (prec, m, ms, mc, NULL);
12617 mpfr_from_real (m, ra, GMP_RNDN);
12618 mpfr_clear_flags ();
12619 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12620 result_s = do_mpfr_ckconv (ms, type, inexact);
12621 result_c = do_mpfr_ckconv (mc, type, inexact);
12622 mpfr_clears (m, ms, mc, NULL);
12623 if (result_s && result_c)
12625 /* If we are to return in a complex value do so. */
12626 if (!arg_sinp && !arg_cosp)
12627 return build_complex (build_complex_type (type),
12628 result_c, result_s);
12630 /* Dereference the sin/cos pointer arguments. */
12631 arg_sinp = build_fold_indirect_ref (arg_sinp);
12632 arg_cosp = build_fold_indirect_ref (arg_cosp);
12633 /* Proceed if valid pointer type were passed in. */
12634 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12635 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12637 /* Set the values. */
12638 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12640 TREE_SIDE_EFFECTS (result_s) = 1;
12641 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12643 TREE_SIDE_EFFECTS (result_c) = 1;
12644 /* Combine the assignments into a compound expr. */
12645 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12646 result_s, result_c));
12654 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12655 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12656 two-argument mpfr order N Bessel function FUNC on them and return
12657 the resulting value as a tree with type TYPE. The mpfr precision
12658 is set to the precision of TYPE. We assume that function FUNC
12659 returns zero if the result could be calculated exactly within the
12660 requested precision. */
12662 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12663 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12664 const REAL_VALUE_TYPE *min, bool inclusive)
12666 tree result = NULL_TREE;
12671 /* To proceed, MPFR must exactly represent the target floating point
12672 format, which only happens when the target base equals two. */
12673 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12674 && host_integerp (arg1, 0)
12675 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12677 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12678 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12681 && real_isfinite (ra)
12682 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12684 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12688 mpfr_init2 (m, prec);
12689 mpfr_from_real (m, ra, GMP_RNDN);
12690 mpfr_clear_flags ();
12691 inexact = func (m, n, m, GMP_RNDN);
12692 result = do_mpfr_ckconv (m, type, inexact);
12700 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12701 the pointer *(ARG_QUO) and return the result. The type is taken
12702 from the type of ARG0 and is used for setting the precision of the
12703 calculation and results. */
12706 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12708 tree const type = TREE_TYPE (arg0);
12709 tree result = NULL_TREE;
12714 /* To proceed, MPFR must exactly represent the target floating point
12715 format, which only happens when the target base equals two. */
12716 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12717 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12718 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12720 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12721 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12723 if (real_isfinite (ra0) && real_isfinite (ra1))
12725 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12730 mpfr_inits2 (prec, m0, m1, NULL);
12731 mpfr_from_real (m0, ra0, GMP_RNDN);
12732 mpfr_from_real (m1, ra1, GMP_RNDN);
12733 mpfr_clear_flags ();
12734 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12735 /* Remquo is independent of the rounding mode, so pass
12736 inexact=0 to do_mpfr_ckconv(). */
12737 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12738 mpfr_clears (m0, m1, NULL);
12741 /* MPFR calculates quo in the host's long so it may
12742 return more bits in quo than the target int can hold
12743 if sizeof(host long) > sizeof(target int). This can
12744 happen even for native compilers in LP64 mode. In
12745 these cases, modulo the quo value with the largest
12746 number that the target int can hold while leaving one
12747 bit for the sign. */
12748 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12749 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12751 /* Dereference the quo pointer argument. */
12752 arg_quo = build_fold_indirect_ref (arg_quo);
12753 /* Proceed iff a valid pointer type was passed in. */
12754 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12756 /* Set the value. */
12757 tree result_quo = fold_build2 (MODIFY_EXPR,
12758 TREE_TYPE (arg_quo), arg_quo,
12759 build_int_cst (NULL, integer_quo));
12760 TREE_SIDE_EFFECTS (result_quo) = 1;
12761 /* Combine the quo assignment with the rem. */
12762 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12763 result_quo, result_rem));
12771 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12772 resulting value as a tree with type TYPE. The mpfr precision is
12773 set to the precision of TYPE. We assume that this mpfr function
12774 returns zero if the result could be calculated exactly within the
12775 requested precision. In addition, the integer pointer represented
12776 by ARG_SG will be dereferenced and set to the appropriate signgam
12780 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12782 tree result = NULL_TREE;
12786 /* To proceed, MPFR must exactly represent the target floating point
12787 format, which only happens when the target base equals two. Also
12788 verify ARG is a constant and that ARG_SG is an int pointer. */
12789 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12790 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12791 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12792 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12794 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12796 /* In addition to NaN and Inf, the argument cannot be zero or a
12797 negative integer. */
12798 if (real_isfinite (ra)
12799 && ra->cl != rvc_zero
12800 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
12802 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12807 mpfr_init2 (m, prec);
12808 mpfr_from_real (m, ra, GMP_RNDN);
12809 mpfr_clear_flags ();
12810 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
12811 result_lg = do_mpfr_ckconv (m, type, inexact);
12817 /* Dereference the arg_sg pointer argument. */
12818 arg_sg = build_fold_indirect_ref (arg_sg);
12819 /* Assign the signgam value into *arg_sg. */
12820 result_sg = fold_build2 (MODIFY_EXPR,
12821 TREE_TYPE (arg_sg), arg_sg,
12822 build_int_cst (NULL, sg));
12823 TREE_SIDE_EFFECTS (result_sg) = 1;
12824 /* Combine the signgam assignment with the lgamma result. */
12825 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12826 result_sg, result_lg));