1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_alloca (tree, rtx);
140 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
141 static rtx expand_builtin_frame_address (tree, tree);
142 static rtx expand_builtin_fputs (tree, rtx, bool);
143 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
146 static tree stabilize_va_list (tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_expect (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static tree rewrite_call_expr (tree, int, tree, int, ...);
155 static bool validate_arg (tree, enum tree_code code);
156 static bool integer_valued_real_p (tree);
157 static tree fold_trunc_transparent_mathfn (tree, tree);
158 static bool readonly_data_expr (tree);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_sqrt (tree, tree);
162 static tree fold_builtin_cbrt (tree, tree);
163 static tree fold_builtin_pow (tree, tree, tree, tree);
164 static tree fold_builtin_powi (tree, tree, tree, tree);
165 static tree fold_builtin_cos (tree, tree, tree);
166 static tree fold_builtin_cosh (tree, tree, tree);
167 static tree fold_builtin_tan (tree, tree);
168 static tree fold_builtin_trunc (tree, tree);
169 static tree fold_builtin_floor (tree, tree);
170 static tree fold_builtin_ceil (tree, tree);
171 static tree fold_builtin_round (tree, tree);
172 static tree fold_builtin_int_roundingfn (tree, tree);
173 static tree fold_builtin_bitop (tree, tree);
174 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
175 static tree fold_builtin_strchr (tree, tree, tree);
176 static tree fold_builtin_memchr (tree, tree, tree, tree);
177 static tree fold_builtin_memcmp (tree, tree, tree);
178 static tree fold_builtin_strcmp (tree, tree);
179 static tree fold_builtin_strncmp (tree, tree, tree);
180 static tree fold_builtin_signbit (tree, tree);
181 static tree fold_builtin_copysign (tree, tree, tree, tree);
182 static tree fold_builtin_isascii (tree);
183 static tree fold_builtin_toascii (tree);
184 static tree fold_builtin_isdigit (tree);
185 static tree fold_builtin_fabs (tree, tree);
186 static tree fold_builtin_abs (tree, tree);
187 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
189 static tree fold_builtin_n (tree, tree *, int, bool);
190 static tree fold_builtin_0 (tree, bool);
191 static tree fold_builtin_1 (tree, tree, bool);
192 static tree fold_builtin_2 (tree, tree, tree, bool);
193 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
194 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
195 static tree fold_builtin_varargs (tree, tree, bool);
197 static tree fold_builtin_strpbrk (tree, tree, tree);
198 static tree fold_builtin_strstr (tree, tree, tree);
199 static tree fold_builtin_strrchr (tree, tree, tree);
200 static tree fold_builtin_strcat (tree, tree);
201 static tree fold_builtin_strncat (tree, tree, tree);
202 static tree fold_builtin_strspn (tree, tree);
203 static tree fold_builtin_strcspn (tree, tree);
204 static tree fold_builtin_sprintf (tree, tree, tree, int);
206 static rtx expand_builtin_object_size (tree);
207 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
208 enum built_in_function);
209 static void maybe_emit_chk_warning (tree, enum built_in_function);
210 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
211 static tree fold_builtin_object_size (tree, tree);
212 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
213 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
214 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
215 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
216 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
217 enum built_in_function);
218 static bool init_target_chars (void);
220 static unsigned HOST_WIDE_INT target_newline;
221 static unsigned HOST_WIDE_INT target_percent;
222 static unsigned HOST_WIDE_INT target_c;
223 static unsigned HOST_WIDE_INT target_s;
224 static char target_percent_c[3];
225 static char target_percent_s[3];
226 static char target_percent_s_newline[4];
227 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
228 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
229 static tree do_mpfr_arg2 (tree, tree, tree,
230 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
231 static tree do_mpfr_arg3 (tree, tree, tree, tree,
232 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
233 static tree do_mpfr_sincos (tree, tree, tree);
235 /* Return true if NODE should be considered for inline expansion regardless
236 of the optimization level. This means whenever a function is invoked with
237 its "internal" name, which normally contains the prefix "__builtin". */
239 static bool called_as_built_in (tree node)
241 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
242 if (strncmp (name, "__builtin_", 10) == 0)
244 if (strncmp (name, "__sync_", 7) == 0)
249 /* Return the alignment in bits of EXP, a pointer valued expression.
250 But don't return more than MAX_ALIGN no matter what.
251 The alignment returned is, by default, the alignment of the thing that
252 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
254 Otherwise, look at the expression to see if we can do better, i.e., if the
255 expression is actually pointing at an object whose alignment is tighter. */
258 get_pointer_alignment (tree exp, unsigned int max_align)
260 unsigned int align, inner;
262 /* We rely on TER to compute accurate alignment information. */
263 if (!(optimize && flag_tree_ter))
266 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
269 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
270 align = MIN (align, max_align);
274 switch (TREE_CODE (exp))
278 case NON_LVALUE_EXPR:
279 exp = TREE_OPERAND (exp, 0);
280 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
283 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
284 align = MIN (inner, max_align);
288 /* If sum of pointer + int, restrict our maximum alignment to that
289 imposed by the integer. If not, we can't do any better than
291 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
294 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
295 & (max_align / BITS_PER_UNIT - 1))
299 exp = TREE_OPERAND (exp, 0);
303 /* See what we are pointing at and look at its alignment. */
304 exp = TREE_OPERAND (exp, 0);
306 if (handled_component_p (exp))
308 HOST_WIDE_INT bitsize, bitpos;
310 enum machine_mode mode;
311 int unsignedp, volatilep;
313 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
314 &mode, &unsignedp, &volatilep, true);
316 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
317 if (offset && TREE_CODE (offset) == PLUS_EXPR
318 && host_integerp (TREE_OPERAND (offset, 1), 1))
320 /* Any overflow in calculating offset_bits won't change
323 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
327 inner = MIN (inner, (offset_bits & -offset_bits));
328 offset = TREE_OPERAND (offset, 0);
330 if (offset && TREE_CODE (offset) == MULT_EXPR
331 && host_integerp (TREE_OPERAND (offset, 1), 1))
333 /* Any overflow in calculating offset_factor won't change
335 unsigned offset_factor
336 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
340 inner = MIN (inner, (offset_factor & -offset_factor));
343 inner = MIN (inner, BITS_PER_UNIT);
345 if (TREE_CODE (exp) == FUNCTION_DECL)
346 align = FUNCTION_BOUNDARY;
347 else if (DECL_P (exp))
348 align = MIN (inner, DECL_ALIGN (exp));
349 #ifdef CONSTANT_ALIGNMENT
350 else if (CONSTANT_CLASS_P (exp))
351 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
353 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
354 || TREE_CODE (exp) == INDIRECT_REF)
355 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
357 align = MIN (align, inner);
358 return MIN (align, max_align);
366 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
367 way, because it could contain a zero byte in the middle.
368 TREE_STRING_LENGTH is the size of the character array, not the string.
370 ONLY_VALUE should be nonzero if the result is not going to be emitted
371 into the instruction stream and zero if it is going to be expanded.
372 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
373 is returned, otherwise NULL, since
374 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
375 evaluate the side-effects.
377 The value returned is of type `ssizetype'.
379 Unfortunately, string_constant can't access the values of const char
380 arrays with initializers, so neither can we do so here. */
383 c_strlen (tree src, int only_value)
386 HOST_WIDE_INT offset;
391 if (TREE_CODE (src) == COND_EXPR
392 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
396 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
397 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
398 if (tree_int_cst_equal (len1, len2))
402 if (TREE_CODE (src) == COMPOUND_EXPR
403 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
404 return c_strlen (TREE_OPERAND (src, 1), only_value);
406 src = string_constant (src, &offset_node);
410 max = TREE_STRING_LENGTH (src) - 1;
411 ptr = TREE_STRING_POINTER (src);
413 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
415 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
416 compute the offset to the following null if we don't know where to
417 start searching for it. */
420 for (i = 0; i < max; i++)
424 /* We don't know the starting offset, but we do know that the string
425 has no internal zero bytes. We can assume that the offset falls
426 within the bounds of the string; otherwise, the programmer deserves
427 what he gets. Subtract the offset from the length of the string,
428 and return that. This would perhaps not be valid if we were dealing
429 with named arrays in addition to literal string constants. */
431 return size_diffop (size_int (max), offset_node);
434 /* We have a known offset into the string. Start searching there for
435 a null character if we can represent it as a single HOST_WIDE_INT. */
436 if (offset_node == 0)
438 else if (! host_integerp (offset_node, 0))
441 offset = tree_low_cst (offset_node, 0);
443 /* If the offset is known to be out of bounds, warn, and call strlen at
445 if (offset < 0 || offset > max)
447 warning (0, "offset outside bounds of constant string");
451 /* Use strlen to search for the first zero byte. Since any strings
452 constructed with build_string will have nulls appended, we win even
453 if we get handed something like (char[4])"abcd".
455 Since OFFSET is our starting index into the string, no further
456 calculation is needed. */
457 return ssize_int (strlen (ptr + offset));
460 /* Return a char pointer for a C string if it is a string constant
461 or sum of string constant and integer constant. */
468 src = string_constant (src, &offset_node);
472 if (offset_node == 0)
473 return TREE_STRING_POINTER (src);
474 else if (!host_integerp (offset_node, 1)
475 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
478 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
481 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
482 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
485 c_readstr (const char *str, enum machine_mode mode)
491 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
496 for (i = 0; i < GET_MODE_SIZE (mode); i++)
499 if (WORDS_BIG_ENDIAN)
500 j = GET_MODE_SIZE (mode) - i - 1;
501 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
502 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
503 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
505 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
508 ch = (unsigned char) str[i];
509 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
511 return immed_double_const (c[0], c[1], mode);
514 /* Cast a target constant CST to target CHAR and if that value fits into
515 host char type, return zero and put that value into variable pointed to by
519 target_char_cast (tree cst, char *p)
521 unsigned HOST_WIDE_INT val, hostval;
523 if (!host_integerp (cst, 1)
524 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
527 val = tree_low_cst (cst, 1);
528 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
529 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
532 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
533 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 /* Similar to save_expr, but assumes that arbitrary code is not executed
543 in between the multiple evaluations. In particular, we assume that a
544 non-addressable local variable will not be modified. */
547 builtin_save_expr (tree exp)
549 if (TREE_ADDRESSABLE (exp) == 0
550 && (TREE_CODE (exp) == PARM_DECL
551 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
554 return save_expr (exp);
557 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
558 times to get the address of either a higher stack frame, or a return
559 address located within it (depending on FNDECL_CODE). */
562 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
566 #ifdef INITIAL_FRAME_ADDRESS_RTX
567 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
571 /* For a zero count with __builtin_return_address, we don't care what
572 frame address we return, because target-specific definitions will
573 override us. Therefore frame pointer elimination is OK, and using
574 the soft frame pointer is OK.
576 For a nonzero count, or a zero count with __builtin_frame_address,
577 we require a stable offset from the current frame pointer to the
578 previous one, so we must use the hard frame pointer, and
579 we must disable frame pointer elimination. */
580 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
581 tem = frame_pointer_rtx;
584 tem = hard_frame_pointer_rtx;
586 /* Tell reload not to eliminate the frame pointer. */
587 current_function_accesses_prior_frames = 1;
591 /* Some machines need special handling before we can access
592 arbitrary frames. For example, on the SPARC, we must first flush
593 all register windows to the stack. */
594 #ifdef SETUP_FRAME_ADDRESSES
596 SETUP_FRAME_ADDRESSES ();
599 /* On the SPARC, the return address is not in the frame, it is in a
600 register. There is no way to access it off of the current frame
601 pointer, but it can be accessed off the previous frame pointer by
602 reading the value from the register window save area. */
603 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
604 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
608 /* Scan back COUNT frames to the specified frame. */
609 for (i = 0; i < count; i++)
611 /* Assume the dynamic chain pointer is in the word that the
612 frame address points to, unless otherwise specified. */
613 #ifdef DYNAMIC_CHAIN_ADDRESS
614 tem = DYNAMIC_CHAIN_ADDRESS (tem);
616 tem = memory_address (Pmode, tem);
617 tem = gen_frame_mem (Pmode, tem);
618 tem = copy_to_reg (tem);
621 /* For __builtin_frame_address, return what we've got. But, on
622 the SPARC for example, we may have to add a bias. */
623 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
624 #ifdef FRAME_ADDR_RTX
625 return FRAME_ADDR_RTX (tem);
630 /* For __builtin_return_address, get the return address from that frame. */
631 #ifdef RETURN_ADDR_RTX
632 tem = RETURN_ADDR_RTX (count, tem);
634 tem = memory_address (Pmode,
635 plus_constant (tem, GET_MODE_SIZE (Pmode)));
636 tem = gen_frame_mem (Pmode, tem);
641 /* Alias set used for setjmp buffer. */
642 static HOST_WIDE_INT setjmp_alias_set = -1;
644 /* Construct the leading half of a __builtin_setjmp call. Control will
645 return to RECEIVER_LABEL. This is also called directly by the SJLJ
646 exception handling code. */
649 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
651 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
655 if (setjmp_alias_set == -1)
656 setjmp_alias_set = new_alias_set ();
658 buf_addr = convert_memory_address (Pmode, buf_addr);
660 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
662 /* We store the frame pointer and the address of receiver_label in
663 the buffer and use the rest of it for the stack save area, which
664 is machine-dependent. */
666 mem = gen_rtx_MEM (Pmode, buf_addr);
667 set_mem_alias_set (mem, setjmp_alias_set);
668 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
670 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
671 set_mem_alias_set (mem, setjmp_alias_set);
673 emit_move_insn (validize_mem (mem),
674 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
676 stack_save = gen_rtx_MEM (sa_mode,
677 plus_constant (buf_addr,
678 2 * GET_MODE_SIZE (Pmode)));
679 set_mem_alias_set (stack_save, setjmp_alias_set);
680 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
682 /* If there is further processing to do, do it. */
683 #ifdef HAVE_builtin_setjmp_setup
684 if (HAVE_builtin_setjmp_setup)
685 emit_insn (gen_builtin_setjmp_setup (buf_addr));
688 /* Tell optimize_save_area_alloca that extra work is going to
689 need to go on during alloca. */
690 current_function_calls_setjmp = 1;
692 /* Set this so all the registers get saved in our frame; we need to be
693 able to copy the saved values for any registers from frames we unwind. */
694 current_function_has_nonlocal_label = 1;
697 /* Construct the trailing part of a __builtin_setjmp call. This is
698 also called directly by the SJLJ exception handling code. */
701 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
703 /* Clobber the FP when we get here, so we have to make sure it's
704 marked as used by this function. */
705 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
707 /* Mark the static chain as clobbered here so life information
708 doesn't get messed up for it. */
709 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
711 /* Now put in the code to restore the frame pointer, and argument
712 pointer, if needed. */
713 #ifdef HAVE_nonlocal_goto
714 if (! HAVE_nonlocal_goto)
717 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
718 /* This might change the hard frame pointer in ways that aren't
719 apparent to early optimization passes, so force a clobber. */
720 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
723 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
724 if (fixed_regs[ARG_POINTER_REGNUM])
726 #ifdef ELIMINABLE_REGS
728 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
730 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
731 if (elim_regs[i].from == ARG_POINTER_REGNUM
732 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
735 if (i == ARRAY_SIZE (elim_regs))
738 /* Now restore our arg pointer from the address at which it
739 was saved in our stack frame. */
740 emit_move_insn (virtual_incoming_args_rtx,
741 copy_to_reg (get_arg_pointer_save_area (cfun)));
746 #ifdef HAVE_builtin_setjmp_receiver
747 if (HAVE_builtin_setjmp_receiver)
748 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
751 #ifdef HAVE_nonlocal_goto_receiver
752 if (HAVE_nonlocal_goto_receiver)
753 emit_insn (gen_nonlocal_goto_receiver ());
758 /* @@@ This is a kludge. Not all machine descriptions define a blockage
759 insn, but we must not allow the code we just generated to be reordered
760 by scheduling. Specifically, the update of the frame pointer must
761 happen immediately, not later. So emit an ASM_INPUT to act as blockage
763 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
766 /* __builtin_longjmp is passed a pointer to an array of five words (not
767 all will be used on all machines). It operates similarly to the C
768 library function of the same name, but is more efficient. Much of
769 the code below is copied from the handling of non-local gotos. */
772 expand_builtin_longjmp (rtx buf_addr, rtx value)
774 rtx fp, lab, stack, insn, last;
775 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
777 if (setjmp_alias_set == -1)
778 setjmp_alias_set = new_alias_set ();
780 buf_addr = convert_memory_address (Pmode, buf_addr);
782 buf_addr = force_reg (Pmode, buf_addr);
784 /* We used to store value in static_chain_rtx, but that fails if pointers
785 are smaller than integers. We instead require that the user must pass
786 a second argument of 1, because that is what builtin_setjmp will
787 return. This also makes EH slightly more efficient, since we are no
788 longer copying around a value that we don't care about. */
789 gcc_assert (value == const1_rtx);
791 last = get_last_insn ();
792 #ifdef HAVE_builtin_longjmp
793 if (HAVE_builtin_longjmp)
794 emit_insn (gen_builtin_longjmp (buf_addr));
798 fp = gen_rtx_MEM (Pmode, buf_addr);
799 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
800 GET_MODE_SIZE (Pmode)));
802 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
803 2 * GET_MODE_SIZE (Pmode)));
804 set_mem_alias_set (fp, setjmp_alias_set);
805 set_mem_alias_set (lab, setjmp_alias_set);
806 set_mem_alias_set (stack, setjmp_alias_set);
808 /* Pick up FP, label, and SP from the block and jump. This code is
809 from expand_goto in stmt.c; see there for detailed comments. */
810 #ifdef HAVE_nonlocal_goto
811 if (HAVE_nonlocal_goto)
812 /* We have to pass a value to the nonlocal_goto pattern that will
813 get copied into the static_chain pointer, but it does not matter
814 what that value is, because builtin_setjmp does not use it. */
815 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
819 lab = copy_to_reg (lab);
821 emit_insn (gen_rtx_CLOBBER (VOIDmode,
822 gen_rtx_MEM (BLKmode,
823 gen_rtx_SCRATCH (VOIDmode))));
824 emit_insn (gen_rtx_CLOBBER (VOIDmode,
825 gen_rtx_MEM (BLKmode,
826 hard_frame_pointer_rtx)));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
832 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
848 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
852 else if (CALL_P (insn))
857 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
858 and the address of the save area. */
861 expand_builtin_nonlocal_goto (tree exp)
863 tree t_label, t_save_area;
864 rtx r_label, r_save_area, r_fp, r_sp, insn;
866 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
869 t_label = CALL_EXPR_ARG (exp, 0);
870 t_save_area = CALL_EXPR_ARG (exp, 1);
872 r_label = expand_normal (t_label);
873 r_label = convert_memory_address (Pmode, r_label);
874 r_save_area = expand_normal (t_save_area);
875 r_save_area = convert_memory_address (Pmode, r_save_area);
876 r_fp = gen_rtx_MEM (Pmode, r_save_area);
877 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
878 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
880 current_function_has_nonlocal_goto = 1;
882 #ifdef HAVE_nonlocal_goto
883 /* ??? We no longer need to pass the static chain value, afaik. */
884 if (HAVE_nonlocal_goto)
885 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
889 r_label = copy_to_reg (r_label);
891 emit_insn (gen_rtx_CLOBBER (VOIDmode,
892 gen_rtx_MEM (BLKmode,
893 gen_rtx_SCRATCH (VOIDmode))));
895 emit_insn (gen_rtx_CLOBBER (VOIDmode,
896 gen_rtx_MEM (BLKmode,
897 hard_frame_pointer_rtx)));
899 /* Restore frame pointer for containing function.
900 This sets the actual hard register used for the frame pointer
901 to the location of the function's incoming static chain info.
902 The non-local goto handler will then adjust it to contain the
903 proper value and reload the argument pointer, if needed. */
904 emit_move_insn (hard_frame_pointer_rtx, r_fp);
905 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
907 /* USE of hard_frame_pointer_rtx added for consistency;
908 not clear if really needed. */
909 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
910 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
911 emit_indirect_jump (r_label);
914 /* Search backwards to the jump insn and mark it as a
916 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
920 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
921 const0_rtx, REG_NOTES (insn));
924 else if (CALL_P (insn))
931 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
932 (not all will be used on all machines) that was passed to __builtin_setjmp.
933 It updates the stack pointer in that block to correspond to the current
937 expand_builtin_update_setjmp_buf (rtx buf_addr)
939 enum machine_mode sa_mode = Pmode;
943 #ifdef HAVE_save_stack_nonlocal
944 if (HAVE_save_stack_nonlocal)
945 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
947 #ifdef STACK_SAVEAREA_MODE
948 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
952 = gen_rtx_MEM (sa_mode,
955 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
959 emit_insn (gen_setjmp ());
962 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
965 /* Expand a call to __builtin_prefetch. For a target that does not support
966 data prefetch, evaluate the memory address argument in case it has side
970 expand_builtin_prefetch (tree exp)
972 tree arg0, arg1, arg2;
976 if (!validate_arglist (exp, POINTER_TYPE, 0))
979 arg0 = CALL_EXPR_ARG (exp, 0);
981 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
982 zero (read) and argument 2 (locality) defaults to 3 (high degree of
984 nargs = call_expr_nargs (exp);
986 arg1 = CALL_EXPR_ARG (exp, 1);
988 arg1 = integer_zero_node;
990 arg2 = CALL_EXPR_ARG (exp, 2);
992 arg2 = build_int_cst (NULL_TREE, 3);
994 /* Argument 0 is an address. */
995 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
997 /* Argument 1 (read/write flag) must be a compile-time constant int. */
998 if (TREE_CODE (arg1) != INTEGER_CST)
1000 error ("second argument to %<__builtin_prefetch%> must be a constant");
1001 arg1 = integer_zero_node;
1003 op1 = expand_normal (arg1);
1004 /* Argument 1 must be either zero or one. */
1005 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1007 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1012 /* Argument 2 (locality) must be a compile-time constant int. */
1013 if (TREE_CODE (arg2) != INTEGER_CST)
1015 error ("third argument to %<__builtin_prefetch%> must be a constant");
1016 arg2 = integer_zero_node;
1018 op2 = expand_normal (arg2);
1019 /* Argument 2 must be 0, 1, 2, or 3. */
1020 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1022 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1026 #ifdef HAVE_prefetch
1029 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1031 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1032 || (GET_MODE (op0) != Pmode))
1034 op0 = convert_memory_address (Pmode, op0);
1035 op0 = force_reg (Pmode, op0);
1037 emit_insn (gen_prefetch (op0, op1, op2));
1041 /* Don't do anything with direct references to volatile memory, but
1042 generate code to handle other side effects. */
1043 if (!MEM_P (op0) && side_effects_p (op0))
1047 /* Get a MEM rtx for expression EXP which is the address of an operand
1048 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1049 the maximum length of the block of memory that might be accessed or
1053 get_memory_rtx (tree exp, tree len)
1055 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1056 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1058 /* Get an expression we can use to find the attributes to assign to MEM.
1059 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1060 we can. First remove any nops. */
1061 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1062 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1063 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1064 exp = TREE_OPERAND (exp, 0);
1066 if (TREE_CODE (exp) == ADDR_EXPR)
1067 exp = TREE_OPERAND (exp, 0);
1068 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1069 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1073 /* Honor attributes derived from exp, except for the alias set
1074 (as builtin stringops may alias with anything) and the size
1075 (as stringops may access multiple array elements). */
1078 set_mem_attributes (mem, exp, 0);
1080 /* Allow the string and memory builtins to overflow from one
1081 field into another, see http://gcc.gnu.org/PR23561.
1082 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1083 memory accessed by the string or memory builtin will fit
1084 within the field. */
1085 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1087 tree mem_expr = MEM_EXPR (mem);
1088 HOST_WIDE_INT offset = -1, length = -1;
1091 while (TREE_CODE (inner) == ARRAY_REF
1092 || TREE_CODE (inner) == NOP_EXPR
1093 || TREE_CODE (inner) == CONVERT_EXPR
1094 || TREE_CODE (inner) == NON_LVALUE_EXPR
1095 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1096 || TREE_CODE (inner) == SAVE_EXPR)
1097 inner = TREE_OPERAND (inner, 0);
1099 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1101 if (MEM_OFFSET (mem)
1102 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1103 offset = INTVAL (MEM_OFFSET (mem));
1105 if (offset >= 0 && len && host_integerp (len, 0))
1106 length = tree_low_cst (len, 0);
1108 while (TREE_CODE (inner) == COMPONENT_REF)
1110 tree field = TREE_OPERAND (inner, 1);
1111 gcc_assert (! DECL_BIT_FIELD (field));
1112 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1113 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1116 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1117 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1120 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1121 /* If we can prove the memory starting at XEXP (mem, 0)
1122 and ending at XEXP (mem, 0) + LENGTH will fit into
1123 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1126 && offset + length <= size)
1131 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1132 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1133 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1141 mem_expr = TREE_OPERAND (mem_expr, 0);
1142 inner = TREE_OPERAND (inner, 0);
1145 if (mem_expr == NULL)
1147 if (mem_expr != MEM_EXPR (mem))
1149 set_mem_expr (mem, mem_expr);
1150 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1153 set_mem_alias_set (mem, 0);
1154 set_mem_size (mem, NULL_RTX);
1160 /* Built-in functions to perform an untyped call and return. */
1162 /* For each register that may be used for calling a function, this
1163 gives a mode used to copy the register's value. VOIDmode indicates
1164 the register is not used for calling a function. If the machine
1165 has register windows, this gives only the outbound registers.
1166 INCOMING_REGNO gives the corresponding inbound register. */
1167 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1169 /* For each register that may be used for returning values, this gives
1170 a mode used to copy the register's value. VOIDmode indicates the
1171 register is not used for returning values. If the machine has
1172 register windows, this gives only the outbound registers.
1173 INCOMING_REGNO gives the corresponding inbound register. */
1174 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1176 /* For each register that may be used for calling a function, this
1177 gives the offset of that register into the block returned by
1178 __builtin_apply_args. 0 indicates that the register is not
1179 used for calling a function. */
1180 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1182 /* Return the size required for the block returned by __builtin_apply_args,
1183 and initialize apply_args_mode. */
1186 apply_args_size (void)
1188 static int size = -1;
1191 enum machine_mode mode;
1193 /* The values computed by this function never change. */
1196 /* The first value is the incoming arg-pointer. */
1197 size = GET_MODE_SIZE (Pmode);
1199 /* The second value is the structure value address unless this is
1200 passed as an "invisible" first argument. */
1201 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1202 size += GET_MODE_SIZE (Pmode);
1204 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1205 if (FUNCTION_ARG_REGNO_P (regno))
1207 mode = reg_raw_mode[regno];
1209 gcc_assert (mode != VOIDmode);
1211 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1212 if (size % align != 0)
1213 size = CEIL (size, align) * align;
1214 apply_args_reg_offset[regno] = size;
1215 size += GET_MODE_SIZE (mode);
1216 apply_args_mode[regno] = mode;
1220 apply_args_mode[regno] = VOIDmode;
1221 apply_args_reg_offset[regno] = 0;
1227 /* Return the size required for the block returned by __builtin_apply,
1228 and initialize apply_result_mode. */
1231 apply_result_size (void)
1233 static int size = -1;
1235 enum machine_mode mode;
1237 /* The values computed by this function never change. */
1242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1243 if (FUNCTION_VALUE_REGNO_P (regno))
1245 mode = reg_raw_mode[regno];
1247 gcc_assert (mode != VOIDmode);
1249 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1250 if (size % align != 0)
1251 size = CEIL (size, align) * align;
1252 size += GET_MODE_SIZE (mode);
1253 apply_result_mode[regno] = mode;
1256 apply_result_mode[regno] = VOIDmode;
1258 /* Allow targets that use untyped_call and untyped_return to override
1259 the size so that machine-specific information can be stored here. */
1260 #ifdef APPLY_RESULT_SIZE
1261 size = APPLY_RESULT_SIZE;
1267 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1268 /* Create a vector describing the result block RESULT. If SAVEP is true,
1269 the result block is used to save the values; otherwise it is used to
1270 restore the values. */
1273 result_vector (int savep, rtx result)
1275 int regno, size, align, nelts;
1276 enum machine_mode mode;
1278 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1282 if ((mode = apply_result_mode[regno]) != VOIDmode)
1284 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1285 if (size % align != 0)
1286 size = CEIL (size, align) * align;
1287 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1288 mem = adjust_address (result, mode, size);
1289 savevec[nelts++] = (savep
1290 ? gen_rtx_SET (VOIDmode, mem, reg)
1291 : gen_rtx_SET (VOIDmode, reg, mem));
1292 size += GET_MODE_SIZE (mode);
1294 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1296 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1298 /* Save the state required to perform an untyped call with the same
1299 arguments as were passed to the current function. */
1302 expand_builtin_apply_args_1 (void)
1305 int size, align, regno;
1306 enum machine_mode mode;
1307 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1309 /* Create a block where the arg-pointer, structure value address,
1310 and argument registers can be saved. */
1311 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1313 /* Walk past the arg-pointer and structure value address. */
1314 size = GET_MODE_SIZE (Pmode);
1315 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1316 size += GET_MODE_SIZE (Pmode);
1318 /* Save each register used in calling a function to the block. */
1319 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1320 if ((mode = apply_args_mode[regno]) != VOIDmode)
1322 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1323 if (size % align != 0)
1324 size = CEIL (size, align) * align;
1326 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1328 emit_move_insn (adjust_address (registers, mode, size), tem);
1329 size += GET_MODE_SIZE (mode);
1332 /* Save the arg pointer to the block. */
1333 tem = copy_to_reg (virtual_incoming_args_rtx);
1334 #ifdef STACK_GROWS_DOWNWARD
1335 /* We need the pointer as the caller actually passed them to us, not
1336 as we might have pretended they were passed. Make sure it's a valid
1337 operand, as emit_move_insn isn't expected to handle a PLUS. */
1339 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1342 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1344 size = GET_MODE_SIZE (Pmode);
1346 /* Save the structure value address unless this is passed as an
1347 "invisible" first argument. */
1348 if (struct_incoming_value)
1350 emit_move_insn (adjust_address (registers, Pmode, size),
1351 copy_to_reg (struct_incoming_value));
1352 size += GET_MODE_SIZE (Pmode);
1355 /* Return the address of the block. */
1356 return copy_addr_to_reg (XEXP (registers, 0));
1359 /* __builtin_apply_args returns block of memory allocated on
1360 the stack into which is stored the arg pointer, structure
1361 value address, static chain, and all the registers that might
1362 possibly be used in performing a function call. The code is
1363 moved to the start of the function so the incoming values are
1367 expand_builtin_apply_args (void)
1369 /* Don't do __builtin_apply_args more than once in a function.
1370 Save the result of the first call and reuse it. */
1371 if (apply_args_value != 0)
1372 return apply_args_value;
1374 /* When this function is called, it means that registers must be
1375 saved on entry to this function. So we migrate the
1376 call to the first insn of this function. */
1381 temp = expand_builtin_apply_args_1 ();
1385 apply_args_value = temp;
1387 /* Put the insns after the NOTE that starts the function.
1388 If this is inside a start_sequence, make the outer-level insn
1389 chain current, so the code is placed at the start of the
1391 push_topmost_sequence ();
1392 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1393 pop_topmost_sequence ();
1398 /* Perform an untyped call and save the state required to perform an
1399 untyped return of whatever value was returned by the given function. */
1402 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1404 int size, align, regno;
1405 enum machine_mode mode;
1406 rtx incoming_args, result, reg, dest, src, call_insn;
1407 rtx old_stack_level = 0;
1408 rtx call_fusage = 0;
1409 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1411 arguments = convert_memory_address (Pmode, arguments);
1413 /* Create a block where the return registers can be saved. */
1414 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1416 /* Fetch the arg pointer from the ARGUMENTS block. */
1417 incoming_args = gen_reg_rtx (Pmode);
1418 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1419 #ifndef STACK_GROWS_DOWNWARD
1420 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1421 incoming_args, 0, OPTAB_LIB_WIDEN);
1424 /* Push a new argument block and copy the arguments. Do not allow
1425 the (potential) memcpy call below to interfere with our stack
1427 do_pending_stack_adjust ();
1430 /* Save the stack with nonlocal if available. */
1431 #ifdef HAVE_save_stack_nonlocal
1432 if (HAVE_save_stack_nonlocal)
1433 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1436 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1438 /* Allocate a block of memory onto the stack and copy the memory
1439 arguments to the outgoing arguments address. */
1440 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1441 dest = virtual_outgoing_args_rtx;
1442 #ifndef STACK_GROWS_DOWNWARD
1443 if (GET_CODE (argsize) == CONST_INT)
1444 dest = plus_constant (dest, -INTVAL (argsize));
1446 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1448 dest = gen_rtx_MEM (BLKmode, dest);
1449 set_mem_align (dest, PARM_BOUNDARY);
1450 src = gen_rtx_MEM (BLKmode, incoming_args);
1451 set_mem_align (src, PARM_BOUNDARY);
1452 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1454 /* Refer to the argument block. */
1456 arguments = gen_rtx_MEM (BLKmode, arguments);
1457 set_mem_align (arguments, PARM_BOUNDARY);
1459 /* Walk past the arg-pointer and structure value address. */
1460 size = GET_MODE_SIZE (Pmode);
1462 size += GET_MODE_SIZE (Pmode);
1464 /* Restore each of the registers previously saved. Make USE insns
1465 for each of these registers for use in making the call. */
1466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1467 if ((mode = apply_args_mode[regno]) != VOIDmode)
1469 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1470 if (size % align != 0)
1471 size = CEIL (size, align) * align;
1472 reg = gen_rtx_REG (mode, regno);
1473 emit_move_insn (reg, adjust_address (arguments, mode, size));
1474 use_reg (&call_fusage, reg);
1475 size += GET_MODE_SIZE (mode);
1478 /* Restore the structure value address unless this is passed as an
1479 "invisible" first argument. */
1480 size = GET_MODE_SIZE (Pmode);
1483 rtx value = gen_reg_rtx (Pmode);
1484 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1485 emit_move_insn (struct_value, value);
1486 if (REG_P (struct_value))
1487 use_reg (&call_fusage, struct_value);
1488 size += GET_MODE_SIZE (Pmode);
1491 /* All arguments and registers used for the call are set up by now! */
1492 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1494 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1495 and we don't want to load it into a register as an optimization,
1496 because prepare_call_address already did it if it should be done. */
1497 if (GET_CODE (function) != SYMBOL_REF)
1498 function = memory_address (FUNCTION_MODE, function);
1500 /* Generate the actual call instruction and save the return value. */
1501 #ifdef HAVE_untyped_call
1502 if (HAVE_untyped_call)
1503 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1504 result, result_vector (1, result)));
1507 #ifdef HAVE_call_value
1508 if (HAVE_call_value)
1512 /* Locate the unique return register. It is not possible to
1513 express a call that sets more than one return register using
1514 call_value; use untyped_call for that. In fact, untyped_call
1515 only needs to save the return registers in the given block. */
1516 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 if ((mode = apply_result_mode[regno]) != VOIDmode)
1519 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1521 valreg = gen_rtx_REG (mode, regno);
1524 emit_call_insn (GEN_CALL_VALUE (valreg,
1525 gen_rtx_MEM (FUNCTION_MODE, function),
1526 const0_rtx, NULL_RTX, const0_rtx));
1528 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1534 /* Find the CALL insn we just emitted, and attach the register usage
1536 call_insn = last_call_insn ();
1537 add_function_usage_to (call_insn, call_fusage);
1539 /* Restore the stack. */
1540 #ifdef HAVE_save_stack_nonlocal
1541 if (HAVE_save_stack_nonlocal)
1542 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1545 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1549 /* Return the address of the result block. */
1550 result = copy_addr_to_reg (XEXP (result, 0));
1551 return convert_memory_address (ptr_mode, result);
1554 /* Perform an untyped return. */
1557 expand_builtin_return (rtx result)
1559 int size, align, regno;
1560 enum machine_mode mode;
1562 rtx call_fusage = 0;
1564 result = convert_memory_address (Pmode, result);
1566 apply_result_size ();
1567 result = gen_rtx_MEM (BLKmode, result);
1569 #ifdef HAVE_untyped_return
1570 if (HAVE_untyped_return)
1572 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1578 /* Restore the return value and note that each value is used. */
1580 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1581 if ((mode = apply_result_mode[regno]) != VOIDmode)
1583 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1584 if (size % align != 0)
1585 size = CEIL (size, align) * align;
1586 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1587 emit_move_insn (reg, adjust_address (result, mode, size));
1589 push_to_sequence (call_fusage);
1590 emit_insn (gen_rtx_USE (VOIDmode, reg));
1591 call_fusage = get_insns ();
1593 size += GET_MODE_SIZE (mode);
1596 /* Put the USE insns before the return. */
1597 emit_insn (call_fusage);
1599 /* Return whatever values was restored by jumping directly to the end
1601 expand_naked_return ();
1604 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1606 static enum type_class
1607 type_to_class (tree type)
1609 switch (TREE_CODE (type))
1611 case VOID_TYPE: return void_type_class;
1612 case INTEGER_TYPE: return integer_type_class;
1613 case ENUMERAL_TYPE: return enumeral_type_class;
1614 case BOOLEAN_TYPE: return boolean_type_class;
1615 case POINTER_TYPE: return pointer_type_class;
1616 case REFERENCE_TYPE: return reference_type_class;
1617 case OFFSET_TYPE: return offset_type_class;
1618 case REAL_TYPE: return real_type_class;
1619 case COMPLEX_TYPE: return complex_type_class;
1620 case FUNCTION_TYPE: return function_type_class;
1621 case METHOD_TYPE: return method_type_class;
1622 case RECORD_TYPE: return record_type_class;
1624 case QUAL_UNION_TYPE: return union_type_class;
1625 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1626 ? string_type_class : array_type_class);
1627 case LANG_TYPE: return lang_type_class;
1628 default: return no_type_class;
1632 /* Expand a call EXP to __builtin_classify_type. */
1635 expand_builtin_classify_type (tree exp)
1637 if (call_expr_nargs (exp))
1638 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1639 return GEN_INT (no_type_class);
1642 /* This helper macro, meant to be used in mathfn_built_in below,
1643 determines which among a set of three builtin math functions is
1644 appropriate for a given type mode. The `F' and `L' cases are
1645 automatically generated from the `double' case. */
1646 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1647 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1648 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1649 fcodel = BUILT_IN_MATHFN##L ; break;
1651 /* Return mathematic function equivalent to FN but operating directly
1652 on TYPE, if available. If we can't do the conversion, return zero. */
1654 mathfn_built_in (tree type, enum built_in_function fn)
1656 enum built_in_function fcode, fcodef, fcodel;
1660 CASE_MATHFN (BUILT_IN_ACOS)
1661 CASE_MATHFN (BUILT_IN_ACOSH)
1662 CASE_MATHFN (BUILT_IN_ASIN)
1663 CASE_MATHFN (BUILT_IN_ASINH)
1664 CASE_MATHFN (BUILT_IN_ATAN)
1665 CASE_MATHFN (BUILT_IN_ATAN2)
1666 CASE_MATHFN (BUILT_IN_ATANH)
1667 CASE_MATHFN (BUILT_IN_CBRT)
1668 CASE_MATHFN (BUILT_IN_CEIL)
1669 CASE_MATHFN (BUILT_IN_CEXPI)
1670 CASE_MATHFN (BUILT_IN_COPYSIGN)
1671 CASE_MATHFN (BUILT_IN_COS)
1672 CASE_MATHFN (BUILT_IN_COSH)
1673 CASE_MATHFN (BUILT_IN_DREM)
1674 CASE_MATHFN (BUILT_IN_ERF)
1675 CASE_MATHFN (BUILT_IN_ERFC)
1676 CASE_MATHFN (BUILT_IN_EXP)
1677 CASE_MATHFN (BUILT_IN_EXP10)
1678 CASE_MATHFN (BUILT_IN_EXP2)
1679 CASE_MATHFN (BUILT_IN_EXPM1)
1680 CASE_MATHFN (BUILT_IN_FABS)
1681 CASE_MATHFN (BUILT_IN_FDIM)
1682 CASE_MATHFN (BUILT_IN_FLOOR)
1683 CASE_MATHFN (BUILT_IN_FMA)
1684 CASE_MATHFN (BUILT_IN_FMAX)
1685 CASE_MATHFN (BUILT_IN_FMIN)
1686 CASE_MATHFN (BUILT_IN_FMOD)
1687 CASE_MATHFN (BUILT_IN_FREXP)
1688 CASE_MATHFN (BUILT_IN_GAMMA)
1689 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1690 CASE_MATHFN (BUILT_IN_HYPOT)
1691 CASE_MATHFN (BUILT_IN_ILOGB)
1692 CASE_MATHFN (BUILT_IN_INF)
1693 CASE_MATHFN (BUILT_IN_ISINF)
1694 CASE_MATHFN (BUILT_IN_J0)
1695 CASE_MATHFN (BUILT_IN_J1)
1696 CASE_MATHFN (BUILT_IN_JN)
1697 CASE_MATHFN (BUILT_IN_LCEIL)
1698 CASE_MATHFN (BUILT_IN_LDEXP)
1699 CASE_MATHFN (BUILT_IN_LFLOOR)
1700 CASE_MATHFN (BUILT_IN_LGAMMA)
1701 CASE_MATHFN (BUILT_IN_LLCEIL)
1702 CASE_MATHFN (BUILT_IN_LLFLOOR)
1703 CASE_MATHFN (BUILT_IN_LLRINT)
1704 CASE_MATHFN (BUILT_IN_LLROUND)
1705 CASE_MATHFN (BUILT_IN_LOG)
1706 CASE_MATHFN (BUILT_IN_LOG10)
1707 CASE_MATHFN (BUILT_IN_LOG1P)
1708 CASE_MATHFN (BUILT_IN_LOG2)
1709 CASE_MATHFN (BUILT_IN_LOGB)
1710 CASE_MATHFN (BUILT_IN_LRINT)
1711 CASE_MATHFN (BUILT_IN_LROUND)
1712 CASE_MATHFN (BUILT_IN_MODF)
1713 CASE_MATHFN (BUILT_IN_NAN)
1714 CASE_MATHFN (BUILT_IN_NANS)
1715 CASE_MATHFN (BUILT_IN_NEARBYINT)
1716 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1717 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1718 CASE_MATHFN (BUILT_IN_POW)
1719 CASE_MATHFN (BUILT_IN_POWI)
1720 CASE_MATHFN (BUILT_IN_POW10)
1721 CASE_MATHFN (BUILT_IN_REMAINDER)
1722 CASE_MATHFN (BUILT_IN_REMQUO)
1723 CASE_MATHFN (BUILT_IN_RINT)
1724 CASE_MATHFN (BUILT_IN_ROUND)
1725 CASE_MATHFN (BUILT_IN_SCALB)
1726 CASE_MATHFN (BUILT_IN_SCALBLN)
1727 CASE_MATHFN (BUILT_IN_SCALBN)
1728 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1729 CASE_MATHFN (BUILT_IN_SIN)
1730 CASE_MATHFN (BUILT_IN_SINCOS)
1731 CASE_MATHFN (BUILT_IN_SINH)
1732 CASE_MATHFN (BUILT_IN_SQRT)
1733 CASE_MATHFN (BUILT_IN_TAN)
1734 CASE_MATHFN (BUILT_IN_TANH)
1735 CASE_MATHFN (BUILT_IN_TGAMMA)
1736 CASE_MATHFN (BUILT_IN_TRUNC)
1737 CASE_MATHFN (BUILT_IN_Y0)
1738 CASE_MATHFN (BUILT_IN_Y1)
1739 CASE_MATHFN (BUILT_IN_YN)
1745 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1746 return implicit_built_in_decls[fcode];
1747 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1748 return implicit_built_in_decls[fcodef];
1749 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1750 return implicit_built_in_decls[fcodel];
1755 /* If errno must be maintained, expand the RTL to check if the result,
1756 TARGET, of a built-in function call, EXP, is NaN, and if so set
1760 expand_errno_check (tree exp, rtx target)
1762 rtx lab = gen_label_rtx ();
1764 /* Test the result; if it is NaN, set errno=EDOM because
1765 the argument was not in the domain. */
1766 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1770 /* If this built-in doesn't throw an exception, set errno directly. */
1771 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1773 #ifdef GEN_ERRNO_RTX
1774 rtx errno_rtx = GEN_ERRNO_RTX;
1777 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1779 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1785 /* We can't set errno=EDOM directly; let the library call do it.
1786 Pop the arguments right away in case the call gets deleted. */
1788 expand_call (exp, target, 0);
1793 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1794 Return NULL_RTX if a normal call should be emitted rather than expanding
1795 the function in-line. EXP is the expression that is a call to the builtin
1796 function; if convenient, the result should be placed in TARGET.
1797 SUBTARGET may be used as the target for computing one of EXP's operands. */
1800 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1802 optab builtin_optab;
1803 rtx op0, insns, before_call;
1804 tree fndecl = get_callee_fndecl (exp);
1805 enum machine_mode mode;
1806 bool errno_set = false;
1809 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1812 arg = CALL_EXPR_ARG (exp, 0);
1814 switch (DECL_FUNCTION_CODE (fndecl))
1816 CASE_FLT_FN (BUILT_IN_SQRT):
1817 errno_set = ! tree_expr_nonnegative_p (arg);
1818 builtin_optab = sqrt_optab;
1820 CASE_FLT_FN (BUILT_IN_EXP):
1821 errno_set = true; builtin_optab = exp_optab; break;
1822 CASE_FLT_FN (BUILT_IN_EXP10):
1823 CASE_FLT_FN (BUILT_IN_POW10):
1824 errno_set = true; builtin_optab = exp10_optab; break;
1825 CASE_FLT_FN (BUILT_IN_EXP2):
1826 errno_set = true; builtin_optab = exp2_optab; break;
1827 CASE_FLT_FN (BUILT_IN_EXPM1):
1828 errno_set = true; builtin_optab = expm1_optab; break;
1829 CASE_FLT_FN (BUILT_IN_LOGB):
1830 errno_set = true; builtin_optab = logb_optab; break;
1831 CASE_FLT_FN (BUILT_IN_LOG):
1832 errno_set = true; builtin_optab = log_optab; break;
1833 CASE_FLT_FN (BUILT_IN_LOG10):
1834 errno_set = true; builtin_optab = log10_optab; break;
1835 CASE_FLT_FN (BUILT_IN_LOG2):
1836 errno_set = true; builtin_optab = log2_optab; break;
1837 CASE_FLT_FN (BUILT_IN_LOG1P):
1838 errno_set = true; builtin_optab = log1p_optab; break;
1839 CASE_FLT_FN (BUILT_IN_ASIN):
1840 builtin_optab = asin_optab; break;
1841 CASE_FLT_FN (BUILT_IN_ACOS):
1842 builtin_optab = acos_optab; break;
1843 CASE_FLT_FN (BUILT_IN_TAN):
1844 builtin_optab = tan_optab; break;
1845 CASE_FLT_FN (BUILT_IN_ATAN):
1846 builtin_optab = atan_optab; break;
1847 CASE_FLT_FN (BUILT_IN_FLOOR):
1848 builtin_optab = floor_optab; break;
1849 CASE_FLT_FN (BUILT_IN_CEIL):
1850 builtin_optab = ceil_optab; break;
1851 CASE_FLT_FN (BUILT_IN_TRUNC):
1852 builtin_optab = btrunc_optab; break;
1853 CASE_FLT_FN (BUILT_IN_ROUND):
1854 builtin_optab = round_optab; break;
1855 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1856 builtin_optab = nearbyint_optab;
1857 if (flag_trapping_math)
1859 /* Else fallthrough and expand as rint. */
1860 CASE_FLT_FN (BUILT_IN_RINT):
1861 builtin_optab = rint_optab; break;
1866 /* Make a suitable register to place result in. */
1867 mode = TYPE_MODE (TREE_TYPE (exp));
1869 if (! flag_errno_math || ! HONOR_NANS (mode))
1872 /* Before working hard, check whether the instruction is available. */
1873 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1875 target = gen_reg_rtx (mode);
1877 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1878 need to expand the argument again. This way, we will not perform
1879 side-effects more the once. */
1880 narg = builtin_save_expr (arg);
1884 exp = build_call_expr (fndecl, 1, arg);
1887 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1891 /* Compute into TARGET.
1892 Set TARGET to wherever the result comes back. */
1893 target = expand_unop (mode, builtin_optab, op0, target, 0);
1898 expand_errno_check (exp, target);
1900 /* Output the entire sequence. */
1901 insns = get_insns ();
1907 /* If we were unable to expand via the builtin, stop the sequence
1908 (without outputting the insns) and call to the library function
1909 with the stabilized argument list. */
1913 before_call = get_last_insn ();
1915 target = expand_call (exp, target, target == const0_rtx);
1917 /* If this is a sqrt operation and we don't care about errno, try to
1918 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1919 This allows the semantics of the libcall to be visible to the RTL
1921 if (builtin_optab == sqrt_optab && !errno_set)
1923 /* Search backwards through the insns emitted by expand_call looking
1924 for the instruction with the REG_RETVAL note. */
1925 rtx last = get_last_insn ();
1926 while (last != before_call)
1928 if (find_reg_note (last, REG_RETVAL, NULL))
1930 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1931 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1932 two elements, i.e. symbol_ref(sqrt) and the operand. */
1934 && GET_CODE (note) == EXPR_LIST
1935 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1936 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1937 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1939 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1940 /* Check operand is a register with expected mode. */
1943 && GET_MODE (operand) == mode)
1945 /* Replace the REG_EQUAL note with a SQRT rtx. */
1946 rtx equiv = gen_rtx_SQRT (mode, operand);
1947 set_unique_reg_note (last, REG_EQUAL, equiv);
1952 last = PREV_INSN (last);
1959 /* Expand a call to the builtin binary math functions (pow and atan2).
1960 Return NULL_RTX if a normal call should be emitted rather than expanding the
1961 function in-line. EXP is the expression that is a call to the builtin
1962 function; if convenient, the result should be placed in TARGET.
1963 SUBTARGET may be used as the target for computing one of EXP's
1967 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1969 optab builtin_optab;
1970 rtx op0, op1, insns;
1971 int op1_type = REAL_TYPE;
1972 tree fndecl = get_callee_fndecl (exp);
1973 tree arg0, arg1, narg;
1974 enum machine_mode mode;
1975 bool errno_set = true;
1978 switch (DECL_FUNCTION_CODE (fndecl))
1980 CASE_FLT_FN (BUILT_IN_SCALBN):
1981 CASE_FLT_FN (BUILT_IN_SCALBLN):
1982 CASE_FLT_FN (BUILT_IN_LDEXP):
1983 op1_type = INTEGER_TYPE;
1988 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1991 arg0 = CALL_EXPR_ARG (exp, 0);
1992 arg1 = CALL_EXPR_ARG (exp, 1);
1994 switch (DECL_FUNCTION_CODE (fndecl))
1996 CASE_FLT_FN (BUILT_IN_POW):
1997 builtin_optab = pow_optab; break;
1998 CASE_FLT_FN (BUILT_IN_ATAN2):
1999 builtin_optab = atan2_optab; break;
2000 CASE_FLT_FN (BUILT_IN_SCALB):
2001 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2003 builtin_optab = scalb_optab; break;
2004 CASE_FLT_FN (BUILT_IN_SCALBN):
2005 CASE_FLT_FN (BUILT_IN_SCALBLN):
2006 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2008 /* Fall through... */
2009 CASE_FLT_FN (BUILT_IN_LDEXP):
2010 builtin_optab = ldexp_optab; break;
2011 CASE_FLT_FN (BUILT_IN_FMOD):
2012 builtin_optab = fmod_optab; break;
2013 CASE_FLT_FN (BUILT_IN_REMAINDER):
2014 CASE_FLT_FN (BUILT_IN_DREM):
2015 builtin_optab = remainder_optab; break;
2020 /* Make a suitable register to place result in. */
2021 mode = TYPE_MODE (TREE_TYPE (exp));
2023 /* Before working hard, check whether the instruction is available. */
2024 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2027 target = gen_reg_rtx (mode);
2029 if (! flag_errno_math || ! HONOR_NANS (mode))
2032 /* Always stabilize the argument list. */
2033 narg = builtin_save_expr (arg1);
2039 narg = builtin_save_expr (arg0);
2047 exp = build_call_expr (fndecl, 2, arg0, arg1);
2049 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2050 op1 = expand_normal (arg1);
2054 /* Compute into TARGET.
2055 Set TARGET to wherever the result comes back. */
2056 target = expand_binop (mode, builtin_optab, op0, op1,
2057 target, 0, OPTAB_DIRECT);
2059 /* If we were unable to expand via the builtin, stop the sequence
2060 (without outputting the insns) and call to the library function
2061 with the stabilized argument list. */
2065 return expand_call (exp, target, target == const0_rtx);
2069 expand_errno_check (exp, target);
2071 /* Output the entire sequence. */
2072 insns = get_insns ();
2079 /* Expand a call to the builtin sin and cos math functions.
2080 Return NULL_RTX if a normal call should be emitted rather than expanding the
2081 function in-line. EXP is the expression that is a call to the builtin
2082 function; if convenient, the result should be placed in TARGET.
2083 SUBTARGET may be used as the target for computing one of EXP's
2087 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2089 optab builtin_optab;
2091 tree fndecl = get_callee_fndecl (exp);
2092 enum machine_mode mode;
2095 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2098 arg = CALL_EXPR_ARG (exp, 0);
2100 switch (DECL_FUNCTION_CODE (fndecl))
2102 CASE_FLT_FN (BUILT_IN_SIN):
2103 CASE_FLT_FN (BUILT_IN_COS):
2104 builtin_optab = sincos_optab; break;
2109 /* Make a suitable register to place result in. */
2110 mode = TYPE_MODE (TREE_TYPE (exp));
2112 /* Check if sincos insn is available, otherwise fallback
2113 to sin or cos insn. */
2114 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 builtin_optab = sin_optab; break;
2119 CASE_FLT_FN (BUILT_IN_COS):
2120 builtin_optab = cos_optab; break;
2125 /* Before working hard, check whether the instruction is available. */
2126 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2128 target = gen_reg_rtx (mode);
2130 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2131 need to expand the argument again. This way, we will not perform
2132 side-effects more the once. */
2133 narg = save_expr (arg);
2137 exp = build_call_expr (fndecl, 1, arg);
2140 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2144 /* Compute into TARGET.
2145 Set TARGET to wherever the result comes back. */
2146 if (builtin_optab == sincos_optab)
2150 switch (DECL_FUNCTION_CODE (fndecl))
2152 CASE_FLT_FN (BUILT_IN_SIN):
2153 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2155 CASE_FLT_FN (BUILT_IN_COS):
2156 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2161 gcc_assert (result);
2165 target = expand_unop (mode, builtin_optab, op0, target, 0);
2170 /* Output the entire sequence. */
2171 insns = get_insns ();
2177 /* If we were unable to expand via the builtin, stop the sequence
2178 (without outputting the insns) and call to the library function
2179 with the stabilized argument list. */
2183 target = expand_call (exp, target, target == const0_rtx);
2188 /* Expand a call to one of the builtin math functions that operate on
2189 floating point argument and output an integer result (ilogb, isinf,
2191 Return 0 if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's operands. */
2197 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2199 optab builtin_optab;
2200 enum insn_code icode;
2202 tree fndecl = get_callee_fndecl (exp);
2203 enum machine_mode mode;
2204 bool errno_set = false;
2207 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2210 arg = CALL_EXPR_ARG (exp, 0);
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_ILOGB):
2215 errno_set = true; builtin_optab = ilogb_optab; break;
2216 CASE_FLT_FN (BUILT_IN_ISINF):
2217 builtin_optab = isinf_optab; break;
2222 /* There's no easy way to detect the case we need to set EDOM. */
2223 if (flag_errno_math && errno_set)
2226 /* Optab mode depends on the mode of the input argument. */
2227 mode = TYPE_MODE (TREE_TYPE (arg));
2229 icode = builtin_optab->handlers[(int) mode].insn_code;
2231 /* Before working hard, check whether the instruction is available. */
2232 if (icode != CODE_FOR_nothing)
2234 /* Make a suitable register to place result in. */
2236 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2237 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2239 gcc_assert (insn_data[icode].operand[0].predicate
2240 (target, GET_MODE (target)));
2242 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2243 need to expand the argument again. This way, we will not perform
2244 side-effects more the once. */
2245 narg = builtin_save_expr (arg);
2249 exp = build_call_expr (fndecl, 1, arg);
2252 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2254 if (mode != GET_MODE (op0))
2255 op0 = convert_to_mode (mode, op0, 0);
2257 /* Compute into TARGET.
2258 Set TARGET to wherever the result comes back. */
2259 emit_unop_insn (icode, target, op0, UNKNOWN);
2263 target = expand_call (exp, target, target == const0_rtx);
2268 /* Expand a call to the builtin sincos math function.
2269 Return NULL_RTX if a normal call should be emitted rather than expanding the
2270 function in-line. EXP is the expression that is a call to the builtin
2274 expand_builtin_sincos (tree exp)
2276 rtx op0, op1, op2, target1, target2;
2277 enum machine_mode mode;
2278 tree arg, sinp, cosp;
2281 if (!validate_arglist (exp, REAL_TYPE,
2282 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2285 arg = CALL_EXPR_ARG (exp, 0);
2286 sinp = CALL_EXPR_ARG (exp, 1);
2287 cosp = CALL_EXPR_ARG (exp, 2);
2289 /* Make a suitable register to place result in. */
2290 mode = TYPE_MODE (TREE_TYPE (arg));
2292 /* Check if sincos insn is available, otherwise emit the call. */
2293 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2296 target1 = gen_reg_rtx (mode);
2297 target2 = gen_reg_rtx (mode);
2299 op0 = expand_normal (arg);
2300 op1 = expand_normal (build_fold_indirect_ref (sinp));
2301 op2 = expand_normal (build_fold_indirect_ref (cosp));
2303 /* Compute into target1 and target2.
2304 Set TARGET to wherever the result comes back. */
2305 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2306 gcc_assert (result);
2308 /* Move target1 and target2 to the memory locations indicated
2310 emit_move_insn (op1, target1);
2311 emit_move_insn (op2, target2);
2316 /* Expand a call to the internal cexpi builtin to the sincos math function.
2317 EXP is the expression that is a call to the builtin function; if convenient,
2318 the result should be placed in TARGET. SUBTARGET may be used as the target
2319 for computing one of EXP's operands. */
2322 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2324 tree fndecl = get_callee_fndecl (exp);
2326 enum machine_mode mode;
2329 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2332 arg = CALL_EXPR_ARG (exp, 0);
2333 type = TREE_TYPE (arg);
2334 mode = TYPE_MODE (TREE_TYPE (arg));
2336 /* Try expanding via a sincos optab, fall back to emitting a libcall
2337 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2338 is only generated from sincos, cexp or if we have either of them. */
2339 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2341 op1 = gen_reg_rtx (mode);
2342 op2 = gen_reg_rtx (mode);
2344 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2346 /* Compute into op1 and op2. */
2347 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2349 else if (TARGET_HAS_SINCOS)
2351 tree call, fn = NULL_TREE;
2355 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2356 fn = built_in_decls[BUILT_IN_SINCOSF];
2357 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2358 fn = built_in_decls[BUILT_IN_SINCOS];
2359 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2360 fn = built_in_decls[BUILT_IN_SINCOSL];
2364 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2365 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2366 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2367 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2368 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2369 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2371 /* Make sure not to fold the sincos call again. */
2372 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2373 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2374 call, 3, arg, top1, top2));
2378 tree call, fn = NULL_TREE, narg;
2379 tree ctype = build_complex_type (type);
2381 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2382 fn = built_in_decls[BUILT_IN_CEXPF];
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2384 fn = built_in_decls[BUILT_IN_CEXP];
2385 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2386 fn = built_in_decls[BUILT_IN_CEXPL];
2390 /* If we don't have a decl for cexp create one. This is the
2391 friendliest fallback if the user calls __builtin_cexpi
2392 without full target C99 function support. */
2393 if (fn == NULL_TREE)
2396 const char *name = NULL;
2398 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2400 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2402 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2405 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2406 fn = build_fn_decl (name, fntype);
2409 narg = fold_build2 (COMPLEX_EXPR, ctype,
2410 build_real (type, dconst0), arg);
2412 /* Make sure not to fold the cexp call again. */
2413 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2414 return expand_expr (build_call_nary (ctype, call, 1, narg),
2415 target, VOIDmode, EXPAND_NORMAL);
2418 /* Now build the proper return type. */
2419 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2420 make_tree (TREE_TYPE (arg), op2),
2421 make_tree (TREE_TYPE (arg), op1)),
2422 target, VOIDmode, EXPAND_NORMAL);
2425 /* Expand a call to one of the builtin rounding functions gcc defines
2426 as an extension (lfloor and lceil). As these are gcc extensions we
2427 do not need to worry about setting errno to EDOM.
2428 If expanding via optab fails, lower expression to (int)(floor(x)).
2429 EXP is the expression that is a call to the builtin function;
2430 if convenient, the result should be placed in TARGET. SUBTARGET may
2431 be used as the target for computing one of EXP's operands. */
2434 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2436 convert_optab builtin_optab;
2437 rtx op0, insns, tmp;
2438 tree fndecl = get_callee_fndecl (exp);
2439 enum built_in_function fallback_fn;
2440 tree fallback_fndecl;
2441 enum machine_mode mode;
2444 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2447 arg = CALL_EXPR_ARG (exp, 0);
2449 switch (DECL_FUNCTION_CODE (fndecl))
2451 CASE_FLT_FN (BUILT_IN_LCEIL):
2452 CASE_FLT_FN (BUILT_IN_LLCEIL):
2453 builtin_optab = lceil_optab;
2454 fallback_fn = BUILT_IN_CEIL;
2457 CASE_FLT_FN (BUILT_IN_LFLOOR):
2458 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2459 builtin_optab = lfloor_optab;
2460 fallback_fn = BUILT_IN_FLOOR;
2467 /* Make a suitable register to place result in. */
2468 mode = TYPE_MODE (TREE_TYPE (exp));
2470 target = gen_reg_rtx (mode);
2472 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2473 need to expand the argument again. This way, we will not perform
2474 side-effects more the once. */
2475 narg = builtin_save_expr (arg);
2479 exp = build_call_expr (fndecl, 1, arg);
2482 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2486 /* Compute into TARGET. */
2487 if (expand_sfix_optab (target, op0, builtin_optab))
2489 /* Output the entire sequence. */
2490 insns = get_insns ();
2496 /* If we were unable to expand via the builtin, stop the sequence
2497 (without outputting the insns). */
2500 /* Fall back to floating point rounding optab. */
2501 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2503 /* For non-C99 targets we may end up without a fallback fndecl here
2504 if the user called __builtin_lfloor directly. In this case emit
2505 a call to the floor/ceil variants nevertheless. This should result
2506 in the best user experience for not full C99 targets. */
2507 if (fallback_fndecl == NULL_TREE)
2510 const char *name = NULL;
2512 switch (DECL_FUNCTION_CODE (fndecl))
2514 case BUILT_IN_LCEIL:
2515 case BUILT_IN_LLCEIL:
2518 case BUILT_IN_LCEILF:
2519 case BUILT_IN_LLCEILF:
2522 case BUILT_IN_LCEILL:
2523 case BUILT_IN_LLCEILL:
2526 case BUILT_IN_LFLOOR:
2527 case BUILT_IN_LLFLOOR:
2530 case BUILT_IN_LFLOORF:
2531 case BUILT_IN_LLFLOORF:
2534 case BUILT_IN_LFLOORL:
2535 case BUILT_IN_LLFLOORL:
2542 fntype = build_function_type_list (TREE_TYPE (arg),
2543 TREE_TYPE (arg), NULL_TREE);
2544 fallback_fndecl = build_fn_decl (name, fntype);
2547 exp = build_call_expr (fallback_fndecl, 1, arg);
2549 tmp = expand_normal (exp);
2551 /* Truncate the result of floating point optab to integer
2552 via expand_fix (). */
2553 target = gen_reg_rtx (mode);
2554 expand_fix (target, tmp, 0);
2559 /* Expand a call to one of the builtin math functions doing integer
2561 Return 0 if a normal call should be emitted rather than expanding the
2562 function in-line. EXP is the expression that is a call to the builtin
2563 function; if convenient, the result should be placed in TARGET.
2564 SUBTARGET may be used as the target for computing one of EXP's operands. */
2567 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2569 convert_optab builtin_optab;
2571 tree fndecl = get_callee_fndecl (exp);
2573 enum machine_mode mode;
2575 /* There's no easy way to detect the case we need to set EDOM. */
2576 if (flag_errno_math)
2579 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 arg = CALL_EXPR_ARG (exp, 0);
2584 switch (DECL_FUNCTION_CODE (fndecl))
2586 CASE_FLT_FN (BUILT_IN_LRINT):
2587 CASE_FLT_FN (BUILT_IN_LLRINT):
2588 builtin_optab = lrint_optab; break;
2589 CASE_FLT_FN (BUILT_IN_LROUND):
2590 CASE_FLT_FN (BUILT_IN_LLROUND):
2591 builtin_optab = lround_optab; break;
2596 /* Make a suitable register to place result in. */
2597 mode = TYPE_MODE (TREE_TYPE (exp));
2599 target = gen_reg_rtx (mode);
2601 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2602 need to expand the argument again. This way, we will not perform
2603 side-effects more the once. */
2604 narg = builtin_save_expr (arg);
2608 exp = build_call_expr (fndecl, 1, arg);
2611 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2615 if (expand_sfix_optab (target, op0, builtin_optab))
2617 /* Output the entire sequence. */
2618 insns = get_insns ();
2624 /* If we were unable to expand via the builtin, stop the sequence
2625 (without outputting the insns) and call to the library function
2626 with the stabilized argument list. */
2629 target = expand_call (exp, target, target == const0_rtx);
2634 /* To evaluate powi(x,n), the floating point value x raised to the
2635 constant integer exponent n, we use a hybrid algorithm that
2636 combines the "window method" with look-up tables. For an
2637 introduction to exponentiation algorithms and "addition chains",
2638 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2639 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2640 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2641 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2643 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2644 multiplications to inline before calling the system library's pow
2645 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2646 so this default never requires calling pow, powf or powl. */
2648 #ifndef POWI_MAX_MULTS
2649 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2652 /* The size of the "optimal power tree" lookup table. All
2653 exponents less than this value are simply looked up in the
2654 powi_table below. This threshold is also used to size the
2655 cache of pseudo registers that hold intermediate results. */
2656 #define POWI_TABLE_SIZE 256
2658 /* The size, in bits of the window, used in the "window method"
2659 exponentiation algorithm. This is equivalent to a radix of
2660 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2661 #define POWI_WINDOW_SIZE 3
2663 /* The following table is an efficient representation of an
2664 "optimal power tree". For each value, i, the corresponding
2665 value, j, in the table states than an optimal evaluation
2666 sequence for calculating pow(x,i) can be found by evaluating
2667 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2668 100 integers is given in Knuth's "Seminumerical algorithms". */
2670 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2672 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2673 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2674 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2675 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2676 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2677 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2678 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2679 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2680 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2681 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2682 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2683 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2684 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2685 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2686 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2687 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2688 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2689 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2690 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2691 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2692 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2693 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2694 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2695 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2696 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2697 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2698 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2699 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2700 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2701 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2702 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2703 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2707 /* Return the number of multiplications required to calculate
2708 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2709 subroutine of powi_cost. CACHE is an array indicating
2710 which exponents have already been calculated. */
2713 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2715 /* If we've already calculated this exponent, then this evaluation
2716 doesn't require any additional multiplications. */
2721 return powi_lookup_cost (n - powi_table[n], cache)
2722 + powi_lookup_cost (powi_table[n], cache) + 1;
2725 /* Return the number of multiplications required to calculate
2726 powi(x,n) for an arbitrary x, given the exponent N. This
2727 function needs to be kept in sync with expand_powi below. */
2730 powi_cost (HOST_WIDE_INT n)
2732 bool cache[POWI_TABLE_SIZE];
2733 unsigned HOST_WIDE_INT digit;
2734 unsigned HOST_WIDE_INT val;
2740 /* Ignore the reciprocal when calculating the cost. */
2741 val = (n < 0) ? -n : n;
2743 /* Initialize the exponent cache. */
2744 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2749 while (val >= POWI_TABLE_SIZE)
2753 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2754 result += powi_lookup_cost (digit, cache)
2755 + POWI_WINDOW_SIZE + 1;
2756 val >>= POWI_WINDOW_SIZE;
2765 return result + powi_lookup_cost (val, cache);
2768 /* Recursive subroutine of expand_powi. This function takes the array,
2769 CACHE, of already calculated exponents and an exponent N and returns
2770 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2773 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2775 unsigned HOST_WIDE_INT digit;
2779 if (n < POWI_TABLE_SIZE)
2784 target = gen_reg_rtx (mode);
2787 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2788 op1 = expand_powi_1 (mode, powi_table[n], cache);
2792 target = gen_reg_rtx (mode);
2793 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2794 op0 = expand_powi_1 (mode, n - digit, cache);
2795 op1 = expand_powi_1 (mode, digit, cache);
2799 target = gen_reg_rtx (mode);
2800 op0 = expand_powi_1 (mode, n >> 1, cache);
2804 result = expand_mult (mode, op0, op1, target, 0);
2805 if (result != target)
2806 emit_move_insn (target, result);
2810 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2811 floating point operand in mode MODE, and N is the exponent. This
2812 function needs to be kept in sync with powi_cost above. */
2815 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2817 unsigned HOST_WIDE_INT val;
2818 rtx cache[POWI_TABLE_SIZE];
2822 return CONST1_RTX (mode);
2824 val = (n < 0) ? -n : n;
2826 memset (cache, 0, sizeof (cache));
2829 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2831 /* If the original exponent was negative, reciprocate the result. */
2833 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2834 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2839 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2840 a normal call should be emitted rather than expanding the function
2841 in-line. EXP is the expression that is a call to the builtin
2842 function; if convenient, the result should be placed in TARGET. */
2845 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2849 tree type = TREE_TYPE (exp);
2850 REAL_VALUE_TYPE cint, c, c2;
2853 enum machine_mode mode = TYPE_MODE (type);
2855 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2858 arg0 = CALL_EXPR_ARG (exp, 0);
2859 arg1 = CALL_EXPR_ARG (exp, 1);
2861 if (TREE_CODE (arg1) != REAL_CST
2862 || TREE_OVERFLOW (arg1))
2863 return expand_builtin_mathfn_2 (exp, target, subtarget);
2865 /* Handle constant exponents. */
2867 /* For integer valued exponents we can expand to an optimal multiplication
2868 sequence using expand_powi. */
2869 c = TREE_REAL_CST (arg1);
2870 n = real_to_integer (&c);
2871 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2872 if (real_identical (&c, &cint)
2873 && ((n >= -1 && n <= 2)
2874 || (flag_unsafe_math_optimizations
2876 && powi_cost (n) <= POWI_MAX_MULTS)))
2878 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2881 op = force_reg (mode, op);
2882 op = expand_powi (op, mode, n);
2887 narg0 = builtin_save_expr (arg0);
2889 /* If the exponent is not integer valued, check if it is half of an integer.
2890 In this case we can expand to sqrt (x) * x**(n/2). */
2891 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2892 if (fn != NULL_TREE)
2894 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2895 n = real_to_integer (&c2);
2896 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2897 if (real_identical (&c2, &cint)
2898 && ((flag_unsafe_math_optimizations
2900 && powi_cost (n/2) <= POWI_MAX_MULTS)
2903 tree call_expr = build_call_expr (fn, 1, narg0);
2904 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2907 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2908 op2 = force_reg (mode, op2);
2909 op2 = expand_powi (op2, mode, abs (n / 2));
2910 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2911 0, OPTAB_LIB_WIDEN);
2912 /* If the original exponent was negative, reciprocate the
2915 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2916 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2922 /* Try if the exponent is a third of an integer. In this case
2923 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2924 different from pow (x, 1./3.) due to rounding and behavior
2925 with negative x we need to constrain this transformation to
2926 unsafe math and positive x or finite math. */
2927 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2929 && flag_unsafe_math_optimizations
2930 && (tree_expr_nonnegative_p (arg0)
2931 || !HONOR_NANS (mode)))
2933 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2934 real_round (&c2, mode, &c2);
2935 n = real_to_integer (&c2);
2936 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2937 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2938 real_convert (&c2, mode, &c2);
2939 if (real_identical (&c2, &c)
2941 && powi_cost (n/3) <= POWI_MAX_MULTS)
2944 tree call_expr = build_call_expr (fn, 1,narg0);
2945 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2946 if (abs (n) % 3 == 2)
2947 op = expand_simple_binop (mode, MULT, op, op, op,
2948 0, OPTAB_LIB_WIDEN);
2951 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2952 op2 = force_reg (mode, op2);
2953 op2 = expand_powi (op2, mode, abs (n / 3));
2954 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2955 0, OPTAB_LIB_WIDEN);
2956 /* If the original exponent was negative, reciprocate the
2959 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2960 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2966 /* Fall back to optab expansion. */
2967 return expand_builtin_mathfn_2 (exp, target, subtarget);
2970 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2971 a normal call should be emitted rather than expanding the function
2972 in-line. EXP is the expression that is a call to the builtin
2973 function; if convenient, the result should be placed in TARGET. */
2976 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2980 enum machine_mode mode;
2981 enum machine_mode mode2;
2983 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2986 arg0 = CALL_EXPR_ARG (exp, 0);
2987 arg1 = CALL_EXPR_ARG (exp, 1);
2988 mode = TYPE_MODE (TREE_TYPE (exp));
2990 /* Handle constant power. */
2992 if (TREE_CODE (arg1) == INTEGER_CST
2993 && !TREE_OVERFLOW (arg1))
2995 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2997 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2998 Otherwise, check the number of multiplications required. */
2999 if ((TREE_INT_CST_HIGH (arg1) == 0
3000 || TREE_INT_CST_HIGH (arg1) == -1)
3001 && ((n >= -1 && n <= 2)
3003 && powi_cost (n) <= POWI_MAX_MULTS)))
3005 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3006 op0 = force_reg (mode, op0);
3007 return expand_powi (op0, mode, n);
3011 /* Emit a libcall to libgcc. */
3013 /* Mode of the 2nd argument must match that of an int. */
3014 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3016 if (target == NULL_RTX)
3017 target = gen_reg_rtx (mode);
3019 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3020 if (GET_MODE (op0) != mode)
3021 op0 = convert_to_mode (mode, op0, 0);
3022 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3023 if (GET_MODE (op1) != mode2)
3024 op1 = convert_to_mode (mode2, op1, 0);
3026 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3027 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3028 op0, mode, op1, mode2);
3033 /* Expand expression EXP which is a call to the strlen builtin. Return
3034 NULL_RTX if we failed the caller should emit a normal call, otherwise
3035 try to get the result in TARGET, if convenient. */
3038 expand_builtin_strlen (tree exp, rtx target,
3039 enum machine_mode target_mode)
3041 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3047 tree src = CALL_EXPR_ARG (exp, 0);
3048 rtx result, src_reg, char_rtx, before_strlen;
3049 enum machine_mode insn_mode = target_mode, char_mode;
3050 enum insn_code icode = CODE_FOR_nothing;
3053 /* If the length can be computed at compile-time, return it. */
3054 len = c_strlen (src, 0);
3056 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3058 /* If the length can be computed at compile-time and is constant
3059 integer, but there are side-effects in src, evaluate
3060 src for side-effects, then return len.
3061 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3062 can be optimized into: i++; x = 3; */
3063 len = c_strlen (src, 1);
3064 if (len && TREE_CODE (len) == INTEGER_CST)
3066 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3067 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3070 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3072 /* If SRC is not a pointer type, don't do this operation inline. */
3076 /* Bail out if we can't compute strlen in the right mode. */
3077 while (insn_mode != VOIDmode)
3079 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3080 if (icode != CODE_FOR_nothing)
3083 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3085 if (insn_mode == VOIDmode)
3088 /* Make a place to write the result of the instruction. */
3092 && GET_MODE (result) == insn_mode
3093 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3094 result = gen_reg_rtx (insn_mode);
3096 /* Make a place to hold the source address. We will not expand
3097 the actual source until we are sure that the expansion will
3098 not fail -- there are trees that cannot be expanded twice. */
3099 src_reg = gen_reg_rtx (Pmode);
3101 /* Mark the beginning of the strlen sequence so we can emit the
3102 source operand later. */
3103 before_strlen = get_last_insn ();
3105 char_rtx = const0_rtx;
3106 char_mode = insn_data[(int) icode].operand[2].mode;
3107 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3109 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3111 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3112 char_rtx, GEN_INT (align));
3117 /* Now that we are assured of success, expand the source. */
3119 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3121 emit_move_insn (src_reg, pat);
3126 emit_insn_after (pat, before_strlen);
3128 emit_insn_before (pat, get_insns ());
3130 /* Return the value in the proper mode for this function. */
3131 if (GET_MODE (result) == target_mode)
3133 else if (target != 0)
3134 convert_move (target, result, 0);
3136 target = convert_to_mode (target_mode, result, 0);
3142 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3143 caller should emit a normal call, otherwise try to get the result
3144 in TARGET, if convenient (and in mode MODE if that's convenient). */
3147 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3149 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3151 tree type = TREE_TYPE (exp);
3152 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3153 CALL_EXPR_ARG (exp, 1), type);
3155 return expand_expr (result, target, mode, EXPAND_NORMAL);
3160 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3161 caller should emit a normal call, otherwise try to get the result
3162 in TARGET, if convenient (and in mode MODE if that's convenient). */
3165 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3167 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3169 tree type = TREE_TYPE (exp);
3170 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3171 CALL_EXPR_ARG (exp, 1), type);
3173 return expand_expr (result, target, mode, EXPAND_NORMAL);
3175 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3180 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3181 caller should emit a normal call, otherwise try to get the result
3182 in TARGET, if convenient (and in mode MODE if that's convenient). */
3185 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3187 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3189 tree type = TREE_TYPE (exp);
3190 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3191 CALL_EXPR_ARG (exp, 1), type);
3193 return expand_expr (result, target, mode, EXPAND_NORMAL);
3198 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3199 caller should emit a normal call, otherwise try to get the result
3200 in TARGET, if convenient (and in mode MODE if that's convenient). */
3203 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3205 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3207 tree type = TREE_TYPE (exp);
3208 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3209 CALL_EXPR_ARG (exp, 1), type);
3211 return expand_expr (result, target, mode, EXPAND_NORMAL);
3216 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3217 bytes from constant string DATA + OFFSET and return it as target
3221 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3222 enum machine_mode mode)
3224 const char *str = (const char *) data;
3226 gcc_assert (offset >= 0
3227 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3228 <= strlen (str) + 1));
3230 return c_readstr (str + offset, mode);
3233 /* Expand a call EXP to the memcpy builtin.
3234 Return NULL_RTX if we failed, the caller should emit a normal call,
3235 otherwise try to get the result in TARGET, if convenient (and in
3236 mode MODE if that's convenient). */
3239 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3241 tree fndecl = get_callee_fndecl (exp);
3243 if (!validate_arglist (exp,
3244 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3248 tree dest = CALL_EXPR_ARG (exp, 0);
3249 tree src = CALL_EXPR_ARG (exp, 1);
3250 tree len = CALL_EXPR_ARG (exp, 2);
3251 const char *src_str;
3252 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3253 unsigned int dest_align
3254 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3255 rtx dest_mem, src_mem, dest_addr, len_rtx;
3256 tree result = fold_builtin_memory_op (dest, src, len,
3257 TREE_TYPE (TREE_TYPE (fndecl)),
3259 HOST_WIDE_INT expected_size = -1;
3260 unsigned int expected_align = 0;
3264 while (TREE_CODE (result) == COMPOUND_EXPR)
3266 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3268 result = TREE_OPERAND (result, 1);
3270 return expand_expr (result, target, mode, EXPAND_NORMAL);
3273 /* If DEST is not a pointer type, call the normal function. */
3274 if (dest_align == 0)
3277 /* If either SRC is not a pointer type, don't do this
3278 operation in-line. */
3282 stringop_block_profile (exp, &expected_align, &expected_size);
3283 if (expected_align < dest_align)
3284 expected_align = dest_align;
3285 dest_mem = get_memory_rtx (dest, len);
3286 set_mem_align (dest_mem, dest_align);
3287 len_rtx = expand_normal (len);
3288 src_str = c_getstr (src);
3290 /* If SRC is a string constant and block move would be done
3291 by pieces, we can avoid loading the string from memory
3292 and only stored the computed constants. */
3294 && GET_CODE (len_rtx) == CONST_INT
3295 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3296 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3297 (void *) src_str, dest_align))
3299 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3300 builtin_memcpy_read_str,
3301 (void *) src_str, dest_align, 0);
3302 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3303 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3307 src_mem = get_memory_rtx (src, len);
3308 set_mem_align (src_mem, src_align);
3310 /* Copy word part most expediently. */
3311 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3312 CALL_EXPR_TAILCALL (exp)
3313 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3314 expected_align, expected_size);
3318 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3319 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3325 /* Expand a call EXP to the mempcpy builtin.
3326 Return NULL_RTX if we failed; the caller should emit a normal call,
3327 otherwise try to get the result in TARGET, if convenient (and in
3328 mode MODE if that's convenient). If ENDP is 0 return the
3329 destination pointer, if ENDP is 1 return the end pointer ala
3330 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3334 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3336 if (!validate_arglist (exp,
3337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3341 tree dest = CALL_EXPR_ARG (exp, 0);
3342 tree src = CALL_EXPR_ARG (exp, 1);
3343 tree len = CALL_EXPR_ARG (exp, 2);
3344 return expand_builtin_mempcpy_args (dest, src, len,
3346 target, mode, /*endp=*/ 1);
3350 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
3353 TYPE is the return type of the call. The other arguments and return value
3354 are the same as for expand_builtin_mempcpy. */
3357 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3358 rtx target, enum machine_mode mode, int endp)
3360 /* If return value is ignored, transform mempcpy into memcpy. */
3361 if (target == const0_rtx)
3363 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3368 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3369 target, mode, EXPAND_NORMAL);
3373 const char *src_str;
3374 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3375 unsigned int dest_align
3376 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3377 rtx dest_mem, src_mem, len_rtx;
3378 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3382 while (TREE_CODE (result) == COMPOUND_EXPR)
3384 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3386 result = TREE_OPERAND (result, 1);
3388 return expand_expr (result, target, mode, EXPAND_NORMAL);
3391 /* If either SRC or DEST is not a pointer type, don't do this
3392 operation in-line. */
3393 if (dest_align == 0 || src_align == 0)
3396 /* If LEN is not constant, call the normal function. */
3397 if (! host_integerp (len, 1))
3400 len_rtx = expand_normal (len);
3401 src_str = c_getstr (src);
3403 /* If SRC is a string constant and block move would be done
3404 by pieces, we can avoid loading the string from memory
3405 and only stored the computed constants. */
3407 && GET_CODE (len_rtx) == CONST_INT
3408 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3409 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3410 (void *) src_str, dest_align))
3412 dest_mem = get_memory_rtx (dest, len);
3413 set_mem_align (dest_mem, dest_align);
3414 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3415 builtin_memcpy_read_str,
3416 (void *) src_str, dest_align, endp);
3417 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3418 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3422 if (GET_CODE (len_rtx) == CONST_INT
3423 && can_move_by_pieces (INTVAL (len_rtx),
3424 MIN (dest_align, src_align)))
3426 dest_mem = get_memory_rtx (dest, len);
3427 set_mem_align (dest_mem, dest_align);
3428 src_mem = get_memory_rtx (src, len);
3429 set_mem_align (src_mem, src_align);
3430 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3431 MIN (dest_align, src_align), endp);
3432 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3433 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3441 /* Expand expression EXP, which is a call to the memmove builtin. Return
3442 NULL_RTX if we failed; the caller should emit a normal call. */
3445 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3455 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3456 target, mode, ignore);
3460 /* Helper function to do the actual work for expand_builtin_memmove. The
3461 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3462 so that this can also be called without constructing an actual CALL_EXPR.
3463 TYPE is the return type of the call. The other arguments and return value
3464 are the same as for expand_builtin_memmove. */
3467 expand_builtin_memmove_args (tree dest, tree src, tree len,
3468 tree type, rtx target, enum machine_mode mode,
3471 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3475 while (TREE_CODE (result) == COMPOUND_EXPR)
3477 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3479 result = TREE_OPERAND (result, 1);
3481 return expand_expr (result, target, mode, EXPAND_NORMAL);
3484 /* Otherwise, call the normal function. */
3488 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3489 NULL_RTX if we failed the caller should emit a normal call. */
3492 expand_builtin_bcopy (tree exp, int ignore)
3494 tree type = TREE_TYPE (exp);
3495 tree src, dest, size;
3497 if (!validate_arglist (exp,
3498 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3501 src = CALL_EXPR_ARG (exp, 0);
3502 dest = CALL_EXPR_ARG (exp, 1);
3503 size = CALL_EXPR_ARG (exp, 2);
3505 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3506 This is done this way so that if it isn't expanded inline, we fall
3507 back to calling bcopy instead of memmove. */
3508 return expand_builtin_memmove_args (dest, src,
3509 fold_convert (sizetype, size),
3510 type, const0_rtx, VOIDmode,
3515 # define HAVE_movstr 0
3516 # define CODE_FOR_movstr CODE_FOR_nothing
3519 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3520 we failed, the caller should emit a normal call, otherwise try to
3521 get the result in TARGET, if convenient. If ENDP is 0 return the
3522 destination pointer, if ENDP is 1 return the end pointer ala
3523 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3527 expand_movstr (tree dest, tree src, rtx target, int endp)
3533 const struct insn_data * data;
3538 dest_mem = get_memory_rtx (dest, NULL);
3539 src_mem = get_memory_rtx (src, NULL);
3542 target = force_reg (Pmode, XEXP (dest_mem, 0));
3543 dest_mem = replace_equiv_address (dest_mem, target);
3544 end = gen_reg_rtx (Pmode);
3548 if (target == 0 || target == const0_rtx)
3550 end = gen_reg_rtx (Pmode);
3558 data = insn_data + CODE_FOR_movstr;
3560 if (data->operand[0].mode != VOIDmode)
3561 end = gen_lowpart (data->operand[0].mode, end);
3563 insn = data->genfun (end, dest_mem, src_mem);
3569 /* movstr is supposed to set end to the address of the NUL
3570 terminator. If the caller requested a mempcpy-like return value,
3572 if (endp == 1 && target != const0_rtx)
3574 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3575 emit_move_insn (target, force_operand (tem, NULL_RTX));
3581 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3582 NULL_RTX if we failed the caller should emit a normal call, otherwise
3583 try to get the result in TARGET, if convenient (and in mode MODE if that's
3587 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3589 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3591 tree dest = CALL_EXPR_ARG (exp, 0);
3592 tree src = CALL_EXPR_ARG (exp, 1);
3593 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3598 /* Helper function to do the actual work for expand_builtin_strcpy. The
3599 arguments to the builtin_strcpy call DEST and SRC are broken out
3600 so that this can also be called without constructing an actual CALL_EXPR.
3601 The other arguments and return value are the same as for
3602 expand_builtin_strcpy. */
3605 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3606 rtx target, enum machine_mode mode)
3608 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3610 return expand_expr (result, target, mode, EXPAND_NORMAL);
3611 return expand_movstr (dest, src, target, /*endp=*/0);
3615 /* Expand a call EXP to the stpcpy builtin.
3616 Return NULL_RTX if we failed the caller should emit a normal call,
3617 otherwise try to get the result in TARGET, if convenient (and in
3618 mode MODE if that's convenient). */
3621 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3625 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3628 dst = CALL_EXPR_ARG (exp, 0);
3629 src = CALL_EXPR_ARG (exp, 1);
3631 /* If return value is ignored, transform stpcpy into strcpy. */
3632 if (target == const0_rtx)
3634 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3638 return expand_expr (build_call_expr (fn, 2, dst, src),
3639 target, mode, EXPAND_NORMAL);
3646 /* Ensure we get an actual string whose length can be evaluated at
3647 compile-time, not an expression containing a string. This is
3648 because the latter will potentially produce pessimized code
3649 when used to produce the return value. */
3650 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3651 return expand_movstr (dst, src, target, /*endp=*/2);
3653 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3654 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3655 target, mode, /*endp=*/2);
3660 if (TREE_CODE (len) == INTEGER_CST)
3662 rtx len_rtx = expand_normal (len);
3664 if (GET_CODE (len_rtx) == CONST_INT)
3666 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3667 dst, src, target, mode);
3673 if (mode != VOIDmode)
3674 target = gen_reg_rtx (mode);
3676 target = gen_reg_rtx (GET_MODE (ret));
3678 if (GET_MODE (target) != GET_MODE (ret))
3679 ret = gen_lowpart (GET_MODE (target), ret);
3681 ret = plus_constant (ret, INTVAL (len_rtx));
3682 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3690 return expand_movstr (dst, src, target, /*endp=*/2);
3694 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3695 bytes from constant string DATA + OFFSET and return it as target
3699 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3700 enum machine_mode mode)
3702 const char *str = (const char *) data;
3704 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3707 return c_readstr (str + offset, mode);
3710 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3711 NULL_RTX if we failed the caller should emit a normal call. */
3714 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3716 tree fndecl = get_callee_fndecl (exp);
3718 if (validate_arglist (exp,
3719 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3721 tree dest = CALL_EXPR_ARG (exp, 0);
3722 tree src = CALL_EXPR_ARG (exp, 1);
3723 tree len = CALL_EXPR_ARG (exp, 2);
3724 tree slen = c_strlen (src, 1);
3725 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3729 while (TREE_CODE (result) == COMPOUND_EXPR)
3731 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3733 result = TREE_OPERAND (result, 1);
3735 return expand_expr (result, target, mode, EXPAND_NORMAL);
3738 /* We must be passed a constant len and src parameter. */
3739 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3742 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3744 /* We're required to pad with trailing zeros if the requested
3745 len is greater than strlen(s2)+1. In that case try to
3746 use store_by_pieces, if it fails, punt. */
3747 if (tree_int_cst_lt (slen, len))
3749 unsigned int dest_align
3750 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3751 const char *p = c_getstr (src);
3754 if (!p || dest_align == 0 || !host_integerp (len, 1)
3755 || !can_store_by_pieces (tree_low_cst (len, 1),
3756 builtin_strncpy_read_str,
3757 (void *) p, dest_align))
3760 dest_mem = get_memory_rtx (dest, len);
3761 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3762 builtin_strncpy_read_str,
3763 (void *) p, dest_align, 0);
3764 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3765 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3772 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3773 bytes from constant string DATA + OFFSET and return it as target
3777 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3778 enum machine_mode mode)
3780 const char *c = (const char *) data;
3781 char *p = alloca (GET_MODE_SIZE (mode));
3783 memset (p, *c, GET_MODE_SIZE (mode));
3785 return c_readstr (p, mode);
3788 /* Callback routine for store_by_pieces. Return the RTL of a register
3789 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3790 char value given in the RTL register data. For example, if mode is
3791 4 bytes wide, return the RTL for 0x01010101*data. */
3794 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3795 enum machine_mode mode)
3801 size = GET_MODE_SIZE (mode);
3806 memset (p, 1, size);
3807 coeff = c_readstr (p, mode);
3809 target = convert_to_mode (mode, (rtx) data, 1);
3810 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3811 return force_reg (mode, target);
3814 /* Expand expression EXP, which is a call to the memset builtin. Return
3815 NULL_RTX if we failed the caller should emit a normal call, otherwise
3816 try to get the result in TARGET, if convenient (and in mode MODE if that's
3820 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3822 if (!validate_arglist (exp,
3823 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827 tree dest = CALL_EXPR_ARG (exp, 0);
3828 tree val = CALL_EXPR_ARG (exp, 1);
3829 tree len = CALL_EXPR_ARG (exp, 2);
3830 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3834 /* Helper function to do the actual work for expand_builtin_memset. The
3835 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3836 so that this can also be called without constructing an actual CALL_EXPR.
3837 The other arguments and return value are the same as for
3838 expand_builtin_memset. */
3841 expand_builtin_memset_args (tree dest, tree val, tree len,
3842 rtx target, enum machine_mode mode, tree orig_exp)
3845 enum built_in_function fcode;
3847 unsigned int dest_align;
3848 rtx dest_mem, dest_addr, len_rtx;
3849 HOST_WIDE_INT expected_size = -1;
3850 unsigned int expected_align = 0;
3852 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3854 /* If DEST is not a pointer type, don't do this operation in-line. */
3855 if (dest_align == 0)
3858 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3859 if (expected_align < dest_align)
3860 expected_align = dest_align;
3862 /* If the LEN parameter is zero, return DEST. */
3863 if (integer_zerop (len))
3865 /* Evaluate and ignore VAL in case it has side-effects. */
3866 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3867 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3870 /* Stabilize the arguments in case we fail. */
3871 dest = builtin_save_expr (dest);
3872 val = builtin_save_expr (val);
3873 len = builtin_save_expr (len);
3875 len_rtx = expand_normal (len);
3876 dest_mem = get_memory_rtx (dest, len);
3878 if (TREE_CODE (val) != INTEGER_CST)
3882 val_rtx = expand_normal (val);
3883 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3886 /* Assume that we can memset by pieces if we can store
3887 * the coefficients by pieces (in the required modes).
3888 * We can't pass builtin_memset_gen_str as that emits RTL. */
3890 if (host_integerp (len, 1)
3891 && !(optimize_size && tree_low_cst (len, 1) > 1)
3892 && can_store_by_pieces (tree_low_cst (len, 1),
3893 builtin_memset_read_str, &c, dest_align))
3895 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3897 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3898 builtin_memset_gen_str, val_rtx, dest_align, 0);
3900 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3901 dest_align, expected_align,
3905 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3906 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3910 if (target_char_cast (val, &c))
3915 if (host_integerp (len, 1)
3916 && !(optimize_size && tree_low_cst (len, 1) > 1)
3917 && can_store_by_pieces (tree_low_cst (len, 1),
3918 builtin_memset_read_str, &c, dest_align))
3919 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3920 builtin_memset_read_str, &c, dest_align, 0);
3921 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3922 dest_align, expected_align,
3926 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3927 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3931 set_mem_align (dest_mem, dest_align);
3932 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3933 CALL_EXPR_TAILCALL (orig_exp)
3934 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3935 expected_align, expected_size);
3939 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3940 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3946 fndecl = get_callee_fndecl (orig_exp);
3947 fcode = DECL_FUNCTION_CODE (fndecl);
3948 if (fcode == BUILT_IN_MEMSET)
3949 fn = build_call_expr (fndecl, 3, dest, val, len);
3950 else if (fcode == BUILT_IN_BZERO)
3951 fn = build_call_expr (fndecl, 2, dest, len);
3954 if (TREE_CODE (fn) == CALL_EXPR)
3955 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3956 return expand_call (fn, target, target == const0_rtx);
3959 /* Expand expression EXP, which is a call to the bzero builtin. Return
3960 NULL_RTX if we failed the caller should emit a normal call. */
3963 expand_builtin_bzero (tree exp)
3967 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3970 dest = CALL_EXPR_ARG (exp, 0);
3971 size = CALL_EXPR_ARG (exp, 1);
3973 /* New argument list transforming bzero(ptr x, int y) to
3974 memset(ptr x, int 0, size_t y). This is done this way
3975 so that if it isn't expanded inline, we fallback to
3976 calling bzero instead of memset. */
3978 return expand_builtin_memset_args (dest, integer_zero_node,
3979 fold_convert (sizetype, size),
3980 const0_rtx, VOIDmode, exp);
3983 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
3984 caller should emit a normal call, otherwise try to get the result
3985 in TARGET, if convenient (and in mode MODE if that's convenient). */
3988 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
3990 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
3991 INTEGER_TYPE, VOID_TYPE))
3993 tree type = TREE_TYPE (exp);
3994 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
3995 CALL_EXPR_ARG (exp, 1),
3996 CALL_EXPR_ARG (exp, 2), type);
3998 return expand_expr (result, target, mode, EXPAND_NORMAL);
4003 /* Expand expression EXP, which is a call to the memcmp built-in function.
4004 Return NULL_RTX if we failed and the
4005 caller should emit a normal call, otherwise try to get the result in
4006 TARGET, if convenient (and in mode MODE, if that's convenient). */
4009 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4011 if (!validate_arglist (exp,
4012 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4016 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4017 CALL_EXPR_ARG (exp, 1),
4018 CALL_EXPR_ARG (exp, 2));
4020 return expand_expr (result, target, mode, EXPAND_NORMAL);
4023 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4025 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4028 tree arg1 = CALL_EXPR_ARG (exp, 0);
4029 tree arg2 = CALL_EXPR_ARG (exp, 1);
4030 tree len = CALL_EXPR_ARG (exp, 2);
4033 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4035 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4036 enum machine_mode insn_mode;
4038 #ifdef HAVE_cmpmemsi
4040 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4043 #ifdef HAVE_cmpstrnsi
4045 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4050 /* If we don't have POINTER_TYPE, call the function. */
4051 if (arg1_align == 0 || arg2_align == 0)
4054 /* Make a place to write the result of the instruction. */
4057 && REG_P (result) && GET_MODE (result) == insn_mode
4058 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4059 result = gen_reg_rtx (insn_mode);
4061 arg1_rtx = get_memory_rtx (arg1, len);
4062 arg2_rtx = get_memory_rtx (arg2, len);
4063 arg3_rtx = expand_normal (len);
4065 /* Set MEM_SIZE as appropriate. */
4066 if (GET_CODE (arg3_rtx) == CONST_INT)
4068 set_mem_size (arg1_rtx, arg3_rtx);
4069 set_mem_size (arg2_rtx, arg3_rtx);
4072 #ifdef HAVE_cmpmemsi
4074 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4075 GEN_INT (MIN (arg1_align, arg2_align)));
4078 #ifdef HAVE_cmpstrnsi
4080 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4081 GEN_INT (MIN (arg1_align, arg2_align)));
4089 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
4090 TYPE_MODE (integer_type_node), 3,
4091 XEXP (arg1_rtx, 0), Pmode,
4092 XEXP (arg2_rtx, 0), Pmode,
4093 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4094 TYPE_UNSIGNED (sizetype)),
4095 TYPE_MODE (sizetype));
4097 /* Return the value in the proper mode for this function. */
4098 mode = TYPE_MODE (TREE_TYPE (exp));
4099 if (GET_MODE (result) == mode)
4101 else if (target != 0)
4103 convert_move (target, result, 0);
4107 return convert_to_mode (mode, result, 0);
4114 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4115 if we failed the caller should emit a normal call, otherwise try to get
4116 the result in TARGET, if convenient. */
4119 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4121 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4125 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4126 CALL_EXPR_ARG (exp, 1));
4128 return expand_expr (result, target, mode, EXPAND_NORMAL);
4131 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4132 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4133 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4135 rtx arg1_rtx, arg2_rtx;
4136 rtx result, insn = NULL_RTX;
4138 tree arg1 = CALL_EXPR_ARG (exp, 0);
4139 tree arg2 = CALL_EXPR_ARG (exp, 1);
4142 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4144 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4146 /* If we don't have POINTER_TYPE, call the function. */
4147 if (arg1_align == 0 || arg2_align == 0)
4150 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4151 arg1 = builtin_save_expr (arg1);
4152 arg2 = builtin_save_expr (arg2);
4154 arg1_rtx = get_memory_rtx (arg1, NULL);
4155 arg2_rtx = get_memory_rtx (arg2, NULL);
4157 #ifdef HAVE_cmpstrsi
4158 /* Try to call cmpstrsi. */
4161 enum machine_mode insn_mode
4162 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4164 /* Make a place to write the result of the instruction. */
4167 && REG_P (result) && GET_MODE (result) == insn_mode
4168 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4169 result = gen_reg_rtx (insn_mode);
4171 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4172 GEN_INT (MIN (arg1_align, arg2_align)));
4175 #ifdef HAVE_cmpstrnsi
4176 /* Try to determine at least one length and call cmpstrnsi. */
4177 if (!insn && HAVE_cmpstrnsi)
4182 enum machine_mode insn_mode
4183 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4184 tree len1 = c_strlen (arg1, 1);
4185 tree len2 = c_strlen (arg2, 1);
4188 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4190 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4192 /* If we don't have a constant length for the first, use the length
4193 of the second, if we know it. We don't require a constant for
4194 this case; some cost analysis could be done if both are available
4195 but neither is constant. For now, assume they're equally cheap,
4196 unless one has side effects. If both strings have constant lengths,
4203 else if (TREE_SIDE_EFFECTS (len1))
4205 else if (TREE_SIDE_EFFECTS (len2))
4207 else if (TREE_CODE (len1) != INTEGER_CST)
4209 else if (TREE_CODE (len2) != INTEGER_CST)
4211 else if (tree_int_cst_lt (len1, len2))
4216 /* If both arguments have side effects, we cannot optimize. */
4217 if (!len || TREE_SIDE_EFFECTS (len))
4220 arg3_rtx = expand_normal (len);
4222 /* Make a place to write the result of the instruction. */
4225 && REG_P (result) && GET_MODE (result) == insn_mode
4226 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4227 result = gen_reg_rtx (insn_mode);
4229 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4230 GEN_INT (MIN (arg1_align, arg2_align)));
4238 /* Return the value in the proper mode for this function. */
4239 mode = TYPE_MODE (TREE_TYPE (exp));
4240 if (GET_MODE (result) == mode)
4243 return convert_to_mode (mode, result, 0);
4244 convert_move (target, result, 0);
4248 /* Expand the library call ourselves using a stabilized argument
4249 list to avoid re-evaluating the function's arguments twice. */
4250 #ifdef HAVE_cmpstrnsi
4253 fndecl = get_callee_fndecl (exp);
4254 fn = build_call_expr (fndecl, 2, arg1, arg2);
4255 if (TREE_CODE (fn) == CALL_EXPR)
4256 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4257 return expand_call (fn, target, target == const0_rtx);
4263 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4264 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4265 the result in TARGET, if convenient. */
4268 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4270 if (!validate_arglist (exp,
4271 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4275 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4276 CALL_EXPR_ARG (exp, 1),
4277 CALL_EXPR_ARG (exp, 2));
4279 return expand_expr (result, target, mode, EXPAND_NORMAL);
4282 /* If c_strlen can determine an expression for one of the string
4283 lengths, and it doesn't have side effects, then emit cmpstrnsi
4284 using length MIN(strlen(string)+1, arg3). */
4285 #ifdef HAVE_cmpstrnsi
4288 tree len, len1, len2;
4289 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4292 tree arg1 = CALL_EXPR_ARG (exp, 0);
4293 tree arg2 = CALL_EXPR_ARG (exp, 1);
4294 tree arg3 = CALL_EXPR_ARG (exp, 2);
4297 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4299 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4300 enum machine_mode insn_mode
4301 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4303 len1 = c_strlen (arg1, 1);
4304 len2 = c_strlen (arg2, 1);
4307 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4309 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4311 /* If we don't have a constant length for the first, use the length
4312 of the second, if we know it. We don't require a constant for
4313 this case; some cost analysis could be done if both are available
4314 but neither is constant. For now, assume they're equally cheap,
4315 unless one has side effects. If both strings have constant lengths,
4322 else if (TREE_SIDE_EFFECTS (len1))
4324 else if (TREE_SIDE_EFFECTS (len2))
4326 else if (TREE_CODE (len1) != INTEGER_CST)
4328 else if (TREE_CODE (len2) != INTEGER_CST)
4330 else if (tree_int_cst_lt (len1, len2))
4335 /* If both arguments have side effects, we cannot optimize. */
4336 if (!len || TREE_SIDE_EFFECTS (len))
4339 /* The actual new length parameter is MIN(len,arg3). */
4340 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4341 fold_convert (TREE_TYPE (len), arg3));
4343 /* If we don't have POINTER_TYPE, call the function. */
4344 if (arg1_align == 0 || arg2_align == 0)
4347 /* Make a place to write the result of the instruction. */
4350 && REG_P (result) && GET_MODE (result) == insn_mode
4351 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4352 result = gen_reg_rtx (insn_mode);
4354 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4355 arg1 = builtin_save_expr (arg1);
4356 arg2 = builtin_save_expr (arg2);
4357 len = builtin_save_expr (len);
4359 arg1_rtx = get_memory_rtx (arg1, len);
4360 arg2_rtx = get_memory_rtx (arg2, len);
4361 arg3_rtx = expand_normal (len);
4362 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4363 GEN_INT (MIN (arg1_align, arg2_align)));
4368 /* Return the value in the proper mode for this function. */
4369 mode = TYPE_MODE (TREE_TYPE (exp));
4370 if (GET_MODE (result) == mode)
4373 return convert_to_mode (mode, result, 0);
4374 convert_move (target, result, 0);
4378 /* Expand the library call ourselves using a stabilized argument
4379 list to avoid re-evaluating the function's arguments twice. */
4380 fndecl = get_callee_fndecl (exp);
4381 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4382 if (TREE_CODE (fn) == CALL_EXPR)
4383 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4384 return expand_call (fn, target, target == const0_rtx);
4390 /* Expand expression EXP, which is a call to the strcat builtin.
4391 Return NULL_RTX if we failed the caller should emit a normal call,
4392 otherwise try to get the result in TARGET, if convenient. */
4395 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4397 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4401 tree dst = CALL_EXPR_ARG (exp, 0);
4402 tree src = CALL_EXPR_ARG (exp, 1);
4403 const char *p = c_getstr (src);
4405 /* If the string length is zero, return the dst parameter. */
4406 if (p && *p == '\0')
4407 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4411 /* See if we can store by pieces into (dst + strlen(dst)). */
4412 tree newsrc, newdst,
4413 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4416 /* Stabilize the argument list. */
4417 newsrc = builtin_save_expr (src);
4418 dst = builtin_save_expr (dst);
4422 /* Create strlen (dst). */
4423 newdst = build_call_expr (strlen_fn, 1, dst);
4424 /* Create (dst + (cast) strlen (dst)). */
4425 newdst = fold_convert (TREE_TYPE (dst), newdst);
4426 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4428 newdst = builtin_save_expr (newdst);
4430 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4432 end_sequence (); /* Stop sequence. */
4436 /* Output the entire sequence. */
4437 insns = get_insns ();
4441 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4448 /* Expand expression EXP, which is a call to the strncat builtin.
4449 Return NULL_RTX if we failed the caller should emit a normal call,
4450 otherwise try to get the result in TARGET, if convenient. */
4453 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4455 if (validate_arglist (exp,
4456 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4458 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4459 CALL_EXPR_ARG (exp, 1),
4460 CALL_EXPR_ARG (exp, 2));
4462 return expand_expr (result, target, mode, EXPAND_NORMAL);
4467 /* Expand expression EXP, which is a call to the strspn builtin.
4468 Return NULL_RTX if we failed the caller should emit a normal call,
4469 otherwise try to get the result in TARGET, if convenient. */
4472 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4474 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4476 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4477 CALL_EXPR_ARG (exp, 1));
4479 return expand_expr (result, target, mode, EXPAND_NORMAL);
4484 /* Expand expression EXP, which is a call to the strcspn builtin.
4485 Return NULL_RTX if we failed the caller should emit a normal call,
4486 otherwise try to get the result in TARGET, if convenient. */
4489 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4491 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4493 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4494 CALL_EXPR_ARG (exp, 1));
4496 return expand_expr (result, target, mode, EXPAND_NORMAL);
4501 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4502 if that's convenient. */
4505 expand_builtin_saveregs (void)
4509 /* Don't do __builtin_saveregs more than once in a function.
4510 Save the result of the first call and reuse it. */
4511 if (saveregs_value != 0)
4512 return saveregs_value;
4514 /* When this function is called, it means that registers must be
4515 saved on entry to this function. So we migrate the call to the
4516 first insn of this function. */
4520 /* Do whatever the machine needs done in this case. */
4521 val = targetm.calls.expand_builtin_saveregs ();
4526 saveregs_value = val;
4528 /* Put the insns after the NOTE that starts the function. If this
4529 is inside a start_sequence, make the outer-level insn chain current, so
4530 the code is placed at the start of the function. */
4531 push_topmost_sequence ();
4532 emit_insn_after (seq, entry_of_function ());
4533 pop_topmost_sequence ();
4538 /* __builtin_args_info (N) returns word N of the arg space info
4539 for the current function. The number and meanings of words
4540 is controlled by the definition of CUMULATIVE_ARGS. */
4543 expand_builtin_args_info (tree exp)
4545 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4546 int *word_ptr = (int *) ¤t_function_args_info;
4548 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4550 if (call_expr_nargs (exp) != 0)
4552 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4553 error ("argument of %<__builtin_args_info%> must be constant");
4556 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4558 if (wordnum < 0 || wordnum >= nwords)
4559 error ("argument of %<__builtin_args_info%> out of range");
4561 return GEN_INT (word_ptr[wordnum]);
4565 error ("missing argument in %<__builtin_args_info%>");
4570 /* Expand a call to __builtin_next_arg. */
4573 expand_builtin_next_arg (void)
4575 /* Checking arguments is already done in fold_builtin_next_arg
4576 that must be called before this function. */
4577 return expand_binop (Pmode, add_optab,
4578 current_function_internal_arg_pointer,
4579 current_function_arg_offset_rtx,
4580 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4583 /* Make it easier for the backends by protecting the valist argument
4584 from multiple evaluations. */
4587 stabilize_va_list (tree valist, int needs_lvalue)
4589 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4591 if (TREE_SIDE_EFFECTS (valist))
4592 valist = save_expr (valist);
4594 /* For this case, the backends will be expecting a pointer to
4595 TREE_TYPE (va_list_type_node), but it's possible we've
4596 actually been given an array (an actual va_list_type_node).
4598 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4600 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4601 valist = build_fold_addr_expr_with_type (valist, p1);
4610 if (! TREE_SIDE_EFFECTS (valist))
4613 pt = build_pointer_type (va_list_type_node);
4614 valist = fold_build1 (ADDR_EXPR, pt, valist);
4615 TREE_SIDE_EFFECTS (valist) = 1;
4618 if (TREE_SIDE_EFFECTS (valist))
4619 valist = save_expr (valist);
4620 valist = build_fold_indirect_ref (valist);
4626 /* The "standard" definition of va_list is void*. */
4629 std_build_builtin_va_list (void)
4631 return ptr_type_node;
4634 /* The "standard" implementation of va_start: just assign `nextarg' to
4638 std_expand_builtin_va_start (tree valist, rtx nextarg)
4642 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4643 make_tree (ptr_type_node, nextarg));
4644 TREE_SIDE_EFFECTS (t) = 1;
4646 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4649 /* Expand EXP, a call to __builtin_va_start. */
4652 expand_builtin_va_start (tree exp)
4657 if (call_expr_nargs (exp) < 2)
4659 error ("too few arguments to function %<va_start%>");
4663 if (fold_builtin_next_arg (exp, true))
4666 nextarg = expand_builtin_next_arg ();
4667 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4669 #ifdef EXPAND_BUILTIN_VA_START
4670 EXPAND_BUILTIN_VA_START (valist, nextarg);
4672 std_expand_builtin_va_start (valist, nextarg);
4678 /* The "standard" implementation of va_arg: read the value from the
4679 current (padded) address and increment by the (padded) size. */
4682 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4684 tree addr, t, type_size, rounded_size, valist_tmp;
4685 unsigned HOST_WIDE_INT align, boundary;
4688 #ifdef ARGS_GROW_DOWNWARD
4689 /* All of the alignment and movement below is for args-grow-up machines.
4690 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4691 implement their own specialized gimplify_va_arg_expr routines. */
4695 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4697 type = build_pointer_type (type);
4699 align = PARM_BOUNDARY / BITS_PER_UNIT;
4700 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4702 /* Hoist the valist value into a temporary for the moment. */
4703 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4705 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4706 requires greater alignment, we must perform dynamic alignment. */
4707 if (boundary > align
4708 && !integer_zerop (TYPE_SIZE (type)))
4710 t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4711 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4712 build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4713 gimplify_and_add (t, pre_p);
4715 t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4716 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4717 build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4718 gimplify_and_add (t, pre_p);
4723 /* If the actual alignment is less than the alignment of the type,
4724 adjust the type accordingly so that we don't assume strict alignment
4725 when deferencing the pointer. */
4726 boundary *= BITS_PER_UNIT;
4727 if (boundary < TYPE_ALIGN (type))
4729 type = build_variant_type_copy (type);
4730 TYPE_ALIGN (type) = boundary;
4733 /* Compute the rounded size of the type. */
4734 type_size = size_in_bytes (type);
4735 rounded_size = round_up (type_size, align);
4737 /* Reduce rounded_size so it's sharable with the postqueue. */
4738 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4742 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4744 /* Small args are padded downward. */
4745 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4746 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4747 size_binop (MINUS_EXPR, rounded_size, type_size));
4748 t = fold_convert (TREE_TYPE (addr), t);
4749 addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4752 /* Compute new value for AP. */
4753 t = fold_convert (TREE_TYPE (valist), rounded_size);
4754 t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4755 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4756 gimplify_and_add (t, pre_p);
4758 addr = fold_convert (build_pointer_type (type), addr);
4761 addr = build_va_arg_indirect_ref (addr);
4763 return build_va_arg_indirect_ref (addr);
4766 /* Build an indirect-ref expression over the given TREE, which represents a
4767 piece of a va_arg() expansion. */
4769 build_va_arg_indirect_ref (tree addr)
4771 addr = build_fold_indirect_ref (addr);
4773 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4779 /* Return a dummy expression of type TYPE in order to keep going after an
4783 dummy_object (tree type)
4785 tree t = build_int_cst (build_pointer_type (type), 0);
4786 return build1 (INDIRECT_REF, type, t);
4789 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4790 builtin function, but a very special sort of operator. */
4792 enum gimplify_status
4793 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4795 tree promoted_type, want_va_type, have_va_type;
4796 tree valist = TREE_OPERAND (*expr_p, 0);
4797 tree type = TREE_TYPE (*expr_p);
4800 /* Verify that valist is of the proper type. */
4801 want_va_type = va_list_type_node;
4802 have_va_type = TREE_TYPE (valist);
4804 if (have_va_type == error_mark_node)
4807 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4809 /* If va_list is an array type, the argument may have decayed
4810 to a pointer type, e.g. by being passed to another function.
4811 In that case, unwrap both types so that we can compare the
4812 underlying records. */
4813 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4814 || POINTER_TYPE_P (have_va_type))
4816 want_va_type = TREE_TYPE (want_va_type);
4817 have_va_type = TREE_TYPE (have_va_type);
4821 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4823 error ("first argument to %<va_arg%> not of type %<va_list%>");
4827 /* Generate a diagnostic for requesting data of a type that cannot
4828 be passed through `...' due to type promotion at the call site. */
4829 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4832 static bool gave_help;
4834 /* Unfortunately, this is merely undefined, rather than a constraint
4835 violation, so we cannot make this an error. If this call is never
4836 executed, the program is still strictly conforming. */
4837 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4838 type, promoted_type);
4842 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4843 promoted_type, type);
4846 /* We can, however, treat "undefined" any way we please.
4847 Call abort to encourage the user to fix the program. */
4848 inform ("if this code is reached, the program will abort");
4849 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4850 append_to_statement_list (t, pre_p);
4852 /* This is dead code, but go ahead and finish so that the
4853 mode of the result comes out right. */
4854 *expr_p = dummy_object (type);
4859 /* Make it easier for the backends by protecting the valist argument
4860 from multiple evaluations. */
4861 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4863 /* For this case, the backends will be expecting a pointer to
4864 TREE_TYPE (va_list_type_node), but it's possible we've
4865 actually been given an array (an actual va_list_type_node).
4867 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4869 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4870 valist = build_fold_addr_expr_with_type (valist, p1);
4872 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4875 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4877 if (!targetm.gimplify_va_arg_expr)
4878 /* FIXME:Once most targets are converted we should merely
4879 assert this is non-null. */
4882 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4887 /* Expand EXP, a call to __builtin_va_end. */
4890 expand_builtin_va_end (tree exp)
4892 tree valist = CALL_EXPR_ARG (exp, 0);
4894 /* Evaluate for side effects, if needed. I hate macros that don't
4896 if (TREE_SIDE_EFFECTS (valist))
4897 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4902 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4903 builtin rather than just as an assignment in stdarg.h because of the
4904 nastiness of array-type va_list types. */
4907 expand_builtin_va_copy (tree exp)
4911 dst = CALL_EXPR_ARG (exp, 0);
4912 src = CALL_EXPR_ARG (exp, 1);
4914 dst = stabilize_va_list (dst, 1);
4915 src = stabilize_va_list (src, 0);
4917 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4919 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4920 TREE_SIDE_EFFECTS (t) = 1;
4921 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4925 rtx dstb, srcb, size;
4927 /* Evaluate to pointers. */
4928 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4929 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4930 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4931 VOIDmode, EXPAND_NORMAL);
4933 dstb = convert_memory_address (Pmode, dstb);
4934 srcb = convert_memory_address (Pmode, srcb);
4936 /* "Dereference" to BLKmode memories. */
4937 dstb = gen_rtx_MEM (BLKmode, dstb);
4938 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4939 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4940 srcb = gen_rtx_MEM (BLKmode, srcb);
4941 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4942 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4945 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4951 /* Expand a call to one of the builtin functions __builtin_frame_address or
4952 __builtin_return_address. */
4955 expand_builtin_frame_address (tree fndecl, tree exp)
4957 /* The argument must be a nonnegative integer constant.
4958 It counts the number of frames to scan up the stack.
4959 The value is the return address saved in that frame. */
4960 if (call_expr_nargs (exp) == 0)
4961 /* Warning about missing arg was already issued. */
4963 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4965 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4966 error ("invalid argument to %<__builtin_frame_address%>");
4968 error ("invalid argument to %<__builtin_return_address%>");
4974 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4975 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4977 /* Some ports cannot access arbitrary stack frames. */
4980 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4981 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4983 warning (0, "unsupported argument to %<__builtin_return_address%>");
4987 /* For __builtin_frame_address, return what we've got. */
4988 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4992 && ! CONSTANT_P (tem))
4993 tem = copy_to_mode_reg (Pmode, tem);
4998 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4999 we failed and the caller should emit a normal call, otherwise try to get
5000 the result in TARGET, if convenient. */
5003 expand_builtin_alloca (tree exp, rtx target)
5008 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5009 should always expand to function calls. These can be intercepted
5014 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5017 /* Compute the argument. */
5018 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5020 /* Allocate the desired space. */
5021 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5022 result = convert_memory_address (ptr_mode, result);
5027 /* Expand a call to a bswap builtin with argument ARG0. MODE
5028 is the mode to expand with. */
5031 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5033 enum machine_mode mode;
5037 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5040 arg = CALL_EXPR_ARG (exp, 0);
5041 mode = TYPE_MODE (TREE_TYPE (arg));
5042 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5044 target = expand_unop (mode, bswap_optab, op0, target, 1);
5046 gcc_assert (target);
5048 return convert_to_mode (mode, target, 0);
5051 /* Expand a call to a unary builtin in EXP.
5052 Return NULL_RTX if a normal call should be emitted rather than expanding the
5053 function in-line. If convenient, the result should be placed in TARGET.
5054 SUBTARGET may be used as the target for computing one of EXP's operands. */
5057 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5058 rtx subtarget, optab op_optab)
5062 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5065 /* Compute the argument. */
5066 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5067 VOIDmode, EXPAND_NORMAL);
5068 /* Compute op, into TARGET if possible.
5069 Set TARGET to wherever the result comes back. */
5070 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5071 op_optab, op0, target, 1);
5072 gcc_assert (target);
5074 return convert_to_mode (target_mode, target, 0);
5077 /* If the string passed to fputs is a constant and is one character
5078 long, we attempt to transform this call into __builtin_fputc(). */
5081 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5083 /* Verify the arguments in the original call. */
5084 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5086 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5087 CALL_EXPR_ARG (exp, 1),
5088 (target == const0_rtx),
5089 unlocked, NULL_TREE);
5091 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5096 /* Expand a call to __builtin_expect. We just return our argument
5097 as the builtin_expect semantic should've been already executed by
5098 tree branch prediction pass. */
5101 expand_builtin_expect (tree exp, rtx target)
5105 if (call_expr_nargs (exp) < 2)
5107 arg = CALL_EXPR_ARG (exp, 0);
5108 c = CALL_EXPR_ARG (exp, 1);
5110 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5111 /* When guessing was done, the hints should be already stripped away. */
5112 gcc_assert (!flag_guess_branch_prob);
5117 expand_builtin_trap (void)
5121 emit_insn (gen_trap ());
5124 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5128 /* Expand EXP, a call to fabs, fabsf or fabsl.
5129 Return NULL_RTX if a normal call should be emitted rather than expanding
5130 the function inline. If convenient, the result should be placed
5131 in TARGET. SUBTARGET may be used as the target for computing
5135 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5137 enum machine_mode mode;
5141 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5144 arg = CALL_EXPR_ARG (exp, 0);
5145 mode = TYPE_MODE (TREE_TYPE (arg));
5146 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5147 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5150 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5151 Return NULL is a normal call should be emitted rather than expanding the
5152 function inline. If convenient, the result should be placed in TARGET.
5153 SUBTARGET may be used as the target for computing the operand. */
5156 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5161 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5164 arg = CALL_EXPR_ARG (exp, 0);
5165 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5167 arg = CALL_EXPR_ARG (exp, 1);
5168 op1 = expand_normal (arg);
5170 return expand_copysign (op0, op1, target);
5173 /* Create a new constant string literal and return a char* pointer to it.
5174 The STRING_CST value is the LEN characters at STR. */
5176 build_string_literal (int len, const char *str)
5178 tree t, elem, index, type;
5180 t = build_string (len, str);
5181 elem = build_type_variant (char_type_node, 1, 0);
5182 index = build_index_type (build_int_cst (NULL_TREE, len - 1));
5183 type = build_array_type (elem, index);
5184 TREE_TYPE (t) = type;
5185 TREE_CONSTANT (t) = 1;
5186 TREE_INVARIANT (t) = 1;
5187 TREE_READONLY (t) = 1;
5188 TREE_STATIC (t) = 1;
5190 type = build_pointer_type (type);
5191 t = build1 (ADDR_EXPR, type, t);
5193 type = build_pointer_type (elem);
5194 t = build1 (NOP_EXPR, type, t);
5198 /* Expand EXP, a call to printf or printf_unlocked.
5199 Return NULL_RTX if a normal call should be emitted rather than transforming
5200 the function inline. If convenient, the result should be placed in
5201 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5204 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5207 /* If we're using an unlocked function, assume the other unlocked
5208 functions exist explicitly. */
5209 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5210 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5211 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5212 : implicit_built_in_decls[BUILT_IN_PUTS];
5213 const char *fmt_str;
5216 int nargs = call_expr_nargs (exp);
5218 /* If the return value is used, don't do the transformation. */
5219 if (target != const0_rtx)
5222 /* Verify the required arguments in the original call. */
5225 fmt = CALL_EXPR_ARG (exp, 0);
5226 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5229 /* Check whether the format is a literal string constant. */
5230 fmt_str = c_getstr (fmt);
5231 if (fmt_str == NULL)
5234 if (!init_target_chars ())
5237 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5238 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5241 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5244 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5246 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5247 else if (strcmp (fmt_str, target_percent_c) == 0)
5250 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5253 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5257 /* We can't handle anything else with % args or %% ... yet. */
5258 if (strchr (fmt_str, target_percent))
5264 /* If the format specifier was "", printf does nothing. */
5265 if (fmt_str[0] == '\0')
5267 /* If the format specifier has length of 1, call putchar. */
5268 if (fmt_str[1] == '\0')
5270 /* Given printf("c"), (where c is any one character,)
5271 convert "c"[0] to an int and pass that to the replacement
5273 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5275 fn = build_call_expr (fn_putchar, 1, arg);
5279 /* If the format specifier was "string\n", call puts("string"). */
5280 size_t len = strlen (fmt_str);
5281 if ((unsigned char)fmt_str[len - 1] == target_newline)
5283 /* Create a NUL-terminated string that's one char shorter
5284 than the original, stripping off the trailing '\n'. */
5285 char *newstr = alloca (len);
5286 memcpy (newstr, fmt_str, len - 1);
5287 newstr[len - 1] = 0;
5288 arg = build_string_literal (len, newstr);
5290 fn = build_call_expr (fn_puts, 1, arg);
5293 /* We'd like to arrange to call fputs(string,stdout) here,
5294 but we need stdout and don't have a way to get it yet. */
5301 if (TREE_CODE (fn) == CALL_EXPR)
5302 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5303 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5306 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5307 Return NULL_RTX if a normal call should be emitted rather than transforming
5308 the function inline. If convenient, the result should be placed in
5309 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5312 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5315 /* If we're using an unlocked function, assume the other unlocked
5316 functions exist explicitly. */
5317 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5318 : implicit_built_in_decls[BUILT_IN_FPUTC];
5319 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5320 : implicit_built_in_decls[BUILT_IN_FPUTS];
5321 const char *fmt_str;
5324 int nargs = call_expr_nargs (exp);
5326 /* If the return value is used, don't do the transformation. */
5327 if (target != const0_rtx)
5330 /* Verify the required arguments in the original call. */
5333 fp = CALL_EXPR_ARG (exp, 0);
5334 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5336 fmt = CALL_EXPR_ARG (exp, 1);
5337 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5340 /* Check whether the format is a literal string constant. */
5341 fmt_str = c_getstr (fmt);
5342 if (fmt_str == NULL)
5345 if (!init_target_chars ())
5348 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5349 if (strcmp (fmt_str, target_percent_s) == 0)
5352 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5354 arg = CALL_EXPR_ARG (exp, 2);
5356 fn = build_call_expr (fn_fputs, 2, arg, fp);
5358 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5359 else if (strcmp (fmt_str, target_percent_c) == 0)
5362 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5364 arg = CALL_EXPR_ARG (exp, 2);
5366 fn = build_call_expr (fn_fputc, 2, arg, fp);
5370 /* We can't handle anything else with % args or %% ... yet. */
5371 if (strchr (fmt_str, target_percent))
5377 /* If the format specifier was "", fprintf does nothing. */
5378 if (fmt_str[0] == '\0')
5380 /* Evaluate and ignore FILE* argument for side-effects. */
5381 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5385 /* When "string" doesn't contain %, replace all cases of
5386 fprintf(stream,string) with fputs(string,stream). The fputs
5387 builtin will take care of special cases like length == 1. */
5389 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5394 if (TREE_CODE (fn) == CALL_EXPR)
5395 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5396 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5399 /* Expand a call EXP to sprintf. Return NULL_RTX if
5400 a normal call should be emitted rather than expanding the function
5401 inline. If convenient, the result should be placed in TARGET with
5405 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5408 const char *fmt_str;
5409 int nargs = call_expr_nargs (exp);
5411 /* Verify the required arguments in the original call. */
5414 dest = CALL_EXPR_ARG (exp, 0);
5415 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5417 fmt = CALL_EXPR_ARG (exp, 0);
5418 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5421 /* Check whether the format is a literal string constant. */
5422 fmt_str = c_getstr (fmt);
5423 if (fmt_str == NULL)
5426 if (!init_target_chars ())
5429 /* If the format doesn't contain % args or %%, use strcpy. */
5430 if (strchr (fmt_str, target_percent) == 0)
5432 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5435 if ((nargs > 2) || ! fn)
5437 expand_expr (build_call_expr (fn, 2, dest, fmt),
5438 const0_rtx, VOIDmode, EXPAND_NORMAL);
5439 if (target == const0_rtx)
5441 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5442 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5444 /* If the format is "%s", use strcpy if the result isn't used. */
5445 else if (strcmp (fmt_str, target_percent_s) == 0)
5448 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5454 arg = CALL_EXPR_ARG (exp, 2);
5455 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5458 if (target != const0_rtx)
5460 len = c_strlen (arg, 1);
5461 if (! len || TREE_CODE (len) != INTEGER_CST)
5467 expand_expr (build_call_expr (fn, 2, dest, arg),
5468 const0_rtx, VOIDmode, EXPAND_NORMAL);
5470 if (target == const0_rtx)
5472 return expand_expr (len, target, mode, EXPAND_NORMAL);
5478 /* Expand a call to either the entry or exit function profiler. */
5481 expand_builtin_profile_func (bool exitp)
5485 this = DECL_RTL (current_function_decl);
5486 gcc_assert (MEM_P (this));
5487 this = XEXP (this, 0);
5490 which = profile_function_exit_libfunc;
5492 which = profile_function_entry_libfunc;
5494 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5495 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5502 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5505 round_trampoline_addr (rtx tramp)
5507 rtx temp, addend, mask;
5509 /* If we don't need too much alignment, we'll have been guaranteed
5510 proper alignment by get_trampoline_type. */
5511 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5514 /* Round address up to desired boundary. */
5515 temp = gen_reg_rtx (Pmode);
5516 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5517 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5519 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5520 temp, 0, OPTAB_LIB_WIDEN);
5521 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5522 temp, 0, OPTAB_LIB_WIDEN);
5528 expand_builtin_init_trampoline (tree exp)
5530 tree t_tramp, t_func, t_chain;
5531 rtx r_tramp, r_func, r_chain;
5532 #ifdef TRAMPOLINE_TEMPLATE
5536 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5537 POINTER_TYPE, VOID_TYPE))
5540 t_tramp = CALL_EXPR_ARG (exp, 0);
5541 t_func = CALL_EXPR_ARG (exp, 1);
5542 t_chain = CALL_EXPR_ARG (exp, 2);
5544 r_tramp = expand_normal (t_tramp);
5545 r_func = expand_normal (t_func);
5546 r_chain = expand_normal (t_chain);
5548 /* Generate insns to initialize the trampoline. */
5549 r_tramp = round_trampoline_addr (r_tramp);
5550 #ifdef TRAMPOLINE_TEMPLATE
5551 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5552 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5553 emit_block_move (blktramp, assemble_trampoline_template (),
5554 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5556 trampolines_created = 1;
5557 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5563 expand_builtin_adjust_trampoline (tree exp)
5567 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5570 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5571 tramp = round_trampoline_addr (tramp);
5572 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5573 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5579 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5580 signbitd64, or signbitd128 function.
5581 Return NULL_RTX if a normal call should be emitted rather than expanding
5582 the function in-line. EXP is the expression that is a call to the builtin
5583 function; if convenient, the result should be placed in TARGET. */
5586 expand_builtin_signbit (tree exp, rtx target)
5588 const struct real_format *fmt;
5589 enum machine_mode fmode, imode, rmode;
5590 HOST_WIDE_INT hi, lo;
5595 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5598 arg = CALL_EXPR_ARG (exp, 0);
5599 fmode = TYPE_MODE (TREE_TYPE (arg));
5600 rmode = TYPE_MODE (TREE_TYPE (exp));
5601 fmt = REAL_MODE_FORMAT (fmode);
5603 /* For floating point formats without a sign bit, implement signbit
5605 bitpos = fmt->signbit_ro;
5608 /* But we can't do this if the format supports signed zero. */
5609 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5612 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5613 build_real (TREE_TYPE (arg), dconst0));
5614 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5617 temp = expand_normal (arg);
5618 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5620 imode = int_mode_for_mode (fmode);
5621 if (imode == BLKmode)
5623 temp = gen_lowpart (imode, temp);
5628 /* Handle targets with different FP word orders. */
5629 if (FLOAT_WORDS_BIG_ENDIAN)
5630 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5632 word = bitpos / BITS_PER_WORD;
5633 temp = operand_subword_force (temp, word, fmode);
5634 bitpos = bitpos % BITS_PER_WORD;
5637 /* Force the intermediate word_mode (or narrower) result into a
5638 register. This avoids attempting to create paradoxical SUBREGs
5639 of floating point modes below. */
5640 temp = force_reg (imode, temp);
5642 /* If the bitpos is within the "result mode" lowpart, the operation
5643 can be implement with a single bitwise AND. Otherwise, we need
5644 a right shift and an AND. */
5646 if (bitpos < GET_MODE_BITSIZE (rmode))
5648 if (bitpos < HOST_BITS_PER_WIDE_INT)
5651 lo = (HOST_WIDE_INT) 1 << bitpos;
5655 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5660 temp = gen_lowpart (rmode, temp);
5661 temp = expand_binop (rmode, and_optab, temp,
5662 immed_double_const (lo, hi, rmode),
5663 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5667 /* Perform a logical right shift to place the signbit in the least
5668 significant bit, then truncate the result to the desired mode
5669 and mask just this bit. */
5670 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5671 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5672 temp = gen_lowpart (rmode, temp);
5673 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5674 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5680 /* Expand fork or exec calls. TARGET is the desired target of the
5681 call. EXP is the call. FN is the
5682 identificator of the actual function. IGNORE is nonzero if the
5683 value is to be ignored. */
5686 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5691 /* If we are not profiling, just call the function. */
5692 if (!profile_arc_flag)
5695 /* Otherwise call the wrapper. This should be equivalent for the rest of
5696 compiler, so the code does not diverge, and the wrapper may run the
5697 code necessary for keeping the profiling sane. */
5699 switch (DECL_FUNCTION_CODE (fn))
5702 id = get_identifier ("__gcov_fork");
5705 case BUILT_IN_EXECL:
5706 id = get_identifier ("__gcov_execl");
5709 case BUILT_IN_EXECV:
5710 id = get_identifier ("__gcov_execv");
5713 case BUILT_IN_EXECLP:
5714 id = get_identifier ("__gcov_execlp");
5717 case BUILT_IN_EXECLE:
5718 id = get_identifier ("__gcov_execle");
5721 case BUILT_IN_EXECVP:
5722 id = get_identifier ("__gcov_execvp");
5725 case BUILT_IN_EXECVE:
5726 id = get_identifier ("__gcov_execve");
5733 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5734 DECL_EXTERNAL (decl) = 1;
5735 TREE_PUBLIC (decl) = 1;
5736 DECL_ARTIFICIAL (decl) = 1;
5737 TREE_NOTHROW (decl) = 1;
5738 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5739 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5740 call = rewrite_call_expr (exp, 0, decl, 0);
5741 return expand_call (call, target, ignore);
5746 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5747 the pointer in these functions is void*, the tree optimizers may remove
5748 casts. The mode computed in expand_builtin isn't reliable either, due
5749 to __sync_bool_compare_and_swap.
5751 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5752 group of builtins. This gives us log2 of the mode size. */
5754 static inline enum machine_mode
5755 get_builtin_sync_mode (int fcode_diff)
5757 /* The size is not negotiable, so ask not to get BLKmode in return
5758 if the target indicates that a smaller size would be better. */
5759 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5762 /* Expand the memory expression LOC and return the appropriate memory operand
5763 for the builtin_sync operations. */
5766 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5770 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5772 /* Note that we explicitly do not want any alias information for this
5773 memory, so that we kill all other live memories. Otherwise we don't
5774 satisfy the full barrier semantics of the intrinsic. */
5775 mem = validize_mem (gen_rtx_MEM (mode, addr));
5777 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5778 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5779 MEM_VOLATILE_P (mem) = 1;
5784 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5785 EXP is the CALL_EXPR. CODE is the rtx code
5786 that corresponds to the arithmetic or logical operation from the name;
5787 an exception here is that NOT actually means NAND. TARGET is an optional
5788 place for us to store the results; AFTER is true if this is the
5789 fetch_and_xxx form. IGNORE is true if we don't actually care about
5790 the result of the operation at all. */
5793 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5794 enum rtx_code code, bool after,
5795 rtx target, bool ignore)
5798 enum machine_mode old_mode;
5800 /* Expand the operands. */
5801 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5803 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5804 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5805 of CONST_INTs, where we know the old_mode only from the call argument. */
5806 old_mode = GET_MODE (val);
5807 if (old_mode == VOIDmode)
5808 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5809 val = convert_modes (mode, old_mode, val, 1);
5812 return expand_sync_operation (mem, val, code);
5814 return expand_sync_fetch_operation (mem, val, code, after, target);
5817 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5818 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5819 true if this is the boolean form. TARGET is a place for us to store the
5820 results; this is NOT optional if IS_BOOL is true. */
5823 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5824 bool is_bool, rtx target)
5826 rtx old_val, new_val, mem;
5827 enum machine_mode old_mode;
5829 /* Expand the operands. */
5830 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5833 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5834 mode, EXPAND_NORMAL);
5835 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5836 of CONST_INTs, where we know the old_mode only from the call argument. */
5837 old_mode = GET_MODE (old_val);
5838 if (old_mode == VOIDmode)
5839 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5840 old_val = convert_modes (mode, old_mode, old_val, 1);
5842 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5843 mode, EXPAND_NORMAL);
5844 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5845 of CONST_INTs, where we know the old_mode only from the call argument. */
5846 old_mode = GET_MODE (new_val);
5847 if (old_mode == VOIDmode)
5848 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5849 new_val = convert_modes (mode, old_mode, new_val, 1);
5852 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5854 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5857 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5858 general form is actually an atomic exchange, and some targets only
5859 support a reduced form with the second argument being a constant 1.
5860 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5864 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5868 enum machine_mode old_mode;
5870 /* Expand the operands. */
5871 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5872 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5873 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5874 of CONST_INTs, where we know the old_mode only from the call argument. */
5875 old_mode = GET_MODE (val);
5876 if (old_mode == VOIDmode)
5877 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5878 val = convert_modes (mode, old_mode, val, 1);
5880 return expand_sync_lock_test_and_set (mem, val, target);
5883 /* Expand the __sync_synchronize intrinsic. */
5886 expand_builtin_synchronize (void)
5890 #ifdef HAVE_memory_barrier
5891 if (HAVE_memory_barrier)
5893 emit_insn (gen_memory_barrier ());
5898 /* If no explicit memory barrier instruction is available, create an
5899 empty asm stmt with a memory clobber. */
5900 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5901 tree_cons (NULL, build_string (6, "memory"), NULL));
5902 ASM_VOLATILE_P (x) = 1;
5903 expand_asm_expr (x);
5906 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5909 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5911 enum insn_code icode;
5913 rtx val = const0_rtx;
5915 /* Expand the operands. */
5916 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5918 /* If there is an explicit operation in the md file, use it. */
5919 icode = sync_lock_release[mode];
5920 if (icode != CODE_FOR_nothing)
5922 if (!insn_data[icode].operand[1].predicate (val, mode))
5923 val = force_reg (mode, val);
5925 insn = GEN_FCN (icode) (mem, val);
5933 /* Otherwise we can implement this operation by emitting a barrier
5934 followed by a store of zero. */
5935 expand_builtin_synchronize ();
5936 emit_move_insn (mem, val);
5939 /* Expand an expression EXP that calls a built-in function,
5940 with result going to TARGET if that's convenient
5941 (and in mode MODE if that's convenient).
5942 SUBTARGET may be used as the target for computing one of EXP's operands.
5943 IGNORE is nonzero if the value is to be ignored. */
5946 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5949 tree fndecl = get_callee_fndecl (exp);
5950 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5951 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5953 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5954 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5956 /* When not optimizing, generate calls to library functions for a certain
5959 && !called_as_built_in (fndecl)
5960 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5961 && fcode != BUILT_IN_ALLOCA)
5962 return expand_call (exp, target, ignore);
5964 /* The built-in function expanders test for target == const0_rtx
5965 to determine whether the function's result will be ignored. */
5967 target = const0_rtx;
5969 /* If the result of a pure or const built-in function is ignored, and
5970 none of its arguments are volatile, we can avoid expanding the
5971 built-in call and just evaluate the arguments for side-effects. */
5972 if (target == const0_rtx
5973 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5975 bool volatilep = false;
5977 call_expr_arg_iterator iter;
5979 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5980 if (TREE_THIS_VOLATILE (arg))
5988 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5989 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5996 CASE_FLT_FN (BUILT_IN_FABS):
5997 target = expand_builtin_fabs (exp, target, subtarget);
6002 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6003 target = expand_builtin_copysign (exp, target, subtarget);
6008 /* Just do a normal library call if we were unable to fold
6010 CASE_FLT_FN (BUILT_IN_CABS):
6013 CASE_FLT_FN (BUILT_IN_EXP):
6014 CASE_FLT_FN (BUILT_IN_EXP10):
6015 CASE_FLT_FN (BUILT_IN_POW10):
6016 CASE_FLT_FN (BUILT_IN_EXP2):
6017 CASE_FLT_FN (BUILT_IN_EXPM1):
6018 CASE_FLT_FN (BUILT_IN_LOGB):
6019 CASE_FLT_FN (BUILT_IN_LOG):
6020 CASE_FLT_FN (BUILT_IN_LOG10):
6021 CASE_FLT_FN (BUILT_IN_LOG2):
6022 CASE_FLT_FN (BUILT_IN_LOG1P):
6023 CASE_FLT_FN (BUILT_IN_TAN):
6024 CASE_FLT_FN (BUILT_IN_ASIN):
6025 CASE_FLT_FN (BUILT_IN_ACOS):
6026 CASE_FLT_FN (BUILT_IN_ATAN):
6027 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6028 because of possible accuracy problems. */
6029 if (! flag_unsafe_math_optimizations)
6031 CASE_FLT_FN (BUILT_IN_SQRT):
6032 CASE_FLT_FN (BUILT_IN_FLOOR):
6033 CASE_FLT_FN (BUILT_IN_CEIL):
6034 CASE_FLT_FN (BUILT_IN_TRUNC):
6035 CASE_FLT_FN (BUILT_IN_ROUND):
6036 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6037 CASE_FLT_FN (BUILT_IN_RINT):
6038 target = expand_builtin_mathfn (exp, target, subtarget);
6043 CASE_FLT_FN (BUILT_IN_ILOGB):
6044 if (! flag_unsafe_math_optimizations)
6046 CASE_FLT_FN (BUILT_IN_ISINF):
6047 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6052 CASE_FLT_FN (BUILT_IN_LCEIL):
6053 CASE_FLT_FN (BUILT_IN_LLCEIL):
6054 CASE_FLT_FN (BUILT_IN_LFLOOR):
6055 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6056 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6061 CASE_FLT_FN (BUILT_IN_LRINT):
6062 CASE_FLT_FN (BUILT_IN_LLRINT):
6063 CASE_FLT_FN (BUILT_IN_LROUND):
6064 CASE_FLT_FN (BUILT_IN_LLROUND):
6065 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6070 CASE_FLT_FN (BUILT_IN_POW):
6071 target = expand_builtin_pow (exp, target, subtarget);
6076 CASE_FLT_FN (BUILT_IN_POWI):
6077 target = expand_builtin_powi (exp, target, subtarget);
6082 CASE_FLT_FN (BUILT_IN_ATAN2):
6083 CASE_FLT_FN (BUILT_IN_LDEXP):
6084 CASE_FLT_FN (BUILT_IN_SCALB):
6085 CASE_FLT_FN (BUILT_IN_SCALBN):
6086 CASE_FLT_FN (BUILT_IN_SCALBLN):
6087 if (! flag_unsafe_math_optimizations)
6090 CASE_FLT_FN (BUILT_IN_FMOD):
6091 CASE_FLT_FN (BUILT_IN_REMAINDER):
6092 CASE_FLT_FN (BUILT_IN_DREM):
6093 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6098 CASE_FLT_FN (BUILT_IN_CEXPI):
6099 target = expand_builtin_cexpi (exp, target, subtarget);
6100 gcc_assert (target);
6103 CASE_FLT_FN (BUILT_IN_SIN):
6104 CASE_FLT_FN (BUILT_IN_COS):
6105 if (! flag_unsafe_math_optimizations)
6107 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6112 CASE_FLT_FN (BUILT_IN_SINCOS):
6113 if (! flag_unsafe_math_optimizations)
6115 target = expand_builtin_sincos (exp);
6120 case BUILT_IN_APPLY_ARGS:
6121 return expand_builtin_apply_args ();
6123 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6124 FUNCTION with a copy of the parameters described by
6125 ARGUMENTS, and ARGSIZE. It returns a block of memory
6126 allocated on the stack into which is stored all the registers
6127 that might possibly be used for returning the result of a
6128 function. ARGUMENTS is the value returned by
6129 __builtin_apply_args. ARGSIZE is the number of bytes of
6130 arguments that must be copied. ??? How should this value be
6131 computed? We'll also need a safe worst case value for varargs
6133 case BUILT_IN_APPLY:
6134 if (!validate_arglist (exp, POINTER_TYPE,
6135 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6136 && !validate_arglist (exp, REFERENCE_TYPE,
6137 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6143 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6144 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6145 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6147 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6150 /* __builtin_return (RESULT) causes the function to return the
6151 value described by RESULT. RESULT is address of the block of
6152 memory returned by __builtin_apply. */
6153 case BUILT_IN_RETURN:
6154 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6155 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6158 case BUILT_IN_SAVEREGS:
6159 return expand_builtin_saveregs ();
6161 case BUILT_IN_ARGS_INFO:
6162 return expand_builtin_args_info (exp);
6164 /* Return the address of the first anonymous stack arg. */
6165 case BUILT_IN_NEXT_ARG:
6166 if (fold_builtin_next_arg (exp, false))
6168 return expand_builtin_next_arg ();
6170 case BUILT_IN_CLASSIFY_TYPE:
6171 return expand_builtin_classify_type (exp);
6173 case BUILT_IN_CONSTANT_P:
6176 case BUILT_IN_FRAME_ADDRESS:
6177 case BUILT_IN_RETURN_ADDRESS:
6178 return expand_builtin_frame_address (fndecl, exp);
6180 /* Returns the address of the area where the structure is returned.
6182 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6183 if (call_expr_nargs (exp) != 0
6184 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6185 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6188 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6190 case BUILT_IN_ALLOCA:
6191 target = expand_builtin_alloca (exp, target);
6196 case BUILT_IN_STACK_SAVE:
6197 return expand_stack_save ();
6199 case BUILT_IN_STACK_RESTORE:
6200 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6203 case BUILT_IN_BSWAP32:
6204 case BUILT_IN_BSWAP64:
6205 target = expand_builtin_bswap (exp, target, subtarget);
6211 CASE_INT_FN (BUILT_IN_FFS):
6212 case BUILT_IN_FFSIMAX:
6213 target = expand_builtin_unop (target_mode, exp, target,
6214 subtarget, ffs_optab);
6219 CASE_INT_FN (BUILT_IN_CLZ):
6220 case BUILT_IN_CLZIMAX:
6221 target = expand_builtin_unop (target_mode, exp, target,
6222 subtarget, clz_optab);
6227 CASE_INT_FN (BUILT_IN_CTZ):
6228 case BUILT_IN_CTZIMAX:
6229 target = expand_builtin_unop (target_mode, exp, target,
6230 subtarget, ctz_optab);
6235 CASE_INT_FN (BUILT_IN_POPCOUNT):
6236 case BUILT_IN_POPCOUNTIMAX:
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, popcount_optab);
6243 CASE_INT_FN (BUILT_IN_PARITY):
6244 case BUILT_IN_PARITYIMAX:
6245 target = expand_builtin_unop (target_mode, exp, target,
6246 subtarget, parity_optab);
6251 case BUILT_IN_STRLEN:
6252 target = expand_builtin_strlen (exp, target, target_mode);
6257 case BUILT_IN_STRCPY:
6258 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6263 case BUILT_IN_STRNCPY:
6264 target = expand_builtin_strncpy (exp, target, mode);
6269 case BUILT_IN_STPCPY:
6270 target = expand_builtin_stpcpy (exp, target, mode);
6275 case BUILT_IN_STRCAT:
6276 target = expand_builtin_strcat (fndecl, exp, target, mode);
6281 case BUILT_IN_STRNCAT:
6282 target = expand_builtin_strncat (exp, target, mode);
6287 case BUILT_IN_STRSPN:
6288 target = expand_builtin_strspn (exp, target, mode);
6293 case BUILT_IN_STRCSPN:
6294 target = expand_builtin_strcspn (exp, target, mode);
6299 case BUILT_IN_STRSTR:
6300 target = expand_builtin_strstr (exp, target, mode);
6305 case BUILT_IN_STRPBRK:
6306 target = expand_builtin_strpbrk (exp, target, mode);
6311 case BUILT_IN_INDEX:
6312 case BUILT_IN_STRCHR:
6313 target = expand_builtin_strchr (exp, target, mode);
6318 case BUILT_IN_RINDEX:
6319 case BUILT_IN_STRRCHR:
6320 target = expand_builtin_strrchr (exp, target, mode);
6325 case BUILT_IN_MEMCPY:
6326 target = expand_builtin_memcpy (exp, target, mode);
6331 case BUILT_IN_MEMPCPY:
6332 target = expand_builtin_mempcpy (exp, target, mode);
6337 case BUILT_IN_MEMMOVE:
6338 target = expand_builtin_memmove (exp, target, mode, ignore);
6343 case BUILT_IN_BCOPY:
6344 target = expand_builtin_bcopy (exp, ignore);
6349 case BUILT_IN_MEMSET:
6350 target = expand_builtin_memset (exp, target, mode);
6355 case BUILT_IN_BZERO:
6356 target = expand_builtin_bzero (exp);
6361 case BUILT_IN_STRCMP:
6362 target = expand_builtin_strcmp (exp, target, mode);
6367 case BUILT_IN_STRNCMP:
6368 target = expand_builtin_strncmp (exp, target, mode);
6373 case BUILT_IN_MEMCHR:
6374 target = expand_builtin_memchr (exp, target, mode);
6380 case BUILT_IN_MEMCMP:
6381 target = expand_builtin_memcmp (exp, target, mode);
6386 case BUILT_IN_SETJMP:
6387 /* This should have been lowered to the builtins below. */
6390 case BUILT_IN_SETJMP_SETUP:
6391 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6392 and the receiver label. */
6393 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6395 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6396 VOIDmode, EXPAND_NORMAL);
6397 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6398 rtx label_r = label_rtx (label);
6400 /* This is copied from the handling of non-local gotos. */
6401 expand_builtin_setjmp_setup (buf_addr, label_r);
6402 nonlocal_goto_handler_labels
6403 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6404 nonlocal_goto_handler_labels);
6405 /* ??? Do not let expand_label treat us as such since we would
6406 not want to be both on the list of non-local labels and on
6407 the list of forced labels. */
6408 FORCED_LABEL (label) = 0;
6413 case BUILT_IN_SETJMP_DISPATCHER:
6414 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6415 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6417 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6418 rtx label_r = label_rtx (label);
6420 /* Remove the dispatcher label from the list of non-local labels
6421 since the receiver labels have been added to it above. */
6422 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6427 case BUILT_IN_SETJMP_RECEIVER:
6428 /* __builtin_setjmp_receiver is passed the receiver label. */
6429 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6431 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6432 rtx label_r = label_rtx (label);
6434 expand_builtin_setjmp_receiver (label_r);
6439 /* __builtin_longjmp is passed a pointer to an array of five words.
6440 It's similar to the C library longjmp function but works with
6441 __builtin_setjmp above. */
6442 case BUILT_IN_LONGJMP:
6443 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6445 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6446 VOIDmode, EXPAND_NORMAL);
6447 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6449 if (value != const1_rtx)
6451 error ("%<__builtin_longjmp%> second argument must be 1");
6455 expand_builtin_longjmp (buf_addr, value);
6460 case BUILT_IN_NONLOCAL_GOTO:
6461 target = expand_builtin_nonlocal_goto (exp);
6466 /* This updates the setjmp buffer that is its argument with the value
6467 of the current stack pointer. */
6468 case BUILT_IN_UPDATE_SETJMP_BUF:
6469 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6472 = expand_normal (CALL_EXPR_ARG (exp, 0));
6474 expand_builtin_update_setjmp_buf (buf_addr);
6480 expand_builtin_trap ();
6483 case BUILT_IN_PRINTF:
6484 target = expand_builtin_printf (exp, target, mode, false);
6489 case BUILT_IN_PRINTF_UNLOCKED:
6490 target = expand_builtin_printf (exp, target, mode, true);
6495 case BUILT_IN_FPUTS:
6496 target = expand_builtin_fputs (exp, target, false);
6500 case BUILT_IN_FPUTS_UNLOCKED:
6501 target = expand_builtin_fputs (exp, target, true);
6506 case BUILT_IN_FPRINTF:
6507 target = expand_builtin_fprintf (exp, target, mode, false);
6512 case BUILT_IN_FPRINTF_UNLOCKED:
6513 target = expand_builtin_fprintf (exp, target, mode, true);
6518 case BUILT_IN_SPRINTF:
6519 target = expand_builtin_sprintf (exp, target, mode);
6524 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6525 case BUILT_IN_SIGNBITD32:
6526 case BUILT_IN_SIGNBITD64:
6527 case BUILT_IN_SIGNBITD128:
6528 target = expand_builtin_signbit (exp, target);
6533 /* Various hooks for the DWARF 2 __throw routine. */
6534 case BUILT_IN_UNWIND_INIT:
6535 expand_builtin_unwind_init ();
6537 case BUILT_IN_DWARF_CFA:
6538 return virtual_cfa_rtx;
6539 #ifdef DWARF2_UNWIND_INFO
6540 case BUILT_IN_DWARF_SP_COLUMN:
6541 return expand_builtin_dwarf_sp_column ();
6542 case BUILT_IN_INIT_DWARF_REG_SIZES:
6543 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6546 case BUILT_IN_FROB_RETURN_ADDR:
6547 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6548 case BUILT_IN_EXTRACT_RETURN_ADDR:
6549 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6550 case BUILT_IN_EH_RETURN:
6551 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6552 CALL_EXPR_ARG (exp, 1));
6554 #ifdef EH_RETURN_DATA_REGNO
6555 case BUILT_IN_EH_RETURN_DATA_REGNO:
6556 return expand_builtin_eh_return_data_regno (exp);
6558 case BUILT_IN_EXTEND_POINTER:
6559 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6561 case BUILT_IN_VA_START:
6562 case BUILT_IN_STDARG_START:
6563 return expand_builtin_va_start (exp);
6564 case BUILT_IN_VA_END:
6565 return expand_builtin_va_end (exp);
6566 case BUILT_IN_VA_COPY:
6567 return expand_builtin_va_copy (exp);
6568 case BUILT_IN_EXPECT:
6569 return expand_builtin_expect (exp, target);
6570 case BUILT_IN_PREFETCH:
6571 expand_builtin_prefetch (exp);
6574 case BUILT_IN_PROFILE_FUNC_ENTER:
6575 return expand_builtin_profile_func (false);
6576 case BUILT_IN_PROFILE_FUNC_EXIT:
6577 return expand_builtin_profile_func (true);
6579 case BUILT_IN_INIT_TRAMPOLINE:
6580 return expand_builtin_init_trampoline (exp);
6581 case BUILT_IN_ADJUST_TRAMPOLINE:
6582 return expand_builtin_adjust_trampoline (exp);
6585 case BUILT_IN_EXECL:
6586 case BUILT_IN_EXECV:
6587 case BUILT_IN_EXECLP:
6588 case BUILT_IN_EXECLE:
6589 case BUILT_IN_EXECVP:
6590 case BUILT_IN_EXECVE:
6591 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6596 case BUILT_IN_FETCH_AND_ADD_1:
6597 case BUILT_IN_FETCH_AND_ADD_2:
6598 case BUILT_IN_FETCH_AND_ADD_4:
6599 case BUILT_IN_FETCH_AND_ADD_8:
6600 case BUILT_IN_FETCH_AND_ADD_16:
6601 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6602 target = expand_builtin_sync_operation (mode, exp, PLUS,
6603 false, target, ignore);
6608 case BUILT_IN_FETCH_AND_SUB_1:
6609 case BUILT_IN_FETCH_AND_SUB_2:
6610 case BUILT_IN_FETCH_AND_SUB_4:
6611 case BUILT_IN_FETCH_AND_SUB_8:
6612 case BUILT_IN_FETCH_AND_SUB_16:
6613 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6614 target = expand_builtin_sync_operation (mode, exp, MINUS,
6615 false, target, ignore);
6620 case BUILT_IN_FETCH_AND_OR_1:
6621 case BUILT_IN_FETCH_AND_OR_2:
6622 case BUILT_IN_FETCH_AND_OR_4:
6623 case BUILT_IN_FETCH_AND_OR_8:
6624 case BUILT_IN_FETCH_AND_OR_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6626 target = expand_builtin_sync_operation (mode, exp, IOR,
6627 false, target, ignore);
6632 case BUILT_IN_FETCH_AND_AND_1:
6633 case BUILT_IN_FETCH_AND_AND_2:
6634 case BUILT_IN_FETCH_AND_AND_4:
6635 case BUILT_IN_FETCH_AND_AND_8:
6636 case BUILT_IN_FETCH_AND_AND_16:
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6638 target = expand_builtin_sync_operation (mode, exp, AND,
6639 false, target, ignore);
6644 case BUILT_IN_FETCH_AND_XOR_1:
6645 case BUILT_IN_FETCH_AND_XOR_2:
6646 case BUILT_IN_FETCH_AND_XOR_4:
6647 case BUILT_IN_FETCH_AND_XOR_8:
6648 case BUILT_IN_FETCH_AND_XOR_16:
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6650 target = expand_builtin_sync_operation (mode, exp, XOR,
6651 false, target, ignore);
6656 case BUILT_IN_FETCH_AND_NAND_1:
6657 case BUILT_IN_FETCH_AND_NAND_2:
6658 case BUILT_IN_FETCH_AND_NAND_4:
6659 case BUILT_IN_FETCH_AND_NAND_8:
6660 case BUILT_IN_FETCH_AND_NAND_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6662 target = expand_builtin_sync_operation (mode, exp, NOT,
6663 false, target, ignore);
6668 case BUILT_IN_ADD_AND_FETCH_1:
6669 case BUILT_IN_ADD_AND_FETCH_2:
6670 case BUILT_IN_ADD_AND_FETCH_4:
6671 case BUILT_IN_ADD_AND_FETCH_8:
6672 case BUILT_IN_ADD_AND_FETCH_16:
6673 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6674 target = expand_builtin_sync_operation (mode, exp, PLUS,
6675 true, target, ignore);
6680 case BUILT_IN_SUB_AND_FETCH_1:
6681 case BUILT_IN_SUB_AND_FETCH_2:
6682 case BUILT_IN_SUB_AND_FETCH_4:
6683 case BUILT_IN_SUB_AND_FETCH_8:
6684 case BUILT_IN_SUB_AND_FETCH_16:
6685 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6686 target = expand_builtin_sync_operation (mode, exp, MINUS,
6687 true, target, ignore);
6692 case BUILT_IN_OR_AND_FETCH_1:
6693 case BUILT_IN_OR_AND_FETCH_2:
6694 case BUILT_IN_OR_AND_FETCH_4:
6695 case BUILT_IN_OR_AND_FETCH_8:
6696 case BUILT_IN_OR_AND_FETCH_16:
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6698 target = expand_builtin_sync_operation (mode, exp, IOR,
6699 true, target, ignore);
6704 case BUILT_IN_AND_AND_FETCH_1:
6705 case BUILT_IN_AND_AND_FETCH_2:
6706 case BUILT_IN_AND_AND_FETCH_4:
6707 case BUILT_IN_AND_AND_FETCH_8:
6708 case BUILT_IN_AND_AND_FETCH_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6710 target = expand_builtin_sync_operation (mode, exp, AND,
6711 true, target, ignore);
6716 case BUILT_IN_XOR_AND_FETCH_1:
6717 case BUILT_IN_XOR_AND_FETCH_2:
6718 case BUILT_IN_XOR_AND_FETCH_4:
6719 case BUILT_IN_XOR_AND_FETCH_8:
6720 case BUILT_IN_XOR_AND_FETCH_16:
6721 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6722 target = expand_builtin_sync_operation (mode, exp, XOR,
6723 true, target, ignore);
6728 case BUILT_IN_NAND_AND_FETCH_1:
6729 case BUILT_IN_NAND_AND_FETCH_2:
6730 case BUILT_IN_NAND_AND_FETCH_4:
6731 case BUILT_IN_NAND_AND_FETCH_8:
6732 case BUILT_IN_NAND_AND_FETCH_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6734 target = expand_builtin_sync_operation (mode, exp, NOT,
6735 true, target, ignore);
6740 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6741 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6742 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6743 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6744 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6745 if (mode == VOIDmode)
6746 mode = TYPE_MODE (boolean_type_node);
6747 if (!target || !register_operand (target, mode))
6748 target = gen_reg_rtx (mode);
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6751 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6756 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6757 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6758 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6759 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6760 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6762 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6767 case BUILT_IN_LOCK_TEST_AND_SET_1:
6768 case BUILT_IN_LOCK_TEST_AND_SET_2:
6769 case BUILT_IN_LOCK_TEST_AND_SET_4:
6770 case BUILT_IN_LOCK_TEST_AND_SET_8:
6771 case BUILT_IN_LOCK_TEST_AND_SET_16:
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6773 target = expand_builtin_lock_test_and_set (mode, exp, target);
6778 case BUILT_IN_LOCK_RELEASE_1:
6779 case BUILT_IN_LOCK_RELEASE_2:
6780 case BUILT_IN_LOCK_RELEASE_4:
6781 case BUILT_IN_LOCK_RELEASE_8:
6782 case BUILT_IN_LOCK_RELEASE_16:
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6784 expand_builtin_lock_release (mode, exp);
6787 case BUILT_IN_SYNCHRONIZE:
6788 expand_builtin_synchronize ();
6791 case BUILT_IN_OBJECT_SIZE:
6792 return expand_builtin_object_size (exp);
6794 case BUILT_IN_MEMCPY_CHK:
6795 case BUILT_IN_MEMPCPY_CHK:
6796 case BUILT_IN_MEMMOVE_CHK:
6797 case BUILT_IN_MEMSET_CHK:
6798 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6803 case BUILT_IN_STRCPY_CHK:
6804 case BUILT_IN_STPCPY_CHK:
6805 case BUILT_IN_STRNCPY_CHK:
6806 case BUILT_IN_STRCAT_CHK:
6807 case BUILT_IN_STRNCAT_CHK:
6808 case BUILT_IN_SNPRINTF_CHK:
6809 case BUILT_IN_VSNPRINTF_CHK:
6810 maybe_emit_chk_warning (exp, fcode);
6813 case BUILT_IN_SPRINTF_CHK:
6814 case BUILT_IN_VSPRINTF_CHK:
6815 maybe_emit_sprintf_chk_warning (exp, fcode);
6818 default: /* just do library call, if unknown builtin */
6822 /* The switch statement above can drop through to cause the function
6823 to be called normally. */
6824 return expand_call (exp, target, ignore);
6827 /* Determine whether a tree node represents a call to a built-in
6828 function. If the tree T is a call to a built-in function with
6829 the right number of arguments of the appropriate types, return
6830 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6831 Otherwise the return value is END_BUILTINS. */
6833 enum built_in_function
6834 builtin_mathfn_code (tree t)
6836 tree fndecl, arg, parmlist;
6837 tree argtype, parmtype;
6838 call_expr_arg_iterator iter;
6840 if (TREE_CODE (t) != CALL_EXPR
6841 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6842 return END_BUILTINS;
6844 fndecl = get_callee_fndecl (t);
6845 if (fndecl == NULL_TREE
6846 || TREE_CODE (fndecl) != FUNCTION_DECL
6847 || ! DECL_BUILT_IN (fndecl)
6848 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6849 return END_BUILTINS;
6851 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6852 init_call_expr_arg_iterator (t, &iter);
6853 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6855 /* If a function doesn't take a variable number of arguments,
6856 the last element in the list will have type `void'. */
6857 parmtype = TREE_VALUE (parmlist);
6858 if (VOID_TYPE_P (parmtype))
6860 if (more_call_expr_args_p (&iter))
6861 return END_BUILTINS;
6862 return DECL_FUNCTION_CODE (fndecl);
6865 if (! more_call_expr_args_p (&iter))
6866 return END_BUILTINS;
6868 arg = next_call_expr_arg (&iter);
6869 argtype = TREE_TYPE (arg);
6871 if (SCALAR_FLOAT_TYPE_P (parmtype))
6873 if (! SCALAR_FLOAT_TYPE_P (argtype))
6874 return END_BUILTINS;
6876 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6878 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6879 return END_BUILTINS;
6881 else if (POINTER_TYPE_P (parmtype))
6883 if (! POINTER_TYPE_P (argtype))
6884 return END_BUILTINS;
6886 else if (INTEGRAL_TYPE_P (parmtype))
6888 if (! INTEGRAL_TYPE_P (argtype))
6889 return END_BUILTINS;
6892 return END_BUILTINS;
6895 /* Variable-length argument list. */
6896 return DECL_FUNCTION_CODE (fndecl);
6899 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6900 evaluate to a constant. */
6903 fold_builtin_constant_p (tree arg)
6905 /* We return 1 for a numeric type that's known to be a constant
6906 value at compile-time or for an aggregate type that's a
6907 literal constant. */
6910 /* If we know this is a constant, emit the constant of one. */
6911 if (CONSTANT_CLASS_P (arg)
6912 || (TREE_CODE (arg) == CONSTRUCTOR
6913 && TREE_CONSTANT (arg)))
6914 return integer_one_node;
6915 if (TREE_CODE (arg) == ADDR_EXPR)
6917 tree op = TREE_OPERAND (arg, 0);
6918 if (TREE_CODE (op) == STRING_CST
6919 || (TREE_CODE (op) == ARRAY_REF
6920 && integer_zerop (TREE_OPERAND (op, 1))
6921 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6922 return integer_one_node;
6925 /* If this expression has side effects, show we don't know it to be a
6926 constant. Likewise if it's a pointer or aggregate type since in
6927 those case we only want literals, since those are only optimized
6928 when generating RTL, not later.
6929 And finally, if we are compiling an initializer, not code, we
6930 need to return a definite result now; there's not going to be any
6931 more optimization done. */
6932 if (TREE_SIDE_EFFECTS (arg)
6933 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6934 || POINTER_TYPE_P (TREE_TYPE (arg))
6936 || folding_initializer)
6937 return integer_zero_node;
6942 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6943 comparison against the argument will fold to a constant. In practice,
6944 this means a true constant or the address of a non-weak symbol. */
6947 fold_builtin_expect (tree arg)
6951 /* If the argument isn't invariant, then there's nothing we can do. */
6952 if (!TREE_INVARIANT (arg))
6955 /* If we're looking at an address of a weak decl, then do not fold. */
6958 if (TREE_CODE (inner) == ADDR_EXPR)
6962 inner = TREE_OPERAND (inner, 0);
6964 while (TREE_CODE (inner) == COMPONENT_REF
6965 || TREE_CODE (inner) == ARRAY_REF);
6966 if (DECL_P (inner) && DECL_WEAK (inner))
6970 /* Otherwise, ARG already has the proper type for the return value. */
6974 /* Fold a call to __builtin_classify_type with argument ARG. */
6977 fold_builtin_classify_type (tree arg)
6980 return build_int_cst (NULL_TREE, no_type_class);
6982 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6985 /* Fold a call to __builtin_strlen with argument ARG. */
6988 fold_builtin_strlen (tree arg)
6990 if (!validate_arg (arg, POINTER_TYPE))
6994 tree len = c_strlen (arg, 0);
6998 /* Convert from the internal "sizetype" type to "size_t". */
7000 len = fold_convert (size_type_node, len);
7008 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7011 fold_builtin_inf (tree type, int warn)
7013 REAL_VALUE_TYPE real;
7015 /* __builtin_inff is intended to be usable to define INFINITY on all
7016 targets. If an infinity is not available, INFINITY expands "to a
7017 positive constant of type float that overflows at translation
7018 time", footnote "In this case, using INFINITY will violate the
7019 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7020 Thus we pedwarn to ensure this constraint violation is
7022 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7023 pedwarn ("target format does not support infinity");
7026 return build_real (type, real);
7029 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7032 fold_builtin_nan (tree arg, tree type, int quiet)
7034 REAL_VALUE_TYPE real;
7037 if (!validate_arg (arg, POINTER_TYPE))
7039 str = c_getstr (arg);
7043 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7046 return build_real (type, real);
7049 /* Return true if the floating point expression T has an integer value.
7050 We also allow +Inf, -Inf and NaN to be considered integer values. */
7053 integer_valued_real_p (tree t)
7055 switch (TREE_CODE (t))
7062 case NON_LVALUE_EXPR:
7063 return integer_valued_real_p (TREE_OPERAND (t, 0));
7068 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7075 return integer_valued_real_p (TREE_OPERAND (t, 0))
7076 && integer_valued_real_p (TREE_OPERAND (t, 1));
7079 return integer_valued_real_p (TREE_OPERAND (t, 1))
7080 && integer_valued_real_p (TREE_OPERAND (t, 2));
7083 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7087 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7088 if (TREE_CODE (type) == INTEGER_TYPE)
7090 if (TREE_CODE (type) == REAL_TYPE)
7091 return integer_valued_real_p (TREE_OPERAND (t, 0));
7096 switch (builtin_mathfn_code (t))
7098 CASE_FLT_FN (BUILT_IN_CEIL):
7099 CASE_FLT_FN (BUILT_IN_FLOOR):
7100 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7101 CASE_FLT_FN (BUILT_IN_RINT):
7102 CASE_FLT_FN (BUILT_IN_ROUND):
7103 CASE_FLT_FN (BUILT_IN_TRUNC):
7106 CASE_FLT_FN (BUILT_IN_FMIN):
7107 CASE_FLT_FN (BUILT_IN_FMAX):
7108 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7109 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7122 /* FNDECL is assumed to be a builtin where truncation can be propagated
7123 across (for instance floor((double)f) == (double)floorf (f).
7124 Do the transformation for a call with argument ARG. */
7127 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7129 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7131 if (!validate_arg (arg, REAL_TYPE))
7134 /* Integer rounding functions are idempotent. */
7135 if (fcode == builtin_mathfn_code (arg))
7138 /* If argument is already integer valued, and we don't need to worry
7139 about setting errno, there's no need to perform rounding. */
7140 if (! flag_errno_math && integer_valued_real_p (arg))
7145 tree arg0 = strip_float_extensions (arg);
7146 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7147 tree newtype = TREE_TYPE (arg0);
7150 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7151 && (decl = mathfn_built_in (newtype, fcode)))
7152 return fold_convert (ftype,
7153 build_call_expr (decl, 1,
7154 fold_convert (newtype, arg0)));
7159 /* FNDECL is assumed to be builtin which can narrow the FP type of
7160 the argument, for instance lround((double)f) -> lroundf (f).
7161 Do the transformation for a call with argument ARG. */
7164 fold_fixed_mathfn (tree fndecl, tree arg)
7166 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7168 if (!validate_arg (arg, REAL_TYPE))
7171 /* If argument is already integer valued, and we don't need to worry
7172 about setting errno, there's no need to perform rounding. */
7173 if (! flag_errno_math && integer_valued_real_p (arg))
7174 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7178 tree ftype = TREE_TYPE (arg);
7179 tree arg0 = strip_float_extensions (arg);
7180 tree newtype = TREE_TYPE (arg0);
7183 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7184 && (decl = mathfn_built_in (newtype, fcode)))
7185 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7188 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7189 sizeof (long long) == sizeof (long). */
7190 if (TYPE_PRECISION (long_long_integer_type_node)
7191 == TYPE_PRECISION (long_integer_type_node))
7193 tree newfn = NULL_TREE;
7196 CASE_FLT_FN (BUILT_IN_LLCEIL):
7197 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7200 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7201 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7204 CASE_FLT_FN (BUILT_IN_LLROUND):
7205 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7208 CASE_FLT_FN (BUILT_IN_LLRINT):
7209 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7218 tree newcall = build_call_expr(newfn, 1, arg);
7219 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7226 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7227 return type. Return NULL_TREE if no simplification can be made. */
7230 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7234 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7235 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7238 /* Calculate the result when the argument is a constant. */
7239 if (TREE_CODE (arg) == COMPLEX_CST
7240 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7244 if (TREE_CODE (arg) == COMPLEX_EXPR)
7246 tree real = TREE_OPERAND (arg, 0);
7247 tree imag = TREE_OPERAND (arg, 1);
7249 /* If either part is zero, cabs is fabs of the other. */
7250 if (real_zerop (real))
7251 return fold_build1 (ABS_EXPR, type, imag);
7252 if (real_zerop (imag))
7253 return fold_build1 (ABS_EXPR, type, real);
7255 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7256 if (flag_unsafe_math_optimizations
7257 && operand_equal_p (real, imag, OEP_PURE_SAME))
7259 const REAL_VALUE_TYPE sqrt2_trunc
7260 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
7262 return fold_build2 (MULT_EXPR, type,
7263 fold_build1 (ABS_EXPR, type, real),
7264 build_real (type, sqrt2_trunc));
7268 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7269 if (TREE_CODE (arg) == NEGATE_EXPR
7270 || TREE_CODE (arg) == CONJ_EXPR)
7271 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7273 /* Don't do this when optimizing for size. */
7274 if (flag_unsafe_math_optimizations
7275 && optimize && !optimize_size)
7277 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7279 if (sqrtfn != NULL_TREE)
7281 tree rpart, ipart, result;
7283 arg = builtin_save_expr (arg);
7285 rpart = fold_build1 (REALPART_EXPR, type, arg);
7286 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7288 rpart = builtin_save_expr (rpart);
7289 ipart = builtin_save_expr (ipart);
7291 result = fold_build2 (PLUS_EXPR, type,
7292 fold_build2 (MULT_EXPR, type,
7294 fold_build2 (MULT_EXPR, type,
7297 return build_call_expr (sqrtfn, 1, result);
7304 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7305 Return NULL_TREE if no simplification can be made. */
7308 fold_builtin_sqrt (tree arg, tree type)
7311 enum built_in_function fcode;
7314 if (!validate_arg (arg, REAL_TYPE))
7317 /* Calculate the result when the argument is a constant. */
7318 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7321 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7322 fcode = builtin_mathfn_code (arg);
7323 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7325 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7326 arg = fold_build2 (MULT_EXPR, type,
7327 CALL_EXPR_ARG (arg, 0),
7328 build_real (type, dconsthalf));
7329 return build_call_expr (expfn, 1, arg);
7332 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7333 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7335 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7339 tree arg0 = CALL_EXPR_ARG (arg, 0);
7341 /* The inner root was either sqrt or cbrt. */
7342 REAL_VALUE_TYPE dconstroot =
7343 BUILTIN_SQRT_P (fcode) ? dconsthalf : dconstthird;
7345 /* Adjust for the outer root. */
7346 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7347 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7348 tree_root = build_real (type, dconstroot);
7349 return build_call_expr (powfn, 2, arg0, tree_root);
7353 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7354 if (flag_unsafe_math_optimizations
7355 && (fcode == BUILT_IN_POW
7356 || fcode == BUILT_IN_POWF
7357 || fcode == BUILT_IN_POWL))
7359 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7360 tree arg0 = CALL_EXPR_ARG (arg, 0);
7361 tree arg1 = CALL_EXPR_ARG (arg, 1);
7363 if (!tree_expr_nonnegative_p (arg0))
7364 arg0 = build1 (ABS_EXPR, type, arg0);
7365 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7366 build_real (type, dconsthalf));
7367 return build_call_expr (powfn, 2, arg0, narg1);
7373 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7374 Return NULL_TREE if no simplification can be made. */
7377 fold_builtin_cbrt (tree arg, tree type)
7379 const enum built_in_function fcode = builtin_mathfn_code (arg);
7382 if (!validate_arg (arg, REAL_TYPE))
7385 /* Calculate the result when the argument is a constant. */
7386 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7389 if (flag_unsafe_math_optimizations)
7391 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7392 if (BUILTIN_EXPONENT_P (fcode))
7394 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7395 const REAL_VALUE_TYPE third_trunc =
7396 real_value_truncate (TYPE_MODE (type), dconstthird);
7397 arg = fold_build2 (MULT_EXPR, type,
7398 CALL_EXPR_ARG (arg, 0),
7399 build_real (type, third_trunc));
7400 return build_call_expr (expfn, 1, arg);
7403 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7404 if (BUILTIN_SQRT_P (fcode))
7406 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7410 tree arg0 = CALL_EXPR_ARG (arg, 0);
7412 REAL_VALUE_TYPE dconstroot = dconstthird;
7414 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7415 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7416 tree_root = build_real (type, dconstroot);
7417 return build_call_expr (powfn, 2, arg0, tree_root);
7421 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7422 if (BUILTIN_CBRT_P (fcode))
7424 tree arg0 = CALL_EXPR_ARG (arg, 0);
7425 if (tree_expr_nonnegative_p (arg0))
7427 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7432 REAL_VALUE_TYPE dconstroot;
7434 real_arithmetic (&dconstroot, MULT_EXPR, &dconstthird, &dconstthird);
7435 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7436 tree_root = build_real (type, dconstroot);
7437 return build_call_expr (powfn, 2, arg0, tree_root);
7442 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7443 if (fcode == BUILT_IN_POW
7444 || fcode == BUILT_IN_POWF
7445 || fcode == BUILT_IN_POWL)
7447 tree arg00 = CALL_EXPR_ARG (arg, 0);
7448 tree arg01 = CALL_EXPR_ARG (arg, 1);
7449 if (tree_expr_nonnegative_p (arg00))
7451 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7452 const REAL_VALUE_TYPE dconstroot
7453 = real_value_truncate (TYPE_MODE (type), dconstthird);
7454 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7455 build_real (type, dconstroot));
7456 return build_call_expr (powfn, 2, arg00, narg01);
7463 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7464 TYPE is the type of the return value. Return NULL_TREE if no
7465 simplification can be made. */
7468 fold_builtin_cos (tree arg, tree type, tree fndecl)
7472 if (!validate_arg (arg, REAL_TYPE))
7475 /* Calculate the result when the argument is a constant. */
7476 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7479 /* Optimize cos(-x) into cos (x). */
7480 if ((narg = fold_strip_sign_ops (arg)))
7481 return build_call_expr (fndecl, 1, narg);
7486 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7487 Return NULL_TREE if no simplification can be made. */
7490 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7492 if (validate_arg (arg, REAL_TYPE))
7496 /* Calculate the result when the argument is a constant. */
7497 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7500 /* Optimize cosh(-x) into cosh (x). */
7501 if ((narg = fold_strip_sign_ops (arg)))
7502 return build_call_expr (fndecl, 1, narg);
7508 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7509 Return NULL_TREE if no simplification can be made. */
7512 fold_builtin_tan (tree arg, tree type)
7514 enum built_in_function fcode;
7517 if (!validate_arg (arg, REAL_TYPE))
7520 /* Calculate the result when the argument is a constant. */
7521 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7524 /* Optimize tan(atan(x)) = x. */
7525 fcode = builtin_mathfn_code (arg);
7526 if (flag_unsafe_math_optimizations
7527 && (fcode == BUILT_IN_ATAN
7528 || fcode == BUILT_IN_ATANF
7529 || fcode == BUILT_IN_ATANL))
7530 return CALL_EXPR_ARG (arg, 0);
7535 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7536 NULL_TREE if no simplification can be made. */
7539 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7544 if (!validate_arg (arg0, REAL_TYPE)
7545 || !validate_arg (arg1, POINTER_TYPE)
7546 || !validate_arg (arg2, POINTER_TYPE))
7549 type = TREE_TYPE (arg0);
7551 /* Calculate the result when the argument is a constant. */
7552 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7555 /* Canonicalize sincos to cexpi. */
7556 if (!TARGET_C99_FUNCTIONS)
7558 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7562 call = build_call_expr (fn, 1, arg0);
7563 call = builtin_save_expr (call);
7565 return build2 (COMPOUND_EXPR, type,
7566 build2 (MODIFY_EXPR, void_type_node,
7567 build_fold_indirect_ref (arg1),
7568 build1 (IMAGPART_EXPR, type, call)),
7569 build2 (MODIFY_EXPR, void_type_node,
7570 build_fold_indirect_ref (arg2),
7571 build1 (REALPART_EXPR, type, call)));
7574 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7575 NULL_TREE if no simplification can be made. */
7578 fold_builtin_cexp (tree arg0, tree type)
7581 tree realp, imagp, ifn;
7583 if (!validate_arg (arg0, COMPLEX_TYPE))
7586 rtype = TREE_TYPE (TREE_TYPE (arg0));
7588 /* In case we can figure out the real part of arg0 and it is constant zero
7590 if (!TARGET_C99_FUNCTIONS)
7592 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7596 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7597 && real_zerop (realp))
7599 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7600 return build_call_expr (ifn, 1, narg);
7603 /* In case we can easily decompose real and imaginary parts split cexp
7604 to exp (r) * cexpi (i). */
7605 if (flag_unsafe_math_optimizations
7608 tree rfn, rcall, icall;
7610 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7614 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7618 icall = build_call_expr (ifn, 1, imagp);
7619 icall = builtin_save_expr (icall);
7620 rcall = build_call_expr (rfn, 1, realp);
7621 rcall = builtin_save_expr (rcall);
7622 return build2 (COMPLEX_EXPR, type,
7623 build2 (MULT_EXPR, rtype,
7625 build1 (REALPART_EXPR, rtype, icall)),
7626 build2 (MULT_EXPR, rtype,
7628 build1 (IMAGPART_EXPR, rtype, icall)));
7634 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7635 Return NULL_TREE if no simplification can be made. */
7638 fold_builtin_trunc (tree fndecl, tree arg)
7640 if (!validate_arg (arg, REAL_TYPE))
7643 /* Optimize trunc of constant value. */
7644 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7646 REAL_VALUE_TYPE r, x;
7647 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7649 x = TREE_REAL_CST (arg);
7650 real_trunc (&r, TYPE_MODE (type), &x);
7651 return build_real (type, r);
7654 return fold_trunc_transparent_mathfn (fndecl, arg);
7657 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7658 Return NULL_TREE if no simplification can be made. */
7661 fold_builtin_floor (tree fndecl, tree arg)
7663 if (!validate_arg (arg, REAL_TYPE))
7666 /* Optimize floor of constant value. */
7667 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7671 x = TREE_REAL_CST (arg);
7672 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7674 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7677 real_floor (&r, TYPE_MODE (type), &x);
7678 return build_real (type, r);
7682 /* Fold floor (x) where x is nonnegative to trunc (x). */
7683 if (tree_expr_nonnegative_p (arg))
7685 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7687 return build_call_expr (truncfn, 1, arg);
7690 return fold_trunc_transparent_mathfn (fndecl, arg);
7693 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7694 Return NULL_TREE if no simplification can be made. */
7697 fold_builtin_ceil (tree fndecl, tree arg)
7699 if (!validate_arg (arg, REAL_TYPE))
7702 /* Optimize ceil of constant value. */
7703 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7707 x = TREE_REAL_CST (arg);
7708 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7710 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7713 real_ceil (&r, TYPE_MODE (type), &x);
7714 return build_real (type, r);
7718 return fold_trunc_transparent_mathfn (fndecl, arg);
7721 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7722 Return NULL_TREE if no simplification can be made. */
7725 fold_builtin_round (tree fndecl, tree arg)
7727 if (!validate_arg (arg, REAL_TYPE))
7730 /* Optimize round of constant value. */
7731 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7735 x = TREE_REAL_CST (arg);
7736 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7738 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7741 real_round (&r, TYPE_MODE (type), &x);
7742 return build_real (type, r);
7746 return fold_trunc_transparent_mathfn (fndecl, arg);
7749 /* Fold function call to builtin lround, lroundf or lroundl (or the
7750 corresponding long long versions) and other rounding functions. ARG
7751 is the argument to the call. Return NULL_TREE if no simplification
7755 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7757 if (!validate_arg (arg, REAL_TYPE))
7760 /* Optimize lround of constant value. */
7761 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7763 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7765 if (! REAL_VALUE_ISNAN (x) && ! REAL_VALUE_ISINF (x))
7767 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7768 tree ftype = TREE_TYPE (arg);
7769 unsigned HOST_WIDE_INT lo2;
7770 HOST_WIDE_INT hi, lo;
7773 switch (DECL_FUNCTION_CODE (fndecl))
7775 CASE_FLT_FN (BUILT_IN_LFLOOR):
7776 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7777 real_floor (&r, TYPE_MODE (ftype), &x);
7780 CASE_FLT_FN (BUILT_IN_LCEIL):
7781 CASE_FLT_FN (BUILT_IN_LLCEIL):
7782 real_ceil (&r, TYPE_MODE (ftype), &x);
7785 CASE_FLT_FN (BUILT_IN_LROUND):
7786 CASE_FLT_FN (BUILT_IN_LLROUND):
7787 real_round (&r, TYPE_MODE (ftype), &x);
7794 REAL_VALUE_TO_INT (&lo, &hi, r);
7795 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7796 return build_int_cst_wide (itype, lo2, hi);
7800 switch (DECL_FUNCTION_CODE (fndecl))
7802 CASE_FLT_FN (BUILT_IN_LFLOOR):
7803 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7804 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7805 if (tree_expr_nonnegative_p (arg))
7806 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7812 return fold_fixed_mathfn (fndecl, arg);
7815 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7816 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7817 the argument to the call. Return NULL_TREE if no simplification can
7821 fold_builtin_bitop (tree fndecl, tree arg)
7823 if (!validate_arg (arg, INTEGER_TYPE))
7826 /* Optimize for constant argument. */
7827 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7829 HOST_WIDE_INT hi, width, result;
7830 unsigned HOST_WIDE_INT lo;
7833 type = TREE_TYPE (arg);
7834 width = TYPE_PRECISION (type);
7835 lo = TREE_INT_CST_LOW (arg);
7837 /* Clear all the bits that are beyond the type's precision. */
7838 if (width > HOST_BITS_PER_WIDE_INT)
7840 hi = TREE_INT_CST_HIGH (arg);
7841 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7842 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7847 if (width < HOST_BITS_PER_WIDE_INT)
7848 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7851 switch (DECL_FUNCTION_CODE (fndecl))
7853 CASE_INT_FN (BUILT_IN_FFS):
7855 result = exact_log2 (lo & -lo) + 1;
7857 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7862 CASE_INT_FN (BUILT_IN_CLZ):
7864 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7866 result = width - floor_log2 (lo) - 1;
7867 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7871 CASE_INT_FN (BUILT_IN_CTZ):
7873 result = exact_log2 (lo & -lo);
7875 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7876 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7880 CASE_INT_FN (BUILT_IN_POPCOUNT):
7883 result++, lo &= lo - 1;
7885 result++, hi &= hi - 1;
7888 CASE_INT_FN (BUILT_IN_PARITY):
7891 result++, lo &= lo - 1;
7893 result++, hi &= hi - 1;
7901 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7907 /* Fold function call to builtin_bswap and the long and long long
7908 variants. Return NULL_TREE if no simplification can be made. */
7910 fold_builtin_bswap (tree fndecl, tree arg)
7912 if (! validate_arg (arg, INTEGER_TYPE))
7915 /* Optimize constant value. */
7916 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7918 HOST_WIDE_INT hi, width, r_hi = 0;
7919 unsigned HOST_WIDE_INT lo, r_lo = 0;
7922 type = TREE_TYPE (arg);
7923 width = TYPE_PRECISION (type);
7924 lo = TREE_INT_CST_LOW (arg);
7925 hi = TREE_INT_CST_HIGH (arg);
7927 switch (DECL_FUNCTION_CODE (fndecl))
7929 case BUILT_IN_BSWAP32:
7930 case BUILT_IN_BSWAP64:
7934 for (s = 0; s < width; s += 8)
7936 int d = width - s - 8;
7937 unsigned HOST_WIDE_INT byte;
7939 if (s < HOST_BITS_PER_WIDE_INT)
7940 byte = (lo >> s) & 0xff;
7942 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7944 if (d < HOST_BITS_PER_WIDE_INT)
7947 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7957 if (width < HOST_BITS_PER_WIDE_INT)
7958 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7960 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7966 /* Return true if EXPR is the real constant contained in VALUE. */
7969 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
7973 return ((TREE_CODE (expr) == REAL_CST
7974 && !TREE_OVERFLOW (expr)
7975 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
7976 || (TREE_CODE (expr) == COMPLEX_CST
7977 && real_dconstp (TREE_REALPART (expr), value)
7978 && real_zerop (TREE_IMAGPART (expr))));
7981 /* A subroutine of fold_builtin to fold the various logarithmic
7982 functions. Return NULL_TREE if no simplification can me made.
7983 FUNC is the corresponding MPFR logarithm function. */
7986 fold_builtin_logarithm (tree fndecl, tree arg,
7987 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7989 if (validate_arg (arg, REAL_TYPE))
7991 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7993 const enum built_in_function fcode = builtin_mathfn_code (arg);
7995 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
7996 instead we'll look for 'e' truncated to MODE. So only do
7997 this if flag_unsafe_math_optimizations is set. */
7998 if (flag_unsafe_math_optimizations && func == mpfr_log)
8000 const REAL_VALUE_TYPE e_truncated =
8001 real_value_truncate (TYPE_MODE (type), dconste);
8002 if (real_dconstp (arg, &e_truncated))
8003 return build_real (type, dconst1);
8006 /* Calculate the result when the argument is a constant. */
8007 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8010 /* Special case, optimize logN(expN(x)) = x. */
8011 if (flag_unsafe_math_optimizations
8012 && ((func == mpfr_log
8013 && (fcode == BUILT_IN_EXP
8014 || fcode == BUILT_IN_EXPF
8015 || fcode == BUILT_IN_EXPL))
8016 || (func == mpfr_log2
8017 && (fcode == BUILT_IN_EXP2
8018 || fcode == BUILT_IN_EXP2F
8019 || fcode == BUILT_IN_EXP2L))
8020 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8021 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8023 /* Optimize logN(func()) for various exponential functions. We
8024 want to determine the value "x" and the power "exponent" in
8025 order to transform logN(x**exponent) into exponent*logN(x). */
8026 if (flag_unsafe_math_optimizations)
8028 tree exponent = 0, x = 0;
8032 CASE_FLT_FN (BUILT_IN_EXP):
8033 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8034 x = build_real (type,
8035 real_value_truncate (TYPE_MODE (type), dconste));
8036 exponent = CALL_EXPR_ARG (arg, 0);
8038 CASE_FLT_FN (BUILT_IN_EXP2):
8039 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8040 x = build_real (type, dconst2);
8041 exponent = CALL_EXPR_ARG (arg, 0);
8043 CASE_FLT_FN (BUILT_IN_EXP10):
8044 CASE_FLT_FN (BUILT_IN_POW10):
8045 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8046 x = build_real (type, dconst10);
8047 exponent = CALL_EXPR_ARG (arg, 0);
8049 CASE_FLT_FN (BUILT_IN_SQRT):
8050 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8051 x = CALL_EXPR_ARG (arg, 0);
8052 exponent = build_real (type, dconsthalf);
8054 CASE_FLT_FN (BUILT_IN_CBRT):
8055 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8056 x = CALL_EXPR_ARG (arg, 0);
8057 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8060 CASE_FLT_FN (BUILT_IN_POW):
8061 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8062 x = CALL_EXPR_ARG (arg, 0);
8063 exponent = CALL_EXPR_ARG (arg, 1);
8069 /* Now perform the optimization. */
8072 tree logfn = build_call_expr (fndecl, 1, x);
8073 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8081 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8082 NULL_TREE if no simplification can be made. */
8085 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8087 tree res, narg0, narg1;
8089 if (!validate_arg (arg0, REAL_TYPE)
8090 || !validate_arg (arg1, REAL_TYPE))
8093 /* Calculate the result when the argument is a constant. */
8094 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8097 /* If either argument to hypot has a negate or abs, strip that off.
8098 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8099 narg0 = fold_strip_sign_ops (arg0);
8100 narg1 = fold_strip_sign_ops (arg1);
8103 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8104 narg1 ? narg1 : arg1);
8107 /* If either argument is zero, hypot is fabs of the other. */
8108 if (real_zerop (arg0))
8109 return fold_build1 (ABS_EXPR, type, arg1);
8110 else if (real_zerop (arg1))
8111 return fold_build1 (ABS_EXPR, type, arg0);
8113 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8114 if (flag_unsafe_math_optimizations
8115 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8117 const REAL_VALUE_TYPE sqrt2_trunc
8118 = real_value_truncate (TYPE_MODE (type), dconstsqrt2);
8119 return fold_build2 (MULT_EXPR, type,
8120 fold_build1 (ABS_EXPR, type, arg0),
8121 build_real (type, sqrt2_trunc));
8128 /* Fold a builtin function call to pow, powf, or powl. Return
8129 NULL_TREE if no simplification can be made. */
8131 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8135 if (!validate_arg (arg0, REAL_TYPE)
8136 || !validate_arg (arg1, REAL_TYPE))
8139 /* Calculate the result when the argument is a constant. */
8140 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8143 /* Optimize pow(1.0,y) = 1.0. */
8144 if (real_onep (arg0))
8145 return omit_one_operand (type, build_real (type, dconst1), arg1);
8147 if (TREE_CODE (arg1) == REAL_CST
8148 && !TREE_OVERFLOW (arg1))
8150 REAL_VALUE_TYPE cint;
8154 c = TREE_REAL_CST (arg1);
8156 /* Optimize pow(x,0.0) = 1.0. */
8157 if (REAL_VALUES_EQUAL (c, dconst0))
8158 return omit_one_operand (type, build_real (type, dconst1),
8161 /* Optimize pow(x,1.0) = x. */
8162 if (REAL_VALUES_EQUAL (c, dconst1))
8165 /* Optimize pow(x,-1.0) = 1.0/x. */
8166 if (REAL_VALUES_EQUAL (c, dconstm1))
8167 return fold_build2 (RDIV_EXPR, type,
8168 build_real (type, dconst1), arg0);
8170 /* Optimize pow(x,0.5) = sqrt(x). */
8171 if (flag_unsafe_math_optimizations
8172 && REAL_VALUES_EQUAL (c, dconsthalf))
8174 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8176 if (sqrtfn != NULL_TREE)
8177 return build_call_expr (sqrtfn, 1, arg0);
8180 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8181 if (flag_unsafe_math_optimizations)
8183 const REAL_VALUE_TYPE dconstroot
8184 = real_value_truncate (TYPE_MODE (type), dconstthird);
8186 if (REAL_VALUES_EQUAL (c, dconstroot))
8188 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8189 if (cbrtfn != NULL_TREE)
8190 return build_call_expr (cbrtfn, 1, arg0);
8194 /* Check for an integer exponent. */
8195 n = real_to_integer (&c);
8196 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8197 if (real_identical (&c, &cint))
8199 /* Attempt to evaluate pow at compile-time. */
8200 if (TREE_CODE (arg0) == REAL_CST
8201 && !TREE_OVERFLOW (arg0))
8206 x = TREE_REAL_CST (arg0);
8207 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8208 if (flag_unsafe_math_optimizations || !inexact)
8209 return build_real (type, x);
8212 /* Strip sign ops from even integer powers. */
8213 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8215 tree narg0 = fold_strip_sign_ops (arg0);
8217 return build_call_expr (fndecl, 2, narg0, arg1);
8222 if (flag_unsafe_math_optimizations)
8224 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8226 /* Optimize pow(expN(x),y) = expN(x*y). */
8227 if (BUILTIN_EXPONENT_P (fcode))
8229 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8230 tree arg = CALL_EXPR_ARG (arg0, 0);
8231 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8232 return build_call_expr (expfn, 1, arg);
8235 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8236 if (BUILTIN_SQRT_P (fcode))
8238 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8239 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8240 build_real (type, dconsthalf));
8241 return build_call_expr (fndecl, 2, narg0, narg1);
8244 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8245 if (BUILTIN_CBRT_P (fcode))
8247 tree arg = CALL_EXPR_ARG (arg0, 0);
8248 if (tree_expr_nonnegative_p (arg))
8250 const REAL_VALUE_TYPE dconstroot
8251 = real_value_truncate (TYPE_MODE (type), dconstthird);
8252 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8253 build_real (type, dconstroot));
8254 return build_call_expr (fndecl, 2, arg, narg1);
8258 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8259 if (fcode == BUILT_IN_POW
8260 || fcode == BUILT_IN_POWF
8261 || fcode == BUILT_IN_POWL)
8263 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8264 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8265 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8266 return build_call_expr (fndecl, 2, arg00, narg1);
8273 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8274 Return NULL_TREE if no simplification can be made. */
8276 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8277 tree arg0, tree arg1, tree type)
8279 if (!validate_arg (arg0, REAL_TYPE)
8280 || !validate_arg (arg1, INTEGER_TYPE))
8283 /* Optimize pow(1.0,y) = 1.0. */
8284 if (real_onep (arg0))
8285 return omit_one_operand (type, build_real (type, dconst1), arg1);
8287 if (host_integerp (arg1, 0))
8289 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8291 /* Evaluate powi at compile-time. */
8292 if (TREE_CODE (arg0) == REAL_CST
8293 && !TREE_OVERFLOW (arg0))
8296 x = TREE_REAL_CST (arg0);
8297 real_powi (&x, TYPE_MODE (type), &x, c);
8298 return build_real (type, x);
8301 /* Optimize pow(x,0) = 1.0. */
8303 return omit_one_operand (type, build_real (type, dconst1),
8306 /* Optimize pow(x,1) = x. */
8310 /* Optimize pow(x,-1) = 1.0/x. */
8312 return fold_build2 (RDIV_EXPR, type,
8313 build_real (type, dconst1), arg0);
8319 /* A subroutine of fold_builtin to fold the various exponent
8320 functions. Return NULL_TREE if no simplification can be made.
8321 FUNC is the corresponding MPFR exponent function. */
8324 fold_builtin_exponent (tree fndecl, tree arg,
8325 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8327 if (validate_arg (arg, REAL_TYPE))
8329 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8332 /* Calculate the result when the argument is a constant. */
8333 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8336 /* Optimize expN(logN(x)) = x. */
8337 if (flag_unsafe_math_optimizations)
8339 const enum built_in_function fcode = builtin_mathfn_code (arg);
8341 if ((func == mpfr_exp
8342 && (fcode == BUILT_IN_LOG
8343 || fcode == BUILT_IN_LOGF
8344 || fcode == BUILT_IN_LOGL))
8345 || (func == mpfr_exp2
8346 && (fcode == BUILT_IN_LOG2
8347 || fcode == BUILT_IN_LOG2F
8348 || fcode == BUILT_IN_LOG2L))
8349 || (func == mpfr_exp10
8350 && (fcode == BUILT_IN_LOG10
8351 || fcode == BUILT_IN_LOG10F
8352 || fcode == BUILT_IN_LOG10L)))
8353 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8360 /* Return true if VAR is a VAR_DECL or a component thereof. */
8363 var_decl_component_p (tree var)
8366 while (handled_component_p (inner))
8367 inner = TREE_OPERAND (inner, 0);
8368 return SSA_VAR_P (inner);
8371 /* Fold function call to builtin memset. Return
8372 NULL_TREE if no simplification can be made. */
8375 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8378 unsigned HOST_WIDE_INT length, cval;
8380 if (! validate_arg (dest, POINTER_TYPE)
8381 || ! validate_arg (c, INTEGER_TYPE)
8382 || ! validate_arg (len, INTEGER_TYPE))
8385 if (! host_integerp (len, 1))
8388 /* If the LEN parameter is zero, return DEST. */
8389 if (integer_zerop (len))
8390 return omit_one_operand (type, dest, c);
8392 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8397 if (TREE_CODE (var) != ADDR_EXPR)
8400 var = TREE_OPERAND (var, 0);
8401 if (TREE_THIS_VOLATILE (var))
8404 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8405 && !POINTER_TYPE_P (TREE_TYPE (var)))
8408 if (! var_decl_component_p (var))
8411 length = tree_low_cst (len, 1);
8412 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8413 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8417 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8420 if (integer_zerop (c))
8424 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8427 cval = tree_low_cst (c, 1);
8431 cval |= (cval << 31) << 1;
8434 ret = build_int_cst_type (TREE_TYPE (var), cval);
8435 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8439 return omit_one_operand (type, dest, ret);
8442 /* Fold function call to builtin memset. Return
8443 NULL_TREE if no simplification can be made. */
8446 fold_builtin_bzero (tree dest, tree size, bool ignore)
8448 if (! validate_arg (dest, POINTER_TYPE)
8449 || ! validate_arg (size, INTEGER_TYPE))
8455 /* New argument list transforming bzero(ptr x, int y) to
8456 memset(ptr x, int 0, size_t y). This is done this way
8457 so that if it isn't expanded inline, we fallback to
8458 calling bzero instead of memset. */
8460 return fold_builtin_memset (dest, integer_zero_node,
8461 fold_convert (sizetype, size),
8462 void_type_node, ignore);
8465 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8466 NULL_TREE if no simplification can be made.
8467 If ENDP is 0, return DEST (like memcpy).
8468 If ENDP is 1, return DEST+LEN (like mempcpy).
8469 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8470 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8474 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8476 tree destvar, srcvar, expr;
8478 if (! validate_arg (dest, POINTER_TYPE)
8479 || ! validate_arg (src, POINTER_TYPE)
8480 || ! validate_arg (len, INTEGER_TYPE))
8483 /* If the LEN parameter is zero, return DEST. */
8484 if (integer_zerop (len))
8485 return omit_one_operand (type, dest, src);
8487 /* If SRC and DEST are the same (and not volatile), return
8488 DEST{,+LEN,+LEN-1}. */
8489 if (operand_equal_p (src, dest, 0))
8493 tree srctype, desttype;
8496 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8497 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8499 /* Both DEST and SRC must be pointer types.
8500 ??? This is what old code did. Is the testing for pointer types
8503 If either SRC is readonly or length is 1, we can use memcpy. */
8504 if (dest_align && src_align
8505 && (readonly_data_expr (src)
8506 || (host_integerp (len, 1)
8507 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8508 tree_low_cst (len, 1)))))
8510 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8513 return build_call_expr (fn, 3, dest, src, len);
8518 if (!host_integerp (len, 0))
8521 This logic lose for arguments like (type *)malloc (sizeof (type)),
8522 since we strip the casts of up to VOID return value from malloc.
8523 Perhaps we ought to inherit type from non-VOID argument here? */
8526 srctype = TREE_TYPE (TREE_TYPE (src));
8527 desttype = TREE_TYPE (TREE_TYPE (dest));
8528 if (!srctype || !desttype
8529 || !TYPE_SIZE_UNIT (srctype)
8530 || !TYPE_SIZE_UNIT (desttype)
8531 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8532 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8533 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8534 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8537 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8538 < (int) TYPE_ALIGN (desttype)
8539 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8540 < (int) TYPE_ALIGN (srctype)))
8544 dest = builtin_save_expr (dest);
8546 srcvar = build_fold_indirect_ref (src);
8547 if (TREE_THIS_VOLATILE (srcvar))
8549 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8551 /* With memcpy, it is possible to bypass aliasing rules, so without
8552 this check i. e. execute/20060930-2.c would be misoptimized, because
8553 it use conflicting alias set to hold argument for the memcpy call.
8554 This check is probably unnecesary with -fno-strict-aliasing.
8555 Similarly for destvar. See also PR29286. */
8556 if (!var_decl_component_p (srcvar)
8557 /* Accept: memcpy (*char_var, "test", 1); that simplify
8559 || is_gimple_min_invariant (srcvar)
8560 || readonly_data_expr (src))
8563 destvar = build_fold_indirect_ref (dest);
8564 if (TREE_THIS_VOLATILE (destvar))
8566 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8568 if (!var_decl_component_p (destvar))
8571 if (srctype == desttype
8572 || (gimple_in_ssa_p (cfun)
8573 && tree_ssa_useless_type_conversion_1 (desttype, srctype)))
8575 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8576 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8577 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8578 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8579 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8581 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8582 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8588 if (endp == 0 || endp == 3)
8589 return omit_one_operand (type, dest, expr);
8595 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8598 len = fold_convert (TREE_TYPE (dest), len);
8599 dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
8600 dest = fold_convert (type, dest);
8602 dest = omit_one_operand (type, dest, expr);
8606 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8607 If LEN is not NULL, it represents the length of the string to be
8608 copied. Return NULL_TREE if no simplification can be made. */
8611 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8615 if (!validate_arg (dest, POINTER_TYPE)
8616 || !validate_arg (src, POINTER_TYPE))
8619 /* If SRC and DEST are the same (and not volatile), return DEST. */
8620 if (operand_equal_p (src, dest, 0))
8621 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8626 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8632 len = c_strlen (src, 1);
8633 if (! len || TREE_SIDE_EFFECTS (len))
8637 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8638 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8639 build_call_expr (fn, 3, dest, src, len));
8642 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8643 If SLEN is not NULL, it represents the length of the source string.
8644 Return NULL_TREE if no simplification can be made. */
8647 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8651 if (!validate_arg (dest, POINTER_TYPE)
8652 || !validate_arg (src, POINTER_TYPE)
8653 || !validate_arg (len, INTEGER_TYPE))
8656 /* If the LEN parameter is zero, return DEST. */
8657 if (integer_zerop (len))
8658 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8660 /* We can't compare slen with len as constants below if len is not a
8662 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8666 slen = c_strlen (src, 1);
8668 /* Now, we must be passed a constant src ptr parameter. */
8669 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8672 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8674 /* We do not support simplification of this case, though we do
8675 support it when expanding trees into RTL. */
8676 /* FIXME: generate a call to __builtin_memset. */
8677 if (tree_int_cst_lt (slen, len))
8680 /* OK transform into builtin memcpy. */
8681 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8684 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8685 build_call_expr (fn, 3, dest, src, len));
8688 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8689 arguments to the call, and TYPE is its return type.
8690 Return NULL_TREE if no simplification can be made. */
8693 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8695 if (!validate_arg (arg1, POINTER_TYPE)
8696 || !validate_arg (arg2, INTEGER_TYPE)
8697 || !validate_arg (len, INTEGER_TYPE))
8703 if (TREE_CODE (arg2) != INTEGER_CST
8704 || !host_integerp (len, 1))
8707 p1 = c_getstr (arg1);
8708 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8714 if (target_char_cast (arg2, &c))
8717 r = memchr (p1, c, tree_low_cst (len, 1));
8720 return build_int_cst (TREE_TYPE (arg1), 0);
8722 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (arg1), arg1,
8723 build_int_cst (TREE_TYPE (arg1), r - p1));
8724 return fold_convert (type, tem);
8730 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8731 Return NULL_TREE if no simplification can be made. */
8734 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8736 const char *p1, *p2;
8738 if (!validate_arg (arg1, POINTER_TYPE)
8739 || !validate_arg (arg2, POINTER_TYPE)
8740 || !validate_arg (len, INTEGER_TYPE))
8743 /* If the LEN parameter is zero, return zero. */
8744 if (integer_zerop (len))
8745 return omit_two_operands (integer_type_node, integer_zero_node,
8748 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8749 if (operand_equal_p (arg1, arg2, 0))
8750 return omit_one_operand (integer_type_node, integer_zero_node, len);
8752 p1 = c_getstr (arg1);
8753 p2 = c_getstr (arg2);
8755 /* If all arguments are constant, and the value of len is not greater
8756 than the lengths of arg1 and arg2, evaluate at compile-time. */
8757 if (host_integerp (len, 1) && p1 && p2
8758 && compare_tree_int (len, strlen (p1) + 1) <= 0
8759 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8761 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8764 return integer_one_node;
8766 return integer_minus_one_node;
8768 return integer_zero_node;
8771 /* If len parameter is one, return an expression corresponding to
8772 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8773 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8775 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8776 tree cst_uchar_ptr_node
8777 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8779 tree ind1 = fold_convert (integer_type_node,
8780 build1 (INDIRECT_REF, cst_uchar_node,
8781 fold_convert (cst_uchar_ptr_node,
8783 tree ind2 = fold_convert (integer_type_node,
8784 build1 (INDIRECT_REF, cst_uchar_node,
8785 fold_convert (cst_uchar_ptr_node,
8787 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8793 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8794 Return NULL_TREE if no simplification can be made. */
8797 fold_builtin_strcmp (tree arg1, tree arg2)
8799 const char *p1, *p2;
8801 if (!validate_arg (arg1, POINTER_TYPE)
8802 || !validate_arg (arg2, POINTER_TYPE))
8805 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8806 if (operand_equal_p (arg1, arg2, 0))
8807 return integer_zero_node;
8809 p1 = c_getstr (arg1);
8810 p2 = c_getstr (arg2);
8814 const int i = strcmp (p1, p2);
8816 return integer_minus_one_node;
8818 return integer_one_node;
8820 return integer_zero_node;
8823 /* If the second arg is "", return *(const unsigned char*)arg1. */
8824 if (p2 && *p2 == '\0')
8826 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8827 tree cst_uchar_ptr_node
8828 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8830 return fold_convert (integer_type_node,
8831 build1 (INDIRECT_REF, cst_uchar_node,
8832 fold_convert (cst_uchar_ptr_node,
8836 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8837 if (p1 && *p1 == '\0')
8839 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8840 tree cst_uchar_ptr_node
8841 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8843 tree temp = fold_convert (integer_type_node,
8844 build1 (INDIRECT_REF, cst_uchar_node,
8845 fold_convert (cst_uchar_ptr_node,
8847 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8853 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8854 Return NULL_TREE if no simplification can be made. */
8857 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
8859 const char *p1, *p2;
8861 if (!validate_arg (arg1, POINTER_TYPE)
8862 || !validate_arg (arg2, POINTER_TYPE)
8863 || !validate_arg (len, INTEGER_TYPE))
8866 /* If the LEN parameter is zero, return zero. */
8867 if (integer_zerop (len))
8868 return omit_two_operands (integer_type_node, integer_zero_node,
8871 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8872 if (operand_equal_p (arg1, arg2, 0))
8873 return omit_one_operand (integer_type_node, integer_zero_node, len);
8875 p1 = c_getstr (arg1);
8876 p2 = c_getstr (arg2);
8878 if (host_integerp (len, 1) && p1 && p2)
8880 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8882 return integer_one_node;
8884 return integer_minus_one_node;
8886 return integer_zero_node;
8889 /* If the second arg is "", and the length is greater than zero,
8890 return *(const unsigned char*)arg1. */
8891 if (p2 && *p2 == '\0'
8892 && TREE_CODE (len) == INTEGER_CST
8893 && tree_int_cst_sgn (len) == 1)
8895 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8896 tree cst_uchar_ptr_node
8897 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8899 return fold_convert (integer_type_node,
8900 build1 (INDIRECT_REF, cst_uchar_node,
8901 fold_convert (cst_uchar_ptr_node,
8905 /* If the first arg is "", and the length is greater than zero,
8906 return -*(const unsigned char*)arg2. */
8907 if (p1 && *p1 == '\0'
8908 && TREE_CODE (len) == INTEGER_CST
8909 && tree_int_cst_sgn (len) == 1)
8911 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8912 tree cst_uchar_ptr_node
8913 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8915 tree temp = fold_convert (integer_type_node,
8916 build1 (INDIRECT_REF, cst_uchar_node,
8917 fold_convert (cst_uchar_ptr_node,
8919 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
8922 /* If len parameter is one, return an expression corresponding to
8923 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8924 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8926 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8927 tree cst_uchar_ptr_node
8928 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8930 tree ind1 = fold_convert (integer_type_node,
8931 build1 (INDIRECT_REF, cst_uchar_node,
8932 fold_convert (cst_uchar_ptr_node,
8934 tree ind2 = fold_convert (integer_type_node,
8935 build1 (INDIRECT_REF, cst_uchar_node,
8936 fold_convert (cst_uchar_ptr_node,
8938 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8944 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8945 ARG. Return NULL_TREE if no simplification can be made. */
8948 fold_builtin_signbit (tree arg, tree type)
8952 if (!validate_arg (arg, REAL_TYPE))
8955 /* If ARG is a compile-time constant, determine the result. */
8956 if (TREE_CODE (arg) == REAL_CST
8957 && !TREE_OVERFLOW (arg))
8961 c = TREE_REAL_CST (arg);
8962 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8963 return fold_convert (type, temp);
8966 /* If ARG is non-negative, the result is always zero. */
8967 if (tree_expr_nonnegative_p (arg))
8968 return omit_one_operand (type, integer_zero_node, arg);
8970 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8971 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8972 return fold_build2 (LT_EXPR, type, arg,
8973 build_real (TREE_TYPE (arg), dconst0));
8978 /* Fold function call to builtin copysign, copysignf or copysignl with
8979 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8983 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
8987 if (!validate_arg (arg1, REAL_TYPE)
8988 || !validate_arg (arg2, REAL_TYPE))
8991 /* copysign(X,X) is X. */
8992 if (operand_equal_p (arg1, arg2, 0))
8993 return fold_convert (type, arg1);
8995 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8996 if (TREE_CODE (arg1) == REAL_CST
8997 && TREE_CODE (arg2) == REAL_CST
8998 && !TREE_OVERFLOW (arg1)
8999 && !TREE_OVERFLOW (arg2))
9001 REAL_VALUE_TYPE c1, c2;
9003 c1 = TREE_REAL_CST (arg1);
9004 c2 = TREE_REAL_CST (arg2);
9005 /* c1.sign := c2.sign. */
9006 real_copysign (&c1, &c2);
9007 return build_real (type, c1);
9010 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9011 Remember to evaluate Y for side-effects. */
9012 if (tree_expr_nonnegative_p (arg2))
9013 return omit_one_operand (type,
9014 fold_build1 (ABS_EXPR, type, arg1),
9017 /* Strip sign changing operations for the first argument. */
9018 tem = fold_strip_sign_ops (arg1);
9020 return build_call_expr (fndecl, 2, tem, arg2);
9025 /* Fold a call to builtin isascii with argument ARG. */
9028 fold_builtin_isascii (tree arg)
9030 if (!validate_arg (arg, INTEGER_TYPE))
9034 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9035 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9036 build_int_cst (NULL_TREE,
9037 ~ (unsigned HOST_WIDE_INT) 0x7f));
9038 return fold_build2 (EQ_EXPR, integer_type_node,
9039 arg, integer_zero_node);
9043 /* Fold a call to builtin toascii with argument ARG. */
9046 fold_builtin_toascii (tree arg)
9048 if (!validate_arg (arg, INTEGER_TYPE))
9051 /* Transform toascii(c) -> (c & 0x7f). */
9052 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9053 build_int_cst (NULL_TREE, 0x7f));
9056 /* Fold a call to builtin isdigit with argument ARG. */
9059 fold_builtin_isdigit (tree arg)
9061 if (!validate_arg (arg, INTEGER_TYPE))
9065 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9066 /* According to the C standard, isdigit is unaffected by locale.
9067 However, it definitely is affected by the target character set. */
9068 unsigned HOST_WIDE_INT target_digit0
9069 = lang_hooks.to_target_charset ('0');
9071 if (target_digit0 == 0)
9074 arg = fold_convert (unsigned_type_node, arg);
9075 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9076 build_int_cst (unsigned_type_node, target_digit0));
9077 return fold_build2 (LE_EXPR, integer_type_node, arg,
9078 build_int_cst (unsigned_type_node, 9));
9082 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9085 fold_builtin_fabs (tree arg, tree type)
9087 if (!validate_arg (arg, REAL_TYPE))
9090 arg = fold_convert (type, arg);
9091 if (TREE_CODE (arg) == REAL_CST)
9092 return fold_abs_const (arg, type);
9093 return fold_build1 (ABS_EXPR, type, arg);
9096 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9099 fold_builtin_abs (tree arg, tree type)
9101 if (!validate_arg (arg, INTEGER_TYPE))
9104 arg = fold_convert (type, arg);
9105 if (TREE_CODE (arg) == INTEGER_CST)
9106 return fold_abs_const (arg, type);
9107 return fold_build1 (ABS_EXPR, type, arg);
9110 /* Fold a call to builtin fmin or fmax. */
9113 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9115 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9117 /* Calculate the result when the argument is a constant. */
9118 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9123 /* If either argument is NaN, return the other one. Avoid the
9124 transformation if we get (and honor) a signalling NaN. Using
9125 omit_one_operand() ensures we create a non-lvalue. */
9126 if (TREE_CODE (arg0) == REAL_CST
9127 && real_isnan (&TREE_REAL_CST (arg0))
9128 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9129 || ! TREE_REAL_CST (arg0).signalling))
9130 return omit_one_operand (type, arg1, arg0);
9131 if (TREE_CODE (arg1) == REAL_CST
9132 && real_isnan (&TREE_REAL_CST (arg1))
9133 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9134 || ! TREE_REAL_CST (arg1).signalling))
9135 return omit_one_operand (type, arg0, arg1);
9137 /* Transform fmin/fmax(x,x) -> x. */
9138 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9139 return omit_one_operand (type, arg0, arg1);
9141 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9142 functions to return the numeric arg if the other one is NaN.
9143 These tree codes don't honor that, so only transform if
9144 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9145 handled, so we don't have to worry about it either. */
9146 if (flag_finite_math_only)
9147 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9148 fold_convert (type, arg0),
9149 fold_convert (type, arg1));
9154 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9157 fold_builtin_carg (tree arg, tree type)
9159 if (validate_arg (arg, COMPLEX_TYPE))
9161 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9165 tree new_arg = builtin_save_expr (arg);
9166 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9167 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9168 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9175 /* Fold a call to builtin logb/ilogb. */
9178 fold_builtin_logb (tree arg, tree rettype)
9180 if (! validate_arg (arg, REAL_TYPE))
9185 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9187 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9193 /* If arg is Inf or NaN and we're logb, return it. */
9194 if (TREE_CODE (rettype) == REAL_TYPE)
9195 return fold_convert (rettype, arg);
9196 /* Fall through... */
9198 /* Zero may set errno and/or raise an exception for logb, also
9199 for ilogb we don't know FP_ILOGB0. */
9202 /* For normal numbers, proceed iff radix == 2. In GCC,
9203 normalized significands are in the range [0.5, 1.0). We
9204 want the exponent as if they were [1.0, 2.0) so get the
9205 exponent and subtract 1. */
9206 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9207 return fold_convert (rettype, build_int_cst (NULL_TREE,
9208 REAL_EXP (value)-1));
9216 /* Fold a call to builtin significand, if radix == 2. */
9219 fold_builtin_significand (tree arg, tree rettype)
9221 if (! validate_arg (arg, REAL_TYPE))
9226 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9228 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9235 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9236 return fold_convert (rettype, arg);
9238 /* For normal numbers, proceed iff radix == 2. */
9239 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9241 REAL_VALUE_TYPE result = *value;
9242 /* In GCC, normalized significands are in the range [0.5,
9243 1.0). We want them to be [1.0, 2.0) so set the
9245 SET_REAL_EXP (&result, 1);
9246 return build_real (rettype, result);
9255 /* Fold a call to builtin frexp, we can assume the base is 2. */
9258 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9260 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9265 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9268 arg1 = build_fold_indirect_ref (arg1);
9270 /* Proceed if a valid pointer type was passed in. */
9271 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9273 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9279 /* For +-0, return (*exp = 0, +-0). */
9280 exp = integer_zero_node;
9285 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9286 return omit_one_operand (rettype, arg0, arg1);
9289 /* Since the frexp function always expects base 2, and in
9290 GCC normalized significands are already in the range
9291 [0.5, 1.0), we have exactly what frexp wants. */
9292 REAL_VALUE_TYPE frac_rvt = *value;
9293 SET_REAL_EXP (&frac_rvt, 0);
9294 frac = build_real (rettype, frac_rvt);
9295 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9302 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9303 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9304 TREE_SIDE_EFFECTS (arg1) = 1;
9305 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9311 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9312 then we can assume the base is two. If it's false, then we have to
9313 check the mode of the TYPE parameter in certain cases. */
9316 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9318 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9323 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9324 if (real_zerop (arg0) || integer_zerop (arg1)
9325 || (TREE_CODE (arg0) == REAL_CST
9326 && (real_isnan (&TREE_REAL_CST (arg0))
9327 || real_isinf (&TREE_REAL_CST (arg0)))))
9328 return omit_one_operand (type, arg0, arg1);
9330 /* If both arguments are constant, then try to evaluate it. */
9331 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9332 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9333 && host_integerp (arg1, 0))
9335 /* Bound the maximum adjustment to twice the range of the
9336 mode's valid exponents. Use abs to ensure the range is
9337 positive as a sanity check. */
9338 const long max_exp_adj = 2 *
9339 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9340 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9342 /* Get the user-requested adjustment. */
9343 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9345 /* The requested adjustment must be inside this range. This
9346 is a preliminary cap to avoid things like overflow, we
9347 may still fail to compute the result for other reasons. */
9348 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9350 REAL_VALUE_TYPE initial_result;
9352 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9354 /* Ensure we didn't overflow. */
9355 if (! real_isinf (&initial_result))
9357 const REAL_VALUE_TYPE trunc_result
9358 = real_value_truncate (TYPE_MODE (type), initial_result);
9360 /* Only proceed if the target mode can hold the
9362 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9363 return build_real (type, trunc_result);
9372 /* Fold a call to builtin modf. */
9375 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9377 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9382 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9385 arg1 = build_fold_indirect_ref (arg1);
9387 /* Proceed if a valid pointer type was passed in. */
9388 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9390 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9391 REAL_VALUE_TYPE trunc, frac;
9397 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9398 trunc = frac = *value;
9401 /* For +-Inf, return (*arg1 = arg0, +-0). */
9403 frac.sign = value->sign;
9407 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9408 real_trunc (&trunc, VOIDmode, value);
9409 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9410 /* If the original number was negative and already
9411 integral, then the fractional part is -0.0. */
9412 if (value->sign && frac.cl == rvc_zero)
9413 frac.sign = value->sign;
9417 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9418 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9419 build_real (rettype, trunc));
9420 TREE_SIDE_EFFECTS (arg1) = 1;
9421 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9422 build_real (rettype, frac));
9428 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9429 ARG is the argument for the call. */
9432 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9434 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9437 if (!validate_arg (arg, REAL_TYPE))
9439 error ("non-floating-point argument to function %qs",
9440 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9441 return error_mark_node;
9444 switch (builtin_index)
9446 case BUILT_IN_ISINF:
9447 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9448 return omit_one_operand (type, integer_zero_node, arg);
9450 if (TREE_CODE (arg) == REAL_CST)
9452 r = TREE_REAL_CST (arg);
9453 if (real_isinf (&r))
9454 return real_compare (GT_EXPR, &r, &dconst0)
9455 ? integer_one_node : integer_minus_one_node;
9457 return integer_zero_node;
9462 case BUILT_IN_FINITE:
9463 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9464 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9465 return omit_one_operand (type, integer_one_node, arg);
9467 if (TREE_CODE (arg) == REAL_CST)
9469 r = TREE_REAL_CST (arg);
9470 return real_isinf (&r) || real_isnan (&r)
9471 ? integer_zero_node : integer_one_node;
9476 case BUILT_IN_ISNAN:
9477 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9478 return omit_one_operand (type, integer_zero_node, arg);
9480 if (TREE_CODE (arg) == REAL_CST)
9482 r = TREE_REAL_CST (arg);
9483 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9486 arg = builtin_save_expr (arg);
9487 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9494 /* Fold a call to an unordered comparison function such as
9495 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9496 being called and ARG0 and ARG1 are the arguments for the call.
9497 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9498 the opposite of the desired result. UNORDERED_CODE is used
9499 for modes that can hold NaNs and ORDERED_CODE is used for
9503 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9504 enum tree_code unordered_code,
9505 enum tree_code ordered_code)
9507 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9508 enum tree_code code;
9510 enum tree_code code0, code1;
9511 tree cmp_type = NULL_TREE;
9513 type0 = TREE_TYPE (arg0);
9514 type1 = TREE_TYPE (arg1);
9516 code0 = TREE_CODE (type0);
9517 code1 = TREE_CODE (type1);
9519 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9520 /* Choose the wider of two real types. */
9521 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9523 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9525 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9529 error ("non-floating-point argument to function %qs",
9530 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
9531 return error_mark_node;
9534 arg0 = fold_convert (cmp_type, arg0);
9535 arg1 = fold_convert (cmp_type, arg1);
9537 if (unordered_code == UNORDERED_EXPR)
9539 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9540 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9541 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9544 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9546 return fold_build1 (TRUTH_NOT_EXPR, type,
9547 fold_build2 (code, type, arg0, arg1));
9550 /* Fold a call to built-in function FNDECL with 0 arguments.
9551 IGNORE is true if the result of the function call is ignored. This
9552 function returns NULL_TREE if no simplification was possible. */
9555 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9557 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9558 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9561 CASE_FLT_FN (BUILT_IN_INF):
9562 case BUILT_IN_INFD32:
9563 case BUILT_IN_INFD64:
9564 case BUILT_IN_INFD128:
9565 return fold_builtin_inf (type, true);
9567 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9568 return fold_builtin_inf (type, false);
9570 case BUILT_IN_CLASSIFY_TYPE:
9571 return fold_builtin_classify_type (NULL_TREE);
9579 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9580 IGNORE is true if the result of the function call is ignored. This
9581 function returns NULL_TREE if no simplification was possible. */
9584 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9586 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9587 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9591 case BUILT_IN_CONSTANT_P:
9593 tree val = fold_builtin_constant_p (arg0);
9595 /* Gimplification will pull the CALL_EXPR for the builtin out of
9596 an if condition. When not optimizing, we'll not CSE it back.
9597 To avoid link error types of regressions, return false now. */
9598 if (!val && !optimize)
9599 val = integer_zero_node;
9604 case BUILT_IN_CLASSIFY_TYPE:
9605 return fold_builtin_classify_type (arg0);
9607 case BUILT_IN_STRLEN:
9608 return fold_builtin_strlen (arg0);
9610 CASE_FLT_FN (BUILT_IN_FABS):
9611 return fold_builtin_fabs (arg0, type);
9615 case BUILT_IN_LLABS:
9616 case BUILT_IN_IMAXABS:
9617 return fold_builtin_abs (arg0, type);
9619 CASE_FLT_FN (BUILT_IN_CONJ):
9620 if (validate_arg (arg0, COMPLEX_TYPE))
9621 return fold_build1 (CONJ_EXPR, type, arg0);
9624 CASE_FLT_FN (BUILT_IN_CREAL):
9625 if (validate_arg (arg0, COMPLEX_TYPE))
9626 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9629 CASE_FLT_FN (BUILT_IN_CIMAG):
9630 if (validate_arg (arg0, COMPLEX_TYPE))
9631 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9634 CASE_FLT_FN (BUILT_IN_CCOS):
9635 CASE_FLT_FN (BUILT_IN_CCOSH):
9636 /* These functions are "even", i.e. f(x) == f(-x). */
9637 if (validate_arg (arg0, COMPLEX_TYPE))
9639 tree narg = fold_strip_sign_ops (arg0);
9641 return build_call_expr (fndecl, 1, narg);
9645 CASE_FLT_FN (BUILT_IN_CABS):
9646 return fold_builtin_cabs (arg0, type, fndecl);
9648 CASE_FLT_FN (BUILT_IN_CARG):
9649 return fold_builtin_carg (arg0, type);
9651 CASE_FLT_FN (BUILT_IN_SQRT):
9652 return fold_builtin_sqrt (arg0, type);
9654 CASE_FLT_FN (BUILT_IN_CBRT):
9655 return fold_builtin_cbrt (arg0, type);
9657 CASE_FLT_FN (BUILT_IN_ASIN):
9658 if (validate_arg (arg0, REAL_TYPE))
9659 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9660 &dconstm1, &dconst1, true);
9663 CASE_FLT_FN (BUILT_IN_ACOS):
9664 if (validate_arg (arg0, REAL_TYPE))
9665 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9666 &dconstm1, &dconst1, true);
9669 CASE_FLT_FN (BUILT_IN_ATAN):
9670 if (validate_arg (arg0, REAL_TYPE))
9671 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9674 CASE_FLT_FN (BUILT_IN_ASINH):
9675 if (validate_arg (arg0, REAL_TYPE))
9676 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9679 CASE_FLT_FN (BUILT_IN_ACOSH):
9680 if (validate_arg (arg0, REAL_TYPE))
9681 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9682 &dconst1, NULL, true);
9685 CASE_FLT_FN (BUILT_IN_ATANH):
9686 if (validate_arg (arg0, REAL_TYPE))
9687 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9688 &dconstm1, &dconst1, false);
9691 CASE_FLT_FN (BUILT_IN_SIN):
9692 if (validate_arg (arg0, REAL_TYPE))
9693 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9696 CASE_FLT_FN (BUILT_IN_COS):
9697 return fold_builtin_cos (arg0, type, fndecl);
9700 CASE_FLT_FN (BUILT_IN_TAN):
9701 return fold_builtin_tan (arg0, type);
9703 CASE_FLT_FN (BUILT_IN_CEXP):
9704 return fold_builtin_cexp (arg0, type);
9706 CASE_FLT_FN (BUILT_IN_CEXPI):
9707 if (validate_arg (arg0, REAL_TYPE))
9708 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9711 CASE_FLT_FN (BUILT_IN_SINH):
9712 if (validate_arg (arg0, REAL_TYPE))
9713 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9716 CASE_FLT_FN (BUILT_IN_COSH):
9717 return fold_builtin_cosh (arg0, type, fndecl);
9719 CASE_FLT_FN (BUILT_IN_TANH):
9720 if (validate_arg (arg0, REAL_TYPE))
9721 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9724 CASE_FLT_FN (BUILT_IN_ERF):
9725 if (validate_arg (arg0, REAL_TYPE))
9726 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9729 CASE_FLT_FN (BUILT_IN_ERFC):
9730 if (validate_arg (arg0, REAL_TYPE))
9731 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9734 CASE_FLT_FN (BUILT_IN_TGAMMA):
9735 if (validate_arg (arg0, REAL_TYPE))
9736 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9739 CASE_FLT_FN (BUILT_IN_EXP):
9740 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
9742 CASE_FLT_FN (BUILT_IN_EXP2):
9743 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
9745 CASE_FLT_FN (BUILT_IN_EXP10):
9746 CASE_FLT_FN (BUILT_IN_POW10):
9747 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
9749 CASE_FLT_FN (BUILT_IN_EXPM1):
9750 if (validate_arg (arg0, REAL_TYPE))
9751 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9754 CASE_FLT_FN (BUILT_IN_LOG):
9755 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
9757 CASE_FLT_FN (BUILT_IN_LOG2):
9758 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
9760 CASE_FLT_FN (BUILT_IN_LOG10):
9761 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
9763 CASE_FLT_FN (BUILT_IN_LOG1P):
9764 if (validate_arg (arg0, REAL_TYPE))
9765 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9766 &dconstm1, NULL, false);
9769 CASE_FLT_FN (BUILT_IN_NAN):
9770 case BUILT_IN_NAND32:
9771 case BUILT_IN_NAND64:
9772 case BUILT_IN_NAND128:
9773 return fold_builtin_nan (arg0, type, true);
9775 CASE_FLT_FN (BUILT_IN_NANS):
9776 return fold_builtin_nan (arg0, type, false);
9778 CASE_FLT_FN (BUILT_IN_FLOOR):
9779 return fold_builtin_floor (fndecl, arg0);
9781 CASE_FLT_FN (BUILT_IN_CEIL):
9782 return fold_builtin_ceil (fndecl, arg0);
9784 CASE_FLT_FN (BUILT_IN_TRUNC):
9785 return fold_builtin_trunc (fndecl, arg0);
9787 CASE_FLT_FN (BUILT_IN_ROUND):
9788 return fold_builtin_round (fndecl, arg0);
9790 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9791 CASE_FLT_FN (BUILT_IN_RINT):
9792 return fold_trunc_transparent_mathfn (fndecl, arg0);
9794 CASE_FLT_FN (BUILT_IN_LCEIL):
9795 CASE_FLT_FN (BUILT_IN_LLCEIL):
9796 CASE_FLT_FN (BUILT_IN_LFLOOR):
9797 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9798 CASE_FLT_FN (BUILT_IN_LROUND):
9799 CASE_FLT_FN (BUILT_IN_LLROUND):
9800 return fold_builtin_int_roundingfn (fndecl, arg0);
9802 CASE_FLT_FN (BUILT_IN_LRINT):
9803 CASE_FLT_FN (BUILT_IN_LLRINT):
9804 return fold_fixed_mathfn (fndecl, arg0);
9806 case BUILT_IN_BSWAP32:
9807 case BUILT_IN_BSWAP64:
9808 return fold_builtin_bswap (fndecl, arg0);
9810 CASE_INT_FN (BUILT_IN_FFS):
9811 CASE_INT_FN (BUILT_IN_CLZ):
9812 CASE_INT_FN (BUILT_IN_CTZ):
9813 CASE_INT_FN (BUILT_IN_POPCOUNT):
9814 CASE_INT_FN (BUILT_IN_PARITY):
9815 return fold_builtin_bitop (fndecl, arg0);
9817 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9818 return fold_builtin_signbit (arg0, type);
9820 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9821 return fold_builtin_significand (arg0, type);
9823 CASE_FLT_FN (BUILT_IN_ILOGB):
9824 CASE_FLT_FN (BUILT_IN_LOGB):
9825 return fold_builtin_logb (arg0, type);
9827 case BUILT_IN_ISASCII:
9828 return fold_builtin_isascii (arg0);
9830 case BUILT_IN_TOASCII:
9831 return fold_builtin_toascii (arg0);
9833 case BUILT_IN_ISDIGIT:
9834 return fold_builtin_isdigit (arg0);
9836 CASE_FLT_FN (BUILT_IN_FINITE):
9837 case BUILT_IN_FINITED32:
9838 case BUILT_IN_FINITED64:
9839 case BUILT_IN_FINITED128:
9840 return fold_builtin_classify (fndecl, arg0, BUILT_IN_FINITE);
9842 CASE_FLT_FN (BUILT_IN_ISINF):
9843 case BUILT_IN_ISINFD32:
9844 case BUILT_IN_ISINFD64:
9845 case BUILT_IN_ISINFD128:
9846 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
9848 CASE_FLT_FN (BUILT_IN_ISNAN):
9849 case BUILT_IN_ISNAND32:
9850 case BUILT_IN_ISNAND64:
9851 case BUILT_IN_ISNAND128:
9852 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
9854 case BUILT_IN_PRINTF:
9855 case BUILT_IN_PRINTF_UNLOCKED:
9856 case BUILT_IN_VPRINTF:
9857 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
9867 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9868 IGNORE is true if the result of the function call is ignored. This
9869 function returns NULL_TREE if no simplification was possible. */
9872 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
9874 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9875 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9880 CASE_FLT_FN (BUILT_IN_ATAN2):
9881 if (validate_arg (arg0, REAL_TYPE)
9882 && validate_arg(arg1, REAL_TYPE))
9883 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9886 CASE_FLT_FN (BUILT_IN_FDIM):
9887 if (validate_arg (arg0, REAL_TYPE)
9888 && validate_arg(arg1, REAL_TYPE))
9889 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9892 CASE_FLT_FN (BUILT_IN_HYPOT):
9893 return fold_builtin_hypot (fndecl, arg0, arg1, type);
9895 CASE_FLT_FN (BUILT_IN_LDEXP):
9896 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
9897 CASE_FLT_FN (BUILT_IN_SCALBN):
9898 CASE_FLT_FN (BUILT_IN_SCALBLN):
9899 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
9901 CASE_FLT_FN (BUILT_IN_FREXP):
9902 return fold_builtin_frexp (arg0, arg1, type);
9904 CASE_FLT_FN (BUILT_IN_MODF):
9905 return fold_builtin_modf (arg0, arg1, type);
9907 case BUILT_IN_BZERO:
9908 return fold_builtin_bzero (arg0, arg1, ignore);
9910 case BUILT_IN_FPUTS:
9911 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
9913 case BUILT_IN_FPUTS_UNLOCKED:
9914 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
9916 case BUILT_IN_STRSTR:
9917 return fold_builtin_strstr (arg0, arg1, type);
9919 case BUILT_IN_STRCAT:
9920 return fold_builtin_strcat (arg0, arg1);
9922 case BUILT_IN_STRSPN:
9923 return fold_builtin_strspn (arg0, arg1);
9925 case BUILT_IN_STRCSPN:
9926 return fold_builtin_strcspn (arg0, arg1);
9928 case BUILT_IN_STRCHR:
9929 case BUILT_IN_INDEX:
9930 return fold_builtin_strchr (arg0, arg1, type);
9932 case BUILT_IN_STRRCHR:
9933 case BUILT_IN_RINDEX:
9934 return fold_builtin_strrchr (arg0, arg1, type);
9936 case BUILT_IN_STRCPY:
9937 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
9939 case BUILT_IN_STRCMP:
9940 return fold_builtin_strcmp (arg0, arg1);
9942 case BUILT_IN_STRPBRK:
9943 return fold_builtin_strpbrk (arg0, arg1, type);
9945 case BUILT_IN_EXPECT:
9946 return fold_builtin_expect (arg0);
9948 CASE_FLT_FN (BUILT_IN_POW):
9949 return fold_builtin_pow (fndecl, arg0, arg1, type);
9951 CASE_FLT_FN (BUILT_IN_POWI):
9952 return fold_builtin_powi (fndecl, arg0, arg1, type);
9954 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9955 return fold_builtin_copysign (fndecl, arg0, arg1, type);
9957 CASE_FLT_FN (BUILT_IN_FMIN):
9958 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
9960 CASE_FLT_FN (BUILT_IN_FMAX):
9961 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
9963 case BUILT_IN_ISGREATER:
9964 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
9965 case BUILT_IN_ISGREATEREQUAL:
9966 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
9967 case BUILT_IN_ISLESS:
9968 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
9969 case BUILT_IN_ISLESSEQUAL:
9970 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
9971 case BUILT_IN_ISLESSGREATER:
9972 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9973 case BUILT_IN_ISUNORDERED:
9974 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
9977 /* We do the folding for va_start in the expander. */
9978 case BUILT_IN_VA_START:
9981 case BUILT_IN_SPRINTF:
9982 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
9984 case BUILT_IN_OBJECT_SIZE:
9985 return fold_builtin_object_size (arg0, arg1);
9987 case BUILT_IN_PRINTF:
9988 case BUILT_IN_PRINTF_UNLOCKED:
9989 case BUILT_IN_VPRINTF:
9990 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
9992 case BUILT_IN_PRINTF_CHK:
9993 case BUILT_IN_VPRINTF_CHK:
9994 if (!validate_arg (arg0, INTEGER_TYPE)
9995 || TREE_SIDE_EFFECTS (arg0))
9998 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10001 case BUILT_IN_FPRINTF:
10002 case BUILT_IN_FPRINTF_UNLOCKED:
10003 case BUILT_IN_VFPRINTF:
10004 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10013 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10014 and ARG2. IGNORE is true if the result of the function call is ignored.
10015 This function returns NULL_TREE if no simplification was possible. */
10018 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10020 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10021 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10025 CASE_FLT_FN (BUILT_IN_SINCOS):
10026 return fold_builtin_sincos (arg0, arg1, arg2);
10028 CASE_FLT_FN (BUILT_IN_FMA):
10029 if (validate_arg (arg0, REAL_TYPE)
10030 && validate_arg(arg1, REAL_TYPE)
10031 && validate_arg(arg2, REAL_TYPE))
10032 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10035 case BUILT_IN_MEMSET:
10036 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10038 case BUILT_IN_BCOPY:
10039 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10041 case BUILT_IN_MEMCPY:
10042 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10044 case BUILT_IN_MEMPCPY:
10045 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10047 case BUILT_IN_MEMMOVE:
10048 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10050 case BUILT_IN_STRNCAT:
10051 return fold_builtin_strncat (arg0, arg1, arg2);
10053 case BUILT_IN_STRNCPY:
10054 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10056 case BUILT_IN_STRNCMP:
10057 return fold_builtin_strncmp (arg0, arg1, arg2);
10059 case BUILT_IN_MEMCHR:
10060 return fold_builtin_memchr (arg0, arg1, arg2, type);
10062 case BUILT_IN_BCMP:
10063 case BUILT_IN_MEMCMP:
10064 return fold_builtin_memcmp (arg0, arg1, arg2);;
10066 case BUILT_IN_SPRINTF:
10067 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10069 case BUILT_IN_STRCPY_CHK:
10070 case BUILT_IN_STPCPY_CHK:
10071 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10074 case BUILT_IN_STRCAT_CHK:
10075 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10077 case BUILT_IN_PRINTF_CHK:
10078 case BUILT_IN_VPRINTF_CHK:
10079 if (!validate_arg (arg0, INTEGER_TYPE)
10080 || TREE_SIDE_EFFECTS (arg0))
10083 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10086 case BUILT_IN_FPRINTF:
10087 case BUILT_IN_FPRINTF_UNLOCKED:
10088 case BUILT_IN_VFPRINTF:
10089 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10091 case BUILT_IN_FPRINTF_CHK:
10092 case BUILT_IN_VFPRINTF_CHK:
10093 if (!validate_arg (arg1, INTEGER_TYPE)
10094 || TREE_SIDE_EFFECTS (arg1))
10097 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10106 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10107 ARG2, and ARG3. IGNORE is true if the result of the function call is
10108 ignored. This function returns NULL_TREE if no simplification was
10112 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10115 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10119 case BUILT_IN_MEMCPY_CHK:
10120 case BUILT_IN_MEMPCPY_CHK:
10121 case BUILT_IN_MEMMOVE_CHK:
10122 case BUILT_IN_MEMSET_CHK:
10123 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10125 DECL_FUNCTION_CODE (fndecl));
10127 case BUILT_IN_STRNCPY_CHK:
10128 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10130 case BUILT_IN_STRNCAT_CHK:
10131 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10133 case BUILT_IN_FPRINTF_CHK:
10134 case BUILT_IN_VFPRINTF_CHK:
10135 if (!validate_arg (arg1, INTEGER_TYPE)
10136 || TREE_SIDE_EFFECTS (arg1))
10139 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10149 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10150 arguments, where NARGS <= 4. IGNORE is true if the result of the
10151 function call is ignored. This function returns NULL_TREE if no
10152 simplification was possible. Note that this only folds builtins with
10153 fixed argument patterns. Foldings that do varargs-to-varargs
10154 transformations, or that match calls with more than 4 arguments,
10155 need to be handled with fold_builtin_varargs instead. */
10157 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10160 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10162 tree ret = NULL_TREE;
10166 ret = fold_builtin_0 (fndecl, ignore);
10169 ret = fold_builtin_1 (fndecl, args[0], ignore);
10172 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10175 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10178 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10186 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10187 TREE_NO_WARNING (ret) = 1;
10193 /* Builtins with folding operations that operate on "..." arguments
10194 need special handling; we need to store the arguments in a convenient
10195 data structure before attempting any folding. Fortunately there are
10196 only a few builtins that fall into this category. FNDECL is the
10197 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10198 result of the function call is ignored. */
10201 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10203 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10204 tree ret = NULL_TREE;
10208 case BUILT_IN_SPRINTF_CHK:
10209 case BUILT_IN_VSPRINTF_CHK:
10210 ret = fold_builtin_sprintf_chk (exp, fcode);
10213 case BUILT_IN_SNPRINTF_CHK:
10214 case BUILT_IN_VSNPRINTF_CHK:
10215 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10222 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10223 TREE_NO_WARNING (ret) = 1;
10229 /* A wrapper function for builtin folding that prevents warnings for
10230 "statement without effect" and the like, caused by removing the
10231 call node earlier than the warning is generated. */
10234 fold_call_expr (tree exp, bool ignore)
10236 tree ret = NULL_TREE;
10237 tree fndecl = get_callee_fndecl (exp);
10239 && TREE_CODE (fndecl) == FUNCTION_DECL
10240 && DECL_BUILT_IN (fndecl))
10242 /* FIXME: Don't use a list in this interface. */
10243 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10244 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10247 int nargs = call_expr_nargs (exp);
10248 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10250 tree *args = CALL_EXPR_ARGP (exp);
10251 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10254 ret = fold_builtin_varargs (fndecl, exp, ignore);
10257 /* Propagate location information from original call to
10258 expansion of builtin. Otherwise things like
10259 maybe_emit_chk_warning, that operate on the expansion
10260 of a builtin, will use the wrong location information. */
10261 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10263 tree realret = ret;
10264 if (TREE_CODE (ret) == NOP_EXPR)
10265 realret = TREE_OPERAND (ret, 0);
10266 if (CAN_HAVE_LOCATION_P (realret)
10267 && !EXPR_HAS_LOCATION (realret))
10268 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10277 /* Conveniently construct a function call expression. FNDECL names the
10278 function to be called and ARGLIST is a TREE_LIST of arguments. */
10281 build_function_call_expr (tree fndecl, tree arglist)
10283 tree fntype = TREE_TYPE (fndecl);
10284 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10285 int n = list_length (arglist);
10286 tree *argarray = (tree *) alloca (n * sizeof (tree));
10289 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10290 argarray[i] = TREE_VALUE (arglist);
10291 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10294 /* Conveniently construct a function call expression. FNDECL names the
10295 function to be called, N is the number of arguments, and the "..."
10296 parameters are the argument expressions. */
10299 build_call_expr (tree fndecl, int n, ...)
10302 tree fntype = TREE_TYPE (fndecl);
10303 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10304 tree *argarray = (tree *) alloca (n * sizeof (tree));
10308 for (i = 0; i < n; i++)
10309 argarray[i] = va_arg (ap, tree);
10311 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10314 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10315 N arguments are passed in the array ARGARRAY. */
10318 fold_builtin_call_array (tree type,
10323 tree ret = NULL_TREE;
10327 if (TREE_CODE (fn) == ADDR_EXPR)
10329 tree fndecl = TREE_OPERAND (fn, 0);
10330 if (TREE_CODE (fndecl) == FUNCTION_DECL
10331 && DECL_BUILT_IN (fndecl))
10333 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10335 tree arglist = NULL_TREE;
10336 for (i = n - 1; i >= 0; i--)
10337 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10338 ret = targetm.fold_builtin (fndecl, arglist, false);
10342 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10344 /* First try the transformations that don't require consing up
10346 ret = fold_builtin_n (fndecl, argarray, n, false);
10351 /* If we got this far, we need to build an exp. */
10352 exp = build_call_array (type, fn, n, argarray);
10353 ret = fold_builtin_varargs (fndecl, exp, false);
10354 return ret ? ret : exp;
10358 return build_call_array (type, fn, n, argarray);
10361 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10362 along with N new arguments specified as the "..." parameters. SKIP
10363 is the number of arguments in EXP to be omitted. This function is used
10364 to do varargs-to-varargs transformations. */
10367 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10369 int oldnargs = call_expr_nargs (exp);
10370 int nargs = oldnargs - skip + n;
10371 tree fntype = TREE_TYPE (fndecl);
10372 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10380 buffer = alloca (nargs * sizeof (tree));
10382 for (i = 0; i < n; i++)
10383 buffer[i] = va_arg (ap, tree);
10385 for (j = skip; j < oldnargs; j++, i++)
10386 buffer[i] = CALL_EXPR_ARG (exp, j);
10389 buffer = CALL_EXPR_ARGP (exp) + skip;
10391 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10394 /* Validate a single argument ARG against a tree code CODE representing
10398 validate_arg (tree arg, enum tree_code code)
10402 else if (code == POINTER_TYPE)
10403 return POINTER_TYPE_P (TREE_TYPE (arg));
10404 return code == TREE_CODE (TREE_TYPE (arg));
10407 /* This function validates the types of a function call argument list
10408 against a specified list of tree_codes. If the last specifier is a 0,
10409 that represents an ellipses, otherwise the last specifier must be a
10413 validate_arglist (tree callexpr, ...)
10415 enum tree_code code;
10418 call_expr_arg_iterator iter;
10421 va_start (ap, callexpr);
10422 init_call_expr_arg_iterator (callexpr, &iter);
10426 code = va_arg (ap, enum tree_code);
10430 /* This signifies an ellipses, any further arguments are all ok. */
10434 /* This signifies an endlink, if no arguments remain, return
10435 true, otherwise return false. */
10436 res = !more_call_expr_args_p (&iter);
10439 /* If no parameters remain or the parameter's code does not
10440 match the specified code, return false. Otherwise continue
10441 checking any remaining arguments. */
10442 arg = next_call_expr_arg (&iter);
10443 if (!validate_arg (arg, code))
10450 /* We need gotos here since we can only have one VA_CLOSE in a
10458 /* Default target-specific builtin expander that does nothing. */
10461 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10462 rtx target ATTRIBUTE_UNUSED,
10463 rtx subtarget ATTRIBUTE_UNUSED,
10464 enum machine_mode mode ATTRIBUTE_UNUSED,
10465 int ignore ATTRIBUTE_UNUSED)
10470 /* Returns true is EXP represents data that would potentially reside
10471 in a readonly section. */
10474 readonly_data_expr (tree exp)
10478 if (TREE_CODE (exp) != ADDR_EXPR)
10481 exp = get_base_address (TREE_OPERAND (exp, 0));
10485 /* Make sure we call decl_readonly_section only for trees it
10486 can handle (since it returns true for everything it doesn't
10488 if (TREE_CODE (exp) == STRING_CST
10489 || TREE_CODE (exp) == CONSTRUCTOR
10490 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10491 return decl_readonly_section (exp, 0);
10496 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10497 to the call, and TYPE is its return type.
10499 Return NULL_TREE if no simplification was possible, otherwise return the
10500 simplified form of the call as a tree.
10502 The simplified form may be a constant or other expression which
10503 computes the same value, but in a more efficient manner (including
10504 calls to other builtin functions).
10506 The call may contain arguments which need to be evaluated, but
10507 which are not useful to determine the result of the call. In
10508 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10509 COMPOUND_EXPR will be an argument which must be evaluated.
10510 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10511 COMPOUND_EXPR in the chain will contain the tree for the simplified
10512 form of the builtin function call. */
10515 fold_builtin_strstr (tree s1, tree s2, tree type)
10517 if (!validate_arg (s1, POINTER_TYPE)
10518 || !validate_arg (s2, POINTER_TYPE))
10523 const char *p1, *p2;
10525 p2 = c_getstr (s2);
10529 p1 = c_getstr (s1);
10532 const char *r = strstr (p1, p2);
10536 return build_int_cst (TREE_TYPE (s1), 0);
10538 /* Return an offset into the constant string argument. */
10539 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10540 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10541 return fold_convert (type, tem);
10544 /* The argument is const char *, and the result is char *, so we need
10545 a type conversion here to avoid a warning. */
10547 return fold_convert (type, s1);
10552 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10556 /* New argument list transforming strstr(s1, s2) to
10557 strchr(s1, s2[0]). */
10558 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10562 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10563 the call, and TYPE is its return type.
10565 Return NULL_TREE if no simplification was possible, otherwise return the
10566 simplified form of the call as a tree.
10568 The simplified form may be a constant or other expression which
10569 computes the same value, but in a more efficient manner (including
10570 calls to other builtin functions).
10572 The call may contain arguments which need to be evaluated, but
10573 which are not useful to determine the result of the call. In
10574 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10575 COMPOUND_EXPR will be an argument which must be evaluated.
10576 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10577 COMPOUND_EXPR in the chain will contain the tree for the simplified
10578 form of the builtin function call. */
10581 fold_builtin_strchr (tree s1, tree s2, tree type)
10583 if (!validate_arg (s1, POINTER_TYPE)
10584 || !validate_arg (s2, INTEGER_TYPE))
10590 if (TREE_CODE (s2) != INTEGER_CST)
10593 p1 = c_getstr (s1);
10600 if (target_char_cast (s2, &c))
10603 r = strchr (p1, c);
10606 return build_int_cst (TREE_TYPE (s1), 0);
10608 /* Return an offset into the constant string argument. */
10609 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10610 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10611 return fold_convert (type, tem);
10617 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10618 the call, and TYPE is its return type.
10620 Return NULL_TREE if no simplification was possible, otherwise return the
10621 simplified form of the call as a tree.
10623 The simplified form may be a constant or other expression which
10624 computes the same value, but in a more efficient manner (including
10625 calls to other builtin functions).
10627 The call may contain arguments which need to be evaluated, but
10628 which are not useful to determine the result of the call. In
10629 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10630 COMPOUND_EXPR will be an argument which must be evaluated.
10631 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10632 COMPOUND_EXPR in the chain will contain the tree for the simplified
10633 form of the builtin function call. */
10636 fold_builtin_strrchr (tree s1, tree s2, tree type)
10638 if (!validate_arg (s1, POINTER_TYPE)
10639 || !validate_arg (s2, INTEGER_TYPE))
10646 if (TREE_CODE (s2) != INTEGER_CST)
10649 p1 = c_getstr (s1);
10656 if (target_char_cast (s2, &c))
10659 r = strrchr (p1, c);
10662 return build_int_cst (TREE_TYPE (s1), 0);
10664 /* Return an offset into the constant string argument. */
10665 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10666 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10667 return fold_convert (type, tem);
10670 if (! integer_zerop (s2))
10673 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10677 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10678 return build_call_expr (fn, 2, s1, s2);
10682 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10683 to the call, and TYPE is its return type.
10685 Return NULL_TREE if no simplification was possible, otherwise return the
10686 simplified form of the call as a tree.
10688 The simplified form may be a constant or other expression which
10689 computes the same value, but in a more efficient manner (including
10690 calls to other builtin functions).
10692 The call may contain arguments which need to be evaluated, but
10693 which are not useful to determine the result of the call. In
10694 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10695 COMPOUND_EXPR will be an argument which must be evaluated.
10696 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10697 COMPOUND_EXPR in the chain will contain the tree for the simplified
10698 form of the builtin function call. */
10701 fold_builtin_strpbrk (tree s1, tree s2, tree type)
10703 if (!validate_arg (s1, POINTER_TYPE)
10704 || !validate_arg (s2, POINTER_TYPE))
10709 const char *p1, *p2;
10711 p2 = c_getstr (s2);
10715 p1 = c_getstr (s1);
10718 const char *r = strpbrk (p1, p2);
10722 return build_int_cst (TREE_TYPE (s1), 0);
10724 /* Return an offset into the constant string argument. */
10725 tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
10726 s1, build_int_cst (TREE_TYPE (s1), r - p1));
10727 return fold_convert (type, tem);
10731 /* strpbrk(x, "") == NULL.
10732 Evaluate and ignore s1 in case it had side-effects. */
10733 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
10736 return NULL_TREE; /* Really call strpbrk. */
10738 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10742 /* New argument list transforming strpbrk(s1, s2) to
10743 strchr(s1, s2[0]). */
10744 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10748 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10751 Return NULL_TREE if no simplification was possible, otherwise return the
10752 simplified form of the call as a tree.
10754 The simplified form may be a constant or other expression which
10755 computes the same value, but in a more efficient manner (including
10756 calls to other builtin functions).
10758 The call may contain arguments which need to be evaluated, but
10759 which are not useful to determine the result of the call. In
10760 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10761 COMPOUND_EXPR will be an argument which must be evaluated.
10762 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10763 COMPOUND_EXPR in the chain will contain the tree for the simplified
10764 form of the builtin function call. */
10767 fold_builtin_strcat (tree dst, tree src)
10769 if (!validate_arg (dst, POINTER_TYPE)
10770 || !validate_arg (src, POINTER_TYPE))
10774 const char *p = c_getstr (src);
10776 /* If the string length is zero, return the dst parameter. */
10777 if (p && *p == '\0')
10784 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10785 arguments to the call.
10787 Return NULL_TREE if no simplification was possible, otherwise return the
10788 simplified form of the call as a tree.
10790 The simplified form may be a constant or other expression which
10791 computes the same value, but in a more efficient manner (including
10792 calls to other builtin functions).
10794 The call may contain arguments which need to be evaluated, but
10795 which are not useful to determine the result of the call. In
10796 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10797 COMPOUND_EXPR will be an argument which must be evaluated.
10798 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10799 COMPOUND_EXPR in the chain will contain the tree for the simplified
10800 form of the builtin function call. */
10803 fold_builtin_strncat (tree dst, tree src, tree len)
10805 if (!validate_arg (dst, POINTER_TYPE)
10806 || !validate_arg (src, POINTER_TYPE)
10807 || !validate_arg (len, INTEGER_TYPE))
10811 const char *p = c_getstr (src);
10813 /* If the requested length is zero, or the src parameter string
10814 length is zero, return the dst parameter. */
10815 if (integer_zerop (len) || (p && *p == '\0'))
10816 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
10818 /* If the requested len is greater than or equal to the string
10819 length, call strcat. */
10820 if (TREE_CODE (len) == INTEGER_CST && p
10821 && compare_tree_int (len, strlen (p)) >= 0)
10823 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
10825 /* If the replacement _DECL isn't initialized, don't do the
10830 return build_call_expr (fn, 2, dst, src);
10836 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10839 Return NULL_TREE if no simplification was possible, otherwise return the
10840 simplified form of the call as a tree.
10842 The simplified form may be a constant or other expression which
10843 computes the same value, but in a more efficient manner (including
10844 calls to other builtin functions).
10846 The call may contain arguments which need to be evaluated, but
10847 which are not useful to determine the result of the call. In
10848 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10849 COMPOUND_EXPR will be an argument which must be evaluated.
10850 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10851 COMPOUND_EXPR in the chain will contain the tree for the simplified
10852 form of the builtin function call. */
10855 fold_builtin_strspn (tree s1, tree s2)
10857 if (!validate_arg (s1, POINTER_TYPE)
10858 || !validate_arg (s2, POINTER_TYPE))
10862 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10864 /* If both arguments are constants, evaluate at compile-time. */
10867 const size_t r = strspn (p1, p2);
10868 return size_int (r);
10871 /* If either argument is "", return NULL_TREE. */
10872 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10873 /* Evaluate and ignore both arguments in case either one has
10875 return omit_two_operands (integer_type_node, integer_zero_node,
10881 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10884 Return NULL_TREE if no simplification was possible, otherwise return the
10885 simplified form of the call as a tree.
10887 The simplified form may be a constant or other expression which
10888 computes the same value, but in a more efficient manner (including
10889 calls to other builtin functions).
10891 The call may contain arguments which need to be evaluated, but
10892 which are not useful to determine the result of the call. In
10893 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10894 COMPOUND_EXPR will be an argument which must be evaluated.
10895 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10896 COMPOUND_EXPR in the chain will contain the tree for the simplified
10897 form of the builtin function call. */
10900 fold_builtin_strcspn (tree s1, tree s2)
10902 if (!validate_arg (s1, POINTER_TYPE)
10903 || !validate_arg (s2, POINTER_TYPE))
10907 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10909 /* If both arguments are constants, evaluate at compile-time. */
10912 const size_t r = strcspn (p1, p2);
10913 return size_int (r);
10916 /* If the first argument is "", return NULL_TREE. */
10917 if (p1 && *p1 == '\0')
10919 /* Evaluate and ignore argument s2 in case it has
10921 return omit_one_operand (integer_type_node,
10922 integer_zero_node, s2);
10925 /* If the second argument is "", return __builtin_strlen(s1). */
10926 if (p2 && *p2 == '\0')
10928 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10930 /* If the replacement _DECL isn't initialized, don't do the
10935 return build_call_expr (fn, 1, s1);
10941 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
10942 to the call. IGNORE is true if the value returned
10943 by the builtin will be ignored. UNLOCKED is true is true if this
10944 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
10945 the known length of the string. Return NULL_TREE if no simplification
10949 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
10951 /* If we're using an unlocked function, assume the other unlocked
10952 functions exist explicitly. */
10953 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
10954 : implicit_built_in_decls[BUILT_IN_FPUTC];
10955 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
10956 : implicit_built_in_decls[BUILT_IN_FWRITE];
10958 /* If the return value is used, don't do the transformation. */
10962 /* Verify the arguments in the original call. */
10963 if (!validate_arg (arg0, POINTER_TYPE)
10964 || !validate_arg (arg1, POINTER_TYPE))
10968 len = c_strlen (arg0, 0);
10970 /* Get the length of the string passed to fputs. If the length
10971 can't be determined, punt. */
10973 || TREE_CODE (len) != INTEGER_CST)
10976 switch (compare_tree_int (len, 1))
10978 case -1: /* length is 0, delete the call entirely . */
10979 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
10981 case 0: /* length is 1, call fputc. */
10983 const char *p = c_getstr (arg0);
10988 return build_call_expr (fn_fputc, 2,
10989 build_int_cst (NULL_TREE, p[0]), arg1);
10995 case 1: /* length is greater than 1, call fwrite. */
10997 /* If optimizing for size keep fputs. */
11000 /* New argument list transforming fputs(string, stream) to
11001 fwrite(string, 1, len, stream). */
11003 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11008 gcc_unreachable ();
11013 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11014 produced. False otherwise. This is done so that we don't output the error
11015 or warning twice or three times. */
11017 fold_builtin_next_arg (tree exp, bool va_start_p)
11019 tree fntype = TREE_TYPE (current_function_decl);
11020 int nargs = call_expr_nargs (exp);
11023 if (TYPE_ARG_TYPES (fntype) == 0
11024 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11025 == void_type_node))
11027 error ("%<va_start%> used in function with fixed args");
11033 if (va_start_p && (nargs != 2))
11035 error ("wrong number of arguments to function %<va_start%>");
11038 arg = CALL_EXPR_ARG (exp, 1);
11040 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11041 when we checked the arguments and if needed issued a warning. */
11046 /* Evidently an out of date version of <stdarg.h>; can't validate
11047 va_start's second argument, but can still work as intended. */
11048 warning (0, "%<__builtin_next_arg%> called without an argument");
11051 else if (nargs > 1)
11053 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11056 arg = CALL_EXPR_ARG (exp, 0);
11059 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11060 or __builtin_next_arg (0) the first time we see it, after checking
11061 the arguments and if needed issuing a warning. */
11062 if (!integer_zerop (arg))
11064 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11066 /* Strip off all nops for the sake of the comparison. This
11067 is not quite the same as STRIP_NOPS. It does more.
11068 We must also strip off INDIRECT_EXPR for C++ reference
11070 while (TREE_CODE (arg) == NOP_EXPR
11071 || TREE_CODE (arg) == CONVERT_EXPR
11072 || TREE_CODE (arg) == NON_LVALUE_EXPR
11073 || TREE_CODE (arg) == INDIRECT_REF)
11074 arg = TREE_OPERAND (arg, 0);
11075 if (arg != last_parm)
11077 /* FIXME: Sometimes with the tree optimizers we can get the
11078 not the last argument even though the user used the last
11079 argument. We just warn and set the arg to be the last
11080 argument so that we will get wrong-code because of
11082 warning (0, "second parameter of %<va_start%> not last named argument");
11084 /* We want to verify the second parameter just once before the tree
11085 optimizers are run and then avoid keeping it in the tree,
11086 as otherwise we could warn even for correct code like:
11087 void foo (int i, ...)
11088 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11090 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11092 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11098 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11099 ORIG may be null if this is a 2-argument call. We don't attempt to
11100 simplify calls with more than 3 arguments.
11102 Return NULL_TREE if no simplification was possible, otherwise return the
11103 simplified form of the call as a tree. If IGNORED is true, it means that
11104 the caller does not use the returned value of the function. */
11107 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11110 const char *fmt_str = NULL;
11112 /* Verify the required arguments in the original call. We deal with two
11113 types of sprintf() calls: 'sprintf (str, fmt)' and
11114 'sprintf (dest, "%s", orig)'. */
11115 if (!validate_arg (dest, POINTER_TYPE)
11116 || !validate_arg (fmt, POINTER_TYPE))
11118 if (orig && !validate_arg (orig, POINTER_TYPE))
11121 /* Check whether the format is a literal string constant. */
11122 fmt_str = c_getstr (fmt);
11123 if (fmt_str == NULL)
11127 retval = NULL_TREE;
11129 if (!init_target_chars ())
11132 /* If the format doesn't contain % args or %%, use strcpy. */
11133 if (strchr (fmt_str, target_percent) == NULL)
11135 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11140 /* Don't optimize sprintf (buf, "abc", ptr++). */
11144 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11145 'format' is known to contain no % formats. */
11146 call = build_call_expr (fn, 2, dest, fmt);
11148 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11151 /* If the format is "%s", use strcpy if the result isn't used. */
11152 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11155 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11160 /* Don't crash on sprintf (str1, "%s"). */
11164 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11167 retval = c_strlen (orig, 1);
11168 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11171 call = build_call_expr (fn, 2, dest, orig);
11174 if (call && retval)
11176 retval = fold_convert
11177 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11179 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11185 /* Expand a call EXP to __builtin_object_size. */
11188 expand_builtin_object_size (tree exp)
11191 int object_size_type;
11192 tree fndecl = get_callee_fndecl (exp);
11193 location_t locus = EXPR_LOCATION (exp);
11195 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11197 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11199 expand_builtin_trap ();
11203 ost = CALL_EXPR_ARG (exp, 1);
11206 if (TREE_CODE (ost) != INTEGER_CST
11207 || tree_int_cst_sgn (ost) < 0
11208 || compare_tree_int (ost, 3) > 0)
11210 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11212 expand_builtin_trap ();
11216 object_size_type = tree_low_cst (ost, 0);
11218 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11221 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11222 FCODE is the BUILT_IN_* to use.
11223 Return NULL_RTX if we failed; the caller should emit a normal call,
11224 otherwise try to get the result in TARGET, if convenient (and in
11225 mode MODE if that's convenient). */
11228 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11229 enum built_in_function fcode)
11231 tree dest, src, len, size;
11233 if (!validate_arglist (exp,
11235 fcode == BUILT_IN_MEMSET_CHK
11236 ? INTEGER_TYPE : POINTER_TYPE,
11237 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11240 dest = CALL_EXPR_ARG (exp, 0);
11241 src = CALL_EXPR_ARG (exp, 1);
11242 len = CALL_EXPR_ARG (exp, 2);
11243 size = CALL_EXPR_ARG (exp, 3);
11245 if (! host_integerp (size, 1))
11248 if (host_integerp (len, 1) || integer_all_onesp (size))
11252 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11254 location_t locus = EXPR_LOCATION (exp);
11255 warning (0, "%Hcall to %D will always overflow destination buffer",
11256 &locus, get_callee_fndecl (exp));
11261 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11262 mem{cpy,pcpy,move,set} is available. */
11265 case BUILT_IN_MEMCPY_CHK:
11266 fn = built_in_decls[BUILT_IN_MEMCPY];
11268 case BUILT_IN_MEMPCPY_CHK:
11269 fn = built_in_decls[BUILT_IN_MEMPCPY];
11271 case BUILT_IN_MEMMOVE_CHK:
11272 fn = built_in_decls[BUILT_IN_MEMMOVE];
11274 case BUILT_IN_MEMSET_CHK:
11275 fn = built_in_decls[BUILT_IN_MEMSET];
11284 fn = build_call_expr (fn, 3, dest, src, len);
11285 if (TREE_CODE (fn) == CALL_EXPR)
11286 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11287 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11289 else if (fcode == BUILT_IN_MEMSET_CHK)
11293 unsigned int dest_align
11294 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11296 /* If DEST is not a pointer type, call the normal function. */
11297 if (dest_align == 0)
11300 /* If SRC and DEST are the same (and not volatile), do nothing. */
11301 if (operand_equal_p (src, dest, 0))
11305 if (fcode != BUILT_IN_MEMPCPY_CHK)
11307 /* Evaluate and ignore LEN in case it has side-effects. */
11308 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11309 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11312 len = fold_convert (TREE_TYPE (dest), len);
11313 expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
11314 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11317 /* __memmove_chk special case. */
11318 if (fcode == BUILT_IN_MEMMOVE_CHK)
11320 unsigned int src_align
11321 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11323 if (src_align == 0)
11326 /* If src is categorized for a readonly section we can use
11327 normal __memcpy_chk. */
11328 if (readonly_data_expr (src))
11330 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11333 fn = build_call_expr (fn, 4, dest, src, len, size);
11334 if (TREE_CODE (fn) == CALL_EXPR)
11335 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11336 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11343 /* Emit warning if a buffer overflow is detected at compile time. */
11346 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11354 case BUILT_IN_STRCPY_CHK:
11355 case BUILT_IN_STPCPY_CHK:
11356 /* For __strcat_chk the warning will be emitted only if overflowing
11357 by at least strlen (dest) + 1 bytes. */
11358 case BUILT_IN_STRCAT_CHK:
11359 len = CALL_EXPR_ARG (exp, 1);
11360 size = CALL_EXPR_ARG (exp, 2);
11363 case BUILT_IN_STRNCAT_CHK:
11364 case BUILT_IN_STRNCPY_CHK:
11365 len = CALL_EXPR_ARG (exp, 2);
11366 size = CALL_EXPR_ARG (exp, 3);
11368 case BUILT_IN_SNPRINTF_CHK:
11369 case BUILT_IN_VSNPRINTF_CHK:
11370 len = CALL_EXPR_ARG (exp, 1);
11371 size = CALL_EXPR_ARG (exp, 3);
11374 gcc_unreachable ();
11380 if (! host_integerp (size, 1) || integer_all_onesp (size))
11385 len = c_strlen (len, 1);
11386 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11389 else if (fcode == BUILT_IN_STRNCAT_CHK)
11391 tree src = CALL_EXPR_ARG (exp, 1);
11392 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11394 src = c_strlen (src, 1);
11395 if (! src || ! host_integerp (src, 1))
11397 locus = EXPR_LOCATION (exp);
11398 warning (0, "%Hcall to %D might overflow destination buffer",
11399 &locus, get_callee_fndecl (exp));
11402 else if (tree_int_cst_lt (src, size))
11405 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11408 locus = EXPR_LOCATION (exp);
11409 warning (0, "%Hcall to %D will always overflow destination buffer",
11410 &locus, get_callee_fndecl (exp));
11413 /* Emit warning if a buffer overflow is detected at compile time
11414 in __sprintf_chk/__vsprintf_chk calls. */
11417 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11419 tree dest, size, len, fmt, flag;
11420 const char *fmt_str;
11421 int nargs = call_expr_nargs (exp);
11423 /* Verify the required arguments in the original call. */
11427 dest = CALL_EXPR_ARG (exp, 0);
11428 flag = CALL_EXPR_ARG (exp, 1);
11429 size = CALL_EXPR_ARG (exp, 2);
11430 fmt = CALL_EXPR_ARG (exp, 3);
11432 if (! host_integerp (size, 1) || integer_all_onesp (size))
11435 /* Check whether the format is a literal string constant. */
11436 fmt_str = c_getstr (fmt);
11437 if (fmt_str == NULL)
11440 if (!init_target_chars ())
11443 /* If the format doesn't contain % args or %%, we know its size. */
11444 if (strchr (fmt_str, target_percent) == 0)
11445 len = build_int_cstu (size_type_node, strlen (fmt_str));
11446 /* If the format is "%s" and first ... argument is a string literal,
11448 else if (fcode == BUILT_IN_SPRINTF_CHK
11449 && strcmp (fmt_str, target_percent_s) == 0)
11455 arg = CALL_EXPR_ARG (exp, 4);
11456 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11459 len = c_strlen (arg, 1);
11460 if (!len || ! host_integerp (len, 1))
11466 if (! tree_int_cst_lt (len, size))
11468 location_t locus = EXPR_LOCATION (exp);
11469 warning (0, "%Hcall to %D will always overflow destination buffer",
11470 &locus, get_callee_fndecl (exp));
11474 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11478 fold_builtin_object_size (tree ptr, tree ost)
11480 tree ret = NULL_TREE;
11481 int object_size_type;
11483 if (!validate_arg (ptr, POINTER_TYPE)
11484 || !validate_arg (ost, INTEGER_TYPE))
11489 if (TREE_CODE (ost) != INTEGER_CST
11490 || tree_int_cst_sgn (ost) < 0
11491 || compare_tree_int (ost, 3) > 0)
11494 object_size_type = tree_low_cst (ost, 0);
11496 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11497 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11498 and (size_t) 0 for types 2 and 3. */
11499 if (TREE_SIDE_EFFECTS (ptr))
11500 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11502 if (TREE_CODE (ptr) == ADDR_EXPR)
11503 ret = build_int_cstu (size_type_node,
11504 compute_builtin_object_size (ptr, object_size_type));
11506 else if (TREE_CODE (ptr) == SSA_NAME)
11508 unsigned HOST_WIDE_INT bytes;
11510 /* If object size is not known yet, delay folding until
11511 later. Maybe subsequent passes will help determining
11513 bytes = compute_builtin_object_size (ptr, object_size_type);
11514 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11516 ret = build_int_cstu (size_type_node, bytes);
11521 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11522 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11523 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11530 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11531 DEST, SRC, LEN, and SIZE are the arguments to the call.
11532 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11533 code of the builtin. If MAXLEN is not NULL, it is maximum length
11534 passed as third argument. */
11537 fold_builtin_memory_chk (tree fndecl,
11538 tree dest, tree src, tree len, tree size,
11539 tree maxlen, bool ignore,
11540 enum built_in_function fcode)
11544 if (!validate_arg (dest, POINTER_TYPE)
11545 || !validate_arg (src,
11546 (fcode == BUILT_IN_MEMSET_CHK
11547 ? INTEGER_TYPE : POINTER_TYPE))
11548 || !validate_arg (len, INTEGER_TYPE)
11549 || !validate_arg (size, INTEGER_TYPE))
11552 /* If SRC and DEST are the same (and not volatile), return DEST
11553 (resp. DEST+LEN for __mempcpy_chk). */
11554 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11556 if (fcode != BUILT_IN_MEMPCPY_CHK)
11557 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11560 tree temp = fold_convert (TREE_TYPE (dest), len);
11561 temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
11562 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11566 if (! host_integerp (size, 1))
11569 if (! integer_all_onesp (size))
11571 if (! host_integerp (len, 1))
11573 /* If LEN is not constant, try MAXLEN too.
11574 For MAXLEN only allow optimizing into non-_ocs function
11575 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11576 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11578 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11580 /* (void) __mempcpy_chk () can be optimized into
11581 (void) __memcpy_chk (). */
11582 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11586 return build_call_expr (fn, 4, dest, src, len, size);
11594 if (tree_int_cst_lt (size, maxlen))
11599 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11600 mem{cpy,pcpy,move,set} is available. */
11603 case BUILT_IN_MEMCPY_CHK:
11604 fn = built_in_decls[BUILT_IN_MEMCPY];
11606 case BUILT_IN_MEMPCPY_CHK:
11607 fn = built_in_decls[BUILT_IN_MEMPCPY];
11609 case BUILT_IN_MEMMOVE_CHK:
11610 fn = built_in_decls[BUILT_IN_MEMMOVE];
11612 case BUILT_IN_MEMSET_CHK:
11613 fn = built_in_decls[BUILT_IN_MEMSET];
11622 return build_call_expr (fn, 3, dest, src, len);
11625 /* Fold a call to the __st[rp]cpy_chk builtin.
11626 DEST, SRC, and SIZE are the arguments to the call.
11627 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11628 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11629 strings passed as second argument. */
11632 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
11633 tree maxlen, bool ignore,
11634 enum built_in_function fcode)
11638 if (!validate_arg (dest, POINTER_TYPE)
11639 || !validate_arg (src, POINTER_TYPE)
11640 || !validate_arg (size, INTEGER_TYPE))
11643 /* If SRC and DEST are the same (and not volatile), return DEST. */
11644 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11645 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
11647 if (! host_integerp (size, 1))
11650 if (! integer_all_onesp (size))
11652 len = c_strlen (src, 1);
11653 if (! len || ! host_integerp (len, 1))
11655 /* If LEN is not constant, try MAXLEN too.
11656 For MAXLEN only allow optimizing into non-_ocs function
11657 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11658 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11660 if (fcode == BUILT_IN_STPCPY_CHK)
11665 /* If return value of __stpcpy_chk is ignored,
11666 optimize into __strcpy_chk. */
11667 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
11671 return build_call_expr (fn, 3, dest, src, size);
11674 if (! len || TREE_SIDE_EFFECTS (len))
11677 /* If c_strlen returned something, but not a constant,
11678 transform __strcpy_chk into __memcpy_chk. */
11679 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11683 len = size_binop (PLUS_EXPR, len, ssize_int (1));
11684 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
11685 build_call_expr (fn, 4,
11686 dest, src, len, size));
11692 if (! tree_int_cst_lt (maxlen, size))
11696 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11697 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
11698 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
11702 return build_call_expr (fn, 2, dest, src);
11705 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11706 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11707 length passed as third argument. */
11710 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
11715 if (!validate_arg (dest, POINTER_TYPE)
11716 || !validate_arg (src, POINTER_TYPE)
11717 || !validate_arg (len, INTEGER_TYPE)
11718 || !validate_arg (size, INTEGER_TYPE))
11721 if (! host_integerp (size, 1))
11724 if (! integer_all_onesp (size))
11726 if (! host_integerp (len, 1))
11728 /* If LEN is not constant, try MAXLEN too.
11729 For MAXLEN only allow optimizing into non-_ocs function
11730 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11731 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11737 if (tree_int_cst_lt (size, maxlen))
11741 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11742 fn = built_in_decls[BUILT_IN_STRNCPY];
11746 return build_call_expr (fn, 3, dest, src, len);
11749 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11750 are the arguments to the call. */
11753 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
11758 if (!validate_arg (dest, POINTER_TYPE)
11759 || !validate_arg (src, POINTER_TYPE)
11760 || !validate_arg (size, INTEGER_TYPE))
11763 p = c_getstr (src);
11764 /* If the SRC parameter is "", return DEST. */
11765 if (p && *p == '\0')
11766 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11768 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
11771 /* If __builtin_strcat_chk is used, assume strcat is available. */
11772 fn = built_in_decls[BUILT_IN_STRCAT];
11776 return build_call_expr (fn, 2, dest, src);
11779 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11783 fold_builtin_strncat_chk (tree fndecl,
11784 tree dest, tree src, tree len, tree size)
11789 if (!validate_arg (dest, POINTER_TYPE)
11790 || !validate_arg (src, POINTER_TYPE)
11791 || !validate_arg (size, INTEGER_TYPE)
11792 || !validate_arg (size, INTEGER_TYPE))
11795 p = c_getstr (src);
11796 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11797 if (p && *p == '\0')
11798 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11799 else if (integer_zerop (len))
11800 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
11802 if (! host_integerp (size, 1))
11805 if (! integer_all_onesp (size))
11807 tree src_len = c_strlen (src, 1);
11809 && host_integerp (src_len, 1)
11810 && host_integerp (len, 1)
11811 && ! tree_int_cst_lt (len, src_len))
11813 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11814 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
11818 return build_call_expr (fn, 3, dest, src, size);
11823 /* If __builtin_strncat_chk is used, assume strncat is available. */
11824 fn = built_in_decls[BUILT_IN_STRNCAT];
11828 return build_call_expr (fn, 3, dest, src, len);
11831 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11832 a normal call should be emitted rather than expanding the function
11833 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11836 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
11838 tree dest, size, len, fn, fmt, flag;
11839 const char *fmt_str;
11840 int nargs = call_expr_nargs (exp);
11842 /* Verify the required arguments in the original call. */
11845 dest = CALL_EXPR_ARG (exp, 0);
11846 if (!validate_arg (dest, POINTER_TYPE))
11848 flag = CALL_EXPR_ARG (exp, 1);
11849 if (!validate_arg (flag, INTEGER_TYPE))
11851 size = CALL_EXPR_ARG (exp, 2);
11852 if (!validate_arg (size, INTEGER_TYPE))
11854 fmt = CALL_EXPR_ARG (exp, 3);
11855 if (!validate_arg (fmt, POINTER_TYPE))
11858 if (! host_integerp (size, 1))
11863 if (!init_target_chars ())
11866 /* Check whether the format is a literal string constant. */
11867 fmt_str = c_getstr (fmt);
11868 if (fmt_str != NULL)
11870 /* If the format doesn't contain % args or %%, we know the size. */
11871 if (strchr (fmt_str, target_percent) == 0)
11873 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
11874 len = build_int_cstu (size_type_node, strlen (fmt_str));
11876 /* If the format is "%s" and first ... argument is a string literal,
11877 we know the size too. */
11878 else if (fcode == BUILT_IN_SPRINTF_CHK
11879 && strcmp (fmt_str, target_percent_s) == 0)
11885 arg = CALL_EXPR_ARG (exp, 4);
11886 if (validate_arg (arg, POINTER_TYPE))
11888 len = c_strlen (arg, 1);
11889 if (! len || ! host_integerp (len, 1))
11896 if (! integer_all_onesp (size))
11898 if (! len || ! tree_int_cst_lt (len, size))
11902 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11903 or if format doesn't contain % chars or is "%s". */
11904 if (! integer_zerop (flag))
11906 if (fmt_str == NULL)
11908 if (strchr (fmt_str, target_percent) != NULL
11909 && strcmp (fmt_str, target_percent_s))
11913 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11914 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
11915 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
11919 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
11922 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11923 a normal call should be emitted rather than expanding the function
11924 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11925 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11926 passed as second argument. */
11929 fold_builtin_snprintf_chk (tree exp, tree maxlen,
11930 enum built_in_function fcode)
11932 tree dest, size, len, fn, fmt, flag;
11933 const char *fmt_str;
11935 /* Verify the required arguments in the original call. */
11936 if (call_expr_nargs (exp) < 5)
11938 dest = CALL_EXPR_ARG (exp, 0);
11939 if (!validate_arg (dest, POINTER_TYPE))
11941 len = CALL_EXPR_ARG (exp, 1);
11942 if (!validate_arg (len, INTEGER_TYPE))
11944 flag = CALL_EXPR_ARG (exp, 2);
11945 if (!validate_arg (flag, INTEGER_TYPE))
11947 size = CALL_EXPR_ARG (exp, 3);
11948 if (!validate_arg (size, INTEGER_TYPE))
11950 fmt = CALL_EXPR_ARG (exp, 4);
11951 if (!validate_arg (fmt, POINTER_TYPE))
11954 if (! host_integerp (size, 1))
11957 if (! integer_all_onesp (size))
11959 if (! host_integerp (len, 1))
11961 /* If LEN is not constant, try MAXLEN too.
11962 For MAXLEN only allow optimizing into non-_ocs function
11963 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11964 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11970 if (tree_int_cst_lt (size, maxlen))
11974 if (!init_target_chars ())
11977 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
11978 or if format doesn't contain % chars or is "%s". */
11979 if (! integer_zerop (flag))
11981 fmt_str = c_getstr (fmt);
11982 if (fmt_str == NULL)
11984 if (strchr (fmt_str, target_percent) != NULL
11985 && strcmp (fmt_str, target_percent_s))
11989 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
11991 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
11992 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
11996 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
11999 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12000 FMT and ARG are the arguments to the call; we don't fold cases with
12001 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12003 Return NULL_TREE if no simplification was possible, otherwise return the
12004 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12005 code of the function to be simplified. */
12008 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12009 enum built_in_function fcode)
12011 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12012 const char *fmt_str = NULL;
12014 /* If the return value is used, don't do the transformation. */
12018 /* Verify the required arguments in the original call. */
12019 if (!validate_arg (fmt, POINTER_TYPE))
12022 /* Check whether the format is a literal string constant. */
12023 fmt_str = c_getstr (fmt);
12024 if (fmt_str == NULL)
12027 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12029 /* If we're using an unlocked function, assume the other
12030 unlocked functions exist explicitly. */
12031 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12032 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12036 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12037 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12040 if (!init_target_chars ())
12043 if (strcmp (fmt_str, target_percent_s) == 0
12044 || strchr (fmt_str, target_percent) == NULL)
12048 if (strcmp (fmt_str, target_percent_s) == 0)
12050 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12053 if (!arg || !validate_arg (arg, POINTER_TYPE))
12056 str = c_getstr (arg);
12062 /* The format specifier doesn't contain any '%' characters. */
12063 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12069 /* If the string was "", printf does nothing. */
12070 if (str[0] == '\0')
12071 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12073 /* If the string has length of 1, call putchar. */
12074 if (str[1] == '\0')
12076 /* Given printf("c"), (where c is any one character,)
12077 convert "c"[0] to an int and pass that to the replacement
12079 newarg = build_int_cst (NULL_TREE, str[0]);
12081 call = build_call_expr (fn_putchar, 1, newarg);
12085 /* If the string was "string\n", call puts("string"). */
12086 size_t len = strlen (str);
12087 if ((unsigned char)str[len - 1] == target_newline)
12089 /* Create a NUL-terminated string that's one char shorter
12090 than the original, stripping off the trailing '\n'. */
12091 char *newstr = alloca (len);
12092 memcpy (newstr, str, len - 1);
12093 newstr[len - 1] = 0;
12095 newarg = build_string_literal (len, newstr);
12097 call = build_call_expr (fn_puts, 1, newarg);
12100 /* We'd like to arrange to call fputs(string,stdout) here,
12101 but we need stdout and don't have a way to get it yet. */
12106 /* The other optimizations can be done only on the non-va_list variants. */
12107 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12110 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12111 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12113 if (!arg || !validate_arg (arg, POINTER_TYPE))
12116 call = build_call_expr (fn_puts, 1, arg);
12119 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12120 else if (strcmp (fmt_str, target_percent_c) == 0)
12122 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12125 call = build_call_expr (fn_putchar, 1, arg);
12131 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12134 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12135 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12136 more than 3 arguments, and ARG may be null in the 2-argument case.
12138 Return NULL_TREE if no simplification was possible, otherwise return the
12139 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12140 code of the function to be simplified. */
12143 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12144 enum built_in_function fcode)
12146 tree fn_fputc, fn_fputs, call = NULL_TREE;
12147 const char *fmt_str = NULL;
12149 /* If the return value is used, don't do the transformation. */
12153 /* Verify the required arguments in the original call. */
12154 if (!validate_arg (fp, POINTER_TYPE))
12156 if (!validate_arg (fmt, POINTER_TYPE))
12159 /* Check whether the format is a literal string constant. */
12160 fmt_str = c_getstr (fmt);
12161 if (fmt_str == NULL)
12164 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12166 /* If we're using an unlocked function, assume the other
12167 unlocked functions exist explicitly. */
12168 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12169 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12173 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12174 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12177 if (!init_target_chars ())
12180 /* If the format doesn't contain % args or %%, use strcpy. */
12181 if (strchr (fmt_str, target_percent) == NULL)
12183 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12187 /* If the format specifier was "", fprintf does nothing. */
12188 if (fmt_str[0] == '\0')
12190 /* If FP has side-effects, just wait until gimplification is
12192 if (TREE_SIDE_EFFECTS (fp))
12195 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12198 /* When "string" doesn't contain %, replace all cases of
12199 fprintf (fp, string) with fputs (string, fp). The fputs
12200 builtin will take care of special cases like length == 1. */
12202 call = build_call_expr (fn_fputs, 2, fmt, fp);
12205 /* The other optimizations can be done only on the non-va_list variants. */
12206 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12209 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12210 else if (strcmp (fmt_str, target_percent_s) == 0)
12212 if (!arg || !validate_arg (arg, POINTER_TYPE))
12215 call = build_call_expr (fn_fputs, 2, arg, fp);
12218 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12219 else if (strcmp (fmt_str, target_percent_c) == 0)
12221 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12224 call = build_call_expr (fn_fputc, 2, arg, fp);
12229 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12232 /* Initialize format string characters in the target charset. */
12235 init_target_chars (void)
12240 target_newline = lang_hooks.to_target_charset ('\n');
12241 target_percent = lang_hooks.to_target_charset ('%');
12242 target_c = lang_hooks.to_target_charset ('c');
12243 target_s = lang_hooks.to_target_charset ('s');
12244 if (target_newline == 0 || target_percent == 0 || target_c == 0
12248 target_percent_c[0] = target_percent;
12249 target_percent_c[1] = target_c;
12250 target_percent_c[2] = '\0';
12252 target_percent_s[0] = target_percent;
12253 target_percent_s[1] = target_s;
12254 target_percent_s[2] = '\0';
12256 target_percent_s_newline[0] = target_percent;
12257 target_percent_s_newline[1] = target_s;
12258 target_percent_s_newline[2] = target_newline;
12259 target_percent_s_newline[3] = '\0';
12266 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12267 and no overflow/underflow occurred. INEXACT is true if M was not
12268 exactly calculated. TYPE is the tree type for the result. This
12269 function assumes that you cleared the MPFR flags and then
12270 calculated M to see if anything subsequently set a flag prior to
12271 entering this function. Return NULL_TREE if any checks fail. */
12274 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12276 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12277 overflow/underflow occurred. If -frounding-math, proceed iff the
12278 result of calling FUNC was exact. */
12279 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12280 && (!flag_rounding_math || !inexact))
12282 REAL_VALUE_TYPE rr;
12284 real_from_mpfr (&rr, m, type, GMP_RNDN);
12285 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12286 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12287 but the mpft_t is not, then we underflowed in the
12289 if (!real_isnan (&rr) && !real_isinf (&rr)
12290 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12292 REAL_VALUE_TYPE rmode;
12294 real_convert (&rmode, TYPE_MODE (type), &rr);
12295 /* Proceed iff the specified mode can hold the value. */
12296 if (real_identical (&rmode, &rr))
12297 return build_real (type, rmode);
12303 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12304 FUNC on it and return the resulting value as a tree with type TYPE.
12305 If MIN and/or MAX are not NULL, then the supplied ARG must be
12306 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12307 acceptable values, otherwise they are not. The mpfr precision is
12308 set to the precision of TYPE. We assume that function FUNC returns
12309 zero if the result could be calculated exactly within the requested
12313 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12314 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12317 tree result = NULL_TREE;
12321 /* To proceed, MPFR must exactly represent the target floating point
12322 format, which only happens when the target base equals two. */
12323 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12324 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12326 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12328 if (!real_isnan (ra) && !real_isinf (ra)
12329 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12330 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12332 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12336 mpfr_init2 (m, prec);
12337 mpfr_from_real (m, ra, GMP_RNDN);
12338 mpfr_clear_flags ();
12339 inexact = func (m, m, GMP_RNDN);
12340 result = do_mpfr_ckconv (m, type, inexact);
12348 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12349 FUNC on it and return the resulting value as a tree with type TYPE.
12350 The mpfr precision is set to the precision of TYPE. We assume that
12351 function FUNC returns zero if the result could be calculated
12352 exactly within the requested precision. */
12355 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12356 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12358 tree result = NULL_TREE;
12363 /* To proceed, MPFR must exactly represent the target floating point
12364 format, which only happens when the target base equals two. */
12365 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12366 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12367 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12369 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12370 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12372 if (!real_isnan (ra1) && !real_isinf (ra1)
12373 && !real_isnan (ra2) && !real_isinf (ra2))
12375 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12379 mpfr_inits2 (prec, m1, m2, NULL);
12380 mpfr_from_real (m1, ra1, GMP_RNDN);
12381 mpfr_from_real (m2, ra2, GMP_RNDN);
12382 mpfr_clear_flags ();
12383 inexact = func (m1, m1, m2, GMP_RNDN);
12384 result = do_mpfr_ckconv (m1, type, inexact);
12385 mpfr_clears (m1, m2, NULL);
12392 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12393 FUNC on it and return the resulting value as a tree with type TYPE.
12394 The mpfr precision is set to the precision of TYPE. We assume that
12395 function FUNC returns zero if the result could be calculated
12396 exactly within the requested precision. */
12399 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12400 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12402 tree result = NULL_TREE;
12408 /* To proceed, MPFR must exactly represent the target floating point
12409 format, which only happens when the target base equals two. */
12410 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12411 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12412 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12413 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12415 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12416 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12417 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12419 if (!real_isnan (ra1) && !real_isinf (ra1)
12420 && !real_isnan (ra2) && !real_isinf (ra2)
12421 && !real_isnan (ra3) && !real_isinf (ra3))
12423 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12427 mpfr_inits2 (prec, m1, m2, m3, NULL);
12428 mpfr_from_real (m1, ra1, GMP_RNDN);
12429 mpfr_from_real (m2, ra2, GMP_RNDN);
12430 mpfr_from_real (m3, ra3, GMP_RNDN);
12431 mpfr_clear_flags ();
12432 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12433 result = do_mpfr_ckconv (m1, type, inexact);
12434 mpfr_clears (m1, m2, m3, NULL);
12441 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12442 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12443 If ARG_SINP and ARG_COSP are NULL then the result is returned
12444 as a complex value.
12445 The type is taken from the type of ARG and is used for setting the
12446 precision of the calculation and results. */
12449 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12451 tree const type = TREE_TYPE (arg);
12452 tree result = NULL_TREE;
12456 /* To proceed, MPFR must exactly represent the target floating point
12457 format, which only happens when the target base equals two. */
12458 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12459 && TREE_CODE (arg) == REAL_CST
12460 && !TREE_OVERFLOW (arg))
12462 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12464 if (!real_isnan (ra) && !real_isinf (ra))
12466 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12467 tree result_s, result_c;
12471 mpfr_inits2 (prec, m, ms, mc, NULL);
12472 mpfr_from_real (m, ra, GMP_RNDN);
12473 mpfr_clear_flags ();
12474 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12475 result_s = do_mpfr_ckconv (ms, type, inexact);
12476 result_c = do_mpfr_ckconv (mc, type, inexact);
12477 mpfr_clears (m, ms, mc, NULL);
12478 if (result_s && result_c)
12480 /* If we are to return in a complex value do so. */
12481 if (!arg_sinp && !arg_cosp)
12482 return build_complex (build_complex_type (type),
12483 result_c, result_s);
12485 /* Dereference the sin/cos pointer arguments. */
12486 arg_sinp = build_fold_indirect_ref (arg_sinp);
12487 arg_cosp = build_fold_indirect_ref (arg_cosp);
12488 /* Proceed if valid pointer type were passed in. */
12489 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12490 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12492 /* Set the values. */
12493 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12495 TREE_SIDE_EFFECTS (result_s) = 1;
12496 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12498 TREE_SIDE_EFFECTS (result_c) = 1;
12499 /* Combine the assignments into a compound expr. */
12500 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12501 result_s, result_c));