1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (crtl->args.internal_arg_pointer,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 /* DRAP is needed for stack realign if longjmp is expanded to current
783 if (SUPPORTS_STACK_ALIGNMENT)
784 crtl->need_drap = true;
786 if (setjmp_alias_set == -1)
787 setjmp_alias_set = new_alias_set ();
789 buf_addr = convert_memory_address (Pmode, buf_addr);
791 buf_addr = force_reg (Pmode, buf_addr);
793 /* We used to store value in static_chain_rtx, but that fails if pointers
794 are smaller than integers. We instead require that the user must pass
795 a second argument of 1, because that is what builtin_setjmp will
796 return. This also makes EH slightly more efficient, since we are no
797 longer copying around a value that we don't care about. */
798 gcc_assert (value == const1_rtx);
800 last = get_last_insn ();
801 #ifdef HAVE_builtin_longjmp
802 if (HAVE_builtin_longjmp)
803 emit_insn (gen_builtin_longjmp (buf_addr));
807 fp = gen_rtx_MEM (Pmode, buf_addr);
808 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
809 GET_MODE_SIZE (Pmode)));
811 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
812 2 * GET_MODE_SIZE (Pmode)));
813 set_mem_alias_set (fp, setjmp_alias_set);
814 set_mem_alias_set (lab, setjmp_alias_set);
815 set_mem_alias_set (stack, setjmp_alias_set);
817 /* Pick up FP, label, and SP from the block and jump. This code is
818 from expand_goto in stmt.c; see there for detailed comments. */
819 #ifdef HAVE_nonlocal_goto
820 if (HAVE_nonlocal_goto)
821 /* We have to pass a value to the nonlocal_goto pattern that will
822 get copied into the static_chain pointer, but it does not matter
823 what that value is, because builtin_setjmp does not use it. */
824 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
828 lab = copy_to_reg (lab);
830 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
831 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836 emit_use (hard_frame_pointer_rtx);
837 emit_use (stack_pointer_rtx);
838 emit_indirect_jump (lab);
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 gcc_assert (insn != last);
853 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
856 else if (CALL_P (insn))
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
865 expand_builtin_nonlocal_goto (tree exp)
867 tree t_label, t_save_area;
868 rtx r_label, r_save_area, r_fp, r_sp, insn;
870 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
873 t_label = CALL_EXPR_ARG (exp, 0);
874 t_save_area = CALL_EXPR_ARG (exp, 1);
876 r_label = expand_normal (t_label);
877 r_label = convert_memory_address (Pmode, r_label);
878 r_save_area = expand_normal (t_save_area);
879 r_save_area = convert_memory_address (Pmode, r_save_area);
880 /* Copy the address of the save location to a register just in case it was based
881 on the frame pointer. */
882 r_save_area = copy_to_reg (r_save_area);
883 r_fp = gen_rtx_MEM (Pmode, r_save_area);
884 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
885 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
887 crtl->has_nonlocal_goto = 1;
889 #ifdef HAVE_nonlocal_goto
890 /* ??? We no longer need to pass the static chain value, afaik. */
891 if (HAVE_nonlocal_goto)
892 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
896 r_label = copy_to_reg (r_label);
898 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
899 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
901 /* Restore frame pointer for containing function.
902 This sets the actual hard register used for the frame pointer
903 to the location of the function's incoming static chain info.
904 The non-local goto handler will then adjust it to contain the
905 proper value and reload the argument pointer, if needed. */
906 emit_move_insn (hard_frame_pointer_rtx, r_fp);
907 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
909 /* USE of hard_frame_pointer_rtx added for consistency;
910 not clear if really needed. */
911 emit_use (hard_frame_pointer_rtx);
912 emit_use (stack_pointer_rtx);
914 /* If the architecture is using a GP register, we must
915 conservatively assume that the target function makes use of it.
916 The prologue of functions with nonlocal gotos must therefore
917 initialize the GP register to the appropriate value, and we
918 must then make sure that this value is live at the point
919 of the jump. (Note that this doesn't necessarily apply
920 to targets with a nonlocal_goto pattern; they are free
921 to implement it in their own way. Note also that this is
922 a no-op if the GP register is a global invariant.) */
923 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
924 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
925 emit_use (pic_offset_table_rtx);
927 emit_indirect_jump (r_label);
930 /* Search backwards to the jump insn and mark it as a
932 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
936 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
939 else if (CALL_P (insn))
946 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
947 (not all will be used on all machines) that was passed to __builtin_setjmp.
948 It updates the stack pointer in that block to correspond to the current
952 expand_builtin_update_setjmp_buf (rtx buf_addr)
954 enum machine_mode sa_mode = Pmode;
958 #ifdef HAVE_save_stack_nonlocal
959 if (HAVE_save_stack_nonlocal)
960 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
962 #ifdef STACK_SAVEAREA_MODE
963 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
967 = gen_rtx_MEM (sa_mode,
970 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
974 emit_insn (gen_setjmp ());
977 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
980 /* Expand a call to __builtin_prefetch. For a target that does not support
981 data prefetch, evaluate the memory address argument in case it has side
985 expand_builtin_prefetch (tree exp)
987 tree arg0, arg1, arg2;
991 if (!validate_arglist (exp, POINTER_TYPE, 0))
994 arg0 = CALL_EXPR_ARG (exp, 0);
996 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
997 zero (read) and argument 2 (locality) defaults to 3 (high degree of
999 nargs = call_expr_nargs (exp);
1001 arg1 = CALL_EXPR_ARG (exp, 1);
1003 arg1 = integer_zero_node;
1005 arg2 = CALL_EXPR_ARG (exp, 2);
1007 arg2 = build_int_cst (NULL_TREE, 3);
1009 /* Argument 0 is an address. */
1010 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1012 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1013 if (TREE_CODE (arg1) != INTEGER_CST)
1015 error ("second argument to %<__builtin_prefetch%> must be a constant");
1016 arg1 = integer_zero_node;
1018 op1 = expand_normal (arg1);
1019 /* Argument 1 must be either zero or one. */
1020 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1022 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1027 /* Argument 2 (locality) must be a compile-time constant int. */
1028 if (TREE_CODE (arg2) != INTEGER_CST)
1030 error ("third argument to %<__builtin_prefetch%> must be a constant");
1031 arg2 = integer_zero_node;
1033 op2 = expand_normal (arg2);
1034 /* Argument 2 must be 0, 1, 2, or 3. */
1035 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1037 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1041 #ifdef HAVE_prefetch
1044 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1046 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1047 || (GET_MODE (op0) != Pmode))
1049 op0 = convert_memory_address (Pmode, op0);
1050 op0 = force_reg (Pmode, op0);
1052 emit_insn (gen_prefetch (op0, op1, op2));
1056 /* Don't do anything with direct references to volatile memory, but
1057 generate code to handle other side effects. */
1058 if (!MEM_P (op0) && side_effects_p (op0))
1062 /* Get a MEM rtx for expression EXP which is the address of an operand
1063 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1064 the maximum length of the block of memory that might be accessed or
1068 get_memory_rtx (tree exp, tree len)
1070 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1071 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1073 /* Get an expression we can use to find the attributes to assign to MEM.
1074 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1075 we can. First remove any nops. */
1076 while (CONVERT_EXPR_P (exp)
1077 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1078 exp = TREE_OPERAND (exp, 0);
1080 if (TREE_CODE (exp) == ADDR_EXPR)
1081 exp = TREE_OPERAND (exp, 0);
1082 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1083 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1087 /* Honor attributes derived from exp, except for the alias set
1088 (as builtin stringops may alias with anything) and the size
1089 (as stringops may access multiple array elements). */
1092 set_mem_attributes (mem, exp, 0);
1094 /* Allow the string and memory builtins to overflow from one
1095 field into another, see http://gcc.gnu.org/PR23561.
1096 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1097 memory accessed by the string or memory builtin will fit
1098 within the field. */
1099 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1101 tree mem_expr = MEM_EXPR (mem);
1102 HOST_WIDE_INT offset = -1, length = -1;
1105 while (TREE_CODE (inner) == ARRAY_REF
1106 || CONVERT_EXPR_P (inner)
1107 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1108 || TREE_CODE (inner) == SAVE_EXPR)
1109 inner = TREE_OPERAND (inner, 0);
1111 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1113 if (MEM_OFFSET (mem)
1114 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1115 offset = INTVAL (MEM_OFFSET (mem));
1117 if (offset >= 0 && len && host_integerp (len, 0))
1118 length = tree_low_cst (len, 0);
1120 while (TREE_CODE (inner) == COMPONENT_REF)
1122 tree field = TREE_OPERAND (inner, 1);
1123 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1124 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1126 /* Bitfields are generally not byte-addressable. */
1127 gcc_assert (!DECL_BIT_FIELD (field)
1128 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1129 % BITS_PER_UNIT) == 0
1130 && host_integerp (DECL_SIZE (field), 0)
1131 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1132 % BITS_PER_UNIT) == 0));
1134 /* If we can prove that the memory starting at XEXP (mem, 0) and
1135 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1136 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1137 fields without DECL_SIZE_UNIT like flexible array members. */
1139 && DECL_SIZE_UNIT (field)
1140 && host_integerp (DECL_SIZE_UNIT (field), 0))
1143 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1146 && offset + length <= size)
1151 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1152 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1153 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1161 mem_expr = TREE_OPERAND (mem_expr, 0);
1162 inner = TREE_OPERAND (inner, 0);
1165 if (mem_expr == NULL)
1167 if (mem_expr != MEM_EXPR (mem))
1169 set_mem_expr (mem, mem_expr);
1170 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1173 set_mem_alias_set (mem, 0);
1174 set_mem_size (mem, NULL_RTX);
1180 /* Built-in functions to perform an untyped call and return. */
1182 /* For each register that may be used for calling a function, this
1183 gives a mode used to copy the register's value. VOIDmode indicates
1184 the register is not used for calling a function. If the machine
1185 has register windows, this gives only the outbound registers.
1186 INCOMING_REGNO gives the corresponding inbound register. */
1187 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1189 /* For each register that may be used for returning values, this gives
1190 a mode used to copy the register's value. VOIDmode indicates the
1191 register is not used for returning values. If the machine has
1192 register windows, this gives only the outbound registers.
1193 INCOMING_REGNO gives the corresponding inbound register. */
1194 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1196 /* For each register that may be used for calling a function, this
1197 gives the offset of that register into the block returned by
1198 __builtin_apply_args. 0 indicates that the register is not
1199 used for calling a function. */
1200 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1202 /* Return the size required for the block returned by __builtin_apply_args,
1203 and initialize apply_args_mode. */
1206 apply_args_size (void)
1208 static int size = -1;
1211 enum machine_mode mode;
1213 /* The values computed by this function never change. */
1216 /* The first value is the incoming arg-pointer. */
1217 size = GET_MODE_SIZE (Pmode);
1219 /* The second value is the structure value address unless this is
1220 passed as an "invisible" first argument. */
1221 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1222 size += GET_MODE_SIZE (Pmode);
1224 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1225 if (FUNCTION_ARG_REGNO_P (regno))
1227 mode = reg_raw_mode[regno];
1229 gcc_assert (mode != VOIDmode);
1231 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1232 if (size % align != 0)
1233 size = CEIL (size, align) * align;
1234 apply_args_reg_offset[regno] = size;
1235 size += GET_MODE_SIZE (mode);
1236 apply_args_mode[regno] = mode;
1240 apply_args_mode[regno] = VOIDmode;
1241 apply_args_reg_offset[regno] = 0;
1247 /* Return the size required for the block returned by __builtin_apply,
1248 and initialize apply_result_mode. */
1251 apply_result_size (void)
1253 static int size = -1;
1255 enum machine_mode mode;
1257 /* The values computed by this function never change. */
1262 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1263 if (FUNCTION_VALUE_REGNO_P (regno))
1265 mode = reg_raw_mode[regno];
1267 gcc_assert (mode != VOIDmode);
1269 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1270 if (size % align != 0)
1271 size = CEIL (size, align) * align;
1272 size += GET_MODE_SIZE (mode);
1273 apply_result_mode[regno] = mode;
1276 apply_result_mode[regno] = VOIDmode;
1278 /* Allow targets that use untyped_call and untyped_return to override
1279 the size so that machine-specific information can be stored here. */
1280 #ifdef APPLY_RESULT_SIZE
1281 size = APPLY_RESULT_SIZE;
1287 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1288 /* Create a vector describing the result block RESULT. If SAVEP is true,
1289 the result block is used to save the values; otherwise it is used to
1290 restore the values. */
1293 result_vector (int savep, rtx result)
1295 int regno, size, align, nelts;
1296 enum machine_mode mode;
1298 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1301 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1302 if ((mode = apply_result_mode[regno]) != VOIDmode)
1304 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1305 if (size % align != 0)
1306 size = CEIL (size, align) * align;
1307 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1308 mem = adjust_address (result, mode, size);
1309 savevec[nelts++] = (savep
1310 ? gen_rtx_SET (VOIDmode, mem, reg)
1311 : gen_rtx_SET (VOIDmode, reg, mem));
1312 size += GET_MODE_SIZE (mode);
1314 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1316 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1318 /* Save the state required to perform an untyped call with the same
1319 arguments as were passed to the current function. */
1322 expand_builtin_apply_args_1 (void)
1325 int size, align, regno;
1326 enum machine_mode mode;
1327 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1329 /* Create a block where the arg-pointer, structure value address,
1330 and argument registers can be saved. */
1331 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1333 /* Walk past the arg-pointer and structure value address. */
1334 size = GET_MODE_SIZE (Pmode);
1335 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1336 size += GET_MODE_SIZE (Pmode);
1338 /* Save each register used in calling a function to the block. */
1339 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1340 if ((mode = apply_args_mode[regno]) != VOIDmode)
1342 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1343 if (size % align != 0)
1344 size = CEIL (size, align) * align;
1346 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1348 emit_move_insn (adjust_address (registers, mode, size), tem);
1349 size += GET_MODE_SIZE (mode);
1352 /* Save the arg pointer to the block. */
1353 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1354 #ifdef STACK_GROWS_DOWNWARD
1355 /* We need the pointer as the caller actually passed them to us, not
1356 as we might have pretended they were passed. Make sure it's a valid
1357 operand, as emit_move_insn isn't expected to handle a PLUS. */
1359 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1362 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1364 size = GET_MODE_SIZE (Pmode);
1366 /* Save the structure value address unless this is passed as an
1367 "invisible" first argument. */
1368 if (struct_incoming_value)
1370 emit_move_insn (adjust_address (registers, Pmode, size),
1371 copy_to_reg (struct_incoming_value));
1372 size += GET_MODE_SIZE (Pmode);
1375 /* Return the address of the block. */
1376 return copy_addr_to_reg (XEXP (registers, 0));
1379 /* __builtin_apply_args returns block of memory allocated on
1380 the stack into which is stored the arg pointer, structure
1381 value address, static chain, and all the registers that might
1382 possibly be used in performing a function call. The code is
1383 moved to the start of the function so the incoming values are
1387 expand_builtin_apply_args (void)
1389 /* Don't do __builtin_apply_args more than once in a function.
1390 Save the result of the first call and reuse it. */
1391 if (apply_args_value != 0)
1392 return apply_args_value;
1394 /* When this function is called, it means that registers must be
1395 saved on entry to this function. So we migrate the
1396 call to the first insn of this function. */
1401 temp = expand_builtin_apply_args_1 ();
1405 apply_args_value = temp;
1407 /* Put the insns after the NOTE that starts the function.
1408 If this is inside a start_sequence, make the outer-level insn
1409 chain current, so the code is placed at the start of the
1411 push_topmost_sequence ();
1412 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1413 pop_topmost_sequence ();
1418 /* Perform an untyped call and save the state required to perform an
1419 untyped return of whatever value was returned by the given function. */
1422 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1424 int size, align, regno;
1425 enum machine_mode mode;
1426 rtx incoming_args, result, reg, dest, src, call_insn;
1427 rtx old_stack_level = 0;
1428 rtx call_fusage = 0;
1429 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1431 arguments = convert_memory_address (Pmode, arguments);
1433 /* Create a block where the return registers can be saved. */
1434 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1436 /* Fetch the arg pointer from the ARGUMENTS block. */
1437 incoming_args = gen_reg_rtx (Pmode);
1438 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1439 #ifndef STACK_GROWS_DOWNWARD
1440 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1441 incoming_args, 0, OPTAB_LIB_WIDEN);
1444 /* Push a new argument block and copy the arguments. Do not allow
1445 the (potential) memcpy call below to interfere with our stack
1447 do_pending_stack_adjust ();
1450 /* Save the stack with nonlocal if available. */
1451 #ifdef HAVE_save_stack_nonlocal
1452 if (HAVE_save_stack_nonlocal)
1453 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1456 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1458 /* Allocate a block of memory onto the stack and copy the memory
1459 arguments to the outgoing arguments address. */
1460 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1462 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1463 may have already set current_function_calls_alloca to true.
1464 current_function_calls_alloca won't be set if argsize is zero,
1465 so we have to guarantee need_drap is true here. */
1466 if (SUPPORTS_STACK_ALIGNMENT)
1467 crtl->need_drap = true;
1469 dest = virtual_outgoing_args_rtx;
1470 #ifndef STACK_GROWS_DOWNWARD
1471 if (GET_CODE (argsize) == CONST_INT)
1472 dest = plus_constant (dest, -INTVAL (argsize));
1474 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1476 dest = gen_rtx_MEM (BLKmode, dest);
1477 set_mem_align (dest, PARM_BOUNDARY);
1478 src = gen_rtx_MEM (BLKmode, incoming_args);
1479 set_mem_align (src, PARM_BOUNDARY);
1480 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1482 /* Refer to the argument block. */
1484 arguments = gen_rtx_MEM (BLKmode, arguments);
1485 set_mem_align (arguments, PARM_BOUNDARY);
1487 /* Walk past the arg-pointer and structure value address. */
1488 size = GET_MODE_SIZE (Pmode);
1490 size += GET_MODE_SIZE (Pmode);
1492 /* Restore each of the registers previously saved. Make USE insns
1493 for each of these registers for use in making the call. */
1494 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1495 if ((mode = apply_args_mode[regno]) != VOIDmode)
1497 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1498 if (size % align != 0)
1499 size = CEIL (size, align) * align;
1500 reg = gen_rtx_REG (mode, regno);
1501 emit_move_insn (reg, adjust_address (arguments, mode, size));
1502 use_reg (&call_fusage, reg);
1503 size += GET_MODE_SIZE (mode);
1506 /* Restore the structure value address unless this is passed as an
1507 "invisible" first argument. */
1508 size = GET_MODE_SIZE (Pmode);
1511 rtx value = gen_reg_rtx (Pmode);
1512 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1513 emit_move_insn (struct_value, value);
1514 if (REG_P (struct_value))
1515 use_reg (&call_fusage, struct_value);
1516 size += GET_MODE_SIZE (Pmode);
1519 /* All arguments and registers used for the call are set up by now! */
1520 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1522 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1523 and we don't want to load it into a register as an optimization,
1524 because prepare_call_address already did it if it should be done. */
1525 if (GET_CODE (function) != SYMBOL_REF)
1526 function = memory_address (FUNCTION_MODE, function);
1528 /* Generate the actual call instruction and save the return value. */
1529 #ifdef HAVE_untyped_call
1530 if (HAVE_untyped_call)
1531 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1532 result, result_vector (1, result)));
1535 #ifdef HAVE_call_value
1536 if (HAVE_call_value)
1540 /* Locate the unique return register. It is not possible to
1541 express a call that sets more than one return register using
1542 call_value; use untyped_call for that. In fact, untyped_call
1543 only needs to save the return registers in the given block. */
1544 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1545 if ((mode = apply_result_mode[regno]) != VOIDmode)
1547 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1549 valreg = gen_rtx_REG (mode, regno);
1552 emit_call_insn (GEN_CALL_VALUE (valreg,
1553 gen_rtx_MEM (FUNCTION_MODE, function),
1554 const0_rtx, NULL_RTX, const0_rtx));
1556 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1562 /* Find the CALL insn we just emitted, and attach the register usage
1564 call_insn = last_call_insn ();
1565 add_function_usage_to (call_insn, call_fusage);
1567 /* Restore the stack. */
1568 #ifdef HAVE_save_stack_nonlocal
1569 if (HAVE_save_stack_nonlocal)
1570 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1573 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1577 /* Return the address of the result block. */
1578 result = copy_addr_to_reg (XEXP (result, 0));
1579 return convert_memory_address (ptr_mode, result);
1582 /* Perform an untyped return. */
1585 expand_builtin_return (rtx result)
1587 int size, align, regno;
1588 enum machine_mode mode;
1590 rtx call_fusage = 0;
1592 result = convert_memory_address (Pmode, result);
1594 apply_result_size ();
1595 result = gen_rtx_MEM (BLKmode, result);
1597 #ifdef HAVE_untyped_return
1598 if (HAVE_untyped_return)
1600 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1606 /* Restore the return value and note that each value is used. */
1608 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1609 if ((mode = apply_result_mode[regno]) != VOIDmode)
1611 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1612 if (size % align != 0)
1613 size = CEIL (size, align) * align;
1614 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1615 emit_move_insn (reg, adjust_address (result, mode, size));
1617 push_to_sequence (call_fusage);
1619 call_fusage = get_insns ();
1621 size += GET_MODE_SIZE (mode);
1624 /* Put the USE insns before the return. */
1625 emit_insn (call_fusage);
1627 /* Return whatever values was restored by jumping directly to the end
1629 expand_naked_return ();
1632 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1634 static enum type_class
1635 type_to_class (tree type)
1637 switch (TREE_CODE (type))
1639 case VOID_TYPE: return void_type_class;
1640 case INTEGER_TYPE: return integer_type_class;
1641 case ENUMERAL_TYPE: return enumeral_type_class;
1642 case BOOLEAN_TYPE: return boolean_type_class;
1643 case POINTER_TYPE: return pointer_type_class;
1644 case REFERENCE_TYPE: return reference_type_class;
1645 case OFFSET_TYPE: return offset_type_class;
1646 case REAL_TYPE: return real_type_class;
1647 case COMPLEX_TYPE: return complex_type_class;
1648 case FUNCTION_TYPE: return function_type_class;
1649 case METHOD_TYPE: return method_type_class;
1650 case RECORD_TYPE: return record_type_class;
1652 case QUAL_UNION_TYPE: return union_type_class;
1653 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1654 ? string_type_class : array_type_class);
1655 case LANG_TYPE: return lang_type_class;
1656 default: return no_type_class;
1660 /* Expand a call EXP to __builtin_classify_type. */
1663 expand_builtin_classify_type (tree exp)
1665 if (call_expr_nargs (exp))
1666 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1667 return GEN_INT (no_type_class);
1670 /* This helper macro, meant to be used in mathfn_built_in below,
1671 determines which among a set of three builtin math functions is
1672 appropriate for a given type mode. The `F' and `L' cases are
1673 automatically generated from the `double' case. */
1674 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1675 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1676 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1677 fcodel = BUILT_IN_MATHFN##L ; break;
1678 /* Similar to above, but appends _R after any F/L suffix. */
1679 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1680 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1681 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1682 fcodel = BUILT_IN_MATHFN##L_R ; break;
1684 /* Return mathematic function equivalent to FN but operating directly
1685 on TYPE, if available. If IMPLICIT is true find the function in
1686 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1687 can't do the conversion, return zero. */
1690 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1692 tree const *const fn_arr
1693 = implicit ? implicit_built_in_decls : built_in_decls;
1694 enum built_in_function fcode, fcodef, fcodel;
1698 CASE_MATHFN (BUILT_IN_ACOS)
1699 CASE_MATHFN (BUILT_IN_ACOSH)
1700 CASE_MATHFN (BUILT_IN_ASIN)
1701 CASE_MATHFN (BUILT_IN_ASINH)
1702 CASE_MATHFN (BUILT_IN_ATAN)
1703 CASE_MATHFN (BUILT_IN_ATAN2)
1704 CASE_MATHFN (BUILT_IN_ATANH)
1705 CASE_MATHFN (BUILT_IN_CBRT)
1706 CASE_MATHFN (BUILT_IN_CEIL)
1707 CASE_MATHFN (BUILT_IN_CEXPI)
1708 CASE_MATHFN (BUILT_IN_COPYSIGN)
1709 CASE_MATHFN (BUILT_IN_COS)
1710 CASE_MATHFN (BUILT_IN_COSH)
1711 CASE_MATHFN (BUILT_IN_DREM)
1712 CASE_MATHFN (BUILT_IN_ERF)
1713 CASE_MATHFN (BUILT_IN_ERFC)
1714 CASE_MATHFN (BUILT_IN_EXP)
1715 CASE_MATHFN (BUILT_IN_EXP10)
1716 CASE_MATHFN (BUILT_IN_EXP2)
1717 CASE_MATHFN (BUILT_IN_EXPM1)
1718 CASE_MATHFN (BUILT_IN_FABS)
1719 CASE_MATHFN (BUILT_IN_FDIM)
1720 CASE_MATHFN (BUILT_IN_FLOOR)
1721 CASE_MATHFN (BUILT_IN_FMA)
1722 CASE_MATHFN (BUILT_IN_FMAX)
1723 CASE_MATHFN (BUILT_IN_FMIN)
1724 CASE_MATHFN (BUILT_IN_FMOD)
1725 CASE_MATHFN (BUILT_IN_FREXP)
1726 CASE_MATHFN (BUILT_IN_GAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1728 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1729 CASE_MATHFN (BUILT_IN_HYPOT)
1730 CASE_MATHFN (BUILT_IN_ILOGB)
1731 CASE_MATHFN (BUILT_IN_INF)
1732 CASE_MATHFN (BUILT_IN_ISINF)
1733 CASE_MATHFN (BUILT_IN_J0)
1734 CASE_MATHFN (BUILT_IN_J1)
1735 CASE_MATHFN (BUILT_IN_JN)
1736 CASE_MATHFN (BUILT_IN_LCEIL)
1737 CASE_MATHFN (BUILT_IN_LDEXP)
1738 CASE_MATHFN (BUILT_IN_LFLOOR)
1739 CASE_MATHFN (BUILT_IN_LGAMMA)
1740 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1741 CASE_MATHFN (BUILT_IN_LLCEIL)
1742 CASE_MATHFN (BUILT_IN_LLFLOOR)
1743 CASE_MATHFN (BUILT_IN_LLRINT)
1744 CASE_MATHFN (BUILT_IN_LLROUND)
1745 CASE_MATHFN (BUILT_IN_LOG)
1746 CASE_MATHFN (BUILT_IN_LOG10)
1747 CASE_MATHFN (BUILT_IN_LOG1P)
1748 CASE_MATHFN (BUILT_IN_LOG2)
1749 CASE_MATHFN (BUILT_IN_LOGB)
1750 CASE_MATHFN (BUILT_IN_LRINT)
1751 CASE_MATHFN (BUILT_IN_LROUND)
1752 CASE_MATHFN (BUILT_IN_MODF)
1753 CASE_MATHFN (BUILT_IN_NAN)
1754 CASE_MATHFN (BUILT_IN_NANS)
1755 CASE_MATHFN (BUILT_IN_NEARBYINT)
1756 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1757 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1758 CASE_MATHFN (BUILT_IN_POW)
1759 CASE_MATHFN (BUILT_IN_POWI)
1760 CASE_MATHFN (BUILT_IN_POW10)
1761 CASE_MATHFN (BUILT_IN_REMAINDER)
1762 CASE_MATHFN (BUILT_IN_REMQUO)
1763 CASE_MATHFN (BUILT_IN_RINT)
1764 CASE_MATHFN (BUILT_IN_ROUND)
1765 CASE_MATHFN (BUILT_IN_SCALB)
1766 CASE_MATHFN (BUILT_IN_SCALBLN)
1767 CASE_MATHFN (BUILT_IN_SCALBN)
1768 CASE_MATHFN (BUILT_IN_SIGNBIT)
1769 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1770 CASE_MATHFN (BUILT_IN_SIN)
1771 CASE_MATHFN (BUILT_IN_SINCOS)
1772 CASE_MATHFN (BUILT_IN_SINH)
1773 CASE_MATHFN (BUILT_IN_SQRT)
1774 CASE_MATHFN (BUILT_IN_TAN)
1775 CASE_MATHFN (BUILT_IN_TANH)
1776 CASE_MATHFN (BUILT_IN_TGAMMA)
1777 CASE_MATHFN (BUILT_IN_TRUNC)
1778 CASE_MATHFN (BUILT_IN_Y0)
1779 CASE_MATHFN (BUILT_IN_Y1)
1780 CASE_MATHFN (BUILT_IN_YN)
1786 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1787 return fn_arr[fcode];
1788 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1789 return fn_arr[fcodef];
1790 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1791 return fn_arr[fcodel];
1796 /* Like mathfn_built_in_1(), but always use the implicit array. */
1799 mathfn_built_in (tree type, enum built_in_function fn)
1801 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1804 /* If errno must be maintained, expand the RTL to check if the result,
1805 TARGET, of a built-in function call, EXP, is NaN, and if so set
1809 expand_errno_check (tree exp, rtx target)
1811 rtx lab = gen_label_rtx ();
1813 /* Test the result; if it is NaN, set errno=EDOM because
1814 the argument was not in the domain. */
1815 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1819 /* If this built-in doesn't throw an exception, set errno directly. */
1820 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1822 #ifdef GEN_ERRNO_RTX
1823 rtx errno_rtx = GEN_ERRNO_RTX;
1826 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1828 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1834 /* Make sure the library call isn't expanded as a tail call. */
1835 CALL_EXPR_TAILCALL (exp) = 0;
1837 /* We can't set errno=EDOM directly; let the library call do it.
1838 Pop the arguments right away in case the call gets deleted. */
1840 expand_call (exp, target, 0);
1845 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1846 Return NULL_RTX if a normal call should be emitted rather than expanding
1847 the function in-line. EXP is the expression that is a call to the builtin
1848 function; if convenient, the result should be placed in TARGET.
1849 SUBTARGET may be used as the target for computing one of EXP's operands. */
1852 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1854 optab builtin_optab;
1855 rtx op0, insns, before_call;
1856 tree fndecl = get_callee_fndecl (exp);
1857 enum machine_mode mode;
1858 bool errno_set = false;
1861 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1864 arg = CALL_EXPR_ARG (exp, 0);
1866 switch (DECL_FUNCTION_CODE (fndecl))
1868 CASE_FLT_FN (BUILT_IN_SQRT):
1869 errno_set = ! tree_expr_nonnegative_p (arg);
1870 builtin_optab = sqrt_optab;
1872 CASE_FLT_FN (BUILT_IN_EXP):
1873 errno_set = true; builtin_optab = exp_optab; break;
1874 CASE_FLT_FN (BUILT_IN_EXP10):
1875 CASE_FLT_FN (BUILT_IN_POW10):
1876 errno_set = true; builtin_optab = exp10_optab; break;
1877 CASE_FLT_FN (BUILT_IN_EXP2):
1878 errno_set = true; builtin_optab = exp2_optab; break;
1879 CASE_FLT_FN (BUILT_IN_EXPM1):
1880 errno_set = true; builtin_optab = expm1_optab; break;
1881 CASE_FLT_FN (BUILT_IN_LOGB):
1882 errno_set = true; builtin_optab = logb_optab; break;
1883 CASE_FLT_FN (BUILT_IN_LOG):
1884 errno_set = true; builtin_optab = log_optab; break;
1885 CASE_FLT_FN (BUILT_IN_LOG10):
1886 errno_set = true; builtin_optab = log10_optab; break;
1887 CASE_FLT_FN (BUILT_IN_LOG2):
1888 errno_set = true; builtin_optab = log2_optab; break;
1889 CASE_FLT_FN (BUILT_IN_LOG1P):
1890 errno_set = true; builtin_optab = log1p_optab; break;
1891 CASE_FLT_FN (BUILT_IN_ASIN):
1892 builtin_optab = asin_optab; break;
1893 CASE_FLT_FN (BUILT_IN_ACOS):
1894 builtin_optab = acos_optab; break;
1895 CASE_FLT_FN (BUILT_IN_TAN):
1896 builtin_optab = tan_optab; break;
1897 CASE_FLT_FN (BUILT_IN_ATAN):
1898 builtin_optab = atan_optab; break;
1899 CASE_FLT_FN (BUILT_IN_FLOOR):
1900 builtin_optab = floor_optab; break;
1901 CASE_FLT_FN (BUILT_IN_CEIL):
1902 builtin_optab = ceil_optab; break;
1903 CASE_FLT_FN (BUILT_IN_TRUNC):
1904 builtin_optab = btrunc_optab; break;
1905 CASE_FLT_FN (BUILT_IN_ROUND):
1906 builtin_optab = round_optab; break;
1907 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1908 builtin_optab = nearbyint_optab;
1909 if (flag_trapping_math)
1911 /* Else fallthrough and expand as rint. */
1912 CASE_FLT_FN (BUILT_IN_RINT):
1913 builtin_optab = rint_optab; break;
1918 /* Make a suitable register to place result in. */
1919 mode = TYPE_MODE (TREE_TYPE (exp));
1921 if (! flag_errno_math || ! HONOR_NANS (mode))
1924 /* Before working hard, check whether the instruction is available. */
1925 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1927 target = gen_reg_rtx (mode);
1929 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1930 need to expand the argument again. This way, we will not perform
1931 side-effects more the once. */
1932 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1934 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1938 /* Compute into TARGET.
1939 Set TARGET to wherever the result comes back. */
1940 target = expand_unop (mode, builtin_optab, op0, target, 0);
1945 expand_errno_check (exp, target);
1947 /* Output the entire sequence. */
1948 insns = get_insns ();
1954 /* If we were unable to expand via the builtin, stop the sequence
1955 (without outputting the insns) and call to the library function
1956 with the stabilized argument list. */
1960 before_call = get_last_insn ();
1962 return expand_call (exp, target, target == const0_rtx);
1965 /* Expand a call to the builtin binary math functions (pow and atan2).
1966 Return NULL_RTX if a normal call should be emitted rather than expanding the
1967 function in-line. EXP is the expression that is a call to the builtin
1968 function; if convenient, the result should be placed in TARGET.
1969 SUBTARGET may be used as the target for computing one of EXP's
1973 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1975 optab builtin_optab;
1976 rtx op0, op1, insns;
1977 int op1_type = REAL_TYPE;
1978 tree fndecl = get_callee_fndecl (exp);
1980 enum machine_mode mode;
1981 bool errno_set = true;
1983 switch (DECL_FUNCTION_CODE (fndecl))
1985 CASE_FLT_FN (BUILT_IN_SCALBN):
1986 CASE_FLT_FN (BUILT_IN_SCALBLN):
1987 CASE_FLT_FN (BUILT_IN_LDEXP):
1988 op1_type = INTEGER_TYPE;
1993 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1996 arg0 = CALL_EXPR_ARG (exp, 0);
1997 arg1 = CALL_EXPR_ARG (exp, 1);
1999 switch (DECL_FUNCTION_CODE (fndecl))
2001 CASE_FLT_FN (BUILT_IN_POW):
2002 builtin_optab = pow_optab; break;
2003 CASE_FLT_FN (BUILT_IN_ATAN2):
2004 builtin_optab = atan2_optab; break;
2005 CASE_FLT_FN (BUILT_IN_SCALB):
2006 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2008 builtin_optab = scalb_optab; break;
2009 CASE_FLT_FN (BUILT_IN_SCALBN):
2010 CASE_FLT_FN (BUILT_IN_SCALBLN):
2011 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2013 /* Fall through... */
2014 CASE_FLT_FN (BUILT_IN_LDEXP):
2015 builtin_optab = ldexp_optab; break;
2016 CASE_FLT_FN (BUILT_IN_FMOD):
2017 builtin_optab = fmod_optab; break;
2018 CASE_FLT_FN (BUILT_IN_REMAINDER):
2019 CASE_FLT_FN (BUILT_IN_DREM):
2020 builtin_optab = remainder_optab; break;
2025 /* Make a suitable register to place result in. */
2026 mode = TYPE_MODE (TREE_TYPE (exp));
2028 /* Before working hard, check whether the instruction is available. */
2029 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2032 target = gen_reg_rtx (mode);
2034 if (! flag_errno_math || ! HONOR_NANS (mode))
2037 /* Always stabilize the argument list. */
2038 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2039 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2041 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2042 op1 = expand_normal (arg1);
2046 /* Compute into TARGET.
2047 Set TARGET to wherever the result comes back. */
2048 target = expand_binop (mode, builtin_optab, op0, op1,
2049 target, 0, OPTAB_DIRECT);
2051 /* If we were unable to expand via the builtin, stop the sequence
2052 (without outputting the insns) and call to the library function
2053 with the stabilized argument list. */
2057 return expand_call (exp, target, target == const0_rtx);
2061 expand_errno_check (exp, target);
2063 /* Output the entire sequence. */
2064 insns = get_insns ();
2071 /* Expand a call to the builtin sin and cos math functions.
2072 Return NULL_RTX if a normal call should be emitted rather than expanding the
2073 function in-line. EXP is the expression that is a call to the builtin
2074 function; if convenient, the result should be placed in TARGET.
2075 SUBTARGET may be used as the target for computing one of EXP's
2079 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2081 optab builtin_optab;
2083 tree fndecl = get_callee_fndecl (exp);
2084 enum machine_mode mode;
2087 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2090 arg = CALL_EXPR_ARG (exp, 0);
2092 switch (DECL_FUNCTION_CODE (fndecl))
2094 CASE_FLT_FN (BUILT_IN_SIN):
2095 CASE_FLT_FN (BUILT_IN_COS):
2096 builtin_optab = sincos_optab; break;
2101 /* Make a suitable register to place result in. */
2102 mode = TYPE_MODE (TREE_TYPE (exp));
2104 /* Check if sincos insn is available, otherwise fallback
2105 to sin or cos insn. */
2106 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2107 switch (DECL_FUNCTION_CODE (fndecl))
2109 CASE_FLT_FN (BUILT_IN_SIN):
2110 builtin_optab = sin_optab; break;
2111 CASE_FLT_FN (BUILT_IN_COS):
2112 builtin_optab = cos_optab; break;
2117 /* Before working hard, check whether the instruction is available. */
2118 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2120 target = gen_reg_rtx (mode);
2122 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2123 need to expand the argument again. This way, we will not perform
2124 side-effects more the once. */
2125 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2127 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2131 /* Compute into TARGET.
2132 Set TARGET to wherever the result comes back. */
2133 if (builtin_optab == sincos_optab)
2137 switch (DECL_FUNCTION_CODE (fndecl))
2139 CASE_FLT_FN (BUILT_IN_SIN):
2140 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2142 CASE_FLT_FN (BUILT_IN_COS):
2143 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2148 gcc_assert (result);
2152 target = expand_unop (mode, builtin_optab, op0, target, 0);
2157 /* Output the entire sequence. */
2158 insns = get_insns ();
2164 /* If we were unable to expand via the builtin, stop the sequence
2165 (without outputting the insns) and call to the library function
2166 with the stabilized argument list. */
2170 target = expand_call (exp, target, target == const0_rtx);
2175 /* Expand a call to one of the builtin math functions that operate on
2176 floating point argument and output an integer result (ilogb, isinf,
2178 Return 0 if a normal call should be emitted rather than expanding the
2179 function in-line. EXP is the expression that is a call to the builtin
2180 function; if convenient, the result should be placed in TARGET.
2181 SUBTARGET may be used as the target for computing one of EXP's operands. */
2184 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2186 optab builtin_optab = 0;
2187 enum insn_code icode = CODE_FOR_nothing;
2189 tree fndecl = get_callee_fndecl (exp);
2190 enum machine_mode mode;
2191 bool errno_set = false;
2194 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2197 arg = CALL_EXPR_ARG (exp, 0);
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_ILOGB):
2202 errno_set = true; builtin_optab = ilogb_optab; break;
2203 CASE_FLT_FN (BUILT_IN_ISINF):
2204 builtin_optab = isinf_optab; break;
2205 case BUILT_IN_ISNORMAL:
2206 case BUILT_IN_ISFINITE:
2207 CASE_FLT_FN (BUILT_IN_FINITE):
2208 /* These builtins have no optabs (yet). */
2214 /* There's no easy way to detect the case we need to set EDOM. */
2215 if (flag_errno_math && errno_set)
2218 /* Optab mode depends on the mode of the input argument. */
2219 mode = TYPE_MODE (TREE_TYPE (arg));
2222 icode = optab_handler (builtin_optab, mode)->insn_code;
2224 /* Before working hard, check whether the instruction is available. */
2225 if (icode != CODE_FOR_nothing)
2227 /* Make a suitable register to place result in. */
2229 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2230 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2232 gcc_assert (insn_data[icode].operand[0].predicate
2233 (target, GET_MODE (target)));
2235 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2236 need to expand the argument again. This way, we will not perform
2237 side-effects more the once. */
2238 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2240 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2242 if (mode != GET_MODE (op0))
2243 op0 = convert_to_mode (mode, op0, 0);
2245 /* Compute into TARGET.
2246 Set TARGET to wherever the result comes back. */
2247 emit_unop_insn (icode, target, op0, UNKNOWN);
2251 /* If there is no optab, try generic code. */
2252 switch (DECL_FUNCTION_CODE (fndecl))
2256 CASE_FLT_FN (BUILT_IN_ISINF):
2258 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2259 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2260 tree const type = TREE_TYPE (arg);
2264 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2265 real_from_string (&r, buf);
2266 result = build_call_expr (isgr_fn, 2,
2267 fold_build1 (ABS_EXPR, type, arg),
2268 build_real (type, r));
2269 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2271 CASE_FLT_FN (BUILT_IN_FINITE):
2272 case BUILT_IN_ISFINITE:
2274 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2275 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2276 tree const type = TREE_TYPE (arg);
2280 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2281 real_from_string (&r, buf);
2282 result = build_call_expr (isle_fn, 2,
2283 fold_build1 (ABS_EXPR, type, arg),
2284 build_real (type, r));
2285 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2287 case BUILT_IN_ISNORMAL:
2289 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2290 islessequal(fabs(x),DBL_MAX). */
2291 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2292 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2293 tree const type = TREE_TYPE (arg);
2294 REAL_VALUE_TYPE rmax, rmin;
2297 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2298 real_from_string (&rmax, buf);
2299 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2300 real_from_string (&rmin, buf);
2301 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2302 result = build_call_expr (isle_fn, 2, arg,
2303 build_real (type, rmax));
2304 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2305 build_call_expr (isge_fn, 2, arg,
2306 build_real (type, rmin)));
2307 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2313 target = expand_call (exp, target, target == const0_rtx);
2318 /* Expand a call to the builtin sincos math function.
2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2324 expand_builtin_sincos (tree exp)
2326 rtx op0, op1, op2, target1, target2;
2327 enum machine_mode mode;
2328 tree arg, sinp, cosp;
2331 if (!validate_arglist (exp, REAL_TYPE,
2332 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2335 arg = CALL_EXPR_ARG (exp, 0);
2336 sinp = CALL_EXPR_ARG (exp, 1);
2337 cosp = CALL_EXPR_ARG (exp, 2);
2339 /* Make a suitable register to place result in. */
2340 mode = TYPE_MODE (TREE_TYPE (arg));
2342 /* Check if sincos insn is available, otherwise emit the call. */
2343 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2346 target1 = gen_reg_rtx (mode);
2347 target2 = gen_reg_rtx (mode);
2349 op0 = expand_normal (arg);
2350 op1 = expand_normal (build_fold_indirect_ref (sinp));
2351 op2 = expand_normal (build_fold_indirect_ref (cosp));
2353 /* Compute into target1 and target2.
2354 Set TARGET to wherever the result comes back. */
2355 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2356 gcc_assert (result);
2358 /* Move target1 and target2 to the memory locations indicated
2360 emit_move_insn (op1, target1);
2361 emit_move_insn (op2, target2);
2366 /* Expand a call to the internal cexpi builtin to the sincos math function.
2367 EXP is the expression that is a call to the builtin function; if convenient,
2368 the result should be placed in TARGET. SUBTARGET may be used as the target
2369 for computing one of EXP's operands. */
2372 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2374 tree fndecl = get_callee_fndecl (exp);
2376 enum machine_mode mode;
2379 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2382 arg = CALL_EXPR_ARG (exp, 0);
2383 type = TREE_TYPE (arg);
2384 mode = TYPE_MODE (TREE_TYPE (arg));
2386 /* Try expanding via a sincos optab, fall back to emitting a libcall
2387 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2388 is only generated from sincos, cexp or if we have either of them. */
2389 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2391 op1 = gen_reg_rtx (mode);
2392 op2 = gen_reg_rtx (mode);
2394 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2396 /* Compute into op1 and op2. */
2397 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2399 else if (TARGET_HAS_SINCOS)
2401 tree call, fn = NULL_TREE;
2405 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2406 fn = built_in_decls[BUILT_IN_SINCOSF];
2407 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2408 fn = built_in_decls[BUILT_IN_SINCOS];
2409 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2410 fn = built_in_decls[BUILT_IN_SINCOSL];
2414 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2415 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2416 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2417 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2418 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2419 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2421 /* Make sure not to fold the sincos call again. */
2422 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2423 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2424 call, 3, arg, top1, top2));
2428 tree call, fn = NULL_TREE, narg;
2429 tree ctype = build_complex_type (type);
2431 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2432 fn = built_in_decls[BUILT_IN_CEXPF];
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2434 fn = built_in_decls[BUILT_IN_CEXP];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2436 fn = built_in_decls[BUILT_IN_CEXPL];
2440 /* If we don't have a decl for cexp create one. This is the
2441 friendliest fallback if the user calls __builtin_cexpi
2442 without full target C99 function support. */
2443 if (fn == NULL_TREE)
2446 const char *name = NULL;
2448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2455 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2456 fn = build_fn_decl (name, fntype);
2459 narg = fold_build2 (COMPLEX_EXPR, ctype,
2460 build_real (type, dconst0), arg);
2462 /* Make sure not to fold the cexp call again. */
2463 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2464 return expand_expr (build_call_nary (ctype, call, 1, narg),
2465 target, VOIDmode, EXPAND_NORMAL);
2468 /* Now build the proper return type. */
2469 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2470 make_tree (TREE_TYPE (arg), op2),
2471 make_tree (TREE_TYPE (arg), op1)),
2472 target, VOIDmode, EXPAND_NORMAL);
2475 /* Expand a call to one of the builtin rounding functions gcc defines
2476 as an extension (lfloor and lceil). As these are gcc extensions we
2477 do not need to worry about setting errno to EDOM.
2478 If expanding via optab fails, lower expression to (int)(floor(x)).
2479 EXP is the expression that is a call to the builtin function;
2480 if convenient, the result should be placed in TARGET. */
2483 expand_builtin_int_roundingfn (tree exp, rtx target)
2485 convert_optab builtin_optab;
2486 rtx op0, insns, tmp;
2487 tree fndecl = get_callee_fndecl (exp);
2488 enum built_in_function fallback_fn;
2489 tree fallback_fndecl;
2490 enum machine_mode mode;
2493 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2496 arg = CALL_EXPR_ARG (exp, 0);
2498 switch (DECL_FUNCTION_CODE (fndecl))
2500 CASE_FLT_FN (BUILT_IN_LCEIL):
2501 CASE_FLT_FN (BUILT_IN_LLCEIL):
2502 builtin_optab = lceil_optab;
2503 fallback_fn = BUILT_IN_CEIL;
2506 CASE_FLT_FN (BUILT_IN_LFLOOR):
2507 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2508 builtin_optab = lfloor_optab;
2509 fallback_fn = BUILT_IN_FLOOR;
2516 /* Make a suitable register to place result in. */
2517 mode = TYPE_MODE (TREE_TYPE (exp));
2519 target = gen_reg_rtx (mode);
2521 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2522 need to expand the argument again. This way, we will not perform
2523 side-effects more the once. */
2524 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2526 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2530 /* Compute into TARGET. */
2531 if (expand_sfix_optab (target, op0, builtin_optab))
2533 /* Output the entire sequence. */
2534 insns = get_insns ();
2540 /* If we were unable to expand via the builtin, stop the sequence
2541 (without outputting the insns). */
2544 /* Fall back to floating point rounding optab. */
2545 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2547 /* For non-C99 targets we may end up without a fallback fndecl here
2548 if the user called __builtin_lfloor directly. In this case emit
2549 a call to the floor/ceil variants nevertheless. This should result
2550 in the best user experience for not full C99 targets. */
2551 if (fallback_fndecl == NULL_TREE)
2554 const char *name = NULL;
2556 switch (DECL_FUNCTION_CODE (fndecl))
2558 case BUILT_IN_LCEIL:
2559 case BUILT_IN_LLCEIL:
2562 case BUILT_IN_LCEILF:
2563 case BUILT_IN_LLCEILF:
2566 case BUILT_IN_LCEILL:
2567 case BUILT_IN_LLCEILL:
2570 case BUILT_IN_LFLOOR:
2571 case BUILT_IN_LLFLOOR:
2574 case BUILT_IN_LFLOORF:
2575 case BUILT_IN_LLFLOORF:
2578 case BUILT_IN_LFLOORL:
2579 case BUILT_IN_LLFLOORL:
2586 fntype = build_function_type_list (TREE_TYPE (arg),
2587 TREE_TYPE (arg), NULL_TREE);
2588 fallback_fndecl = build_fn_decl (name, fntype);
2591 exp = build_call_expr (fallback_fndecl, 1, arg);
2593 tmp = expand_normal (exp);
2595 /* Truncate the result of floating point optab to integer
2596 via expand_fix (). */
2597 target = gen_reg_rtx (mode);
2598 expand_fix (target, tmp, 0);
2603 /* Expand a call to one of the builtin math functions doing integer
2605 Return 0 if a normal call should be emitted rather than expanding the
2606 function in-line. EXP is the expression that is a call to the builtin
2607 function; if convenient, the result should be placed in TARGET. */
2610 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2612 convert_optab builtin_optab;
2614 tree fndecl = get_callee_fndecl (exp);
2616 enum machine_mode mode;
2618 /* There's no easy way to detect the case we need to set EDOM. */
2619 if (flag_errno_math)
2622 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2625 arg = CALL_EXPR_ARG (exp, 0);
2627 switch (DECL_FUNCTION_CODE (fndecl))
2629 CASE_FLT_FN (BUILT_IN_LRINT):
2630 CASE_FLT_FN (BUILT_IN_LLRINT):
2631 builtin_optab = lrint_optab; break;
2632 CASE_FLT_FN (BUILT_IN_LROUND):
2633 CASE_FLT_FN (BUILT_IN_LLROUND):
2634 builtin_optab = lround_optab; break;
2639 /* Make a suitable register to place result in. */
2640 mode = TYPE_MODE (TREE_TYPE (exp));
2642 target = gen_reg_rtx (mode);
2644 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2645 need to expand the argument again. This way, we will not perform
2646 side-effects more the once. */
2647 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2649 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2653 if (expand_sfix_optab (target, op0, builtin_optab))
2655 /* Output the entire sequence. */
2656 insns = get_insns ();
2662 /* If we were unable to expand via the builtin, stop the sequence
2663 (without outputting the insns) and call to the library function
2664 with the stabilized argument list. */
2667 target = expand_call (exp, target, target == const0_rtx);
2672 /* To evaluate powi(x,n), the floating point value x raised to the
2673 constant integer exponent n, we use a hybrid algorithm that
2674 combines the "window method" with look-up tables. For an
2675 introduction to exponentiation algorithms and "addition chains",
2676 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2677 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2678 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2679 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2681 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2682 multiplications to inline before calling the system library's pow
2683 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2684 so this default never requires calling pow, powf or powl. */
2686 #ifndef POWI_MAX_MULTS
2687 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2690 /* The size of the "optimal power tree" lookup table. All
2691 exponents less than this value are simply looked up in the
2692 powi_table below. This threshold is also used to size the
2693 cache of pseudo registers that hold intermediate results. */
2694 #define POWI_TABLE_SIZE 256
2696 /* The size, in bits of the window, used in the "window method"
2697 exponentiation algorithm. This is equivalent to a radix of
2698 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2699 #define POWI_WINDOW_SIZE 3
2701 /* The following table is an efficient representation of an
2702 "optimal power tree". For each value, i, the corresponding
2703 value, j, in the table states than an optimal evaluation
2704 sequence for calculating pow(x,i) can be found by evaluating
2705 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2706 100 integers is given in Knuth's "Seminumerical algorithms". */
2708 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2710 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2711 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2712 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2713 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2714 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2715 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2716 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2717 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2718 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2719 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2720 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2721 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2722 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2723 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2724 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2725 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2726 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2727 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2728 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2729 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2730 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2731 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2732 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2733 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2734 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2735 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2736 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2737 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2738 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2739 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2740 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2741 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2745 /* Return the number of multiplications required to calculate
2746 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2747 subroutine of powi_cost. CACHE is an array indicating
2748 which exponents have already been calculated. */
2751 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2753 /* If we've already calculated this exponent, then this evaluation
2754 doesn't require any additional multiplications. */
2759 return powi_lookup_cost (n - powi_table[n], cache)
2760 + powi_lookup_cost (powi_table[n], cache) + 1;
2763 /* Return the number of multiplications required to calculate
2764 powi(x,n) for an arbitrary x, given the exponent N. This
2765 function needs to be kept in sync with expand_powi below. */
2768 powi_cost (HOST_WIDE_INT n)
2770 bool cache[POWI_TABLE_SIZE];
2771 unsigned HOST_WIDE_INT digit;
2772 unsigned HOST_WIDE_INT val;
2778 /* Ignore the reciprocal when calculating the cost. */
2779 val = (n < 0) ? -n : n;
2781 /* Initialize the exponent cache. */
2782 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2787 while (val >= POWI_TABLE_SIZE)
2791 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2792 result += powi_lookup_cost (digit, cache)
2793 + POWI_WINDOW_SIZE + 1;
2794 val >>= POWI_WINDOW_SIZE;
2803 return result + powi_lookup_cost (val, cache);
2806 /* Recursive subroutine of expand_powi. This function takes the array,
2807 CACHE, of already calculated exponents and an exponent N and returns
2808 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2811 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2813 unsigned HOST_WIDE_INT digit;
2817 if (n < POWI_TABLE_SIZE)
2822 target = gen_reg_rtx (mode);
2825 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2826 op1 = expand_powi_1 (mode, powi_table[n], cache);
2830 target = gen_reg_rtx (mode);
2831 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2832 op0 = expand_powi_1 (mode, n - digit, cache);
2833 op1 = expand_powi_1 (mode, digit, cache);
2837 target = gen_reg_rtx (mode);
2838 op0 = expand_powi_1 (mode, n >> 1, cache);
2842 result = expand_mult (mode, op0, op1, target, 0);
2843 if (result != target)
2844 emit_move_insn (target, result);
2848 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2849 floating point operand in mode MODE, and N is the exponent. This
2850 function needs to be kept in sync with powi_cost above. */
2853 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2855 unsigned HOST_WIDE_INT val;
2856 rtx cache[POWI_TABLE_SIZE];
2860 return CONST1_RTX (mode);
2862 val = (n < 0) ? -n : n;
2864 memset (cache, 0, sizeof (cache));
2867 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2869 /* If the original exponent was negative, reciprocate the result. */
2871 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2872 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2877 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2878 a normal call should be emitted rather than expanding the function
2879 in-line. EXP is the expression that is a call to the builtin
2880 function; if convenient, the result should be placed in TARGET. */
2883 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2887 tree type = TREE_TYPE (exp);
2888 REAL_VALUE_TYPE cint, c, c2;
2891 enum machine_mode mode = TYPE_MODE (type);
2893 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2896 arg0 = CALL_EXPR_ARG (exp, 0);
2897 arg1 = CALL_EXPR_ARG (exp, 1);
2899 if (TREE_CODE (arg1) != REAL_CST
2900 || TREE_OVERFLOW (arg1))
2901 return expand_builtin_mathfn_2 (exp, target, subtarget);
2903 /* Handle constant exponents. */
2905 /* For integer valued exponents we can expand to an optimal multiplication
2906 sequence using expand_powi. */
2907 c = TREE_REAL_CST (arg1);
2908 n = real_to_integer (&c);
2909 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2910 if (real_identical (&c, &cint)
2911 && ((n >= -1 && n <= 2)
2912 || (flag_unsafe_math_optimizations
2914 && powi_cost (n) <= POWI_MAX_MULTS)))
2916 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2919 op = force_reg (mode, op);
2920 op = expand_powi (op, mode, n);
2925 narg0 = builtin_save_expr (arg0);
2927 /* If the exponent is not integer valued, check if it is half of an integer.
2928 In this case we can expand to sqrt (x) * x**(n/2). */
2929 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2930 if (fn != NULL_TREE)
2932 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2933 n = real_to_integer (&c2);
2934 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2935 if (real_identical (&c2, &cint)
2936 && ((flag_unsafe_math_optimizations
2938 && powi_cost (n/2) <= POWI_MAX_MULTS)
2941 tree call_expr = build_call_expr (fn, 1, narg0);
2942 /* Use expand_expr in case the newly built call expression
2943 was folded to a non-call. */
2944 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2947 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2948 op2 = force_reg (mode, op2);
2949 op2 = expand_powi (op2, mode, abs (n / 2));
2950 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2951 0, OPTAB_LIB_WIDEN);
2952 /* If the original exponent was negative, reciprocate the
2955 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2956 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2962 /* Try if the exponent is a third of an integer. In this case
2963 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2964 different from pow (x, 1./3.) due to rounding and behavior
2965 with negative x we need to constrain this transformation to
2966 unsafe math and positive x or finite math. */
2967 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2969 && flag_unsafe_math_optimizations
2970 && (tree_expr_nonnegative_p (arg0)
2971 || !HONOR_NANS (mode)))
2973 REAL_VALUE_TYPE dconst3;
2974 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2975 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2976 real_round (&c2, mode, &c2);
2977 n = real_to_integer (&c2);
2978 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2979 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2980 real_convert (&c2, mode, &c2);
2981 if (real_identical (&c2, &c)
2983 && powi_cost (n/3) <= POWI_MAX_MULTS)
2986 tree call_expr = build_call_expr (fn, 1,narg0);
2987 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2988 if (abs (n) % 3 == 2)
2989 op = expand_simple_binop (mode, MULT, op, op, op,
2990 0, OPTAB_LIB_WIDEN);
2993 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2994 op2 = force_reg (mode, op2);
2995 op2 = expand_powi (op2, mode, abs (n / 3));
2996 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2997 0, OPTAB_LIB_WIDEN);
2998 /* If the original exponent was negative, reciprocate the
3001 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3002 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3008 /* Fall back to optab expansion. */
3009 return expand_builtin_mathfn_2 (exp, target, subtarget);
3012 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3013 a normal call should be emitted rather than expanding the function
3014 in-line. EXP is the expression that is a call to the builtin
3015 function; if convenient, the result should be placed in TARGET. */
3018 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3022 enum machine_mode mode;
3023 enum machine_mode mode2;
3025 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3028 arg0 = CALL_EXPR_ARG (exp, 0);
3029 arg1 = CALL_EXPR_ARG (exp, 1);
3030 mode = TYPE_MODE (TREE_TYPE (exp));
3032 /* Handle constant power. */
3034 if (TREE_CODE (arg1) == INTEGER_CST
3035 && !TREE_OVERFLOW (arg1))
3037 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3039 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3040 Otherwise, check the number of multiplications required. */
3041 if ((TREE_INT_CST_HIGH (arg1) == 0
3042 || TREE_INT_CST_HIGH (arg1) == -1)
3043 && ((n >= -1 && n <= 2)
3045 && powi_cost (n) <= POWI_MAX_MULTS)))
3047 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3048 op0 = force_reg (mode, op0);
3049 return expand_powi (op0, mode, n);
3053 /* Emit a libcall to libgcc. */
3055 /* Mode of the 2nd argument must match that of an int. */
3056 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3058 if (target == NULL_RTX)
3059 target = gen_reg_rtx (mode);
3061 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3062 if (GET_MODE (op0) != mode)
3063 op0 = convert_to_mode (mode, op0, 0);
3064 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3065 if (GET_MODE (op1) != mode2)
3066 op1 = convert_to_mode (mode2, op1, 0);
3068 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3069 target, LCT_CONST, mode, 2,
3070 op0, mode, op1, mode2);
3075 /* Expand expression EXP which is a call to the strlen builtin. Return
3076 NULL_RTX if we failed the caller should emit a normal call, otherwise
3077 try to get the result in TARGET, if convenient. */
3080 expand_builtin_strlen (tree exp, rtx target,
3081 enum machine_mode target_mode)
3083 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3089 tree src = CALL_EXPR_ARG (exp, 0);
3090 rtx result, src_reg, char_rtx, before_strlen;
3091 enum machine_mode insn_mode = target_mode, char_mode;
3092 enum insn_code icode = CODE_FOR_nothing;
3095 /* If the length can be computed at compile-time, return it. */
3096 len = c_strlen (src, 0);
3098 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3100 /* If the length can be computed at compile-time and is constant
3101 integer, but there are side-effects in src, evaluate
3102 src for side-effects, then return len.
3103 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3104 can be optimized into: i++; x = 3; */
3105 len = c_strlen (src, 1);
3106 if (len && TREE_CODE (len) == INTEGER_CST)
3108 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3109 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3112 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3114 /* If SRC is not a pointer type, don't do this operation inline. */
3118 /* Bail out if we can't compute strlen in the right mode. */
3119 while (insn_mode != VOIDmode)
3121 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3122 if (icode != CODE_FOR_nothing)
3125 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3127 if (insn_mode == VOIDmode)
3130 /* Make a place to write the result of the instruction. */
3134 && GET_MODE (result) == insn_mode
3135 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3136 result = gen_reg_rtx (insn_mode);
3138 /* Make a place to hold the source address. We will not expand
3139 the actual source until we are sure that the expansion will
3140 not fail -- there are trees that cannot be expanded twice. */
3141 src_reg = gen_reg_rtx (Pmode);
3143 /* Mark the beginning of the strlen sequence so we can emit the
3144 source operand later. */
3145 before_strlen = get_last_insn ();
3147 char_rtx = const0_rtx;
3148 char_mode = insn_data[(int) icode].operand[2].mode;
3149 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3151 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3153 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3154 char_rtx, GEN_INT (align));
3159 /* Now that we are assured of success, expand the source. */
3161 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3163 emit_move_insn (src_reg, pat);
3168 emit_insn_after (pat, before_strlen);
3170 emit_insn_before (pat, get_insns ());
3172 /* Return the value in the proper mode for this function. */
3173 if (GET_MODE (result) == target_mode)
3175 else if (target != 0)
3176 convert_move (target, result, 0);
3178 target = convert_to_mode (target_mode, result, 0);
3184 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3185 caller should emit a normal call, otherwise try to get the result
3186 in TARGET, if convenient (and in mode MODE if that's convenient). */
3189 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3191 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3193 tree type = TREE_TYPE (exp);
3194 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3195 CALL_EXPR_ARG (exp, 1), type);
3197 return expand_expr (result, target, mode, EXPAND_NORMAL);
3202 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3203 caller should emit a normal call, otherwise try to get the result
3204 in TARGET, if convenient (and in mode MODE if that's convenient). */
3207 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3209 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3211 tree type = TREE_TYPE (exp);
3212 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3213 CALL_EXPR_ARG (exp, 1), type);
3215 return expand_expr (result, target, mode, EXPAND_NORMAL);
3217 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3222 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3223 caller should emit a normal call, otherwise try to get the result
3224 in TARGET, if convenient (and in mode MODE if that's convenient). */
3227 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3229 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3231 tree type = TREE_TYPE (exp);
3232 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3233 CALL_EXPR_ARG (exp, 1), type);
3235 return expand_expr (result, target, mode, EXPAND_NORMAL);
3240 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3241 caller should emit a normal call, otherwise try to get the result
3242 in TARGET, if convenient (and in mode MODE if that's convenient). */
3245 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3247 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3249 tree type = TREE_TYPE (exp);
3250 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3251 CALL_EXPR_ARG (exp, 1), type);
3253 return expand_expr (result, target, mode, EXPAND_NORMAL);
3258 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3259 bytes from constant string DATA + OFFSET and return it as target
3263 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3264 enum machine_mode mode)
3266 const char *str = (const char *) data;
3268 gcc_assert (offset >= 0
3269 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3270 <= strlen (str) + 1));
3272 return c_readstr (str + offset, mode);
3275 /* Expand a call EXP to the memcpy builtin.
3276 Return NULL_RTX if we failed, the caller should emit a normal call,
3277 otherwise try to get the result in TARGET, if convenient (and in
3278 mode MODE if that's convenient). */
3281 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3283 tree fndecl = get_callee_fndecl (exp);
3285 if (!validate_arglist (exp,
3286 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3290 tree dest = CALL_EXPR_ARG (exp, 0);
3291 tree src = CALL_EXPR_ARG (exp, 1);
3292 tree len = CALL_EXPR_ARG (exp, 2);
3293 const char *src_str;
3294 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3295 unsigned int dest_align
3296 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3297 rtx dest_mem, src_mem, dest_addr, len_rtx;
3298 tree result = fold_builtin_memory_op (dest, src, len,
3299 TREE_TYPE (TREE_TYPE (fndecl)),
3301 HOST_WIDE_INT expected_size = -1;
3302 unsigned int expected_align = 0;
3303 tree_ann_common_t ann;
3307 while (TREE_CODE (result) == COMPOUND_EXPR)
3309 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3311 result = TREE_OPERAND (result, 1);
3313 return expand_expr (result, target, mode, EXPAND_NORMAL);
3316 /* If DEST is not a pointer type, call the normal function. */
3317 if (dest_align == 0)
3320 /* If either SRC is not a pointer type, don't do this
3321 operation in-line. */
3325 ann = tree_common_ann (exp);
3327 stringop_block_profile (ann->stmt, &expected_align, &expected_size);
3329 if (expected_align < dest_align)
3330 expected_align = dest_align;
3331 dest_mem = get_memory_rtx (dest, len);
3332 set_mem_align (dest_mem, dest_align);
3333 len_rtx = expand_normal (len);
3334 src_str = c_getstr (src);
3336 /* If SRC is a string constant and block move would be done
3337 by pieces, we can avoid loading the string from memory
3338 and only stored the computed constants. */
3340 && GET_CODE (len_rtx) == CONST_INT
3341 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3342 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3343 CONST_CAST (char *, src_str),
3346 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3347 builtin_memcpy_read_str,
3348 CONST_CAST (char *, src_str),
3349 dest_align, false, 0);
3350 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3351 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3355 src_mem = get_memory_rtx (src, len);
3356 set_mem_align (src_mem, src_align);
3358 /* Copy word part most expediently. */
3359 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3360 CALL_EXPR_TAILCALL (exp)
3361 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3362 expected_align, expected_size);
3366 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3367 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3373 /* Expand a call EXP to the mempcpy builtin.
3374 Return NULL_RTX if we failed; the caller should emit a normal call,
3375 otherwise try to get the result in TARGET, if convenient (and in
3376 mode MODE if that's convenient). If ENDP is 0 return the
3377 destination pointer, if ENDP is 1 return the end pointer ala
3378 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3382 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3384 if (!validate_arglist (exp,
3385 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3389 tree dest = CALL_EXPR_ARG (exp, 0);
3390 tree src = CALL_EXPR_ARG (exp, 1);
3391 tree len = CALL_EXPR_ARG (exp, 2);
3392 return expand_builtin_mempcpy_args (dest, src, len,
3394 target, mode, /*endp=*/ 1);
3398 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3399 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3400 so that this can also be called without constructing an actual CALL_EXPR.
3401 TYPE is the return type of the call. The other arguments and return value
3402 are the same as for expand_builtin_mempcpy. */
3405 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3406 rtx target, enum machine_mode mode, int endp)
3408 /* If return value is ignored, transform mempcpy into memcpy. */
3409 if (target == const0_rtx)
3411 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3416 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3417 target, mode, EXPAND_NORMAL);
3421 const char *src_str;
3422 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3423 unsigned int dest_align
3424 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3425 rtx dest_mem, src_mem, len_rtx;
3426 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3430 while (TREE_CODE (result) == COMPOUND_EXPR)
3432 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3434 result = TREE_OPERAND (result, 1);
3436 return expand_expr (result, target, mode, EXPAND_NORMAL);
3439 /* If either SRC or DEST is not a pointer type, don't do this
3440 operation in-line. */
3441 if (dest_align == 0 || src_align == 0)
3444 /* If LEN is not constant, call the normal function. */
3445 if (! host_integerp (len, 1))
3448 len_rtx = expand_normal (len);
3449 src_str = c_getstr (src);
3451 /* If SRC is a string constant and block move would be done
3452 by pieces, we can avoid loading the string from memory
3453 and only stored the computed constants. */
3455 && GET_CODE (len_rtx) == CONST_INT
3456 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3457 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3458 CONST_CAST (char *, src_str),
3461 dest_mem = get_memory_rtx (dest, len);
3462 set_mem_align (dest_mem, dest_align);
3463 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3464 builtin_memcpy_read_str,
3465 CONST_CAST (char *, src_str),
3466 dest_align, false, endp);
3467 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3468 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3472 if (GET_CODE (len_rtx) == CONST_INT
3473 && can_move_by_pieces (INTVAL (len_rtx),
3474 MIN (dest_align, src_align)))
3476 dest_mem = get_memory_rtx (dest, len);
3477 set_mem_align (dest_mem, dest_align);
3478 src_mem = get_memory_rtx (src, len);
3479 set_mem_align (src_mem, src_align);
3480 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3481 MIN (dest_align, src_align), endp);
3482 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3483 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3491 /* Expand expression EXP, which is a call to the memmove builtin. Return
3492 NULL_RTX if we failed; the caller should emit a normal call. */
3495 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3497 if (!validate_arglist (exp,
3498 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3502 tree dest = CALL_EXPR_ARG (exp, 0);
3503 tree src = CALL_EXPR_ARG (exp, 1);
3504 tree len = CALL_EXPR_ARG (exp, 2);
3505 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3506 target, mode, ignore);
3510 /* Helper function to do the actual work for expand_builtin_memmove. The
3511 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3512 so that this can also be called without constructing an actual CALL_EXPR.
3513 TYPE is the return type of the call. The other arguments and return value
3514 are the same as for expand_builtin_memmove. */
3517 expand_builtin_memmove_args (tree dest, tree src, tree len,
3518 tree type, rtx target, enum machine_mode mode,
3521 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3525 STRIP_TYPE_NOPS (result);
3526 while (TREE_CODE (result) == COMPOUND_EXPR)
3528 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3530 result = TREE_OPERAND (result, 1);
3532 return expand_expr (result, target, mode, EXPAND_NORMAL);
3535 /* Otherwise, call the normal function. */
3539 /* Expand expression EXP, which is a call to the bcopy builtin. Return