1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
250 if (strncmp (name, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
280 switch (TREE_CODE (exp))
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
303 exp = TREE_OPERAND (exp, 0);
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
344 inner = MIN (inner, (offset_factor & -offset_factor));
347 inner = MIN (inner, BITS_PER_UNIT);
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
359 align = MIN (align, inner);
360 return MIN (align, max_align);
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
385 c_strlen (tree src, int only_value)
388 HOST_WIDE_INT offset;
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
422 for (i = 0; i < max; i++)
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
440 else if (! host_integerp (offset_node, 0))
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
475 src = string_constant (src, &offset_node);
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
492 c_readstr (const char *str, enum machine_mode mode)
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
603 SETUP_FRAME_ADDRESSES ();
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
741 if (i == ARRAY_SIZE (elim_regs))
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
823 lab = copy_to_reg (lab);
825 emit_insn (gen_rtx_CLOBBER (VOIDmode,
826 gen_rtx_MEM (BLKmode,
827 gen_rtx_SCRATCH (VOIDmode))));
828 emit_insn (gen_rtx_CLOBBER (VOIDmode,
829 gen_rtx_MEM (BLKmode,
830 hard_frame_pointer_rtx)));
832 emit_move_insn (hard_frame_pointer_rtx, fp);
833 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
835 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
836 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
837 emit_indirect_jump (lab);
841 /* Search backwards and mark the jump insn as a non-local goto.
842 Note that this precludes the use of __builtin_longjmp to a
843 __builtin_setjmp target in the same function. However, we've
844 already cautioned the user that these functions are for
845 internal exception handling use only. */
846 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
848 gcc_assert (insn != last);
852 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
856 else if (CALL_P (insn))
861 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
862 and the address of the save area. */
865 expand_builtin_nonlocal_goto (tree exp)
867 tree t_label, t_save_area;
868 rtx r_label, r_save_area, r_fp, r_sp, insn;
870 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
873 t_label = CALL_EXPR_ARG (exp, 0);
874 t_save_area = CALL_EXPR_ARG (exp, 1);
876 r_label = expand_normal (t_label);
877 r_label = convert_memory_address (Pmode, r_label);
878 r_save_area = expand_normal (t_save_area);
879 r_save_area = convert_memory_address (Pmode, r_save_area);
880 r_fp = gen_rtx_MEM (Pmode, r_save_area);
881 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
882 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
884 crtl->has_nonlocal_goto = 1;
886 #ifdef HAVE_nonlocal_goto
887 /* ??? We no longer need to pass the static chain value, afaik. */
888 if (HAVE_nonlocal_goto)
889 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
893 r_label = copy_to_reg (r_label);
895 emit_insn (gen_rtx_CLOBBER (VOIDmode,
896 gen_rtx_MEM (BLKmode,
897 gen_rtx_SCRATCH (VOIDmode))));
899 emit_insn (gen_rtx_CLOBBER (VOIDmode,
900 gen_rtx_MEM (BLKmode,
901 hard_frame_pointer_rtx)));
903 /* Restore frame pointer for containing function.
904 This sets the actual hard register used for the frame pointer
905 to the location of the function's incoming static chain info.
906 The non-local goto handler will then adjust it to contain the
907 proper value and reload the argument pointer, if needed. */
908 emit_move_insn (hard_frame_pointer_rtx, r_fp);
909 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
911 /* USE of hard_frame_pointer_rtx added for consistency;
912 not clear if really needed. */
913 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
914 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
916 /* If the architecture is using a GP register, we must
917 conservatively assume that the target function makes use of it.
918 The prologue of functions with nonlocal gotos must therefore
919 initialize the GP register to the appropriate value, and we
920 must then make sure that this value is live at the point
921 of the jump. (Note that this doesn't necessarily apply
922 to targets with a nonlocal_goto pattern; they are free
923 to implement it in their own way. Note also that this is
924 a no-op if the GP register is a global invariant.) */
925 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
926 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
927 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
929 emit_indirect_jump (r_label);
932 /* Search backwards to the jump insn and mark it as a
934 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
938 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
939 const0_rtx, REG_NOTES (insn));
942 else if (CALL_P (insn))
949 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
950 (not all will be used on all machines) that was passed to __builtin_setjmp.
951 It updates the stack pointer in that block to correspond to the current
955 expand_builtin_update_setjmp_buf (rtx buf_addr)
957 enum machine_mode sa_mode = Pmode;
961 #ifdef HAVE_save_stack_nonlocal
962 if (HAVE_save_stack_nonlocal)
963 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
965 #ifdef STACK_SAVEAREA_MODE
966 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
970 = gen_rtx_MEM (sa_mode,
973 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
977 emit_insn (gen_setjmp ());
980 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
983 /* Expand a call to __builtin_prefetch. For a target that does not support
984 data prefetch, evaluate the memory address argument in case it has side
988 expand_builtin_prefetch (tree exp)
990 tree arg0, arg1, arg2;
994 if (!validate_arglist (exp, POINTER_TYPE, 0))
997 arg0 = CALL_EXPR_ARG (exp, 0);
999 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1000 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1002 nargs = call_expr_nargs (exp);
1004 arg1 = CALL_EXPR_ARG (exp, 1);
1006 arg1 = integer_zero_node;
1008 arg2 = CALL_EXPR_ARG (exp, 2);
1010 arg2 = build_int_cst (NULL_TREE, 3);
1012 /* Argument 0 is an address. */
1013 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1015 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1016 if (TREE_CODE (arg1) != INTEGER_CST)
1018 error ("second argument to %<__builtin_prefetch%> must be a constant");
1019 arg1 = integer_zero_node;
1021 op1 = expand_normal (arg1);
1022 /* Argument 1 must be either zero or one. */
1023 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1025 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1030 /* Argument 2 (locality) must be a compile-time constant int. */
1031 if (TREE_CODE (arg2) != INTEGER_CST)
1033 error ("third argument to %<__builtin_prefetch%> must be a constant");
1034 arg2 = integer_zero_node;
1036 op2 = expand_normal (arg2);
1037 /* Argument 2 must be 0, 1, 2, or 3. */
1038 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1040 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1044 #ifdef HAVE_prefetch
1047 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1049 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1050 || (GET_MODE (op0) != Pmode))
1052 op0 = convert_memory_address (Pmode, op0);
1053 op0 = force_reg (Pmode, op0);
1055 emit_insn (gen_prefetch (op0, op1, op2));
1059 /* Don't do anything with direct references to volatile memory, but
1060 generate code to handle other side effects. */
1061 if (!MEM_P (op0) && side_effects_p (op0))
1065 /* Get a MEM rtx for expression EXP which is the address of an operand
1066 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1067 the maximum length of the block of memory that might be accessed or
1071 get_memory_rtx (tree exp, tree len)
1073 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1074 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1076 /* Get an expression we can use to find the attributes to assign to MEM.
1077 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1078 we can. First remove any nops. */
1079 while (CONVERT_EXPR_P (exp)
1080 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1081 exp = TREE_OPERAND (exp, 0);
1083 if (TREE_CODE (exp) == ADDR_EXPR)
1084 exp = TREE_OPERAND (exp, 0);
1085 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1086 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1090 /* Honor attributes derived from exp, except for the alias set
1091 (as builtin stringops may alias with anything) and the size
1092 (as stringops may access multiple array elements). */
1095 set_mem_attributes (mem, exp, 0);
1097 /* Allow the string and memory builtins to overflow from one
1098 field into another, see http://gcc.gnu.org/PR23561.
1099 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1100 memory accessed by the string or memory builtin will fit
1101 within the field. */
1102 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1104 tree mem_expr = MEM_EXPR (mem);
1105 HOST_WIDE_INT offset = -1, length = -1;
1108 while (TREE_CODE (inner) == ARRAY_REF
1109 || CONVERT_EXPR_P (inner)
1110 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1111 || TREE_CODE (inner) == SAVE_EXPR)
1112 inner = TREE_OPERAND (inner, 0);
1114 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1116 if (MEM_OFFSET (mem)
1117 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1118 offset = INTVAL (MEM_OFFSET (mem));
1120 if (offset >= 0 && len && host_integerp (len, 0))
1121 length = tree_low_cst (len, 0);
1123 while (TREE_CODE (inner) == COMPONENT_REF)
1125 tree field = TREE_OPERAND (inner, 1);
1126 gcc_assert (! DECL_BIT_FIELD (field));
1127 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1128 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1131 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1132 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1135 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1136 /* If we can prove the memory starting at XEXP (mem, 0)
1137 and ending at XEXP (mem, 0) + LENGTH will fit into
1138 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1141 && offset + length <= size)
1146 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1147 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1156 mem_expr = TREE_OPERAND (mem_expr, 0);
1157 inner = TREE_OPERAND (inner, 0);
1160 if (mem_expr == NULL)
1162 if (mem_expr != MEM_EXPR (mem))
1164 set_mem_expr (mem, mem_expr);
1165 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1168 set_mem_alias_set (mem, 0);
1169 set_mem_size (mem, NULL_RTX);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size = -1;
1206 enum machine_mode mode;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size = GET_MODE_SIZE (Pmode);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1217 size += GET_MODE_SIZE (Pmode);
1219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1220 if (FUNCTION_ARG_REGNO_P (regno))
1222 mode = reg_raw_mode[regno];
1224 gcc_assert (mode != VOIDmode);
1226 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1227 if (size % align != 0)
1228 size = CEIL (size, align) * align;
1229 apply_args_reg_offset[regno] = size;
1230 size += GET_MODE_SIZE (mode);
1231 apply_args_mode[regno] = mode;
1235 apply_args_mode[regno] = VOIDmode;
1236 apply_args_reg_offset[regno] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size = -1;
1250 enum machine_mode mode;
1252 /* The values computed by this function never change. */
1257 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1258 if (FUNCTION_VALUE_REGNO_P (regno))
1260 mode = reg_raw_mode[regno];
1262 gcc_assert (mode != VOIDmode);
1264 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1265 if (size % align != 0)
1266 size = CEIL (size, align) * align;
1267 size += GET_MODE_SIZE (mode);
1268 apply_result_mode[regno] = mode;
1271 apply_result_mode[regno] = VOIDmode;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size = APPLY_RESULT_SIZE;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep, rtx result)
1290 int regno, size, align, nelts;
1291 enum machine_mode mode;
1293 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1297 if ((mode = apply_result_mode[regno]) != VOIDmode)
1299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1300 if (size % align != 0)
1301 size = CEIL (size, align) * align;
1302 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1303 mem = adjust_address (result, mode, size);
1304 savevec[nelts++] = (savep
1305 ? gen_rtx_SET (VOIDmode, mem, reg)
1306 : gen_rtx_SET (VOIDmode, reg, mem));
1307 size += GET_MODE_SIZE (mode);
1309 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size, align, regno;
1321 enum machine_mode mode;
1322 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size = GET_MODE_SIZE (Pmode);
1330 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1331 size += GET_MODE_SIZE (Pmode);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1335 if ((mode = apply_args_mode[regno]) != VOIDmode)
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1341 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1343 emit_move_insn (adjust_address (registers, mode, size), tem);
1344 size += GET_MODE_SIZE (mode);
1347 /* Save the arg pointer to the block. */
1348 tem = copy_to_reg (virtual_incoming_args_rtx);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1359 size = GET_MODE_SIZE (Pmode);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value)
1365 emit_move_insn (adjust_address (registers, Pmode, size),
1366 copy_to_reg (struct_incoming_value));
1367 size += GET_MODE_SIZE (Pmode);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value != 0)
1387 return apply_args_value;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp = expand_builtin_apply_args_1 ();
1400 apply_args_value = temp;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1419 int size, align, regno;
1420 enum machine_mode mode;
1421 rtx incoming_args, result, reg, dest, src, call_insn;
1422 rtx old_stack_level = 0;
1423 rtx call_fusage = 0;
1424 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1426 arguments = convert_memory_address (Pmode, arguments);
1428 /* Create a block where the return registers can be saved. */
1429 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args = gen_reg_rtx (Pmode);
1433 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1436 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal)
1448 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1456 dest = virtual_outgoing_args_rtx;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize) == CONST_INT)
1459 dest = plus_constant (dest, -INTVAL (argsize));
1461 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1463 dest = gen_rtx_MEM (BLKmode, dest);
1464 set_mem_align (dest, PARM_BOUNDARY);
1465 src = gen_rtx_MEM (BLKmode, incoming_args);
1466 set_mem_align (src, PARM_BOUNDARY);
1467 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1469 /* Refer to the argument block. */
1471 arguments = gen_rtx_MEM (BLKmode, arguments);
1472 set_mem_align (arguments, PARM_BOUNDARY);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1487 reg = gen_rtx_REG (mode, regno);
1488 emit_move_insn (reg, adjust_address (arguments, mode, size));
1489 use_reg (&call_fusage, reg);
1490 size += GET_MODE_SIZE (mode);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size = GET_MODE_SIZE (Pmode);
1498 rtx value = gen_reg_rtx (Pmode);
1499 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1500 emit_move_insn (struct_value, value);
1501 if (REG_P (struct_value))
1502 use_reg (&call_fusage, struct_value);
1503 size += GET_MODE_SIZE (Pmode);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function) != SYMBOL_REF)
1513 function = memory_address (FUNCTION_MODE, function);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1519 result, result_vector (1, result)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1532 if ((mode = apply_result_mode[regno]) != VOIDmode)
1534 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1536 valreg = gen_rtx_REG (mode, regno);
1539 emit_call_insn (GEN_CALL_VALUE (valreg,
1540 gen_rtx_MEM (FUNCTION_MODE, function),
1541 const0_rtx, NULL_RTX, const0_rtx));
1543 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn = last_call_insn ();
1552 add_function_usage_to (call_insn, call_fusage);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal)
1557 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 /* Return the address of the result block. */
1565 result = copy_addr_to_reg (XEXP (result, 0));
1566 return convert_memory_address (ptr_mode, result);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result)
1574 int size, align, regno;
1575 enum machine_mode mode;
1577 rtx call_fusage = 0;
1579 result = convert_memory_address (Pmode, result);
1581 apply_result_size ();
1582 result = gen_rtx_MEM (BLKmode, result);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return)
1587 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1596 if ((mode = apply_result_mode[regno]) != VOIDmode)
1598 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1599 if (size % align != 0)
1600 size = CEIL (size, align) * align;
1601 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1602 emit_move_insn (reg, adjust_address (result, mode, size));
1604 push_to_sequence (call_fusage);
1605 emit_insn (gen_rtx_USE (VOIDmode, reg));
1606 call_fusage = get_insns ();
1608 size += GET_MODE_SIZE (mode);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type)
1624 switch (TREE_CODE (type))
1626 case VOID_TYPE: return void_type_class;
1627 case INTEGER_TYPE: return integer_type_class;
1628 case ENUMERAL_TYPE: return enumeral_type_class;
1629 case BOOLEAN_TYPE: return boolean_type_class;
1630 case POINTER_TYPE: return pointer_type_class;
1631 case REFERENCE_TYPE: return reference_type_class;
1632 case OFFSET_TYPE: return offset_type_class;
1633 case REAL_TYPE: return real_type_class;
1634 case COMPLEX_TYPE: return complex_type_class;
1635 case FUNCTION_TYPE: return function_type_class;
1636 case METHOD_TYPE: return method_type_class;
1637 case RECORD_TYPE: return record_type_class;
1639 case QUAL_UNION_TYPE: return union_type_class;
1640 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1641 ? string_type_class : array_type_class);
1642 case LANG_TYPE: return lang_type_class;
1643 default: return no_type_class;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp)
1652 if (call_expr_nargs (exp))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1654 return GEN_INT (no_type_class);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If IMPLICIT is true find the function in
1673 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1674 can't do the conversion, return zero. */
1677 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1679 tree const *const fn_arr
1680 = implicit ? implicit_built_in_decls : built_in_decls;
1681 enum built_in_function fcode, fcodef, fcodel;
1685 CASE_MATHFN (BUILT_IN_ACOS)
1686 CASE_MATHFN (BUILT_IN_ACOSH)
1687 CASE_MATHFN (BUILT_IN_ASIN)
1688 CASE_MATHFN (BUILT_IN_ASINH)
1689 CASE_MATHFN (BUILT_IN_ATAN)
1690 CASE_MATHFN (BUILT_IN_ATAN2)
1691 CASE_MATHFN (BUILT_IN_ATANH)
1692 CASE_MATHFN (BUILT_IN_CBRT)
1693 CASE_MATHFN (BUILT_IN_CEIL)
1694 CASE_MATHFN (BUILT_IN_CEXPI)
1695 CASE_MATHFN (BUILT_IN_COPYSIGN)
1696 CASE_MATHFN (BUILT_IN_COS)
1697 CASE_MATHFN (BUILT_IN_COSH)
1698 CASE_MATHFN (BUILT_IN_DREM)
1699 CASE_MATHFN (BUILT_IN_ERF)
1700 CASE_MATHFN (BUILT_IN_ERFC)
1701 CASE_MATHFN (BUILT_IN_EXP)
1702 CASE_MATHFN (BUILT_IN_EXP10)
1703 CASE_MATHFN (BUILT_IN_EXP2)
1704 CASE_MATHFN (BUILT_IN_EXPM1)
1705 CASE_MATHFN (BUILT_IN_FABS)
1706 CASE_MATHFN (BUILT_IN_FDIM)
1707 CASE_MATHFN (BUILT_IN_FLOOR)
1708 CASE_MATHFN (BUILT_IN_FMA)
1709 CASE_MATHFN (BUILT_IN_FMAX)
1710 CASE_MATHFN (BUILT_IN_FMIN)
1711 CASE_MATHFN (BUILT_IN_FMOD)
1712 CASE_MATHFN (BUILT_IN_FREXP)
1713 CASE_MATHFN (BUILT_IN_GAMMA)
1714 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1715 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1716 CASE_MATHFN (BUILT_IN_HYPOT)
1717 CASE_MATHFN (BUILT_IN_ILOGB)
1718 CASE_MATHFN (BUILT_IN_INF)
1719 CASE_MATHFN (BUILT_IN_ISINF)
1720 CASE_MATHFN (BUILT_IN_J0)
1721 CASE_MATHFN (BUILT_IN_J1)
1722 CASE_MATHFN (BUILT_IN_JN)
1723 CASE_MATHFN (BUILT_IN_LCEIL)
1724 CASE_MATHFN (BUILT_IN_LDEXP)
1725 CASE_MATHFN (BUILT_IN_LFLOOR)
1726 CASE_MATHFN (BUILT_IN_LGAMMA)
1727 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1728 CASE_MATHFN (BUILT_IN_LLCEIL)
1729 CASE_MATHFN (BUILT_IN_LLFLOOR)
1730 CASE_MATHFN (BUILT_IN_LLRINT)
1731 CASE_MATHFN (BUILT_IN_LLROUND)
1732 CASE_MATHFN (BUILT_IN_LOG)
1733 CASE_MATHFN (BUILT_IN_LOG10)
1734 CASE_MATHFN (BUILT_IN_LOG1P)
1735 CASE_MATHFN (BUILT_IN_LOG2)
1736 CASE_MATHFN (BUILT_IN_LOGB)
1737 CASE_MATHFN (BUILT_IN_LRINT)
1738 CASE_MATHFN (BUILT_IN_LROUND)
1739 CASE_MATHFN (BUILT_IN_MODF)
1740 CASE_MATHFN (BUILT_IN_NAN)
1741 CASE_MATHFN (BUILT_IN_NANS)
1742 CASE_MATHFN (BUILT_IN_NEARBYINT)
1743 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1744 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1745 CASE_MATHFN (BUILT_IN_POW)
1746 CASE_MATHFN (BUILT_IN_POWI)
1747 CASE_MATHFN (BUILT_IN_POW10)
1748 CASE_MATHFN (BUILT_IN_REMAINDER)
1749 CASE_MATHFN (BUILT_IN_REMQUO)
1750 CASE_MATHFN (BUILT_IN_RINT)
1751 CASE_MATHFN (BUILT_IN_ROUND)
1752 CASE_MATHFN (BUILT_IN_SCALB)
1753 CASE_MATHFN (BUILT_IN_SCALBLN)
1754 CASE_MATHFN (BUILT_IN_SCALBN)
1755 CASE_MATHFN (BUILT_IN_SIGNBIT)
1756 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1757 CASE_MATHFN (BUILT_IN_SIN)
1758 CASE_MATHFN (BUILT_IN_SINCOS)
1759 CASE_MATHFN (BUILT_IN_SINH)
1760 CASE_MATHFN (BUILT_IN_SQRT)
1761 CASE_MATHFN (BUILT_IN_TAN)
1762 CASE_MATHFN (BUILT_IN_TANH)
1763 CASE_MATHFN (BUILT_IN_TGAMMA)
1764 CASE_MATHFN (BUILT_IN_TRUNC)
1765 CASE_MATHFN (BUILT_IN_Y0)
1766 CASE_MATHFN (BUILT_IN_Y1)
1767 CASE_MATHFN (BUILT_IN_YN)
1773 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1774 return fn_arr[fcode];
1775 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1776 return fn_arr[fcodef];
1777 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1778 return fn_arr[fcodel];
1783 /* Like mathfn_built_in_1(), but always use the implicit array. */
1786 mathfn_built_in (tree type, enum built_in_function fn)
1788 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1791 /* If errno must be maintained, expand the RTL to check if the result,
1792 TARGET, of a built-in function call, EXP, is NaN, and if so set
1796 expand_errno_check (tree exp, rtx target)
1798 rtx lab = gen_label_rtx ();
1800 /* Test the result; if it is NaN, set errno=EDOM because
1801 the argument was not in the domain. */
1802 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1806 /* If this built-in doesn't throw an exception, set errno directly. */
1807 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1809 #ifdef GEN_ERRNO_RTX
1810 rtx errno_rtx = GEN_ERRNO_RTX;
1813 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1815 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1821 /* Make sure the library call isn't expanded as a tail call. */
1822 CALL_EXPR_TAILCALL (exp) = 0;
1824 /* We can't set errno=EDOM directly; let the library call do it.
1825 Pop the arguments right away in case the call gets deleted. */
1827 expand_call (exp, target, 0);
1832 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1833 Return NULL_RTX if a normal call should be emitted rather than expanding
1834 the function in-line. EXP is the expression that is a call to the builtin
1835 function; if convenient, the result should be placed in TARGET.
1836 SUBTARGET may be used as the target for computing one of EXP's operands. */
1839 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1841 optab builtin_optab;
1842 rtx op0, insns, before_call;
1843 tree fndecl = get_callee_fndecl (exp);
1844 enum machine_mode mode;
1845 bool errno_set = false;
1848 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1851 arg = CALL_EXPR_ARG (exp, 0);
1853 switch (DECL_FUNCTION_CODE (fndecl))
1855 CASE_FLT_FN (BUILT_IN_SQRT):
1856 errno_set = ! tree_expr_nonnegative_p (arg);
1857 builtin_optab = sqrt_optab;
1859 CASE_FLT_FN (BUILT_IN_EXP):
1860 errno_set = true; builtin_optab = exp_optab; break;
1861 CASE_FLT_FN (BUILT_IN_EXP10):
1862 CASE_FLT_FN (BUILT_IN_POW10):
1863 errno_set = true; builtin_optab = exp10_optab; break;
1864 CASE_FLT_FN (BUILT_IN_EXP2):
1865 errno_set = true; builtin_optab = exp2_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXPM1):
1867 errno_set = true; builtin_optab = expm1_optab; break;
1868 CASE_FLT_FN (BUILT_IN_LOGB):
1869 errno_set = true; builtin_optab = logb_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOG):
1871 errno_set = true; builtin_optab = log_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG10):
1873 errno_set = true; builtin_optab = log10_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG2):
1875 errno_set = true; builtin_optab = log2_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG1P):
1877 errno_set = true; builtin_optab = log1p_optab; break;
1878 CASE_FLT_FN (BUILT_IN_ASIN):
1879 builtin_optab = asin_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ACOS):
1881 builtin_optab = acos_optab; break;
1882 CASE_FLT_FN (BUILT_IN_TAN):
1883 builtin_optab = tan_optab; break;
1884 CASE_FLT_FN (BUILT_IN_ATAN):
1885 builtin_optab = atan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_FLOOR):
1887 builtin_optab = floor_optab; break;
1888 CASE_FLT_FN (BUILT_IN_CEIL):
1889 builtin_optab = ceil_optab; break;
1890 CASE_FLT_FN (BUILT_IN_TRUNC):
1891 builtin_optab = btrunc_optab; break;
1892 CASE_FLT_FN (BUILT_IN_ROUND):
1893 builtin_optab = round_optab; break;
1894 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1895 builtin_optab = nearbyint_optab;
1896 if (flag_trapping_math)
1898 /* Else fallthrough and expand as rint. */
1899 CASE_FLT_FN (BUILT_IN_RINT):
1900 builtin_optab = rint_optab; break;
1905 /* Make a suitable register to place result in. */
1906 mode = TYPE_MODE (TREE_TYPE (exp));
1908 if (! flag_errno_math || ! HONOR_NANS (mode))
1911 /* Before working hard, check whether the instruction is available. */
1912 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1914 target = gen_reg_rtx (mode);
1916 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1917 need to expand the argument again. This way, we will not perform
1918 side-effects more the once. */
1919 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1921 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1925 /* Compute into TARGET.
1926 Set TARGET to wherever the result comes back. */
1927 target = expand_unop (mode, builtin_optab, op0, target, 0);
1932 expand_errno_check (exp, target);
1934 /* Output the entire sequence. */
1935 insns = get_insns ();
1941 /* If we were unable to expand via the builtin, stop the sequence
1942 (without outputting the insns) and call to the library function
1943 with the stabilized argument list. */
1947 before_call = get_last_insn ();
1949 target = expand_call (exp, target, target == const0_rtx);
1951 /* If this is a sqrt operation and we don't care about errno, try to
1952 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1953 This allows the semantics of the libcall to be visible to the RTL
1955 if (builtin_optab == sqrt_optab && !errno_set)
1957 /* Search backwards through the insns emitted by expand_call looking
1958 for the instruction with the REG_RETVAL note. */
1959 rtx last = get_last_insn ();
1960 while (last != before_call)
1962 if (find_reg_note (last, REG_RETVAL, NULL))
1964 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1965 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1966 two elements, i.e. symbol_ref(sqrt) and the operand. */
1968 && GET_CODE (note) == EXPR_LIST
1969 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1970 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1971 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1973 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1974 /* Check operand is a register with expected mode. */
1977 && GET_MODE (operand) == mode)
1979 /* Replace the REG_EQUAL note with a SQRT rtx. */
1980 rtx equiv = gen_rtx_SQRT (mode, operand);
1981 set_unique_reg_note (last, REG_EQUAL, equiv);
1986 last = PREV_INSN (last);
1993 /* Expand a call to the builtin binary math functions (pow and atan2).
1994 Return NULL_RTX if a normal call should be emitted rather than expanding the
1995 function in-line. EXP is the expression that is a call to the builtin
1996 function; if convenient, the result should be placed in TARGET.
1997 SUBTARGET may be used as the target for computing one of EXP's
2001 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2003 optab builtin_optab;
2004 rtx op0, op1, insns;
2005 int op1_type = REAL_TYPE;
2006 tree fndecl = get_callee_fndecl (exp);
2008 enum machine_mode mode;
2009 bool errno_set = true;
2011 switch (DECL_FUNCTION_CODE (fndecl))
2013 CASE_FLT_FN (BUILT_IN_SCALBN):
2014 CASE_FLT_FN (BUILT_IN_SCALBLN):
2015 CASE_FLT_FN (BUILT_IN_LDEXP):
2016 op1_type = INTEGER_TYPE;
2021 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2024 arg0 = CALL_EXPR_ARG (exp, 0);
2025 arg1 = CALL_EXPR_ARG (exp, 1);
2027 switch (DECL_FUNCTION_CODE (fndecl))
2029 CASE_FLT_FN (BUILT_IN_POW):
2030 builtin_optab = pow_optab; break;
2031 CASE_FLT_FN (BUILT_IN_ATAN2):
2032 builtin_optab = atan2_optab; break;
2033 CASE_FLT_FN (BUILT_IN_SCALB):
2034 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2036 builtin_optab = scalb_optab; break;
2037 CASE_FLT_FN (BUILT_IN_SCALBN):
2038 CASE_FLT_FN (BUILT_IN_SCALBLN):
2039 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2041 /* Fall through... */
2042 CASE_FLT_FN (BUILT_IN_LDEXP):
2043 builtin_optab = ldexp_optab; break;
2044 CASE_FLT_FN (BUILT_IN_FMOD):
2045 builtin_optab = fmod_optab; break;
2046 CASE_FLT_FN (BUILT_IN_REMAINDER):
2047 CASE_FLT_FN (BUILT_IN_DREM):
2048 builtin_optab = remainder_optab; break;
2053 /* Make a suitable register to place result in. */
2054 mode = TYPE_MODE (TREE_TYPE (exp));
2056 /* Before working hard, check whether the instruction is available. */
2057 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2060 target = gen_reg_rtx (mode);
2062 if (! flag_errno_math || ! HONOR_NANS (mode))
2065 /* Always stabilize the argument list. */
2066 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2067 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2069 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2070 op1 = expand_normal (arg1);
2074 /* Compute into TARGET.
2075 Set TARGET to wherever the result comes back. */
2076 target = expand_binop (mode, builtin_optab, op0, op1,
2077 target, 0, OPTAB_DIRECT);
2079 /* If we were unable to expand via the builtin, stop the sequence
2080 (without outputting the insns) and call to the library function
2081 with the stabilized argument list. */
2085 return expand_call (exp, target, target == const0_rtx);
2089 expand_errno_check (exp, target);
2091 /* Output the entire sequence. */
2092 insns = get_insns ();
2099 /* Expand a call to the builtin sin and cos math functions.
2100 Return NULL_RTX if a normal call should be emitted rather than expanding the
2101 function in-line. EXP is the expression that is a call to the builtin
2102 function; if convenient, the result should be placed in TARGET.
2103 SUBTARGET may be used as the target for computing one of EXP's
2107 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2109 optab builtin_optab;
2111 tree fndecl = get_callee_fndecl (exp);
2112 enum machine_mode mode;
2115 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2118 arg = CALL_EXPR_ARG (exp, 0);
2120 switch (DECL_FUNCTION_CODE (fndecl))
2122 CASE_FLT_FN (BUILT_IN_SIN):
2123 CASE_FLT_FN (BUILT_IN_COS):
2124 builtin_optab = sincos_optab; break;
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2132 /* Check if sincos insn is available, otherwise fallback
2133 to sin or cos insn. */
2134 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2135 switch (DECL_FUNCTION_CODE (fndecl))
2137 CASE_FLT_FN (BUILT_IN_SIN):
2138 builtin_optab = sin_optab; break;
2139 CASE_FLT_FN (BUILT_IN_COS):
2140 builtin_optab = cos_optab; break;
2145 /* Before working hard, check whether the instruction is available. */
2146 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2148 target = gen_reg_rtx (mode);
2150 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2151 need to expand the argument again. This way, we will not perform
2152 side-effects more the once. */
2153 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2155 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2159 /* Compute into TARGET.
2160 Set TARGET to wherever the result comes back. */
2161 if (builtin_optab == sincos_optab)
2165 switch (DECL_FUNCTION_CODE (fndecl))
2167 CASE_FLT_FN (BUILT_IN_SIN):
2168 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2170 CASE_FLT_FN (BUILT_IN_COS):
2171 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2176 gcc_assert (result);
2180 target = expand_unop (mode, builtin_optab, op0, target, 0);
2185 /* Output the entire sequence. */
2186 insns = get_insns ();
2192 /* If we were unable to expand via the builtin, stop the sequence
2193 (without outputting the insns) and call to the library function
2194 with the stabilized argument list. */
2198 target = expand_call (exp, target, target == const0_rtx);
2203 /* Expand a call to one of the builtin math functions that operate on
2204 floating point argument and output an integer result (ilogb, isinf,
2206 Return 0 if a normal call should be emitted rather than expanding the
2207 function in-line. EXP is the expression that is a call to the builtin
2208 function; if convenient, the result should be placed in TARGET.
2209 SUBTARGET may be used as the target for computing one of EXP's operands. */
2212 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2214 optab builtin_optab = 0;
2215 enum insn_code icode = CODE_FOR_nothing;
2217 tree fndecl = get_callee_fndecl (exp);
2218 enum machine_mode mode;
2219 bool errno_set = false;
2222 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2225 arg = CALL_EXPR_ARG (exp, 0);
2227 switch (DECL_FUNCTION_CODE (fndecl))
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISNORMAL:
2234 case BUILT_IN_ISFINITE:
2235 CASE_FLT_FN (BUILT_IN_FINITE):
2236 /* These builtins have no optabs (yet). */
2242 /* There's no easy way to detect the case we need to set EDOM. */
2243 if (flag_errno_math && errno_set)
2246 /* Optab mode depends on the mode of the input argument. */
2247 mode = TYPE_MODE (TREE_TYPE (arg));
2250 icode = optab_handler (builtin_optab, mode)->insn_code;
2252 /* Before working hard, check whether the instruction is available. */
2253 if (icode != CODE_FOR_nothing)
2255 /* Make a suitable register to place result in. */
2257 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2258 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2260 gcc_assert (insn_data[icode].operand[0].predicate
2261 (target, GET_MODE (target)));
2263 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2264 need to expand the argument again. This way, we will not perform
2265 side-effects more the once. */
2266 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2268 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2270 if (mode != GET_MODE (op0))
2271 op0 = convert_to_mode (mode, op0, 0);
2273 /* Compute into TARGET.
2274 Set TARGET to wherever the result comes back. */
2275 emit_unop_insn (icode, target, op0, UNKNOWN);
2279 /* If there is no optab, try generic code. */
2280 switch (DECL_FUNCTION_CODE (fndecl))
2284 CASE_FLT_FN (BUILT_IN_ISINF):
2286 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2287 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2288 tree const type = TREE_TYPE (arg);
2292 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2293 real_from_string (&r, buf);
2294 result = build_call_expr (isgr_fn, 2,
2295 fold_build1 (ABS_EXPR, type, arg),
2296 build_real (type, r));
2297 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2299 CASE_FLT_FN (BUILT_IN_FINITE):
2300 case BUILT_IN_ISFINITE:
2302 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2303 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2304 tree const type = TREE_TYPE (arg);
2308 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2309 real_from_string (&r, buf);
2310 result = build_call_expr (isle_fn, 2,
2311 fold_build1 (ABS_EXPR, type, arg),
2312 build_real (type, r));
2313 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2315 case BUILT_IN_ISNORMAL:
2317 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2318 islessequal(fabs(x),DBL_MAX). */
2319 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2320 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2321 tree const type = TREE_TYPE (arg);
2322 REAL_VALUE_TYPE rmax, rmin;
2325 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2326 real_from_string (&rmax, buf);
2327 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2328 real_from_string (&rmin, buf);
2329 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2330 result = build_call_expr (isle_fn, 2, arg,
2331 build_real (type, rmax));
2332 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2333 build_call_expr (isge_fn, 2, arg,
2334 build_real (type, rmin)));
2335 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2341 target = expand_call (exp, target, target == const0_rtx);
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2352 expand_builtin_sincos (tree exp)
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2359 if (!validate_arglist (exp, REAL_TYPE,
2360 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 arg = CALL_EXPR_ARG (exp, 0);
2364 sinp = CALL_EXPR_ARG (exp, 1);
2365 cosp = CALL_EXPR_ARG (exp, 2);
2367 /* Make a suitable register to place result in. */
2368 mode = TYPE_MODE (TREE_TYPE (arg));
2370 /* Check if sincos insn is available, otherwise emit the call. */
2371 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374 target1 = gen_reg_rtx (mode);
2375 target2 = gen_reg_rtx (mode);
2377 op0 = expand_normal (arg);
2378 op1 = expand_normal (build_fold_indirect_ref (sinp));
2379 op2 = expand_normal (build_fold_indirect_ref (cosp));
2381 /* Compute into target1 and target2.
2382 Set TARGET to wherever the result comes back. */
2383 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2384 gcc_assert (result);
2386 /* Move target1 and target2 to the memory locations indicated
2388 emit_move_insn (op1, target1);
2389 emit_move_insn (op2, target2);
2394 /* Expand a call to the internal cexpi builtin to the sincos math function.
2395 EXP is the expression that is a call to the builtin function; if convenient,
2396 the result should be placed in TARGET. SUBTARGET may be used as the target
2397 for computing one of EXP's operands. */
2400 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2402 tree fndecl = get_callee_fndecl (exp);
2404 enum machine_mode mode;
2407 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2410 arg = CALL_EXPR_ARG (exp, 0);
2411 type = TREE_TYPE (arg);
2412 mode = TYPE_MODE (TREE_TYPE (arg));
2414 /* Try expanding via a sincos optab, fall back to emitting a libcall
2415 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2416 is only generated from sincos, cexp or if we have either of them. */
2417 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2419 op1 = gen_reg_rtx (mode);
2420 op2 = gen_reg_rtx (mode);
2422 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2424 /* Compute into op1 and op2. */
2425 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2427 else if (TARGET_HAS_SINCOS)
2429 tree call, fn = NULL_TREE;
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = built_in_decls[BUILT_IN_SINCOSF];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = built_in_decls[BUILT_IN_SINCOS];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2443 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2444 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2445 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2446 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2447 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2449 /* Make sure not to fold the sincos call again. */
2450 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2452 call, 3, arg, top1, top2));
2456 tree call, fn = NULL_TREE, narg;
2457 tree ctype = build_complex_type (type);
2459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2460 fn = built_in_decls[BUILT_IN_CEXPF];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2462 fn = built_in_decls[BUILT_IN_CEXP];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2464 fn = built_in_decls[BUILT_IN_CEXPL];
2468 /* If we don't have a decl for cexp create one. This is the
2469 friendliest fallback if the user calls __builtin_cexpi
2470 without full target C99 function support. */
2471 if (fn == NULL_TREE)
2474 const char *name = NULL;
2476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2483 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2484 fn = build_fn_decl (name, fntype);
2487 narg = fold_build2 (COMPLEX_EXPR, ctype,
2488 build_real (type, dconst0), arg);
2490 /* Make sure not to fold the cexp call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 return expand_expr (build_call_nary (ctype, call, 1, narg),
2493 target, VOIDmode, EXPAND_NORMAL);
2496 /* Now build the proper return type. */
2497 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2498 make_tree (TREE_TYPE (arg), op2),
2499 make_tree (TREE_TYPE (arg), op1)),
2500 target, VOIDmode, EXPAND_NORMAL);
2503 /* Expand a call to one of the builtin rounding functions gcc defines
2504 as an extension (lfloor and lceil). As these are gcc extensions we
2505 do not need to worry about setting errno to EDOM.
2506 If expanding via optab fails, lower expression to (int)(floor(x)).
2507 EXP is the expression that is a call to the builtin function;
2508 if convenient, the result should be placed in TARGET. SUBTARGET may
2509 be used as the target for computing one of EXP's operands. */
2512 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2514 convert_optab builtin_optab;
2515 rtx op0, insns, tmp;
2516 tree fndecl = get_callee_fndecl (exp);
2517 enum built_in_function fallback_fn;
2518 tree fallback_fndecl;
2519 enum machine_mode mode;
2522 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2525 arg = CALL_EXPR_ARG (exp, 0);
2527 switch (DECL_FUNCTION_CODE (fndecl))
2529 CASE_FLT_FN (BUILT_IN_LCEIL):
2530 CASE_FLT_FN (BUILT_IN_LLCEIL):
2531 builtin_optab = lceil_optab;
2532 fallback_fn = BUILT_IN_CEIL;
2535 CASE_FLT_FN (BUILT_IN_LFLOOR):
2536 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2537 builtin_optab = lfloor_optab;
2538 fallback_fn = BUILT_IN_FLOOR;
2545 /* Make a suitable register to place result in. */
2546 mode = TYPE_MODE (TREE_TYPE (exp));
2548 target = gen_reg_rtx (mode);
2550 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2551 need to expand the argument again. This way, we will not perform
2552 side-effects more the once. */
2553 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2555 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2559 /* Compute into TARGET. */
2560 if (expand_sfix_optab (target, op0, builtin_optab))
2562 /* Output the entire sequence. */
2563 insns = get_insns ();
2569 /* If we were unable to expand via the builtin, stop the sequence
2570 (without outputting the insns). */
2573 /* Fall back to floating point rounding optab. */
2574 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2576 /* For non-C99 targets we may end up without a fallback fndecl here
2577 if the user called __builtin_lfloor directly. In this case emit
2578 a call to the floor/ceil variants nevertheless. This should result
2579 in the best user experience for not full C99 targets. */
2580 if (fallback_fndecl == NULL_TREE)
2583 const char *name = NULL;
2585 switch (DECL_FUNCTION_CODE (fndecl))
2587 case BUILT_IN_LCEIL:
2588 case BUILT_IN_LLCEIL:
2591 case BUILT_IN_LCEILF:
2592 case BUILT_IN_LLCEILF:
2595 case BUILT_IN_LCEILL:
2596 case BUILT_IN_LLCEILL:
2599 case BUILT_IN_LFLOOR:
2600 case BUILT_IN_LLFLOOR:
2603 case BUILT_IN_LFLOORF:
2604 case BUILT_IN_LLFLOORF:
2607 case BUILT_IN_LFLOORL:
2608 case BUILT_IN_LLFLOORL:
2615 fntype = build_function_type_list (TREE_TYPE (arg),
2616 TREE_TYPE (arg), NULL_TREE);
2617 fallback_fndecl = build_fn_decl (name, fntype);
2620 exp = build_call_expr (fallback_fndecl, 1, arg);
2622 tmp = expand_normal (exp);
2624 /* Truncate the result of floating point optab to integer
2625 via expand_fix (). */
2626 target = gen_reg_rtx (mode);
2627 expand_fix (target, tmp, 0);
2632 /* Expand a call to one of the builtin math functions doing integer
2634 Return 0 if a normal call should be emitted rather than expanding the
2635 function in-line. EXP is the expression that is a call to the builtin
2636 function; if convenient, the result should be placed in TARGET.
2637 SUBTARGET may be used as the target for computing one of EXP's operands. */
2640 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2642 convert_optab builtin_optab;
2644 tree fndecl = get_callee_fndecl (exp);
2646 enum machine_mode mode;
2648 /* There's no easy way to detect the case we need to set EDOM. */
2649 if (flag_errno_math)
2652 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2655 arg = CALL_EXPR_ARG (exp, 0);
2657 switch (DECL_FUNCTION_CODE (fndecl))
2659 CASE_FLT_FN (BUILT_IN_LRINT):
2660 CASE_FLT_FN (BUILT_IN_LLRINT):
2661 builtin_optab = lrint_optab; break;
2662 CASE_FLT_FN (BUILT_IN_LROUND):
2663 CASE_FLT_FN (BUILT_IN_LLROUND):
2664 builtin_optab = lround_optab; break;
2669 /* Make a suitable register to place result in. */
2670 mode = TYPE_MODE (TREE_TYPE (exp));
2672 target = gen_reg_rtx (mode);
2674 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2675 need to expand the argument again. This way, we will not perform
2676 side-effects more the once. */
2677 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2679 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2683 if (expand_sfix_optab (target, op0, builtin_optab))
2685 /* Output the entire sequence. */
2686 insns = get_insns ();
2692 /* If we were unable to expand via the builtin, stop the sequence
2693 (without outputting the insns) and call to the library function
2694 with the stabilized argument list. */
2697 target = expand_call (exp, target, target == const0_rtx);
2702 /* To evaluate powi(x,n), the floating point value x raised to the
2703 constant integer exponent n, we use a hybrid algorithm that
2704 combines the "window method" with look-up tables. For an
2705 introduction to exponentiation algorithms and "addition chains",
2706 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2707 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2708 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2709 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2711 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2712 multiplications to inline before calling the system library's pow
2713 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2714 so this default never requires calling pow, powf or powl. */
2716 #ifndef POWI_MAX_MULTS
2717 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2720 /* The size of the "optimal power tree" lookup table. All
2721 exponents less than this value are simply looked up in the
2722 powi_table below. This threshold is also used to size the
2723 cache of pseudo registers that hold intermediate results. */
2724 #define POWI_TABLE_SIZE 256
2726 /* The size, in bits of the window, used in the "window method"
2727 exponentiation algorithm. This is equivalent to a radix of
2728 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2729 #define POWI_WINDOW_SIZE 3
2731 /* The following table is an efficient representation of an
2732 "optimal power tree". For each value, i, the corresponding
2733 value, j, in the table states than an optimal evaluation
2734 sequence for calculating pow(x,i) can be found by evaluating
2735 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2736 100 integers is given in Knuth's "Seminumerical algorithms". */
2738 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2740 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2741 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2742 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2743 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2744 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2745 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2746 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2747 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2748 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2749 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2750 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2751 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2752 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2753 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2754 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2755 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2756 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2757 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2758 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2759 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2760 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2761 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2762 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2763 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2764 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2765 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2766 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2767 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2768 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2769 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2770 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2771 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2775 /* Return the number of multiplications required to calculate
2776 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2777 subroutine of powi_cost. CACHE is an array indicating
2778 which exponents have already been calculated. */
2781 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2783 /* If we've already calculated this exponent, then this evaluation
2784 doesn't require any additional multiplications. */
2789 return powi_lookup_cost (n - powi_table[n], cache)
2790 + powi_lookup_cost (powi_table[n], cache) + 1;
2793 /* Return the number of multiplications required to calculate
2794 powi(x,n) for an arbitrary x, given the exponent N. This
2795 function needs to be kept in sync with expand_powi below. */
2798 powi_cost (HOST_WIDE_INT n)
2800 bool cache[POWI_TABLE_SIZE];
2801 unsigned HOST_WIDE_INT digit;
2802 unsigned HOST_WIDE_INT val;
2808 /* Ignore the reciprocal when calculating the cost. */
2809 val = (n < 0) ? -n : n;
2811 /* Initialize the exponent cache. */
2812 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2817 while (val >= POWI_TABLE_SIZE)
2821 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2822 result += powi_lookup_cost (digit, cache)
2823 + POWI_WINDOW_SIZE + 1;
2824 val >>= POWI_WINDOW_SIZE;
2833 return result + powi_lookup_cost (val, cache);
2836 /* Recursive subroutine of expand_powi. This function takes the array,
2837 CACHE, of already calculated exponents and an exponent N and returns
2838 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2841 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2843 unsigned HOST_WIDE_INT digit;
2847 if (n < POWI_TABLE_SIZE)
2852 target = gen_reg_rtx (mode);
2855 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2856 op1 = expand_powi_1 (mode, powi_table[n], cache);
2860 target = gen_reg_rtx (mode);
2861 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2862 op0 = expand_powi_1 (mode, n - digit, cache);
2863 op1 = expand_powi_1 (mode, digit, cache);
2867 target = gen_reg_rtx (mode);
2868 op0 = expand_powi_1 (mode, n >> 1, cache);
2872 result = expand_mult (mode, op0, op1, target, 0);
2873 if (result != target)
2874 emit_move_insn (target, result);
2878 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2879 floating point operand in mode MODE, and N is the exponent. This
2880 function needs to be kept in sync with powi_cost above. */
2883 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2885 unsigned HOST_WIDE_INT val;
2886 rtx cache[POWI_TABLE_SIZE];
2890 return CONST1_RTX (mode);
2892 val = (n < 0) ? -n : n;
2894 memset (cache, 0, sizeof (cache));
2897 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2899 /* If the original exponent was negative, reciprocate the result. */
2901 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2902 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2907 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2908 a normal call should be emitted rather than expanding the function
2909 in-line. EXP is the expression that is a call to the builtin
2910 function; if convenient, the result should be placed in TARGET. */
2913 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2917 tree type = TREE_TYPE (exp);
2918 REAL_VALUE_TYPE cint, c, c2;
2921 enum machine_mode mode = TYPE_MODE (type);
2923 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2926 arg0 = CALL_EXPR_ARG (exp, 0);
2927 arg1 = CALL_EXPR_ARG (exp, 1);
2929 if (TREE_CODE (arg1) != REAL_CST
2930 || TREE_OVERFLOW (arg1))
2931 return expand_builtin_mathfn_2 (exp, target, subtarget);
2933 /* Handle constant exponents. */
2935 /* For integer valued exponents we can expand to an optimal multiplication
2936 sequence using expand_powi. */
2937 c = TREE_REAL_CST (arg1);
2938 n = real_to_integer (&c);
2939 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2940 if (real_identical (&c, &cint)
2941 && ((n >= -1 && n <= 2)
2942 || (flag_unsafe_math_optimizations
2944 && powi_cost (n) <= POWI_MAX_MULTS)))
2946 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2949 op = force_reg (mode, op);
2950 op = expand_powi (op, mode, n);
2955 narg0 = builtin_save_expr (arg0);
2957 /* If the exponent is not integer valued, check if it is half of an integer.
2958 In this case we can expand to sqrt (x) * x**(n/2). */
2959 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2960 if (fn != NULL_TREE)
2962 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2963 n = real_to_integer (&c2);
2964 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2965 if (real_identical (&c2, &cint)
2966 && ((flag_unsafe_math_optimizations
2968 && powi_cost (n/2) <= POWI_MAX_MULTS)
2971 tree call_expr = build_call_expr (fn, 1, narg0);
2972 /* Use expand_expr in case the newly built call expression
2973 was folded to a non-call. */
2974 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2977 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2978 op2 = force_reg (mode, op2);
2979 op2 = expand_powi (op2, mode, abs (n / 2));
2980 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2981 0, OPTAB_LIB_WIDEN);
2982 /* If the original exponent was negative, reciprocate the
2985 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2986 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2992 /* Try if the exponent is a third of an integer. In this case
2993 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2994 different from pow (x, 1./3.) due to rounding and behavior
2995 with negative x we need to constrain this transformation to
2996 unsafe math and positive x or finite math. */
2997 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2999 && flag_unsafe_math_optimizations
3000 && (tree_expr_nonnegative_p (arg0)
3001 || !HONOR_NANS (mode)))
3003 REAL_VALUE_TYPE dconst3;
3004 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3005 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3006 real_round (&c2, mode, &c2);
3007 n = real_to_integer (&c2);
3008 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3009 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3010 real_convert (&c2, mode, &c2);
3011 if (real_identical (&c2, &c)
3013 && powi_cost (n/3) <= POWI_MAX_MULTS)
3016 tree call_expr = build_call_expr (fn, 1,narg0);
3017 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3018 if (abs (n) % 3 == 2)
3019 op = expand_simple_binop (mode, MULT, op, op, op,
3020 0, OPTAB_LIB_WIDEN);
3023 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3024 op2 = force_reg (mode, op2);
3025 op2 = expand_powi (op2, mode, abs (n / 3));
3026 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3027 0, OPTAB_LIB_WIDEN);
3028 /* If the original exponent was negative, reciprocate the
3031 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3032 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3038 /* Fall back to optab expansion. */
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3042 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3043 a normal call should be emitted rather than expanding the function
3044 in-line. EXP is the expression that is a call to the builtin
3045 function; if convenient, the result should be placed in TARGET. */
3048 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3052 enum machine_mode mode;
3053 enum machine_mode mode2;
3055 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3058 arg0 = CALL_EXPR_ARG (exp, 0);
3059 arg1 = CALL_EXPR_ARG (exp, 1);
3060 mode = TYPE_MODE (TREE_TYPE (exp));
3062 /* Handle constant power. */
3064 if (TREE_CODE (arg1) == INTEGER_CST
3065 && !TREE_OVERFLOW (arg1))
3067 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3069 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3070 Otherwise, check the number of multiplications required. */
3071 if ((TREE_INT_CST_HIGH (arg1) == 0
3072 || TREE_INT_CST_HIGH (arg1) == -1)
3073 && ((n >= -1 && n <= 2)
3075 && powi_cost (n) <= POWI_MAX_MULTS)))
3077 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3078 op0 = force_reg (mode, op0);
3079 return expand_powi (op0, mode, n);
3083 /* Emit a libcall to libgcc. */
3085 /* Mode of the 2nd argument must match that of an int. */
3086 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3088 if (target == NULL_RTX)
3089 target = gen_reg_rtx (mode);
3091 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3092 if (GET_MODE (op0) != mode)
3093 op0 = convert_to_mode (mode, op0, 0);
3094 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3095 if (GET_MODE (op1) != mode2)
3096 op1 = convert_to_mode (mode2, op1, 0);
3098 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3099 target, LCT_CONST, mode, 2,
3100 op0, mode, op1, mode2);
3105 /* Expand expression EXP which is a call to the strlen builtin. Return
3106 NULL_RTX if we failed the caller should emit a normal call, otherwise
3107 try to get the result in TARGET, if convenient. */
3110 expand_builtin_strlen (tree exp, rtx target,
3111 enum machine_mode target_mode)
3113 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3119 tree src = CALL_EXPR_ARG (exp, 0);
3120 rtx result, src_reg, char_rtx, before_strlen;
3121 enum machine_mode insn_mode = target_mode, char_mode;
3122 enum insn_code icode = CODE_FOR_nothing;
3125 /* If the length can be computed at compile-time, return it. */
3126 len = c_strlen (src, 0);
3128 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3130 /* If the length can be computed at compile-time and is constant
3131 integer, but there are side-effects in src, evaluate
3132 src for side-effects, then return len.
3133 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3134 can be optimized into: i++; x = 3; */
3135 len = c_strlen (src, 1);
3136 if (len && TREE_CODE (len) == INTEGER_CST)
3138 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3139 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3142 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3144 /* If SRC is not a pointer type, don't do this operation inline. */
3148 /* Bail out if we can't compute strlen in the right mode. */
3149 while (insn_mode != VOIDmode)
3151 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3152 if (icode != CODE_FOR_nothing)
3155 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3157 if (insn_mode == VOIDmode)
3160 /* Make a place to write the result of the instruction. */
3164 && GET_MODE (result) == insn_mode
3165 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3166 result = gen_reg_rtx (insn_mode);
3168 /* Make a place to hold the source address. We will not expand
3169 the actual source until we are sure that the expansion will
3170 not fail -- there are trees that cannot be expanded twice. */
3171 src_reg = gen_reg_rtx (Pmode);
3173 /* Mark the beginning of the strlen sequence so we can emit the
3174 source operand later. */
3175 before_strlen = get_last_insn ();
3177 char_rtx = const0_rtx;
3178 char_mode = insn_data[(int) icode].operand[2].mode;
3179 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3181 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3183 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3184 char_rtx, GEN_INT (align));
3189 /* Now that we are assured of success, expand the source. */
3191 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3193 emit_move_insn (src_reg, pat);
3198 emit_insn_after (pat, before_strlen);
3200 emit_insn_before (pat, get_insns ());
3202 /* Return the value in the proper mode for this function. */
3203 if (GET_MODE (result) == target_mode)
3205 else if (target != 0)
3206 convert_move (target, result, 0);
3208 target = convert_to_mode (target_mode, result, 0);
3214 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3215 caller should emit a normal call, otherwise try to get the result
3216 in TARGET, if convenient (and in mode MODE if that's convenient). */
3219 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3221 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3223 tree type = TREE_TYPE (exp);
3224 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3225 CALL_EXPR_ARG (exp, 1), type);
3227 return expand_expr (result, target, mode, EXPAND_NORMAL);
3232 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3233 caller should emit a normal call, otherwise try to get the result
3234 in TARGET, if convenient (and in mode MODE if that's convenient). */
3237 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3239 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3241 tree type = TREE_TYPE (exp);
3242 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3243 CALL_EXPR_ARG (exp, 1), type);
3245 return expand_expr (result, target, mode, EXPAND_NORMAL);
3247 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3252 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3253 caller should emit a normal call, otherwise try to get the result
3254 in TARGET, if convenient (and in mode MODE if that's convenient). */
3257 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3259 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3261 tree type = TREE_TYPE (exp);
3262 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3263 CALL_EXPR_ARG (exp, 1), type);
3265 return expand_expr (result, target, mode, EXPAND_NORMAL);
3270 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3271 caller should emit a normal call, otherwise try to get the result
3272 in TARGET, if convenient (and in mode MODE if that's convenient). */
3275 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3277 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3279 tree type = TREE_TYPE (exp);
3280 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3281 CALL_EXPR_ARG (exp, 1), type);
3283 return expand_expr (result, target, mode, EXPAND_NORMAL);
3288 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3289 bytes from constant string DATA + OFFSET and return it as target
3293 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3294 enum machine_mode mode)
3296 const char *str = (const char *) data;
3298 gcc_assert (offset >= 0
3299 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3300 <= strlen (str) + 1));
3302 return c_readstr (str + offset, mode);
3305 /* Expand a call EXP to the memcpy builtin.
3306 Return NULL_RTX if we failed, the caller should emit a normal call,
3307 otherwise try to get the result in TARGET, if convenient (and in
3308 mode MODE if that's convenient). */
3311 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3313 tree fndecl = get_callee_fndecl (exp);
3315 if (!validate_arglist (exp,
3316 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3320 tree dest = CALL_EXPR_ARG (exp, 0);
3321 tree src = CALL_EXPR_ARG (exp, 1);
3322 tree len = CALL_EXPR_ARG (exp, 2);
3323 const char *src_str;
3324 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3325 unsigned int dest_align
3326 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3327 rtx dest_mem, src_mem, dest_addr, len_rtx;
3328 tree result = fold_builtin_memory_op (dest, src, len,
3329 TREE_TYPE (TREE_TYPE (fndecl)),
3331 HOST_WIDE_INT expected_size = -1;
3332 unsigned int expected_align = 0;
3336 while (TREE_CODE (result) == COMPOUND_EXPR)
3338 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3340 result = TREE_OPERAND (result, 1);
3342 return expand_expr (result, target, mode, EXPAND_NORMAL);
3345 /* If DEST is not a pointer type, call the normal function. */
3346 if (dest_align == 0)
3349 /* If either SRC is not a pointer type, don't do this
3350 operation in-line. */
3354 stringop_block_profile (exp, &expected_align, &expected_size);
3355 if (expected_align < dest_align)
3356 expected_align = dest_align;
3357 dest_mem = get_memory_rtx (dest, len);
3358 set_mem_align (dest_mem, dest_align);
3359 len_rtx = expand_normal (len);
3360 src_str = c_getstr (src);
3362 /* If SRC is a string constant and block move would be done
3363 by pieces, we can avoid loading the string from memory
3364 and only stored the computed constants. */
3366 && GET_CODE (len_rtx) == CONST_INT
3367 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3368 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3369 (void *) src_str, dest_align, false))
3371 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3372 builtin_memcpy_read_str,
3373 (void *) src_str, dest_align, false, 0);
3374 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3375 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3379 src_mem = get_memory_rtx (src, len);
3380 set_mem_align (src_mem, src_align);
3382 /* Copy word part most expediently. */
3383 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3384 CALL_EXPR_TAILCALL (exp)
3385 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3386 expected_align, expected_size);
3390 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3391 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3397 /* Expand a call EXP to the mempcpy builtin.
3398 Return NULL_RTX if we failed; the caller should emit a normal call,
3399 otherwise try to get the result in TARGET, if convenient (and in
3400 mode MODE if that's convenient). If ENDP is 0 return the
3401 destination pointer, if ENDP is 1 return the end pointer ala
3402 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3406 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3408 if (!validate_arglist (exp,
3409 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3413 tree dest = CALL_EXPR_ARG (exp, 0);
3414 tree src = CALL_EXPR_ARG (exp, 1);
3415 tree len = CALL_EXPR_ARG (exp, 2);
3416 return expand_builtin_mempcpy_args (dest, src, len,
3418 target, mode, /*endp=*/ 1);
3422 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3423 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3424 so that this can also be called without constructing an actual CALL_EXPR.
3425 TYPE is the return type of the call. The other arguments and return value
3426 are the same as for expand_builtin_mempcpy. */
3429 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3430 rtx target, enum machine_mode mode, int endp)
3432 /* If return value is ignored, transform mempcpy into memcpy. */
3433 if (target == const0_rtx)
3435 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3440 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3441 target, mode, EXPAND_NORMAL);
3445 const char *src_str;
3446 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3447 unsigned int dest_align
3448 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3449 rtx dest_mem, src_mem, len_rtx;
3450 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3454 while (TREE_CODE (result) == COMPOUND_EXPR)
3456 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3458 result = TREE_OPERAND (result, 1);
3460 return expand_expr (result, target, mode, EXPAND_NORMAL);
3463 /* If either SRC or DEST is not a pointer type, don't do this
3464 operation in-line. */
3465 if (dest_align == 0 || src_align == 0)
3468 /* If LEN is not constant, call the normal function. */
3469 if (! host_integerp (len, 1))
3472 len_rtx = expand_normal (len);
3473 src_str = c_getstr (src);
3475 /* If SRC is a string constant and block move would be done
3476 by pieces, we can avoid loading the string from memory
3477 and only stored the computed constants. */
3479 && GET_CODE (len_rtx) == CONST_INT
3480 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3481 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3482 (void *) src_str, dest_align, false))
3484 dest_mem = get_memory_rtx (dest, len);
3485 set_mem_align (dest_mem, dest_align);
3486 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3487 builtin_memcpy_read_str,
3488 (void *) src_str, dest_align,
3490 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3491 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3495 if (GET_CODE (len_rtx) == CONST_INT
3496 && can_move_by_pieces (INTVAL (len_rtx),
3497 MIN (dest_align, src_align)))
3499 dest_mem = get_memory_rtx (dest, len);
3500 set_mem_align (dest_mem, dest_align);
3501 src_mem = get_memory_rtx (src, len);
3502 set_mem_align (src_mem, src_align);
3503 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3504 MIN (dest_align, src_align), endp);
3505 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3506 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3514 /* Expand expression EXP, which is a call to the memmove builtin. Return
3515 NULL_RTX if we failed; the caller should emit a normal call. */
3518 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 tree dest = CALL_EXPR_ARG (exp, 0);
3526 tree src = CALL_EXPR_ARG (exp, 1);
3527 tree len = CALL_EXPR_ARG (exp, 2);
3528 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),