1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
53 #include "diagnostic.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 /* Define the names of the builtin function types and codes. */
60 const char *const built_in_class_names[4]
61 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
63 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
64 const char * built_in_names[(int) END_BUILTINS] =
66 #include "builtins.def"
70 /* Setup an array of _DECL trees, make sure each element is
71 initialized to NULL_TREE. */
72 tree built_in_decls[(int) END_BUILTINS];
73 /* Declarations used when constructing the builtin implicitly in the compiler.
74 It may be NULL_TREE when this is invalid (for instance runtime is not
75 required to implement the function call in all cases). */
76 tree implicit_built_in_decls[(int) END_BUILTINS];
78 static const char *c_getstr (tree);
79 static rtx c_readstr (const char *, enum machine_mode);
80 static int target_char_cast (tree, char *);
81 static rtx get_memory_rtx (tree, tree);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
87 static void expand_builtin_update_setjmp_buf (rtx);
88 static void expand_builtin_prefetch (tree);
89 static rtx expand_builtin_apply_args (void);
90 static rtx expand_builtin_apply_args_1 (void);
91 static rtx expand_builtin_apply (rtx, rtx, rtx);
92 static void expand_builtin_return (rtx);
93 static enum type_class type_to_class (tree);
94 static rtx expand_builtin_classify_type (tree);
95 static void expand_errno_check (tree, rtx);
96 static rtx expand_builtin_mathfn (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
99 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
100 static rtx expand_builtin_sincos (tree);
101 static rtx expand_builtin_cexpi (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
103 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_args_info (tree);
105 static rtx expand_builtin_next_arg (void);
106 static rtx expand_builtin_va_start (tree);
107 static rtx expand_builtin_va_end (tree);
108 static rtx expand_builtin_va_copy (tree);
109 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
113 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
114 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
121 enum machine_mode, int);
122 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
123 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
124 enum machine_mode, int);
125 static rtx expand_builtin_bcopy (tree, int);
126 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, rtx, bool);
144 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
145 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
147 static tree stabilize_va_list (tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_expect (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (tree);
153 static tree fold_builtin_inf (tree, int);
154 static tree fold_builtin_nan (tree, tree, int);
155 static tree rewrite_call_expr (tree, int, tree, int, ...);
156 static bool validate_arg (tree, enum tree_code code);
157 static bool integer_valued_real_p (tree);
158 static tree fold_trunc_transparent_mathfn (tree, tree);
159 static bool readonly_data_expr (tree);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_sqrt (tree, tree);
163 static tree fold_builtin_cbrt (tree, tree);
164 static tree fold_builtin_pow (tree, tree, tree, tree);
165 static tree fold_builtin_powi (tree, tree, tree, tree);
166 static tree fold_builtin_cos (tree, tree, tree);
167 static tree fold_builtin_cosh (tree, tree, tree);
168 static tree fold_builtin_tan (tree, tree);
169 static tree fold_builtin_trunc (tree, tree);
170 static tree fold_builtin_floor (tree, tree);
171 static tree fold_builtin_ceil (tree, tree);
172 static tree fold_builtin_round (tree, tree);
173 static tree fold_builtin_int_roundingfn (tree, tree);
174 static tree fold_builtin_bitop (tree, tree);
175 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
176 static tree fold_builtin_strchr (tree, tree, tree);
177 static tree fold_builtin_memchr (tree, tree, tree, tree);
178 static tree fold_builtin_memcmp (tree, tree, tree);
179 static tree fold_builtin_strcmp (tree, tree);
180 static tree fold_builtin_strncmp (tree, tree, tree);
181 static tree fold_builtin_signbit (tree, tree);
182 static tree fold_builtin_copysign (tree, tree, tree, tree);
183 static tree fold_builtin_isascii (tree);
184 static tree fold_builtin_toascii (tree);
185 static tree fold_builtin_isdigit (tree);
186 static tree fold_builtin_fabs (tree, tree);
187 static tree fold_builtin_abs (tree, tree);
188 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
190 static tree fold_builtin_n (tree, tree *, int, bool);
191 static tree fold_builtin_0 (tree, bool);
192 static tree fold_builtin_1 (tree, tree, bool);
193 static tree fold_builtin_2 (tree, tree, tree, bool);
194 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
195 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
196 static tree fold_builtin_varargs (tree, tree, bool);
198 static tree fold_builtin_strpbrk (tree, tree, tree);
199 static tree fold_builtin_strstr (tree, tree, tree);
200 static tree fold_builtin_strrchr (tree, tree, tree);
201 static tree fold_builtin_strcat (tree, tree);
202 static tree fold_builtin_strncat (tree, tree, tree);
203 static tree fold_builtin_strspn (tree, tree);
204 static tree fold_builtin_strcspn (tree, tree);
205 static tree fold_builtin_sprintf (tree, tree, tree, int);
207 static rtx expand_builtin_object_size (tree);
208 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
209 enum built_in_function);
210 static void maybe_emit_chk_warning (tree, enum built_in_function);
211 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
212 static tree fold_builtin_object_size (tree, tree);
213 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
214 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
215 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
216 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
217 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
218 enum built_in_function);
219 static bool init_target_chars (void);
221 static unsigned HOST_WIDE_INT target_newline;
222 static unsigned HOST_WIDE_INT target_percent;
223 static unsigned HOST_WIDE_INT target_c;
224 static unsigned HOST_WIDE_INT target_s;
225 static char target_percent_c[3];
226 static char target_percent_s[3];
227 static char target_percent_s_newline[4];
228 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_arg2 (tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_arg3 (tree, tree, tree, tree,
233 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
234 static tree do_mpfr_sincos (tree, tree, tree);
235 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
236 static tree do_mpfr_bessel_n (tree, tree, tree,
237 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
238 const REAL_VALUE_TYPE *, bool);
239 static tree do_mpfr_remquo (tree, tree, tree);
240 static tree do_mpfr_lgamma_r (tree, tree, tree);
243 /* Return true if NODE should be considered for inline expansion regardless
244 of the optimization level. This means whenever a function is invoked with
245 its "internal" name, which normally contains the prefix "__builtin". */
247 static bool called_as_built_in (tree node)
249 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
250 if (strncmp (name, "__builtin_", 10) == 0)
252 if (strncmp (name, "__sync_", 7) == 0)
257 /* Return the alignment in bits of EXP, a pointer valued expression.
258 But don't return more than MAX_ALIGN no matter what.
259 The alignment returned is, by default, the alignment of the thing that
260 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
262 Otherwise, look at the expression to see if we can do better, i.e., if the
263 expression is actually pointing at an object whose alignment is tighter. */
266 get_pointer_alignment (tree exp, unsigned int max_align)
268 unsigned int align, inner;
270 /* We rely on TER to compute accurate alignment information. */
271 if (!(optimize && flag_tree_ter))
274 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
277 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
278 align = MIN (align, max_align);
282 switch (TREE_CODE (exp))
286 case NON_LVALUE_EXPR:
287 exp = TREE_OPERAND (exp, 0);
288 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
291 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
292 align = MIN (inner, max_align);
295 case POINTER_PLUS_EXPR:
296 /* If sum of pointer + int, restrict our maximum alignment to that
297 imposed by the integer. If not, we can't do any better than
299 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
302 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
303 & (max_align / BITS_PER_UNIT - 1))
307 exp = TREE_OPERAND (exp, 0);
311 /* See what we are pointing at and look at its alignment. */
312 exp = TREE_OPERAND (exp, 0);
314 if (handled_component_p (exp))
316 HOST_WIDE_INT bitsize, bitpos;
318 enum machine_mode mode;
319 int unsignedp, volatilep;
321 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
322 &mode, &unsignedp, &volatilep, true);
324 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
325 if (offset && TREE_CODE (offset) == PLUS_EXPR
326 && host_integerp (TREE_OPERAND (offset, 1), 1))
328 /* Any overflow in calculating offset_bits won't change
331 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
335 inner = MIN (inner, (offset_bits & -offset_bits));
336 offset = TREE_OPERAND (offset, 0);
338 if (offset && TREE_CODE (offset) == MULT_EXPR
339 && host_integerp (TREE_OPERAND (offset, 1), 1))
341 /* Any overflow in calculating offset_factor won't change
343 unsigned offset_factor
344 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
348 inner = MIN (inner, (offset_factor & -offset_factor));
351 inner = MIN (inner, BITS_PER_UNIT);
353 if (TREE_CODE (exp) == FUNCTION_DECL)
354 align = FUNCTION_BOUNDARY;
355 else if (DECL_P (exp))
356 align = MIN (inner, DECL_ALIGN (exp));
357 #ifdef CONSTANT_ALIGNMENT
358 else if (CONSTANT_CLASS_P (exp))
359 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
361 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
362 || TREE_CODE (exp) == INDIRECT_REF)
363 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
365 align = MIN (align, inner);
366 return MIN (align, max_align);
374 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
375 way, because it could contain a zero byte in the middle.
376 TREE_STRING_LENGTH is the size of the character array, not the string.
378 ONLY_VALUE should be nonzero if the result is not going to be emitted
379 into the instruction stream and zero if it is going to be expanded.
380 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
381 is returned, otherwise NULL, since
382 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
383 evaluate the side-effects.
385 The value returned is of type `ssizetype'.
387 Unfortunately, string_constant can't access the values of const char
388 arrays with initializers, so neither can we do so here. */
391 c_strlen (tree src, int only_value)
394 HOST_WIDE_INT offset;
399 if (TREE_CODE (src) == COND_EXPR
400 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
404 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
405 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
406 if (tree_int_cst_equal (len1, len2))
410 if (TREE_CODE (src) == COMPOUND_EXPR
411 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
412 return c_strlen (TREE_OPERAND (src, 1), only_value);
414 src = string_constant (src, &offset_node);
418 max = TREE_STRING_LENGTH (src) - 1;
419 ptr = TREE_STRING_POINTER (src);
421 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
423 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
424 compute the offset to the following null if we don't know where to
425 start searching for it. */
428 for (i = 0; i < max; i++)
432 /* We don't know the starting offset, but we do know that the string
433 has no internal zero bytes. We can assume that the offset falls
434 within the bounds of the string; otherwise, the programmer deserves
435 what he gets. Subtract the offset from the length of the string,
436 and return that. This would perhaps not be valid if we were dealing
437 with named arrays in addition to literal string constants. */
439 return size_diffop (size_int (max), offset_node);
442 /* We have a known offset into the string. Start searching there for
443 a null character if we can represent it as a single HOST_WIDE_INT. */
444 if (offset_node == 0)
446 else if (! host_integerp (offset_node, 0))
449 offset = tree_low_cst (offset_node, 0);
451 /* If the offset is known to be out of bounds, warn, and call strlen at
453 if (offset < 0 || offset > max)
455 warning (0, "offset outside bounds of constant string");
459 /* Use strlen to search for the first zero byte. Since any strings
460 constructed with build_string will have nulls appended, we win even
461 if we get handed something like (char[4])"abcd".
463 Since OFFSET is our starting index into the string, no further
464 calculation is needed. */
465 return ssize_int (strlen (ptr + offset));
468 /* Return a char pointer for a C string if it is a string constant
469 or sum of string constant and integer constant. */
476 src = string_constant (src, &offset_node);
480 if (offset_node == 0)
481 return TREE_STRING_POINTER (src);
482 else if (!host_integerp (offset_node, 1)
483 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
486 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
489 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
490 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
493 c_readstr (const char *str, enum machine_mode mode)
499 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
504 for (i = 0; i < GET_MODE_SIZE (mode); i++)
507 if (WORDS_BIG_ENDIAN)
508 j = GET_MODE_SIZE (mode) - i - 1;
509 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
510 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
511 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
513 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
516 ch = (unsigned char) str[i];
517 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
519 return immed_double_const (c[0], c[1], mode);
522 /* Cast a target constant CST to target CHAR and if that value fits into
523 host char type, return zero and put that value into variable pointed to by
527 target_char_cast (tree cst, char *p)
529 unsigned HOST_WIDE_INT val, hostval;
531 if (!host_integerp (cst, 1)
532 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
535 val = tree_low_cst (cst, 1);
536 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
537 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
540 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
541 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
550 /* Similar to save_expr, but assumes that arbitrary code is not executed
551 in between the multiple evaluations. In particular, we assume that a
552 non-addressable local variable will not be modified. */
555 builtin_save_expr (tree exp)
557 if (TREE_ADDRESSABLE (exp) == 0
558 && (TREE_CODE (exp) == PARM_DECL
559 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
562 return save_expr (exp);
565 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
566 times to get the address of either a higher stack frame, or a return
567 address located within it (depending on FNDECL_CODE). */
570 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
574 #ifdef INITIAL_FRAME_ADDRESS_RTX
575 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579 /* For a zero count with __builtin_return_address, we don't care what
580 frame address we return, because target-specific definitions will
581 override us. Therefore frame pointer elimination is OK, and using
582 the soft frame pointer is OK.
584 For a nonzero count, or a zero count with __builtin_frame_address,
585 we require a stable offset from the current frame pointer to the
586 previous one, so we must use the hard frame pointer, and
587 we must disable frame pointer elimination. */
588 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
589 tem = frame_pointer_rtx;
592 tem = hard_frame_pointer_rtx;
594 /* Tell reload not to eliminate the frame pointer. */
595 current_function_accesses_prior_frames = 1;
599 /* Some machines need special handling before we can access
600 arbitrary frames. For example, on the SPARC, we must first flush
601 all register windows to the stack. */
602 #ifdef SETUP_FRAME_ADDRESSES
604 SETUP_FRAME_ADDRESSES ();
607 /* On the SPARC, the return address is not in the frame, it is in a
608 register. There is no way to access it off of the current frame
609 pointer, but it can be accessed off the previous frame pointer by
610 reading the value from the register window save area. */
611 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
612 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
616 /* Scan back COUNT frames to the specified frame. */
617 for (i = 0; i < count; i++)
619 /* Assume the dynamic chain pointer is in the word that the
620 frame address points to, unless otherwise specified. */
621 #ifdef DYNAMIC_CHAIN_ADDRESS
622 tem = DYNAMIC_CHAIN_ADDRESS (tem);
624 tem = memory_address (Pmode, tem);
625 tem = gen_frame_mem (Pmode, tem);
626 tem = copy_to_reg (tem);
629 /* For __builtin_frame_address, return what we've got. But, on
630 the SPARC for example, we may have to add a bias. */
631 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
632 #ifdef FRAME_ADDR_RTX
633 return FRAME_ADDR_RTX (tem);
638 /* For __builtin_return_address, get the return address from that frame. */
639 #ifdef RETURN_ADDR_RTX
640 tem = RETURN_ADDR_RTX (count, tem);
642 tem = memory_address (Pmode,
643 plus_constant (tem, GET_MODE_SIZE (Pmode)));
644 tem = gen_frame_mem (Pmode, tem);
649 /* Alias set used for setjmp buffer. */
650 static HOST_WIDE_INT setjmp_alias_set = -1;
652 /* Construct the leading half of a __builtin_setjmp call. Control will
653 return to RECEIVER_LABEL. This is also called directly by the SJLJ
654 exception handling code. */
657 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
659 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
663 if (setjmp_alias_set == -1)
664 setjmp_alias_set = new_alias_set ();
666 buf_addr = convert_memory_address (Pmode, buf_addr);
668 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
670 /* We store the frame pointer and the address of receiver_label in
671 the buffer and use the rest of it for the stack save area, which
672 is machine-dependent. */
674 mem = gen_rtx_MEM (Pmode, buf_addr);
675 set_mem_alias_set (mem, setjmp_alias_set);
676 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
678 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
679 set_mem_alias_set (mem, setjmp_alias_set);
681 emit_move_insn (validize_mem (mem),
682 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
684 stack_save = gen_rtx_MEM (sa_mode,
685 plus_constant (buf_addr,
686 2 * GET_MODE_SIZE (Pmode)));
687 set_mem_alias_set (stack_save, setjmp_alias_set);
688 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
690 /* If there is further processing to do, do it. */
691 #ifdef HAVE_builtin_setjmp_setup
692 if (HAVE_builtin_setjmp_setup)
693 emit_insn (gen_builtin_setjmp_setup (buf_addr));
696 /* Tell optimize_save_area_alloca that extra work is going to
697 need to go on during alloca. */
698 current_function_calls_setjmp = 1;
700 /* We have a nonlocal label. */
701 current_function_has_nonlocal_label = 1;
704 /* Construct the trailing part of a __builtin_setjmp call. This is
705 also called directly by the SJLJ exception handling code. */
708 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
710 /* Clobber the FP when we get here, so we have to make sure it's
711 marked as used by this function. */
712 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
714 /* Mark the static chain as clobbered here so life information
715 doesn't get messed up for it. */
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
718 /* Now put in the code to restore the frame pointer, and argument
719 pointer, if needed. */
720 #ifdef HAVE_nonlocal_goto
721 if (! HAVE_nonlocal_goto)
724 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
725 /* This might change the hard frame pointer in ways that aren't
726 apparent to early optimization passes, so force a clobber. */
727 emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
730 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
731 if (fixed_regs[ARG_POINTER_REGNUM])
733 #ifdef ELIMINABLE_REGS
735 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
737 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
738 if (elim_regs[i].from == ARG_POINTER_REGNUM
739 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
742 if (i == ARRAY_SIZE (elim_regs))
745 /* Now restore our arg pointer from the address at which it
746 was saved in our stack frame. */
747 emit_move_insn (virtual_incoming_args_rtx,
748 copy_to_reg (get_arg_pointer_save_area (cfun)));
753 #ifdef HAVE_builtin_setjmp_receiver
754 if (HAVE_builtin_setjmp_receiver)
755 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
758 #ifdef HAVE_nonlocal_goto_receiver
759 if (HAVE_nonlocal_goto_receiver)
760 emit_insn (gen_nonlocal_goto_receiver ());
765 /* We must not allow the code we just generated to be reordered by
766 scheduling. Specifically, the update of the frame pointer must
767 happen immediately, not later. */
768 emit_insn (gen_blockage ());
771 /* __builtin_longjmp is passed a pointer to an array of five words (not
772 all will be used on all machines). It operates similarly to the C
773 library function of the same name, but is more efficient. Much of
774 the code below is copied from the handling of non-local gotos. */
777 expand_builtin_longjmp (rtx buf_addr, rtx value)
779 rtx fp, lab, stack, insn, last;
780 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
785 buf_addr = convert_memory_address (Pmode, buf_addr);
787 buf_addr = force_reg (Pmode, buf_addr);
789 /* We used to store value in static_chain_rtx, but that fails if pointers
790 are smaller than integers. We instead require that the user must pass
791 a second argument of 1, because that is what builtin_setjmp will
792 return. This also makes EH slightly more efficient, since we are no
793 longer copying around a value that we don't care about. */
794 gcc_assert (value == const1_rtx);
796 last = get_last_insn ();
797 #ifdef HAVE_builtin_longjmp
798 if (HAVE_builtin_longjmp)
799 emit_insn (gen_builtin_longjmp (buf_addr));
803 fp = gen_rtx_MEM (Pmode, buf_addr);
804 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
805 GET_MODE_SIZE (Pmode)));
807 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (fp, setjmp_alias_set);
810 set_mem_alias_set (lab, setjmp_alias_set);
811 set_mem_alias_set (stack, setjmp_alias_set);
813 /* Pick up FP, label, and SP from the block and jump. This code is
814 from expand_goto in stmt.c; see there for detailed comments. */
815 #ifdef HAVE_nonlocal_goto
816 if (HAVE_nonlocal_goto)
817 /* We have to pass a value to the nonlocal_goto pattern that will
818 get copied into the static_chain pointer, but it does not matter
819 what that value is, because builtin_setjmp does not use it. */
820 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
824 lab = copy_to_reg (lab);
826 emit_insn (gen_rtx_CLOBBER (VOIDmode,
827 gen_rtx_MEM (BLKmode,
828 gen_rtx_SCRATCH (VOIDmode))));
829 emit_insn (gen_rtx_CLOBBER (VOIDmode,
830 gen_rtx_MEM (BLKmode,
831 hard_frame_pointer_rtx)));
833 emit_move_insn (hard_frame_pointer_rtx, fp);
834 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
836 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
837 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
838 emit_indirect_jump (lab);
842 /* Search backwards and mark the jump insn as a non-local goto.
843 Note that this precludes the use of __builtin_longjmp to a
844 __builtin_setjmp target in the same function. However, we've
845 already cautioned the user that these functions are for
846 internal exception handling use only. */
847 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
849 gcc_assert (insn != last);
853 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
857 else if (CALL_P (insn))
862 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
863 and the address of the save area. */
866 expand_builtin_nonlocal_goto (tree exp)
868 tree t_label, t_save_area;
869 rtx r_label, r_save_area, r_fp, r_sp, insn;
871 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
874 t_label = CALL_EXPR_ARG (exp, 0);
875 t_save_area = CALL_EXPR_ARG (exp, 1);
877 r_label = expand_normal (t_label);
878 r_label = convert_memory_address (Pmode, r_label);
879 r_save_area = expand_normal (t_save_area);
880 r_save_area = convert_memory_address (Pmode, r_save_area);
881 r_fp = gen_rtx_MEM (Pmode, r_save_area);
882 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
883 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
885 current_function_has_nonlocal_goto = 1;
887 #ifdef HAVE_nonlocal_goto
888 /* ??? We no longer need to pass the static chain value, afaik. */
889 if (HAVE_nonlocal_goto)
890 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
894 r_label = copy_to_reg (r_label);
896 emit_insn (gen_rtx_CLOBBER (VOIDmode,
897 gen_rtx_MEM (BLKmode,
898 gen_rtx_SCRATCH (VOIDmode))));
900 emit_insn (gen_rtx_CLOBBER (VOIDmode,
901 gen_rtx_MEM (BLKmode,
902 hard_frame_pointer_rtx)));
904 /* Restore frame pointer for containing function.
905 This sets the actual hard register used for the frame pointer
906 to the location of the function's incoming static chain info.
907 The non-local goto handler will then adjust it to contain the
908 proper value and reload the argument pointer, if needed. */
909 emit_move_insn (hard_frame_pointer_rtx, r_fp);
910 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
912 /* USE of hard_frame_pointer_rtx added for consistency;
913 not clear if really needed. */
914 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
915 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
916 emit_indirect_jump (r_label);
919 /* Search backwards to the jump insn and mark it as a
921 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
925 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
926 const0_rtx, REG_NOTES (insn));
929 else if (CALL_P (insn))
936 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
937 (not all will be used on all machines) that was passed to __builtin_setjmp.
938 It updates the stack pointer in that block to correspond to the current
942 expand_builtin_update_setjmp_buf (rtx buf_addr)
944 enum machine_mode sa_mode = Pmode;
948 #ifdef HAVE_save_stack_nonlocal
949 if (HAVE_save_stack_nonlocal)
950 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
952 #ifdef STACK_SAVEAREA_MODE
953 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
957 = gen_rtx_MEM (sa_mode,
960 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
964 emit_insn (gen_setjmp ());
967 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
970 /* Expand a call to __builtin_prefetch. For a target that does not support
971 data prefetch, evaluate the memory address argument in case it has side
975 expand_builtin_prefetch (tree exp)
977 tree arg0, arg1, arg2;
981 if (!validate_arglist (exp, POINTER_TYPE, 0))
984 arg0 = CALL_EXPR_ARG (exp, 0);
986 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
987 zero (read) and argument 2 (locality) defaults to 3 (high degree of
989 nargs = call_expr_nargs (exp);
991 arg1 = CALL_EXPR_ARG (exp, 1);
993 arg1 = integer_zero_node;
995 arg2 = CALL_EXPR_ARG (exp, 2);
997 arg2 = build_int_cst (NULL_TREE, 3);
999 /* Argument 0 is an address. */
1000 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1002 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1003 if (TREE_CODE (arg1) != INTEGER_CST)
1005 error ("second argument to %<__builtin_prefetch%> must be a constant");
1006 arg1 = integer_zero_node;
1008 op1 = expand_normal (arg1);
1009 /* Argument 1 must be either zero or one. */
1010 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1012 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1017 /* Argument 2 (locality) must be a compile-time constant int. */
1018 if (TREE_CODE (arg2) != INTEGER_CST)
1020 error ("third argument to %<__builtin_prefetch%> must be a constant");
1021 arg2 = integer_zero_node;
1023 op2 = expand_normal (arg2);
1024 /* Argument 2 must be 0, 1, 2, or 3. */
1025 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1027 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1031 #ifdef HAVE_prefetch
1034 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1036 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1037 || (GET_MODE (op0) != Pmode))
1039 op0 = convert_memory_address (Pmode, op0);
1040 op0 = force_reg (Pmode, op0);
1042 emit_insn (gen_prefetch (op0, op1, op2));
1046 /* Don't do anything with direct references to volatile memory, but
1047 generate code to handle other side effects. */
1048 if (!MEM_P (op0) && side_effects_p (op0))
1052 /* Get a MEM rtx for expression EXP which is the address of an operand
1053 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1054 the maximum length of the block of memory that might be accessed or
1058 get_memory_rtx (tree exp, tree len)
1060 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1061 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1063 /* Get an expression we can use to find the attributes to assign to MEM.
1064 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1065 we can. First remove any nops. */
1066 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1067 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1068 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1069 exp = TREE_OPERAND (exp, 0);
1071 if (TREE_CODE (exp) == ADDR_EXPR)
1072 exp = TREE_OPERAND (exp, 0);
1073 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1074 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1078 /* Honor attributes derived from exp, except for the alias set
1079 (as builtin stringops may alias with anything) and the size
1080 (as stringops may access multiple array elements). */
1083 set_mem_attributes (mem, exp, 0);
1085 /* Allow the string and memory builtins to overflow from one
1086 field into another, see http://gcc.gnu.org/PR23561.
1087 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1088 memory accessed by the string or memory builtin will fit
1089 within the field. */
1090 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1092 tree mem_expr = MEM_EXPR (mem);
1093 HOST_WIDE_INT offset = -1, length = -1;
1096 while (TREE_CODE (inner) == ARRAY_REF
1097 || TREE_CODE (inner) == NOP_EXPR
1098 || TREE_CODE (inner) == CONVERT_EXPR
1099 || TREE_CODE (inner) == NON_LVALUE_EXPR
1100 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1101 || TREE_CODE (inner) == SAVE_EXPR)
1102 inner = TREE_OPERAND (inner, 0);
1104 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1106 if (MEM_OFFSET (mem)
1107 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1108 offset = INTVAL (MEM_OFFSET (mem));
1110 if (offset >= 0 && len && host_integerp (len, 0))
1111 length = tree_low_cst (len, 0);
1113 while (TREE_CODE (inner) == COMPONENT_REF)
1115 tree field = TREE_OPERAND (inner, 1);
1116 gcc_assert (! DECL_BIT_FIELD (field));
1117 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1118 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1122 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1125 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1126 /* If we can prove the memory starting at XEXP (mem, 0)
1127 and ending at XEXP (mem, 0) + LENGTH will fit into
1128 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1131 && offset + length <= size)
1136 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1137 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1138 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1146 mem_expr = TREE_OPERAND (mem_expr, 0);
1147 inner = TREE_OPERAND (inner, 0);
1150 if (mem_expr == NULL)
1152 if (mem_expr != MEM_EXPR (mem))
1154 set_mem_expr (mem, mem_expr);
1155 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1158 set_mem_alias_set (mem, 0);
1159 set_mem_size (mem, NULL_RTX);
1165 /* Built-in functions to perform an untyped call and return. */
1167 /* For each register that may be used for calling a function, this
1168 gives a mode used to copy the register's value. VOIDmode indicates
1169 the register is not used for calling a function. If the machine
1170 has register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1174 /* For each register that may be used for returning values, this gives
1175 a mode used to copy the register's value. VOIDmode indicates the
1176 register is not used for returning values. If the machine has
1177 register windows, this gives only the outbound registers.
1178 INCOMING_REGNO gives the corresponding inbound register. */
1179 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1181 /* For each register that may be used for calling a function, this
1182 gives the offset of that register into the block returned by
1183 __builtin_apply_args. 0 indicates that the register is not
1184 used for calling a function. */
1185 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1187 /* Return the size required for the block returned by __builtin_apply_args,
1188 and initialize apply_args_mode. */
1191 apply_args_size (void)
1193 static int size = -1;
1196 enum machine_mode mode;
1198 /* The values computed by this function never change. */
1201 /* The first value is the incoming arg-pointer. */
1202 size = GET_MODE_SIZE (Pmode);
1204 /* The second value is the structure value address unless this is
1205 passed as an "invisible" first argument. */
1206 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1207 size += GET_MODE_SIZE (Pmode);
1209 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1210 if (FUNCTION_ARG_REGNO_P (regno))
1212 mode = reg_raw_mode[regno];
1214 gcc_assert (mode != VOIDmode);
1216 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1217 if (size % align != 0)
1218 size = CEIL (size, align) * align;
1219 apply_args_reg_offset[regno] = size;
1220 size += GET_MODE_SIZE (mode);
1221 apply_args_mode[regno] = mode;
1225 apply_args_mode[regno] = VOIDmode;
1226 apply_args_reg_offset[regno] = 0;
1232 /* Return the size required for the block returned by __builtin_apply,
1233 and initialize apply_result_mode. */
1236 apply_result_size (void)
1238 static int size = -1;
1240 enum machine_mode mode;
1242 /* The values computed by this function never change. */
1247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1248 if (FUNCTION_VALUE_REGNO_P (regno))
1250 mode = reg_raw_mode[regno];
1252 gcc_assert (mode != VOIDmode);
1254 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1255 if (size % align != 0)
1256 size = CEIL (size, align) * align;
1257 size += GET_MODE_SIZE (mode);
1258 apply_result_mode[regno] = mode;
1261 apply_result_mode[regno] = VOIDmode;
1263 /* Allow targets that use untyped_call and untyped_return to override
1264 the size so that machine-specific information can be stored here. */
1265 #ifdef APPLY_RESULT_SIZE
1266 size = APPLY_RESULT_SIZE;
1272 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1273 /* Create a vector describing the result block RESULT. If SAVEP is true,
1274 the result block is used to save the values; otherwise it is used to
1275 restore the values. */
1278 result_vector (int savep, rtx result)
1280 int regno, size, align, nelts;
1281 enum machine_mode mode;
1283 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1287 if ((mode = apply_result_mode[regno]) != VOIDmode)
1289 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1290 if (size % align != 0)
1291 size = CEIL (size, align) * align;
1292 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1293 mem = adjust_address (result, mode, size);
1294 savevec[nelts++] = (savep
1295 ? gen_rtx_SET (VOIDmode, mem, reg)
1296 : gen_rtx_SET (VOIDmode, reg, mem));
1297 size += GET_MODE_SIZE (mode);
1299 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1301 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1303 /* Save the state required to perform an untyped call with the same
1304 arguments as were passed to the current function. */
1307 expand_builtin_apply_args_1 (void)
1310 int size, align, regno;
1311 enum machine_mode mode;
1312 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1314 /* Create a block where the arg-pointer, structure value address,
1315 and argument registers can be saved. */
1316 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1318 /* Walk past the arg-pointer and structure value address. */
1319 size = GET_MODE_SIZE (Pmode);
1320 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1321 size += GET_MODE_SIZE (Pmode);
1323 /* Save each register used in calling a function to the block. */
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if ((mode = apply_args_mode[regno]) != VOIDmode)
1327 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1328 if (size % align != 0)
1329 size = CEIL (size, align) * align;
1331 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1333 emit_move_insn (adjust_address (registers, mode, size), tem);
1334 size += GET_MODE_SIZE (mode);
1337 /* Save the arg pointer to the block. */
1338 tem = copy_to_reg (virtual_incoming_args_rtx);
1339 #ifdef STACK_GROWS_DOWNWARD
1340 /* We need the pointer as the caller actually passed them to us, not
1341 as we might have pretended they were passed. Make sure it's a valid
1342 operand, as emit_move_insn isn't expected to handle a PLUS. */
1344 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1347 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1349 size = GET_MODE_SIZE (Pmode);
1351 /* Save the structure value address unless this is passed as an
1352 "invisible" first argument. */
1353 if (struct_incoming_value)
1355 emit_move_insn (adjust_address (registers, Pmode, size),
1356 copy_to_reg (struct_incoming_value));
1357 size += GET_MODE_SIZE (Pmode);
1360 /* Return the address of the block. */
1361 return copy_addr_to_reg (XEXP (registers, 0));
1364 /* __builtin_apply_args returns block of memory allocated on
1365 the stack into which is stored the arg pointer, structure
1366 value address, static chain, and all the registers that might
1367 possibly be used in performing a function call. The code is
1368 moved to the start of the function so the incoming values are
1372 expand_builtin_apply_args (void)
1374 /* Don't do __builtin_apply_args more than once in a function.
1375 Save the result of the first call and reuse it. */
1376 if (apply_args_value != 0)
1377 return apply_args_value;
1379 /* When this function is called, it means that registers must be
1380 saved on entry to this function. So we migrate the
1381 call to the first insn of this function. */
1386 temp = expand_builtin_apply_args_1 ();
1390 apply_args_value = temp;
1392 /* Put the insns after the NOTE that starts the function.
1393 If this is inside a start_sequence, make the outer-level insn
1394 chain current, so the code is placed at the start of the
1396 push_topmost_sequence ();
1397 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1398 pop_topmost_sequence ();
1403 /* Perform an untyped call and save the state required to perform an
1404 untyped return of whatever value was returned by the given function. */
1407 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1409 int size, align, regno;
1410 enum machine_mode mode;
1411 rtx incoming_args, result, reg, dest, src, call_insn;
1412 rtx old_stack_level = 0;
1413 rtx call_fusage = 0;
1414 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1416 arguments = convert_memory_address (Pmode, arguments);
1418 /* Create a block where the return registers can be saved. */
1419 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1421 /* Fetch the arg pointer from the ARGUMENTS block. */
1422 incoming_args = gen_reg_rtx (Pmode);
1423 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1424 #ifndef STACK_GROWS_DOWNWARD
1425 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1426 incoming_args, 0, OPTAB_LIB_WIDEN);
1429 /* Push a new argument block and copy the arguments. Do not allow
1430 the (potential) memcpy call below to interfere with our stack
1432 do_pending_stack_adjust ();
1435 /* Save the stack with nonlocal if available. */
1436 #ifdef HAVE_save_stack_nonlocal
1437 if (HAVE_save_stack_nonlocal)
1438 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1441 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1443 /* Allocate a block of memory onto the stack and copy the memory
1444 arguments to the outgoing arguments address. */
1445 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1446 dest = virtual_outgoing_args_rtx;
1447 #ifndef STACK_GROWS_DOWNWARD
1448 if (GET_CODE (argsize) == CONST_INT)
1449 dest = plus_constant (dest, -INTVAL (argsize));
1451 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1453 dest = gen_rtx_MEM (BLKmode, dest);
1454 set_mem_align (dest, PARM_BOUNDARY);
1455 src = gen_rtx_MEM (BLKmode, incoming_args);
1456 set_mem_align (src, PARM_BOUNDARY);
1457 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1459 /* Refer to the argument block. */
1461 arguments = gen_rtx_MEM (BLKmode, arguments);
1462 set_mem_align (arguments, PARM_BOUNDARY);
1464 /* Walk past the arg-pointer and structure value address. */
1465 size = GET_MODE_SIZE (Pmode);
1467 size += GET_MODE_SIZE (Pmode);
1469 /* Restore each of the registers previously saved. Make USE insns
1470 for each of these registers for use in making the call. */
1471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1472 if ((mode = apply_args_mode[regno]) != VOIDmode)
1474 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1475 if (size % align != 0)
1476 size = CEIL (size, align) * align;
1477 reg = gen_rtx_REG (mode, regno);
1478 emit_move_insn (reg, adjust_address (arguments, mode, size));
1479 use_reg (&call_fusage, reg);
1480 size += GET_MODE_SIZE (mode);
1483 /* Restore the structure value address unless this is passed as an
1484 "invisible" first argument. */
1485 size = GET_MODE_SIZE (Pmode);
1488 rtx value = gen_reg_rtx (Pmode);
1489 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1490 emit_move_insn (struct_value, value);
1491 if (REG_P (struct_value))
1492 use_reg (&call_fusage, struct_value);
1493 size += GET_MODE_SIZE (Pmode);
1496 /* All arguments and registers used for the call are set up by now! */
1497 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1499 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1500 and we don't want to load it into a register as an optimization,
1501 because prepare_call_address already did it if it should be done. */
1502 if (GET_CODE (function) != SYMBOL_REF)
1503 function = memory_address (FUNCTION_MODE, function);
1505 /* Generate the actual call instruction and save the return value. */
1506 #ifdef HAVE_untyped_call
1507 if (HAVE_untyped_call)
1508 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1509 result, result_vector (1, result)));
1512 #ifdef HAVE_call_value
1513 if (HAVE_call_value)
1517 /* Locate the unique return register. It is not possible to
1518 express a call that sets more than one return register using
1519 call_value; use untyped_call for that. In fact, untyped_call
1520 only needs to save the return registers in the given block. */
1521 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1522 if ((mode = apply_result_mode[regno]) != VOIDmode)
1524 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1526 valreg = gen_rtx_REG (mode, regno);
1529 emit_call_insn (GEN_CALL_VALUE (valreg,
1530 gen_rtx_MEM (FUNCTION_MODE, function),
1531 const0_rtx, NULL_RTX, const0_rtx));
1533 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1539 /* Find the CALL insn we just emitted, and attach the register usage
1541 call_insn = last_call_insn ();
1542 add_function_usage_to (call_insn, call_fusage);
1544 /* Restore the stack. */
1545 #ifdef HAVE_save_stack_nonlocal
1546 if (HAVE_save_stack_nonlocal)
1547 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1550 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1554 /* Return the address of the result block. */
1555 result = copy_addr_to_reg (XEXP (result, 0));
1556 return convert_memory_address (ptr_mode, result);
1559 /* Perform an untyped return. */
1562 expand_builtin_return (rtx result)
1564 int size, align, regno;
1565 enum machine_mode mode;
1567 rtx call_fusage = 0;
1569 result = convert_memory_address (Pmode, result);
1571 apply_result_size ();
1572 result = gen_rtx_MEM (BLKmode, result);
1574 #ifdef HAVE_untyped_return
1575 if (HAVE_untyped_return)
1577 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1583 /* Restore the return value and note that each value is used. */
1585 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1586 if ((mode = apply_result_mode[regno]) != VOIDmode)
1588 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1589 if (size % align != 0)
1590 size = CEIL (size, align) * align;
1591 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1592 emit_move_insn (reg, adjust_address (result, mode, size));
1594 push_to_sequence (call_fusage);
1595 emit_insn (gen_rtx_USE (VOIDmode, reg));
1596 call_fusage = get_insns ();
1598 size += GET_MODE_SIZE (mode);
1601 /* Put the USE insns before the return. */
1602 emit_insn (call_fusage);
1604 /* Return whatever values was restored by jumping directly to the end
1606 expand_naked_return ();
1609 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1611 static enum type_class
1612 type_to_class (tree type)
1614 switch (TREE_CODE (type))
1616 case VOID_TYPE: return void_type_class;
1617 case INTEGER_TYPE: return integer_type_class;
1618 case ENUMERAL_TYPE: return enumeral_type_class;
1619 case BOOLEAN_TYPE: return boolean_type_class;
1620 case POINTER_TYPE: return pointer_type_class;
1621 case REFERENCE_TYPE: return reference_type_class;
1622 case OFFSET_TYPE: return offset_type_class;
1623 case REAL_TYPE: return real_type_class;
1624 case COMPLEX_TYPE: return complex_type_class;
1625 case FUNCTION_TYPE: return function_type_class;
1626 case METHOD_TYPE: return method_type_class;
1627 case RECORD_TYPE: return record_type_class;
1629 case QUAL_UNION_TYPE: return union_type_class;
1630 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1631 ? string_type_class : array_type_class);
1632 case LANG_TYPE: return lang_type_class;
1633 default: return no_type_class;
1637 /* Expand a call EXP to __builtin_classify_type. */
1640 expand_builtin_classify_type (tree exp)
1642 if (call_expr_nargs (exp))
1643 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1644 return GEN_INT (no_type_class);
1647 /* This helper macro, meant to be used in mathfn_built_in below,
1648 determines which among a set of three builtin math functions is
1649 appropriate for a given type mode. The `F' and `L' cases are
1650 automatically generated from the `double' case. */
1651 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1652 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1653 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1654 fcodel = BUILT_IN_MATHFN##L ; break;
1655 /* Similar to above, but appends _R after any F/L suffix. */
1656 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1657 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1658 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1659 fcodel = BUILT_IN_MATHFN##L_R ; break;
1661 /* Return mathematic function equivalent to FN but operating directly
1662 on TYPE, if available. If we can't do the conversion, return zero. */
1664 mathfn_built_in (tree type, enum built_in_function fn)
1666 enum built_in_function fcode, fcodef, fcodel;
1670 CASE_MATHFN (BUILT_IN_ACOS)
1671 CASE_MATHFN (BUILT_IN_ACOSH)
1672 CASE_MATHFN (BUILT_IN_ASIN)
1673 CASE_MATHFN (BUILT_IN_ASINH)
1674 CASE_MATHFN (BUILT_IN_ATAN)
1675 CASE_MATHFN (BUILT_IN_ATAN2)
1676 CASE_MATHFN (BUILT_IN_ATANH)
1677 CASE_MATHFN (BUILT_IN_CBRT)
1678 CASE_MATHFN (BUILT_IN_CEIL)
1679 CASE_MATHFN (BUILT_IN_CEXPI)
1680 CASE_MATHFN (BUILT_IN_COPYSIGN)
1681 CASE_MATHFN (BUILT_IN_COS)
1682 CASE_MATHFN (BUILT_IN_COSH)
1683 CASE_MATHFN (BUILT_IN_DREM)
1684 CASE_MATHFN (BUILT_IN_ERF)
1685 CASE_MATHFN (BUILT_IN_ERFC)
1686 CASE_MATHFN (BUILT_IN_EXP)
1687 CASE_MATHFN (BUILT_IN_EXP10)
1688 CASE_MATHFN (BUILT_IN_EXP2)
1689 CASE_MATHFN (BUILT_IN_EXPM1)
1690 CASE_MATHFN (BUILT_IN_FABS)
1691 CASE_MATHFN (BUILT_IN_FDIM)
1692 CASE_MATHFN (BUILT_IN_FLOOR)
1693 CASE_MATHFN (BUILT_IN_FMA)
1694 CASE_MATHFN (BUILT_IN_FMAX)
1695 CASE_MATHFN (BUILT_IN_FMIN)
1696 CASE_MATHFN (BUILT_IN_FMOD)
1697 CASE_MATHFN (BUILT_IN_FREXP)
1698 CASE_MATHFN (BUILT_IN_GAMMA)
1699 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1700 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1701 CASE_MATHFN (BUILT_IN_HYPOT)
1702 CASE_MATHFN (BUILT_IN_ILOGB)
1703 CASE_MATHFN (BUILT_IN_INF)
1704 CASE_MATHFN (BUILT_IN_ISINF)
1705 CASE_MATHFN (BUILT_IN_J0)
1706 CASE_MATHFN (BUILT_IN_J1)
1707 CASE_MATHFN (BUILT_IN_JN)
1708 CASE_MATHFN (BUILT_IN_LCEIL)
1709 CASE_MATHFN (BUILT_IN_LDEXP)
1710 CASE_MATHFN (BUILT_IN_LFLOOR)
1711 CASE_MATHFN (BUILT_IN_LGAMMA)
1712 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1713 CASE_MATHFN (BUILT_IN_LLCEIL)
1714 CASE_MATHFN (BUILT_IN_LLFLOOR)
1715 CASE_MATHFN (BUILT_IN_LLRINT)
1716 CASE_MATHFN (BUILT_IN_LLROUND)
1717 CASE_MATHFN (BUILT_IN_LOG)
1718 CASE_MATHFN (BUILT_IN_LOG10)
1719 CASE_MATHFN (BUILT_IN_LOG1P)
1720 CASE_MATHFN (BUILT_IN_LOG2)
1721 CASE_MATHFN (BUILT_IN_LOGB)
1722 CASE_MATHFN (BUILT_IN_LRINT)
1723 CASE_MATHFN (BUILT_IN_LROUND)
1724 CASE_MATHFN (BUILT_IN_MODF)
1725 CASE_MATHFN (BUILT_IN_NAN)
1726 CASE_MATHFN (BUILT_IN_NANS)
1727 CASE_MATHFN (BUILT_IN_NEARBYINT)
1728 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1729 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1730 CASE_MATHFN (BUILT_IN_POW)
1731 CASE_MATHFN (BUILT_IN_POWI)
1732 CASE_MATHFN (BUILT_IN_POW10)
1733 CASE_MATHFN (BUILT_IN_REMAINDER)
1734 CASE_MATHFN (BUILT_IN_REMQUO)
1735 CASE_MATHFN (BUILT_IN_RINT)
1736 CASE_MATHFN (BUILT_IN_ROUND)
1737 CASE_MATHFN (BUILT_IN_SCALB)
1738 CASE_MATHFN (BUILT_IN_SCALBLN)
1739 CASE_MATHFN (BUILT_IN_SCALBN)
1740 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1741 CASE_MATHFN (BUILT_IN_SIN)
1742 CASE_MATHFN (BUILT_IN_SINCOS)
1743 CASE_MATHFN (BUILT_IN_SINH)
1744 CASE_MATHFN (BUILT_IN_SQRT)
1745 CASE_MATHFN (BUILT_IN_TAN)
1746 CASE_MATHFN (BUILT_IN_TANH)
1747 CASE_MATHFN (BUILT_IN_TGAMMA)
1748 CASE_MATHFN (BUILT_IN_TRUNC)
1749 CASE_MATHFN (BUILT_IN_Y0)
1750 CASE_MATHFN (BUILT_IN_Y1)
1751 CASE_MATHFN (BUILT_IN_YN)
1757 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1758 return implicit_built_in_decls[fcode];
1759 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1760 return implicit_built_in_decls[fcodef];
1761 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1762 return implicit_built_in_decls[fcodel];
1767 /* If errno must be maintained, expand the RTL to check if the result,
1768 TARGET, of a built-in function call, EXP, is NaN, and if so set
1772 expand_errno_check (tree exp, rtx target)
1774 rtx lab = gen_label_rtx ();
1776 /* Test the result; if it is NaN, set errno=EDOM because
1777 the argument was not in the domain. */
1778 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1782 /* If this built-in doesn't throw an exception, set errno directly. */
1783 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1785 #ifdef GEN_ERRNO_RTX
1786 rtx errno_rtx = GEN_ERRNO_RTX;
1789 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1791 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1797 /* We can't set errno=EDOM directly; let the library call do it.
1798 Pop the arguments right away in case the call gets deleted. */
1800 expand_call (exp, target, 0);
1805 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1806 Return NULL_RTX if a normal call should be emitted rather than expanding
1807 the function in-line. EXP is the expression that is a call to the builtin
1808 function; if convenient, the result should be placed in TARGET.
1809 SUBTARGET may be used as the target for computing one of EXP's operands. */
1812 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1814 optab builtin_optab;
1815 rtx op0, insns, before_call;
1816 tree fndecl = get_callee_fndecl (exp);
1817 enum machine_mode mode;
1818 bool errno_set = false;
1821 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1824 arg = CALL_EXPR_ARG (exp, 0);
1826 switch (DECL_FUNCTION_CODE (fndecl))
1828 CASE_FLT_FN (BUILT_IN_SQRT):
1829 errno_set = ! tree_expr_nonnegative_p (arg);
1830 builtin_optab = sqrt_optab;
1832 CASE_FLT_FN (BUILT_IN_EXP):
1833 errno_set = true; builtin_optab = exp_optab; break;
1834 CASE_FLT_FN (BUILT_IN_EXP10):
1835 CASE_FLT_FN (BUILT_IN_POW10):
1836 errno_set = true; builtin_optab = exp10_optab; break;
1837 CASE_FLT_FN (BUILT_IN_EXP2):
1838 errno_set = true; builtin_optab = exp2_optab; break;
1839 CASE_FLT_FN (BUILT_IN_EXPM1):
1840 errno_set = true; builtin_optab = expm1_optab; break;
1841 CASE_FLT_FN (BUILT_IN_LOGB):
1842 errno_set = true; builtin_optab = logb_optab; break;
1843 CASE_FLT_FN (BUILT_IN_LOG):
1844 errno_set = true; builtin_optab = log_optab; break;
1845 CASE_FLT_FN (BUILT_IN_LOG10):
1846 errno_set = true; builtin_optab = log10_optab; break;
1847 CASE_FLT_FN (BUILT_IN_LOG2):
1848 errno_set = true; builtin_optab = log2_optab; break;
1849 CASE_FLT_FN (BUILT_IN_LOG1P):
1850 errno_set = true; builtin_optab = log1p_optab; break;
1851 CASE_FLT_FN (BUILT_IN_ASIN):
1852 builtin_optab = asin_optab; break;
1853 CASE_FLT_FN (BUILT_IN_ACOS):
1854 builtin_optab = acos_optab; break;
1855 CASE_FLT_FN (BUILT_IN_TAN):
1856 builtin_optab = tan_optab; break;
1857 CASE_FLT_FN (BUILT_IN_ATAN):
1858 builtin_optab = atan_optab; break;
1859 CASE_FLT_FN (BUILT_IN_FLOOR):
1860 builtin_optab = floor_optab; break;
1861 CASE_FLT_FN (BUILT_IN_CEIL):
1862 builtin_optab = ceil_optab; break;
1863 CASE_FLT_FN (BUILT_IN_TRUNC):
1864 builtin_optab = btrunc_optab; break;
1865 CASE_FLT_FN (BUILT_IN_ROUND):
1866 builtin_optab = round_optab; break;
1867 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1868 builtin_optab = nearbyint_optab;
1869 if (flag_trapping_math)
1871 /* Else fallthrough and expand as rint. */
1872 CASE_FLT_FN (BUILT_IN_RINT):
1873 builtin_optab = rint_optab; break;
1878 /* Make a suitable register to place result in. */
1879 mode = TYPE_MODE (TREE_TYPE (exp));
1881 if (! flag_errno_math || ! HONOR_NANS (mode))
1884 /* Before working hard, check whether the instruction is available. */
1885 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1887 target = gen_reg_rtx (mode);
1889 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1890 need to expand the argument again. This way, we will not perform
1891 side-effects more the once. */
1892 narg = builtin_save_expr (arg);
1896 exp = build_call_expr (fndecl, 1, arg);
1899 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1903 /* Compute into TARGET.
1904 Set TARGET to wherever the result comes back. */
1905 target = expand_unop (mode, builtin_optab, op0, target, 0);
1910 expand_errno_check (exp, target);
1912 /* Output the entire sequence. */
1913 insns = get_insns ();
1919 /* If we were unable to expand via the builtin, stop the sequence
1920 (without outputting the insns) and call to the library function
1921 with the stabilized argument list. */
1925 before_call = get_last_insn ();
1927 target = expand_call (exp, target, target == const0_rtx);
1929 /* If this is a sqrt operation and we don't care about errno, try to
1930 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1931 This allows the semantics of the libcall to be visible to the RTL
1933 if (builtin_optab == sqrt_optab && !errno_set)
1935 /* Search backwards through the insns emitted by expand_call looking
1936 for the instruction with the REG_RETVAL note. */
1937 rtx last = get_last_insn ();
1938 while (last != before_call)
1940 if (find_reg_note (last, REG_RETVAL, NULL))
1942 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1943 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1944 two elements, i.e. symbol_ref(sqrt) and the operand. */
1946 && GET_CODE (note) == EXPR_LIST
1947 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1948 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1949 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1951 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1952 /* Check operand is a register with expected mode. */
1955 && GET_MODE (operand) == mode)
1957 /* Replace the REG_EQUAL note with a SQRT rtx. */
1958 rtx equiv = gen_rtx_SQRT (mode, operand);
1959 set_unique_reg_note (last, REG_EQUAL, equiv);
1964 last = PREV_INSN (last);
1971 /* Expand a call to the builtin binary math functions (pow and atan2).
1972 Return NULL_RTX if a normal call should be emitted rather than expanding the
1973 function in-line. EXP is the expression that is a call to the builtin
1974 function; if convenient, the result should be placed in TARGET.
1975 SUBTARGET may be used as the target for computing one of EXP's
1979 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1981 optab builtin_optab;
1982 rtx op0, op1, insns;
1983 int op1_type = REAL_TYPE;
1984 tree fndecl = get_callee_fndecl (exp);
1985 tree arg0, arg1, narg;
1986 enum machine_mode mode;
1987 bool errno_set = true;
1990 switch (DECL_FUNCTION_CODE (fndecl))
1992 CASE_FLT_FN (BUILT_IN_SCALBN):
1993 CASE_FLT_FN (BUILT_IN_SCALBLN):
1994 CASE_FLT_FN (BUILT_IN_LDEXP):
1995 op1_type = INTEGER_TYPE;
2000 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2003 arg0 = CALL_EXPR_ARG (exp, 0);
2004 arg1 = CALL_EXPR_ARG (exp, 1);
2006 switch (DECL_FUNCTION_CODE (fndecl))
2008 CASE_FLT_FN (BUILT_IN_POW):
2009 builtin_optab = pow_optab; break;
2010 CASE_FLT_FN (BUILT_IN_ATAN2):
2011 builtin_optab = atan2_optab; break;
2012 CASE_FLT_FN (BUILT_IN_SCALB):
2013 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2015 builtin_optab = scalb_optab; break;
2016 CASE_FLT_FN (BUILT_IN_SCALBN):
2017 CASE_FLT_FN (BUILT_IN_SCALBLN):
2018 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2020 /* Fall through... */
2021 CASE_FLT_FN (BUILT_IN_LDEXP):
2022 builtin_optab = ldexp_optab; break;
2023 CASE_FLT_FN (BUILT_IN_FMOD):
2024 builtin_optab = fmod_optab; break;
2025 CASE_FLT_FN (BUILT_IN_REMAINDER):
2026 CASE_FLT_FN (BUILT_IN_DREM):
2027 builtin_optab = remainder_optab; break;
2032 /* Make a suitable register to place result in. */
2033 mode = TYPE_MODE (TREE_TYPE (exp));
2035 /* Before working hard, check whether the instruction is available. */
2036 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2039 target = gen_reg_rtx (mode);
2041 if (! flag_errno_math || ! HONOR_NANS (mode))
2044 /* Always stabilize the argument list. */
2045 narg = builtin_save_expr (arg1);
2051 narg = builtin_save_expr (arg0);
2059 exp = build_call_expr (fndecl, 2, arg0, arg1);
2061 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2062 op1 = expand_normal (arg1);
2066 /* Compute into TARGET.
2067 Set TARGET to wherever the result comes back. */
2068 target = expand_binop (mode, builtin_optab, op0, op1,
2069 target, 0, OPTAB_DIRECT);
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2077 return expand_call (exp, target, target == const0_rtx);
2081 expand_errno_check (exp, target);
2083 /* Output the entire sequence. */
2084 insns = get_insns ();
2091 /* Expand a call to the builtin sin and cos math functions.
2092 Return NULL_RTX if a normal call should be emitted rather than expanding the
2093 function in-line. EXP is the expression that is a call to the builtin
2094 function; if convenient, the result should be placed in TARGET.
2095 SUBTARGET may be used as the target for computing one of EXP's
2099 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2101 optab builtin_optab;
2103 tree fndecl = get_callee_fndecl (exp);
2104 enum machine_mode mode;
2107 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2110 arg = CALL_EXPR_ARG (exp, 0);
2112 switch (DECL_FUNCTION_CODE (fndecl))
2114 CASE_FLT_FN (BUILT_IN_SIN):
2115 CASE_FLT_FN (BUILT_IN_COS):
2116 builtin_optab = sincos_optab; break;
2121 /* Make a suitable register to place result in. */
2122 mode = TYPE_MODE (TREE_TYPE (exp));
2124 /* Check if sincos insn is available, otherwise fallback
2125 to sin or cos insn. */
2126 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2127 switch (DECL_FUNCTION_CODE (fndecl))
2129 CASE_FLT_FN (BUILT_IN_SIN):
2130 builtin_optab = sin_optab; break;
2131 CASE_FLT_FN (BUILT_IN_COS):
2132 builtin_optab = cos_optab; break;
2137 /* Before working hard, check whether the instruction is available. */
2138 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2140 target = gen_reg_rtx (mode);
2142 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2143 need to expand the argument again. This way, we will not perform
2144 side-effects more the once. */
2145 narg = save_expr (arg);
2149 exp = build_call_expr (fndecl, 1, arg);
2152 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2156 /* Compute into TARGET.
2157 Set TARGET to wherever the result comes back. */
2158 if (builtin_optab == sincos_optab)
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2167 CASE_FLT_FN (BUILT_IN_COS):
2168 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2173 gcc_assert (result);
2177 target = expand_unop (mode, builtin_optab, op0, target, 0);
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2189 /* If we were unable to expand via the builtin, stop the sequence
2190 (without outputting the insns) and call to the library function
2191 with the stabilized argument list. */
2195 target = expand_call (exp, target, target == const0_rtx);
2200 /* Expand a call to one of the builtin math functions that operate on
2201 floating point argument and output an integer result (ilogb, isinf,
2203 Return 0 if a normal call should be emitted rather than expanding the
2204 function in-line. EXP is the expression that is a call to the builtin
2205 function; if convenient, the result should be placed in TARGET.
2206 SUBTARGET may be used as the target for computing one of EXP's operands. */
2209 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2211 optab builtin_optab;
2212 enum insn_code icode;
2214 tree fndecl = get_callee_fndecl (exp);
2215 enum machine_mode mode;
2216 bool errno_set = false;
2219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2222 arg = CALL_EXPR_ARG (exp, 0);
2224 switch (DECL_FUNCTION_CODE (fndecl))
2226 CASE_FLT_FN (BUILT_IN_ILOGB):
2227 errno_set = true; builtin_optab = ilogb_optab; break;
2228 CASE_FLT_FN (BUILT_IN_ISINF):
2229 builtin_optab = isinf_optab; break;
2234 /* There's no easy way to detect the case we need to set EDOM. */
2235 if (flag_errno_math && errno_set)
2238 /* Optab mode depends on the mode of the input argument. */
2239 mode = TYPE_MODE (TREE_TYPE (arg));
2241 icode = builtin_optab->handlers[(int) mode].insn_code;
2243 /* Before working hard, check whether the instruction is available. */
2244 if (icode != CODE_FOR_nothing)
2246 /* Make a suitable register to place result in. */
2248 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2249 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2251 gcc_assert (insn_data[icode].operand[0].predicate
2252 (target, GET_MODE (target)));
2254 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2255 need to expand the argument again. This way, we will not perform
2256 side-effects more the once. */
2257 narg = builtin_save_expr (arg);
2261 exp = build_call_expr (fndecl, 1, arg);
2264 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2266 if (mode != GET_MODE (op0))
2267 op0 = convert_to_mode (mode, op0, 0);
2269 /* Compute into TARGET.
2270 Set TARGET to wherever the result comes back. */
2271 emit_unop_insn (icode, target, op0, UNKNOWN);
2275 target = expand_call (exp, target, target == const0_rtx);
2280 /* Expand a call to the builtin sincos math function.
2281 Return NULL_RTX if a normal call should be emitted rather than expanding the
2282 function in-line. EXP is the expression that is a call to the builtin
2286 expand_builtin_sincos (tree exp)
2288 rtx op0, op1, op2, target1, target2;
2289 enum machine_mode mode;
2290 tree arg, sinp, cosp;
2293 if (!validate_arglist (exp, REAL_TYPE,
2294 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2297 arg = CALL_EXPR_ARG (exp, 0);
2298 sinp = CALL_EXPR_ARG (exp, 1);
2299 cosp = CALL_EXPR_ARG (exp, 2);
2301 /* Make a suitable register to place result in. */
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2304 /* Check if sincos insn is available, otherwise emit the call. */
2305 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2308 target1 = gen_reg_rtx (mode);
2309 target2 = gen_reg_rtx (mode);
2311 op0 = expand_normal (arg);
2312 op1 = expand_normal (build_fold_indirect_ref (sinp));
2313 op2 = expand_normal (build_fold_indirect_ref (cosp));
2315 /* Compute into target1 and target2.
2316 Set TARGET to wherever the result comes back. */
2317 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2318 gcc_assert (result);
2320 /* Move target1 and target2 to the memory locations indicated
2322 emit_move_insn (op1, target1);
2323 emit_move_insn (op2, target2);
2328 /* Expand a call to the internal cexpi builtin to the sincos math function.
2329 EXP is the expression that is a call to the builtin function; if convenient,
2330 the result should be placed in TARGET. SUBTARGET may be used as the target
2331 for computing one of EXP's operands. */
2334 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2336 tree fndecl = get_callee_fndecl (exp);
2338 enum machine_mode mode;
2341 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2344 arg = CALL_EXPR_ARG (exp, 0);
2345 type = TREE_TYPE (arg);
2346 mode = TYPE_MODE (TREE_TYPE (arg));
2348 /* Try expanding via a sincos optab, fall back to emitting a libcall
2349 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2350 is only generated from sincos, cexp or if we have either of them. */
2351 if (sincos_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2353 op1 = gen_reg_rtx (mode);
2354 op2 = gen_reg_rtx (mode);
2356 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2358 /* Compute into op1 and op2. */
2359 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2361 else if (TARGET_HAS_SINCOS)
2363 tree call, fn = NULL_TREE;
2367 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2368 fn = built_in_decls[BUILT_IN_SINCOSF];
2369 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2370 fn = built_in_decls[BUILT_IN_SINCOS];
2371 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2372 fn = built_in_decls[BUILT_IN_SINCOSL];
2376 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2377 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2378 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2379 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2380 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2381 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2383 /* Make sure not to fold the sincos call again. */
2384 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2385 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2386 call, 3, arg, top1, top2));
2390 tree call, fn = NULL_TREE, narg;
2391 tree ctype = build_complex_type (type);
2393 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2394 fn = built_in_decls[BUILT_IN_CEXPF];
2395 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2396 fn = built_in_decls[BUILT_IN_CEXP];
2397 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2398 fn = built_in_decls[BUILT_IN_CEXPL];
2402 /* If we don't have a decl for cexp create one. This is the
2403 friendliest fallback if the user calls __builtin_cexpi
2404 without full target C99 function support. */
2405 if (fn == NULL_TREE)
2408 const char *name = NULL;
2410 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2412 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2414 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2417 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2418 fn = build_fn_decl (name, fntype);
2421 narg = fold_build2 (COMPLEX_EXPR, ctype,
2422 build_real (type, dconst0), arg);
2424 /* Make sure not to fold the cexp call again. */
2425 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2426 return expand_expr (build_call_nary (ctype, call, 1, narg),
2427 target, VOIDmode, EXPAND_NORMAL);
2430 /* Now build the proper return type. */
2431 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2432 make_tree (TREE_TYPE (arg), op2),
2433 make_tree (TREE_TYPE (arg), op1)),
2434 target, VOIDmode, EXPAND_NORMAL);
2437 /* Expand a call to one of the builtin rounding functions gcc defines
2438 as an extension (lfloor and lceil). As these are gcc extensions we
2439 do not need to worry about setting errno to EDOM.
2440 If expanding via optab fails, lower expression to (int)(floor(x)).
2441 EXP is the expression that is a call to the builtin function;
2442 if convenient, the result should be placed in TARGET. SUBTARGET may
2443 be used as the target for computing one of EXP's operands. */
2446 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2448 convert_optab builtin_optab;
2449 rtx op0, insns, tmp;
2450 tree fndecl = get_callee_fndecl (exp);
2451 enum built_in_function fallback_fn;
2452 tree fallback_fndecl;
2453 enum machine_mode mode;
2456 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459 arg = CALL_EXPR_ARG (exp, 0);
2461 switch (DECL_FUNCTION_CODE (fndecl))
2463 CASE_FLT_FN (BUILT_IN_LCEIL):
2464 CASE_FLT_FN (BUILT_IN_LLCEIL):
2465 builtin_optab = lceil_optab;
2466 fallback_fn = BUILT_IN_CEIL;
2469 CASE_FLT_FN (BUILT_IN_LFLOOR):
2470 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2471 builtin_optab = lfloor_optab;
2472 fallback_fn = BUILT_IN_FLOOR;
2479 /* Make a suitable register to place result in. */
2480 mode = TYPE_MODE (TREE_TYPE (exp));
2482 target = gen_reg_rtx (mode);
2484 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2485 need to expand the argument again. This way, we will not perform
2486 side-effects more the once. */
2487 narg = builtin_save_expr (arg);
2491 exp = build_call_expr (fndecl, 1, arg);
2494 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2498 /* Compute into TARGET. */
2499 if (expand_sfix_optab (target, op0, builtin_optab))
2501 /* Output the entire sequence. */
2502 insns = get_insns ();
2508 /* If we were unable to expand via the builtin, stop the sequence
2509 (without outputting the insns). */
2512 /* Fall back to floating point rounding optab. */
2513 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2515 /* For non-C99 targets we may end up without a fallback fndecl here
2516 if the user called __builtin_lfloor directly. In this case emit
2517 a call to the floor/ceil variants nevertheless. This should result
2518 in the best user experience for not full C99 targets. */
2519 if (fallback_fndecl == NULL_TREE)
2522 const char *name = NULL;
2524 switch (DECL_FUNCTION_CODE (fndecl))
2526 case BUILT_IN_LCEIL:
2527 case BUILT_IN_LLCEIL:
2530 case BUILT_IN_LCEILF:
2531 case BUILT_IN_LLCEILF:
2534 case BUILT_IN_LCEILL:
2535 case BUILT_IN_LLCEILL:
2538 case BUILT_IN_LFLOOR:
2539 case BUILT_IN_LLFLOOR:
2542 case BUILT_IN_LFLOORF:
2543 case BUILT_IN_LLFLOORF:
2546 case BUILT_IN_LFLOORL:
2547 case BUILT_IN_LLFLOORL:
2554 fntype = build_function_type_list (TREE_TYPE (arg),
2555 TREE_TYPE (arg), NULL_TREE);
2556 fallback_fndecl = build_fn_decl (name, fntype);
2559 exp = build_call_expr (fallback_fndecl, 1, arg);
2561 tmp = expand_normal (exp);
2563 /* Truncate the result of floating point optab to integer
2564 via expand_fix (). */
2565 target = gen_reg_rtx (mode);
2566 expand_fix (target, tmp, 0);
2571 /* Expand a call to one of the builtin math functions doing integer
2573 Return 0 if a normal call should be emitted rather than expanding the
2574 function in-line. EXP is the expression that is a call to the builtin
2575 function; if convenient, the result should be placed in TARGET.
2576 SUBTARGET may be used as the target for computing one of EXP's operands. */
2579 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2581 convert_optab builtin_optab;
2583 tree fndecl = get_callee_fndecl (exp);
2585 enum machine_mode mode;
2587 /* There's no easy way to detect the case we need to set EDOM. */
2588 if (flag_errno_math)
2591 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2594 arg = CALL_EXPR_ARG (exp, 0);
2596 switch (DECL_FUNCTION_CODE (fndecl))
2598 CASE_FLT_FN (BUILT_IN_LRINT):
2599 CASE_FLT_FN (BUILT_IN_LLRINT):
2600 builtin_optab = lrint_optab; break;
2601 CASE_FLT_FN (BUILT_IN_LROUND):
2602 CASE_FLT_FN (BUILT_IN_LLROUND):
2603 builtin_optab = lround_optab; break;
2608 /* Make a suitable register to place result in. */
2609 mode = TYPE_MODE (TREE_TYPE (exp));
2611 target = gen_reg_rtx (mode);
2613 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2614 need to expand the argument again. This way, we will not perform
2615 side-effects more the once. */
2616 narg = builtin_save_expr (arg);
2620 exp = build_call_expr (fndecl, 1, arg);
2623 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2627 if (expand_sfix_optab (target, op0, builtin_optab))
2629 /* Output the entire sequence. */
2630 insns = get_insns ();
2636 /* If we were unable to expand via the builtin, stop the sequence
2637 (without outputting the insns) and call to the library function
2638 with the stabilized argument list. */
2641 target = expand_call (exp, target, target == const0_rtx);
2646 /* To evaluate powi(x,n), the floating point value x raised to the
2647 constant integer exponent n, we use a hybrid algorithm that
2648 combines the "window method" with look-up tables. For an
2649 introduction to exponentiation algorithms and "addition chains",
2650 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2651 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2652 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2653 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2655 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2656 multiplications to inline before calling the system library's pow
2657 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2658 so this default never requires calling pow, powf or powl. */
2660 #ifndef POWI_MAX_MULTS
2661 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2664 /* The size of the "optimal power tree" lookup table. All
2665 exponents less than this value are simply looked up in the
2666 powi_table below. This threshold is also used to size the
2667 cache of pseudo registers that hold intermediate results. */
2668 #define POWI_TABLE_SIZE 256
2670 /* The size, in bits of the window, used in the "window method"
2671 exponentiation algorithm. This is equivalent to a radix of
2672 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2673 #define POWI_WINDOW_SIZE 3
2675 /* The following table is an efficient representation of an
2676 "optimal power tree". For each value, i, the corresponding
2677 value, j, in the table states than an optimal evaluation
2678 sequence for calculating pow(x,i) can be found by evaluating
2679 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2680 100 integers is given in Knuth's "Seminumerical algorithms". */
2682 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2684 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2685 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2686 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2687 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2688 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2689 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2690 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2691 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2692 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2693 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2694 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2695 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2696 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2697 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2698 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2699 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2700 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2701 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2702 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2703 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2704 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2705 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2706 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2707 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2708 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2709 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2710 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2711 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2712 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2713 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2714 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2715 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2719 /* Return the number of multiplications required to calculate
2720 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2721 subroutine of powi_cost. CACHE is an array indicating
2722 which exponents have already been calculated. */
2725 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2727 /* If we've already calculated this exponent, then this evaluation
2728 doesn't require any additional multiplications. */
2733 return powi_lookup_cost (n - powi_table[n], cache)
2734 + powi_lookup_cost (powi_table[n], cache) + 1;
2737 /* Return the number of multiplications required to calculate
2738 powi(x,n) for an arbitrary x, given the exponent N. This
2739 function needs to be kept in sync with expand_powi below. */
2742 powi_cost (HOST_WIDE_INT n)
2744 bool cache[POWI_TABLE_SIZE];
2745 unsigned HOST_WIDE_INT digit;
2746 unsigned HOST_WIDE_INT val;
2752 /* Ignore the reciprocal when calculating the cost. */
2753 val = (n < 0) ? -n : n;
2755 /* Initialize the exponent cache. */
2756 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2761 while (val >= POWI_TABLE_SIZE)
2765 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2766 result += powi_lookup_cost (digit, cache)
2767 + POWI_WINDOW_SIZE + 1;
2768 val >>= POWI_WINDOW_SIZE;
2777 return result + powi_lookup_cost (val, cache);
2780 /* Recursive subroutine of expand_powi. This function takes the array,
2781 CACHE, of already calculated exponents and an exponent N and returns
2782 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2785 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2787 unsigned HOST_WIDE_INT digit;
2791 if (n < POWI_TABLE_SIZE)
2796 target = gen_reg_rtx (mode);
2799 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2800 op1 = expand_powi_1 (mode, powi_table[n], cache);
2804 target = gen_reg_rtx (mode);
2805 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2806 op0 = expand_powi_1 (mode, n - digit, cache);
2807 op1 = expand_powi_1 (mode, digit, cache);
2811 target = gen_reg_rtx (mode);
2812 op0 = expand_powi_1 (mode, n >> 1, cache);
2816 result = expand_mult (mode, op0, op1, target, 0);
2817 if (result != target)
2818 emit_move_insn (target, result);
2822 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2823 floating point operand in mode MODE, and N is the exponent. This
2824 function needs to be kept in sync with powi_cost above. */
2827 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2829 unsigned HOST_WIDE_INT val;
2830 rtx cache[POWI_TABLE_SIZE];
2834 return CONST1_RTX (mode);
2836 val = (n < 0) ? -n : n;
2838 memset (cache, 0, sizeof (cache));
2841 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2843 /* If the original exponent was negative, reciprocate the result. */
2845 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2846 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2851 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2852 a normal call should be emitted rather than expanding the function
2853 in-line. EXP is the expression that is a call to the builtin
2854 function; if convenient, the result should be placed in TARGET. */
2857 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2861 tree type = TREE_TYPE (exp);
2862 REAL_VALUE_TYPE cint, c, c2;
2865 enum machine_mode mode = TYPE_MODE (type);
2867 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2870 arg0 = CALL_EXPR_ARG (exp, 0);
2871 arg1 = CALL_EXPR_ARG (exp, 1);
2873 if (TREE_CODE (arg1) != REAL_CST
2874 || TREE_OVERFLOW (arg1))
2875 return expand_builtin_mathfn_2 (exp, target, subtarget);
2877 /* Handle constant exponents. */
2879 /* For integer valued exponents we can expand to an optimal multiplication
2880 sequence using expand_powi. */
2881 c = TREE_REAL_CST (arg1);
2882 n = real_to_integer (&c);
2883 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2884 if (real_identical (&c, &cint)
2885 && ((n >= -1 && n <= 2)
2886 || (flag_unsafe_math_optimizations
2888 && powi_cost (n) <= POWI_MAX_MULTS)))
2890 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2893 op = force_reg (mode, op);
2894 op = expand_powi (op, mode, n);
2899 narg0 = builtin_save_expr (arg0);
2901 /* If the exponent is not integer valued, check if it is half of an integer.
2902 In this case we can expand to sqrt (x) * x**(n/2). */
2903 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2904 if (fn != NULL_TREE)
2906 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2907 n = real_to_integer (&c2);
2908 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2909 if (real_identical (&c2, &cint)
2910 && ((flag_unsafe_math_optimizations
2912 && powi_cost (n/2) <= POWI_MAX_MULTS)
2915 tree call_expr = build_call_expr (fn, 1, narg0);
2916 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2919 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2920 op2 = force_reg (mode, op2);
2921 op2 = expand_powi (op2, mode, abs (n / 2));
2922 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2923 0, OPTAB_LIB_WIDEN);
2924 /* If the original exponent was negative, reciprocate the
2927 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2928 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2934 /* Try if the exponent is a third of an integer. In this case
2935 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2936 different from pow (x, 1./3.) due to rounding and behavior
2937 with negative x we need to constrain this transformation to
2938 unsafe math and positive x or finite math. */
2939 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2941 && flag_unsafe_math_optimizations
2942 && (tree_expr_nonnegative_p (arg0)
2943 || !HONOR_NANS (mode)))
2945 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2946 real_round (&c2, mode, &c2);
2947 n = real_to_integer (&c2);
2948 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2949 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2950 real_convert (&c2, mode, &c2);
2951 if (real_identical (&c2, &c)
2953 && powi_cost (n/3) <= POWI_MAX_MULTS)
2956 tree call_expr = build_call_expr (fn, 1,narg0);
2957 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2958 if (abs (n) % 3 == 2)
2959 op = expand_simple_binop (mode, MULT, op, op, op,
2960 0, OPTAB_LIB_WIDEN);
2963 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2964 op2 = force_reg (mode, op2);
2965 op2 = expand_powi (op2, mode, abs (n / 3));
2966 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2967 0, OPTAB_LIB_WIDEN);
2968 /* If the original exponent was negative, reciprocate the
2971 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2972 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2978 /* Fall back to optab expansion. */
2979 return expand_builtin_mathfn_2 (exp, target, subtarget);
2982 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2983 a normal call should be emitted rather than expanding the function
2984 in-line. EXP is the expression that is a call to the builtin
2985 function; if convenient, the result should be placed in TARGET. */
2988 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2992 enum machine_mode mode;
2993 enum machine_mode mode2;
2995 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2998 arg0 = CALL_EXPR_ARG (exp, 0);
2999 arg1 = CALL_EXPR_ARG (exp, 1);
3000 mode = TYPE_MODE (TREE_TYPE (exp));
3002 /* Handle constant power. */
3004 if (TREE_CODE (arg1) == INTEGER_CST
3005 && !TREE_OVERFLOW (arg1))
3007 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3009 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3010 Otherwise, check the number of multiplications required. */
3011 if ((TREE_INT_CST_HIGH (arg1) == 0
3012 || TREE_INT_CST_HIGH (arg1) == -1)
3013 && ((n >= -1 && n <= 2)
3015 && powi_cost (n) <= POWI_MAX_MULTS)))
3017 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3018 op0 = force_reg (mode, op0);
3019 return expand_powi (op0, mode, n);
3023 /* Emit a libcall to libgcc. */
3025 /* Mode of the 2nd argument must match that of an int. */
3026 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3028 if (target == NULL_RTX)
3029 target = gen_reg_rtx (mode);
3031 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3032 if (GET_MODE (op0) != mode)
3033 op0 = convert_to_mode (mode, op0, 0);
3034 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3035 if (GET_MODE (op1) != mode2)
3036 op1 = convert_to_mode (mode2, op1, 0);
3038 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
3039 target, LCT_CONST_MAKE_BLOCK, mode, 2,
3040 op0, mode, op1, mode2);
3045 /* Expand expression EXP which is a call to the strlen builtin. Return
3046 NULL_RTX if we failed the caller should emit a normal call, otherwise
3047 try to get the result in TARGET, if convenient. */
3050 expand_builtin_strlen (tree exp, rtx target,
3051 enum machine_mode target_mode)
3053 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3059 tree src = CALL_EXPR_ARG (exp, 0);
3060 rtx result, src_reg, char_rtx, before_strlen;
3061 enum machine_mode insn_mode = target_mode, char_mode;
3062 enum insn_code icode = CODE_FOR_nothing;
3065 /* If the length can be computed at compile-time, return it. */
3066 len = c_strlen (src, 0);
3068 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3070 /* If the length can be computed at compile-time and is constant
3071 integer, but there are side-effects in src, evaluate
3072 src for side-effects, then return len.
3073 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3074 can be optimized into: i++; x = 3; */
3075 len = c_strlen (src, 1);
3076 if (len && TREE_CODE (len) == INTEGER_CST)
3078 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3079 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3082 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3084 /* If SRC is not a pointer type, don't do this operation inline. */
3088 /* Bail out if we can't compute strlen in the right mode. */
3089 while (insn_mode != VOIDmode)
3091 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
3092 if (icode != CODE_FOR_nothing)
3095 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3097 if (insn_mode == VOIDmode)
3100 /* Make a place to write the result of the instruction. */
3104 && GET_MODE (result) == insn_mode
3105 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3106 result = gen_reg_rtx (insn_mode);
3108 /* Make a place to hold the source address. We will not expand
3109 the actual source until we are sure that the expansion will
3110 not fail -- there are trees that cannot be expanded twice. */
3111 src_reg = gen_reg_rtx (Pmode);
3113 /* Mark the beginning of the strlen sequence so we can emit the
3114 source operand later. */
3115 before_strlen = get_last_insn ();
3117 char_rtx = const0_rtx;
3118 char_mode = insn_data[(int) icode].operand[2].mode;
3119 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3121 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3123 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3124 char_rtx, GEN_INT (align));
3129 /* Now that we are assured of success, expand the source. */
3131 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3133 emit_move_insn (src_reg, pat);
3138 emit_insn_after (pat, before_strlen);
3140 emit_insn_before (pat, get_insns ());
3142 /* Return the value in the proper mode for this function. */
3143 if (GET_MODE (result) == target_mode)
3145 else if (target != 0)
3146 convert_move (target, result, 0);
3148 target = convert_to_mode (target_mode, result, 0);
3154 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3155 caller should emit a normal call, otherwise try to get the result
3156 in TARGET, if convenient (and in mode MODE if that's convenient). */
3159 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3161 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3163 tree type = TREE_TYPE (exp);
3164 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3165 CALL_EXPR_ARG (exp, 1), type);
3167 return expand_expr (result, target, mode, EXPAND_NORMAL);
3172 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3173 caller should emit a normal call, otherwise try to get the result
3174 in TARGET, if convenient (and in mode MODE if that's convenient). */
3177 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3179 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3181 tree type = TREE_TYPE (exp);
3182 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3183 CALL_EXPR_ARG (exp, 1), type);
3185 return expand_expr (result, target, mode, EXPAND_NORMAL);
3187 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3192 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3193 caller should emit a normal call, otherwise try to get the result
3194 in TARGET, if convenient (and in mode MODE if that's convenient). */
3197 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3199 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3201 tree type = TREE_TYPE (exp);
3202 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3203 CALL_EXPR_ARG (exp, 1), type);
3205 return expand_expr (result, target, mode, EXPAND_NORMAL);
3210 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3211 caller should emit a normal call, otherwise try to get the result
3212 in TARGET, if convenient (and in mode MODE if that's convenient). */
3215 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3217 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3219 tree type = TREE_TYPE (exp);
3220 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3221 CALL_EXPR_ARG (exp, 1), type);
3223 return expand_expr (result, target, mode, EXPAND_NORMAL);
3228 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3229 bytes from constant string DATA + OFFSET and return it as target
3233 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3234 enum machine_mode mode)
3236 const char *str = (const char *) data;
3238 gcc_assert (offset >= 0
3239 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3240 <= strlen (str) + 1));
3242 return c_readstr (str + offset, mode);
3245 /* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3251 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3253 tree fndecl = get_callee_fndecl (exp);
3255 if (!validate_arglist (exp,
3256 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3260 tree dest = CALL_EXPR_ARG (exp, 0);
3261 tree src = CALL_EXPR_ARG (exp, 1);
3262 tree len = CALL_EXPR_ARG (exp, 2);
3263 const char *src_str;
3264 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3265 unsigned int dest_align
3266 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3267 rtx dest_mem, src_mem, dest_addr, len_rtx;
3268 tree result = fold_builtin_memory_op (dest, src, len,
3269 TREE_TYPE (TREE_TYPE (fndecl)),
3271 HOST_WIDE_INT expected_size = -1;
3272 unsigned int expected_align = 0;
3276 while (TREE_CODE (result) == COMPOUND_EXPR)
3278 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3280 result = TREE_OPERAND (result, 1);
3282 return expand_expr (result, target, mode, EXPAND_NORMAL);
3285 /* If DEST is not a pointer type, call the normal function. */
3286 if (dest_align == 0)
3289 /* If either SRC is not a pointer type, don't do this
3290 operation in-line. */
3294 stringop_block_profile (exp, &expected_align, &expected_size);
3295 if (expected_align < dest_align)
3296 expected_align = dest_align;
3297 dest_mem = get_memory_rtx (dest, len);
3298 set_mem_align (dest_mem, dest_align);
3299 len_rtx = expand_normal (len);
3300 src_str = c_getstr (src);
3302 /* If SRC is a string constant and block move would be done
3303 by pieces, we can avoid loading the string from memory
3304 and only stored the computed constants. */
3306 && GET_CODE (len_rtx) == CONST_INT
3307 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3308 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3309 (void *) src_str, dest_align))
3311 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3312 builtin_memcpy_read_str,
3313 (void *) src_str, dest_align, 0);
3314 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3315 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3319 src_mem = get_memory_rtx (src, len);
3320 set_mem_align (src_mem, src_align);
3322 /* Copy word part most expediently. */
3323 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3324 CALL_EXPR_TAILCALL (exp)
3325 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3326 expected_align, expected_size);
3330 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3331 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3337 /* Expand a call EXP to the mempcpy builtin.
3338 Return NULL_RTX if we failed; the caller should emit a normal call,
3339 otherwise try to get the result in TARGET, if convenient (and in
3340 mode MODE if that's convenient). If ENDP is 0 return the
3341 destination pointer, if ENDP is 1 return the end pointer ala
3342 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3346 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3348 if (!validate_arglist (exp,
3349 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3353 tree dest = CALL_EXPR_ARG (exp, 0);
3354 tree src = CALL_EXPR_ARG (exp, 1);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3356 return expand_builtin_mempcpy_args (dest, src, len,
3358 target, mode, /*endp=*/ 1);
3362 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3363 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3364 so that this can also be called without constructing an actual CALL_EXPR.
3365 TYPE is the return type of the call. The other arguments and return value
3366 are the same as for expand_builtin_mempcpy. */
3369 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3370 rtx target, enum machine_mode mode, int endp)
3372 /* If return value is ignored, transform mempcpy into memcpy. */
3373 if (target == const0_rtx)
3375 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3380 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3381 target, mode, EXPAND_NORMAL);
3385 const char *src_str;
3386 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3387 unsigned int dest_align
3388 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3389 rtx dest_mem, src_mem, len_rtx;
3390 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3394 while (TREE_CODE (result) == COMPOUND_EXPR)
3396 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3398 result = TREE_OPERAND (result, 1);
3400 return expand_expr (result, target, mode, EXPAND_NORMAL);
3403 /* If either SRC or DEST is not a pointer type, don't do this
3404 operation in-line. */
3405 if (dest_align == 0 || src_align == 0)
3408 /* If LEN is not constant, call the normal function. */
3409 if (! host_integerp (len, 1))
3412 len_rtx = expand_normal (len);
3413 src_str = c_getstr (src);
3415 /* If SRC is a string constant and block move would be done
3416 by pieces, we can avoid loading the string from memory
3417 and only stored the computed constants. */
3419 && GET_CODE (len_rtx) == CONST_INT
3420 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3421 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3422 (void *) src_str, dest_align))
3424 dest_mem = get_memory_rtx (dest, len);
3425 set_mem_align (dest_mem, dest_align);
3426 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3427 builtin_memcpy_read_str,
3428 (void *) src_str, dest_align, endp);
3429 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3430 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3434 if (GET_CODE (len_rtx) == CONST_INT
3435 && can_move_by_pieces (INTVAL (len_rtx),
3436 MIN (dest_align, src_align)))
3438 dest_mem = get_memory_rtx (dest, len);
3439 set_mem_align (dest_mem, dest_align);
3440 src_mem = get_memory_rtx (src, len);
3441 set_mem_align (src_mem, src_align);
3442 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3443 MIN (dest_align, src_align), endp);
3444 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3445 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3453 /* Expand expression EXP, which is a call to the memmove builtin. Return
3454 NULL_RTX if we failed; the caller should emit a normal call. */
3457 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree src = CALL_EXPR_ARG (exp, 1);
3466 tree len = CALL_EXPR_ARG (exp, 2);
3467 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3468 target, mode, ignore);
3472 /* Helper function to do the actual work for expand_builtin_memmove. The
3473 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3474 so that this can also be called without constructing an actual CALL_EXPR.
3475 TYPE is the return type of the call. The other arguments and return value
3476 are the same as for expand_builtin_memmove. */
3479 expand_builtin_memmove_args (tree dest, tree src, tree len,
3480 tree type, rtx target, enum machine_mode mode,
3483 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3487 STRIP_TYPE_NOPS (result);
3488 while (TREE_CODE (result) == COMPOUND_EXPR)
3490 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3492 result = TREE_OPERAND (result, 1);
3494 return expand_expr (result, target, mode, EXPAND_NORMAL);
3497 /* Otherwise, call the normal function. */
3501 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3502 NULL_RTX if we failed the caller should emit a normal call. */
3505 expand_builtin_bcopy (tree exp, int ignore)
3507 tree type = TREE_TYPE (exp);
3508 tree src, dest, size;
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3514 src = CALL_EXPR_ARG (exp, 0);
3515 dest = CALL_EXPR_ARG (exp, 1);
3516 size = CALL_EXPR_ARG (exp, 2);
3518 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3519 This is done this way so that if it isn't expanded inline, we fall
3520 back to calling bcopy instead of memmove. */
3521 return expand_builtin_memmove_args (dest, src,
3522 fold_convert (sizetype, size),
3523 type, const0_rtx, VOIDmode,
3528 # define HAVE_movstr 0
3529 # define CODE_FOR_movstr CODE_FOR_nothing
3532 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3533 we failed, the caller should emit a normal call, otherwise try to
3534 get the result in TARGET, if convenient. If ENDP is 0 return the
3535 destination pointer, if ENDP is 1 return the end pointer ala
3536 mempcpy, and if ENDP is 2 return the end pointer minus one ala