1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
51 #ifndef PAD_VARARGS_DOWN
52 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
55 /* Define the names of the builtin function types and codes. */
56 const char *const built_in_class_names[4]
57 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
59 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
60 const char * built_in_names[(int) END_BUILTINS] =
62 #include "builtins.def"
66 /* Setup an array of _DECL trees, make sure each element is
67 initialized to NULL_TREE. */
68 tree built_in_decls[(int) END_BUILTINS];
69 /* Declarations used when constructing the builtin implicitly in the compiler.
70 It may be NULL_TREE when this is invalid (for instance runtime is not
71 required to implement the function call in all cases). */
72 tree implicit_built_in_decls[(int) END_BUILTINS];
74 static int get_pointer_alignment (tree, unsigned int);
75 static const char *c_getstr (tree);
76 static rtx c_readstr (const char *, enum machine_mode);
77 static int target_char_cast (tree, char *);
78 static rtx get_memory_rtx (tree, tree);
79 static int apply_args_size (void);
80 static int apply_result_size (void);
81 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
82 static rtx result_vector (int, rtx);
84 static rtx expand_builtin_setjmp (tree, rtx);
85 static void expand_builtin_update_setjmp_buf (rtx);
86 static void expand_builtin_prefetch (tree);
87 static rtx expand_builtin_apply_args (void);
88 static rtx expand_builtin_apply_args_1 (void);
89 static rtx expand_builtin_apply (rtx, rtx, rtx);
90 static void expand_builtin_return (rtx);
91 static enum type_class type_to_class (tree);
92 static rtx expand_builtin_classify_type (tree);
93 static void expand_errno_check (tree, rtx);
94 static rtx expand_builtin_mathfn (tree, rtx, rtx);
95 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
97 static rtx expand_builtin_sincos (tree);
98 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
99 static rtx expand_builtin_args_info (tree);
100 static rtx expand_builtin_next_arg (void);
101 static rtx expand_builtin_va_start (tree);
102 static rtx expand_builtin_va_end (tree);
103 static rtx expand_builtin_va_copy (tree);
104 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
105 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
106 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
107 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
108 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
109 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
114 static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
115 static rtx expand_builtin_bcopy (tree);
116 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
117 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
118 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
120 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
123 static rtx expand_builtin_bzero (tree);
124 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strpbrk (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strchr (tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strrchr (tree, tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, rtx);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static rtx expand_builtin_fputs (tree, rtx, bool);
133 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
134 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
135 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
136 static tree stabilize_va_list (tree, int);
137 static rtx expand_builtin_expect (tree, rtx);
138 static tree fold_builtin_constant_p (tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (tree);
141 static tree fold_builtin_inf (tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static int validate_arglist (tree, ...);
144 static bool integer_valued_real_p (tree);
145 static tree fold_trunc_transparent_mathfn (tree, tree);
146 static bool readonly_data_expr (tree);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_sqrt (tree, tree);
150 static tree fold_builtin_cbrt (tree, tree);
151 static tree fold_builtin_pow (tree, tree, tree);
152 static tree fold_builtin_powi (tree, tree, tree);
153 static tree fold_builtin_sin (tree);
154 static tree fold_builtin_cos (tree, tree, tree);
155 static tree fold_builtin_tan (tree);
156 static tree fold_builtin_atan (tree, tree);
157 static tree fold_builtin_trunc (tree, tree);
158 static tree fold_builtin_floor (tree, tree);
159 static tree fold_builtin_ceil (tree, tree);
160 static tree fold_builtin_round (tree, tree);
161 static tree fold_builtin_int_roundingfn (tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memcpy (tree, tree);
164 static tree fold_builtin_mempcpy (tree, tree, int);
165 static tree fold_builtin_memmove (tree, tree);
166 static tree fold_builtin_strchr (tree, tree);
167 static tree fold_builtin_memcmp (tree);
168 static tree fold_builtin_strcmp (tree);
169 static tree fold_builtin_strncmp (tree);
170 static tree fold_builtin_signbit (tree, tree);
171 static tree fold_builtin_copysign (tree, tree, tree);
172 static tree fold_builtin_isascii (tree);
173 static tree fold_builtin_toascii (tree);
174 static tree fold_builtin_isdigit (tree);
175 static tree fold_builtin_fabs (tree, tree);
176 static tree fold_builtin_abs (tree, tree);
177 static tree fold_builtin_unordered_cmp (tree, tree, enum tree_code,
179 static tree fold_builtin_1 (tree, tree, bool);
181 static tree fold_builtin_strpbrk (tree, tree);
182 static tree fold_builtin_strstr (tree, tree);
183 static tree fold_builtin_strrchr (tree, tree);
184 static tree fold_builtin_strcat (tree);
185 static tree fold_builtin_strncat (tree);
186 static tree fold_builtin_strspn (tree);
187 static tree fold_builtin_strcspn (tree);
188 static tree fold_builtin_sprintf (tree, int);
190 static rtx expand_builtin_object_size (tree);
191 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
192 enum built_in_function);
193 static void maybe_emit_chk_warning (tree, enum built_in_function);
194 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
195 static tree fold_builtin_object_size (tree);
196 static tree fold_builtin_strcat_chk (tree, tree);
197 static tree fold_builtin_strncat_chk (tree, tree);
198 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
199 static tree fold_builtin_printf (tree, tree, bool, enum built_in_function);
200 static tree fold_builtin_fprintf (tree, tree, bool, enum built_in_function);
201 static bool init_target_chars (void);
203 static unsigned HOST_WIDE_INT target_newline;
204 static unsigned HOST_WIDE_INT target_percent;
205 static unsigned HOST_WIDE_INT target_c;
206 static unsigned HOST_WIDE_INT target_s;
207 static char target_percent_c[3];
208 static char target_percent_s[3];
209 static char target_percent_s_newline[4];
211 /* Return true if NODE should be considered for inline expansion regardless
212 of the optimization level. This means whenever a function is invoked with
213 its "internal" name, which normally contains the prefix "__builtin". */
215 static bool called_as_built_in (tree node)
217 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
218 if (strncmp (name, "__builtin_", 10) == 0)
220 if (strncmp (name, "__sync_", 7) == 0)
225 /* Return the alignment in bits of EXP, a pointer valued expression.
226 But don't return more than MAX_ALIGN no matter what.
227 The alignment returned is, by default, the alignment of the thing that
228 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
230 Otherwise, look at the expression to see if we can do better, i.e., if the
231 expression is actually pointing at an object whose alignment is tighter. */
234 get_pointer_alignment (tree exp, unsigned int max_align)
236 unsigned int align, inner;
238 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
241 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
242 align = MIN (align, max_align);
246 switch (TREE_CODE (exp))
250 case NON_LVALUE_EXPR:
251 exp = TREE_OPERAND (exp, 0);
252 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
255 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
256 align = MIN (inner, max_align);
260 /* If sum of pointer + int, restrict our maximum alignment to that
261 imposed by the integer. If not, we can't do any better than
263 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
266 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
267 & (max_align / BITS_PER_UNIT - 1))
271 exp = TREE_OPERAND (exp, 0);
275 /* See what we are pointing at and look at its alignment. */
276 exp = TREE_OPERAND (exp, 0);
278 while (handled_component_p (exp))
280 /* Fields in a structure can be packed, honor DECL_ALIGN
281 of the FIELD_DECL. For all other references the conservative
282 alignment is the element type alignment. */
283 if (TREE_CODE (exp) == COMPONENT_REF)
284 inner = MIN (inner, DECL_ALIGN (TREE_OPERAND (exp, 1)));
286 inner = MIN (inner, TYPE_ALIGN (TREE_TYPE (exp)));
287 exp = TREE_OPERAND (exp, 0);
289 if (TREE_CODE (exp) == FUNCTION_DECL)
290 align = FUNCTION_BOUNDARY;
291 else if (DECL_P (exp))
292 align = MIN (inner, DECL_ALIGN (exp));
293 #ifdef CONSTANT_ALIGNMENT
294 else if (CONSTANT_CLASS_P (exp))
295 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
298 align = MIN (align, inner);
299 return MIN (align, max_align);
307 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
308 way, because it could contain a zero byte in the middle.
309 TREE_STRING_LENGTH is the size of the character array, not the string.
311 ONLY_VALUE should be nonzero if the result is not going to be emitted
312 into the instruction stream and zero if it is going to be expanded.
313 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
314 is returned, otherwise NULL, since
315 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
316 evaluate the side-effects.
318 The value returned is of type `ssizetype'.
320 Unfortunately, string_constant can't access the values of const char
321 arrays with initializers, so neither can we do so here. */
324 c_strlen (tree src, int only_value)
327 HOST_WIDE_INT offset;
332 if (TREE_CODE (src) == COND_EXPR
333 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
337 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
338 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
339 if (tree_int_cst_equal (len1, len2))
343 if (TREE_CODE (src) == COMPOUND_EXPR
344 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
345 return c_strlen (TREE_OPERAND (src, 1), only_value);
347 src = string_constant (src, &offset_node);
351 max = TREE_STRING_LENGTH (src) - 1;
352 ptr = TREE_STRING_POINTER (src);
354 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
356 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
357 compute the offset to the following null if we don't know where to
358 start searching for it. */
361 for (i = 0; i < max; i++)
365 /* We don't know the starting offset, but we do know that the string
366 has no internal zero bytes. We can assume that the offset falls
367 within the bounds of the string; otherwise, the programmer deserves
368 what he gets. Subtract the offset from the length of the string,
369 and return that. This would perhaps not be valid if we were dealing
370 with named arrays in addition to literal string constants. */
372 return size_diffop (size_int (max), offset_node);
375 /* We have a known offset into the string. Start searching there for
376 a null character if we can represent it as a single HOST_WIDE_INT. */
377 if (offset_node == 0)
379 else if (! host_integerp (offset_node, 0))
382 offset = tree_low_cst (offset_node, 0);
384 /* If the offset is known to be out of bounds, warn, and call strlen at
386 if (offset < 0 || offset > max)
388 warning (0, "offset outside bounds of constant string");
392 /* Use strlen to search for the first zero byte. Since any strings
393 constructed with build_string will have nulls appended, we win even
394 if we get handed something like (char[4])"abcd".
396 Since OFFSET is our starting index into the string, no further
397 calculation is needed. */
398 return ssize_int (strlen (ptr + offset));
401 /* Return a char pointer for a C string if it is a string constant
402 or sum of string constant and integer constant. */
409 src = string_constant (src, &offset_node);
413 if (offset_node == 0)
414 return TREE_STRING_POINTER (src);
415 else if (!host_integerp (offset_node, 1)
416 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
419 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
422 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
423 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
426 c_readstr (const char *str, enum machine_mode mode)
432 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
437 for (i = 0; i < GET_MODE_SIZE (mode); i++)
440 if (WORDS_BIG_ENDIAN)
441 j = GET_MODE_SIZE (mode) - i - 1;
442 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
443 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
444 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
446 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
449 ch = (unsigned char) str[i];
450 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
452 return immed_double_const (c[0], c[1], mode);
455 /* Cast a target constant CST to target CHAR and if that value fits into
456 host char type, return zero and put that value into variable pointed to by
460 target_char_cast (tree cst, char *p)
462 unsigned HOST_WIDE_INT val, hostval;
464 if (!host_integerp (cst, 1)
465 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
468 val = tree_low_cst (cst, 1);
469 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
470 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
473 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
474 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
483 /* Similar to save_expr, but assumes that arbitrary code is not executed
484 in between the multiple evaluations. In particular, we assume that a
485 non-addressable local variable will not be modified. */
488 builtin_save_expr (tree exp)
490 if (TREE_ADDRESSABLE (exp) == 0
491 && (TREE_CODE (exp) == PARM_DECL
492 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
495 return save_expr (exp);
498 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
499 times to get the address of either a higher stack frame, or a return
500 address located within it (depending on FNDECL_CODE). */
503 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
507 #ifdef INITIAL_FRAME_ADDRESS_RTX
508 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
512 /* For a zero count with __builtin_return_address, we don't care what
513 frame address we return, because target-specific definitions will
514 override us. Therefore frame pointer elimination is OK, and using
515 the soft frame pointer is OK.
517 For a non-zero count, or a zero count with __builtin_frame_address,
518 we require a stable offset from the current frame pointer to the
519 previous one, so we must use the hard frame pointer, and
520 we must disable frame pointer elimination. */
521 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
522 tem = frame_pointer_rtx;
525 tem = hard_frame_pointer_rtx;
527 /* Tell reload not to eliminate the frame pointer. */
528 current_function_accesses_prior_frames = 1;
532 /* Some machines need special handling before we can access
533 arbitrary frames. For example, on the sparc, we must first flush
534 all register windows to the stack. */
535 #ifdef SETUP_FRAME_ADDRESSES
537 SETUP_FRAME_ADDRESSES ();
540 /* On the sparc, the return address is not in the frame, it is in a
541 register. There is no way to access it off of the current frame
542 pointer, but it can be accessed off the previous frame pointer by
543 reading the value from the register window save area. */
544 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
545 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
549 /* Scan back COUNT frames to the specified frame. */
550 for (i = 0; i < count; i++)
552 /* Assume the dynamic chain pointer is in the word that the
553 frame address points to, unless otherwise specified. */
554 #ifdef DYNAMIC_CHAIN_ADDRESS
555 tem = DYNAMIC_CHAIN_ADDRESS (tem);
557 tem = memory_address (Pmode, tem);
558 tem = gen_frame_mem (Pmode, tem);
559 tem = copy_to_reg (tem);
562 /* For __builtin_frame_address, return what we've got. */
563 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
566 /* For __builtin_return_address, Get the return address from that
568 #ifdef RETURN_ADDR_RTX
569 tem = RETURN_ADDR_RTX (count, tem);
571 tem = memory_address (Pmode,
572 plus_constant (tem, GET_MODE_SIZE (Pmode)));
573 tem = gen_frame_mem (Pmode, tem);
578 /* Alias set used for setjmp buffer. */
579 static HOST_WIDE_INT setjmp_alias_set = -1;
581 /* Construct the leading half of a __builtin_setjmp call. Control will
582 return to RECEIVER_LABEL. This is used directly by sjlj exception
586 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
588 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
592 if (setjmp_alias_set == -1)
593 setjmp_alias_set = new_alias_set ();
595 buf_addr = convert_memory_address (Pmode, buf_addr);
597 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
599 /* We store the frame pointer and the address of receiver_label in
600 the buffer and use the rest of it for the stack save area, which
601 is machine-dependent. */
603 mem = gen_rtx_MEM (Pmode, buf_addr);
604 set_mem_alias_set (mem, setjmp_alias_set);
605 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
607 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
608 set_mem_alias_set (mem, setjmp_alias_set);
610 emit_move_insn (validize_mem (mem),
611 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
613 stack_save = gen_rtx_MEM (sa_mode,
614 plus_constant (buf_addr,
615 2 * GET_MODE_SIZE (Pmode)));
616 set_mem_alias_set (stack_save, setjmp_alias_set);
617 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
619 /* If there is further processing to do, do it. */
620 #ifdef HAVE_builtin_setjmp_setup
621 if (HAVE_builtin_setjmp_setup)
622 emit_insn (gen_builtin_setjmp_setup (buf_addr));
625 /* Tell optimize_save_area_alloca that extra work is going to
626 need to go on during alloca. */
627 current_function_calls_setjmp = 1;
629 /* Set this so all the registers get saved in our frame; we need to be
630 able to copy the saved values for any registers from frames we unwind. */
631 current_function_has_nonlocal_label = 1;
634 /* Construct the trailing part of a __builtin_setjmp call.
635 This is used directly by sjlj exception handling code. */
638 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
640 /* Clobber the FP when we get here, so we have to make sure it's
641 marked as used by this function. */
642 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
644 /* Mark the static chain as clobbered here so life information
645 doesn't get messed up for it. */
646 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
648 /* Now put in the code to restore the frame pointer, and argument
649 pointer, if needed. */
650 #ifdef HAVE_nonlocal_goto
651 if (! HAVE_nonlocal_goto)
653 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
655 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
656 if (fixed_regs[ARG_POINTER_REGNUM])
658 #ifdef ELIMINABLE_REGS
660 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
662 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
663 if (elim_regs[i].from == ARG_POINTER_REGNUM
664 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
667 if (i == ARRAY_SIZE (elim_regs))
670 /* Now restore our arg pointer from the address at which it
671 was saved in our stack frame. */
672 emit_move_insn (virtual_incoming_args_rtx,
673 copy_to_reg (get_arg_pointer_save_area (cfun)));
678 #ifdef HAVE_builtin_setjmp_receiver
679 if (HAVE_builtin_setjmp_receiver)
680 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
683 #ifdef HAVE_nonlocal_goto_receiver
684 if (HAVE_nonlocal_goto_receiver)
685 emit_insn (gen_nonlocal_goto_receiver ());
690 /* @@@ This is a kludge. Not all machine descriptions define a blockage
691 insn, but we must not allow the code we just generated to be reordered
692 by scheduling. Specifically, the update of the frame pointer must
693 happen immediately, not later. So emit an ASM_INPUT to act as blockage
695 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
698 /* __builtin_setjmp is passed a pointer to an array of five words (not
699 all will be used on all machines). It operates similarly to the C
700 library function of the same name, but is more efficient. Much of
701 the code below (and for longjmp) is copied from the handling of
704 NOTE: This is intended for use by GNAT and the exception handling
705 scheme in the compiler and will only work in the method used by
709 expand_builtin_setjmp (tree arglist, rtx target)
711 rtx buf_addr, next_lab, cont_lab;
713 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
716 if (target == 0 || !REG_P (target)
717 || REGNO (target) < FIRST_PSEUDO_REGISTER)
718 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
720 buf_addr = expand_normal (TREE_VALUE (arglist));
722 next_lab = gen_label_rtx ();
723 cont_lab = gen_label_rtx ();
725 expand_builtin_setjmp_setup (buf_addr, next_lab);
727 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
728 ensure that pending stack adjustments are flushed. */
729 emit_move_insn (target, const0_rtx);
730 emit_jump (cont_lab);
732 emit_label (next_lab);
734 expand_builtin_setjmp_receiver (next_lab);
736 /* Set TARGET to one. */
737 emit_move_insn (target, const1_rtx);
738 emit_label (cont_lab);
740 /* Tell flow about the strange goings on. Putting `next_lab' on
741 `nonlocal_goto_handler_labels' to indicates that function
742 calls may traverse the arc back to this label. */
744 current_function_has_nonlocal_label = 1;
745 nonlocal_goto_handler_labels
746 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
751 /* __builtin_longjmp is passed a pointer to an array of five words (not
752 all will be used on all machines). It operates similarly to the C
753 library function of the same name, but is more efficient. Much of
754 the code below is copied from the handling of non-local gotos.
756 NOTE: This is intended for use by GNAT and the exception handling
757 scheme in the compiler and will only work in the method used by
761 expand_builtin_longjmp (rtx buf_addr, rtx value)
763 rtx fp, lab, stack, insn, last;
764 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
766 if (setjmp_alias_set == -1)
767 setjmp_alias_set = new_alias_set ();
769 buf_addr = convert_memory_address (Pmode, buf_addr);
771 buf_addr = force_reg (Pmode, buf_addr);
773 /* We used to store value in static_chain_rtx, but that fails if pointers
774 are smaller than integers. We instead require that the user must pass
775 a second argument of 1, because that is what builtin_setjmp will
776 return. This also makes EH slightly more efficient, since we are no
777 longer copying around a value that we don't care about. */
778 gcc_assert (value == const1_rtx);
780 last = get_last_insn ();
781 #ifdef HAVE_builtin_longjmp
782 if (HAVE_builtin_longjmp)
783 emit_insn (gen_builtin_longjmp (buf_addr));
787 fp = gen_rtx_MEM (Pmode, buf_addr);
788 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
789 GET_MODE_SIZE (Pmode)));
791 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
792 2 * GET_MODE_SIZE (Pmode)));
793 set_mem_alias_set (fp, setjmp_alias_set);
794 set_mem_alias_set (lab, setjmp_alias_set);
795 set_mem_alias_set (stack, setjmp_alias_set);
797 /* Pick up FP, label, and SP from the block and jump. This code is
798 from expand_goto in stmt.c; see there for detailed comments. */
799 #ifdef HAVE_nonlocal_goto
800 if (HAVE_nonlocal_goto)
801 /* We have to pass a value to the nonlocal_goto pattern that will
802 get copied into the static_chain pointer, but it does not matter
803 what that value is, because builtin_setjmp does not use it. */
804 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
808 lab = copy_to_reg (lab);
810 emit_insn (gen_rtx_CLOBBER (VOIDmode,
811 gen_rtx_MEM (BLKmode,
812 gen_rtx_SCRATCH (VOIDmode))));
813 emit_insn (gen_rtx_CLOBBER (VOIDmode,
814 gen_rtx_MEM (BLKmode,
815 hard_frame_pointer_rtx)));
817 emit_move_insn (hard_frame_pointer_rtx, fp);
818 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
820 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
821 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
822 emit_indirect_jump (lab);
826 /* Search backwards and mark the jump insn as a non-local goto.
827 Note that this precludes the use of __builtin_longjmp to a
828 __builtin_setjmp target in the same function. However, we've
829 already cautioned the user that these functions are for
830 internal exception handling use only. */
831 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
833 gcc_assert (insn != last);
837 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
841 else if (CALL_P (insn))
846 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
847 and the address of the save area. */
850 expand_builtin_nonlocal_goto (tree arglist)
852 tree t_label, t_save_area;
853 rtx r_label, r_save_area, r_fp, r_sp, insn;
855 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
858 t_label = TREE_VALUE (arglist);
859 arglist = TREE_CHAIN (arglist);
860 t_save_area = TREE_VALUE (arglist);
862 r_label = expand_normal (t_label);
863 r_label = convert_memory_address (Pmode, r_label);
864 r_save_area = expand_normal (t_save_area);
865 r_save_area = convert_memory_address (Pmode, r_save_area);
866 r_fp = gen_rtx_MEM (Pmode, r_save_area);
867 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
868 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
870 current_function_has_nonlocal_goto = 1;
872 #ifdef HAVE_nonlocal_goto
873 /* ??? We no longer need to pass the static chain value, afaik. */
874 if (HAVE_nonlocal_goto)
875 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
879 r_label = copy_to_reg (r_label);
881 emit_insn (gen_rtx_CLOBBER (VOIDmode,
882 gen_rtx_MEM (BLKmode,
883 gen_rtx_SCRATCH (VOIDmode))));
885 emit_insn (gen_rtx_CLOBBER (VOIDmode,
886 gen_rtx_MEM (BLKmode,
887 hard_frame_pointer_rtx)));
889 /* Restore frame pointer for containing function.
890 This sets the actual hard register used for the frame pointer
891 to the location of the function's incoming static chain info.
892 The non-local goto handler will then adjust it to contain the
893 proper value and reload the argument pointer, if needed. */
894 emit_move_insn (hard_frame_pointer_rtx, r_fp);
895 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
897 /* USE of hard_frame_pointer_rtx added for consistency;
898 not clear if really needed. */
899 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
900 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
901 emit_indirect_jump (r_label);
904 /* Search backwards to the jump insn and mark it as a
906 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
910 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
911 const0_rtx, REG_NOTES (insn));
914 else if (CALL_P (insn))
921 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
922 (not all will be used on all machines) that was passed to __builtin_setjmp.
923 It updates the stack pointer in that block to correspond to the current
927 expand_builtin_update_setjmp_buf (rtx buf_addr)
929 enum machine_mode sa_mode = Pmode;
933 #ifdef HAVE_save_stack_nonlocal
934 if (HAVE_save_stack_nonlocal)
935 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
937 #ifdef STACK_SAVEAREA_MODE
938 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
942 = gen_rtx_MEM (sa_mode,
945 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
949 emit_insn (gen_setjmp ());
952 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
955 /* Expand a call to __builtin_prefetch. For a target that does not support
956 data prefetch, evaluate the memory address argument in case it has side
960 expand_builtin_prefetch (tree arglist)
962 tree arg0, arg1, arg2;
965 if (!validate_arglist (arglist, POINTER_TYPE, 0))
968 arg0 = TREE_VALUE (arglist);
969 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
970 zero (read) and argument 2 (locality) defaults to 3 (high degree of
972 if (TREE_CHAIN (arglist))
974 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
975 if (TREE_CHAIN (TREE_CHAIN (arglist)))
976 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
978 arg2 = build_int_cst (NULL_TREE, 3);
982 arg1 = integer_zero_node;
983 arg2 = build_int_cst (NULL_TREE, 3);
986 /* Argument 0 is an address. */
987 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
989 /* Argument 1 (read/write flag) must be a compile-time constant int. */
990 if (TREE_CODE (arg1) != INTEGER_CST)
992 error ("second argument to %<__builtin_prefetch%> must be a constant");
993 arg1 = integer_zero_node;
995 op1 = expand_normal (arg1);
996 /* Argument 1 must be either zero or one. */
997 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
999 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1004 /* Argument 2 (locality) must be a compile-time constant int. */
1005 if (TREE_CODE (arg2) != INTEGER_CST)
1007 error ("third argument to %<__builtin_prefetch%> must be a constant");
1008 arg2 = integer_zero_node;
1010 op2 = expand_normal (arg2);
1011 /* Argument 2 must be 0, 1, 2, or 3. */
1012 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1014 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1018 #ifdef HAVE_prefetch
1021 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1023 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1024 || (GET_MODE (op0) != Pmode))
1026 op0 = convert_memory_address (Pmode, op0);
1027 op0 = force_reg (Pmode, op0);
1029 emit_insn (gen_prefetch (op0, op1, op2));
1033 /* Don't do anything with direct references to volatile memory, but
1034 generate code to handle other side effects. */
1035 if (!MEM_P (op0) && side_effects_p (op0))
1039 /* Get a MEM rtx for expression EXP which is the address of an operand
1040 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1041 the maximum length of the block of memory that might be accessed or
1045 get_memory_rtx (tree exp, tree len)
1047 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1048 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1050 /* Get an expression we can use to find the attributes to assign to MEM.
1051 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1052 we can. First remove any nops. */
1053 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1054 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1055 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1056 exp = TREE_OPERAND (exp, 0);
1058 if (TREE_CODE (exp) == ADDR_EXPR)
1059 exp = TREE_OPERAND (exp, 0);
1060 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1061 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1065 /* Honor attributes derived from exp, except for the alias set
1066 (as builtin stringops may alias with anything) and the size
1067 (as stringops may access multiple array elements). */
1070 set_mem_attributes (mem, exp, 0);
1072 /* Allow the string and memory builtins to overflow from one
1073 field into another, see http://gcc.gnu.org/PR23561.
1074 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1075 memory accessed by the string or memory builtin will fit
1076 within the field. */
1077 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1079 tree mem_expr = MEM_EXPR (mem);
1080 HOST_WIDE_INT offset = -1, length = -1;
1083 while (TREE_CODE (inner) == ARRAY_REF
1084 || TREE_CODE (inner) == NOP_EXPR
1085 || TREE_CODE (inner) == CONVERT_EXPR
1086 || TREE_CODE (inner) == NON_LVALUE_EXPR
1087 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1088 || TREE_CODE (inner) == SAVE_EXPR)
1089 inner = TREE_OPERAND (inner, 0);
1091 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1093 if (MEM_OFFSET (mem)
1094 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1095 offset = INTVAL (MEM_OFFSET (mem));
1097 if (offset >= 0 && len && host_integerp (len, 0))
1098 length = tree_low_cst (len, 0);
1100 while (TREE_CODE (inner) == COMPONENT_REF)
1102 tree field = TREE_OPERAND (inner, 1);
1103 gcc_assert (! DECL_BIT_FIELD (field));
1104 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1105 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1108 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1109 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1112 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1113 /* If we can prove the memory starting at XEXP (mem, 0)
1114 and ending at XEXP (mem, 0) + LENGTH will fit into
1115 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1118 && offset + length <= size)
1123 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1124 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1125 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1133 mem_expr = TREE_OPERAND (mem_expr, 0);
1134 inner = TREE_OPERAND (inner, 0);
1137 if (mem_expr == NULL)
1139 if (mem_expr != MEM_EXPR (mem))
1141 set_mem_expr (mem, mem_expr);
1142 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1145 set_mem_alias_set (mem, 0);
1146 set_mem_size (mem, NULL_RTX);
1152 /* Built-in functions to perform an untyped call and return. */
1154 /* For each register that may be used for calling a function, this
1155 gives a mode used to copy the register's value. VOIDmode indicates
1156 the register is not used for calling a function. If the machine
1157 has register windows, this gives only the outbound registers.
1158 INCOMING_REGNO gives the corresponding inbound register. */
1159 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1161 /* For each register that may be used for returning values, this gives
1162 a mode used to copy the register's value. VOIDmode indicates the
1163 register is not used for returning values. If the machine has
1164 register windows, this gives only the outbound registers.
1165 INCOMING_REGNO gives the corresponding inbound register. */
1166 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1168 /* For each register that may be used for calling a function, this
1169 gives the offset of that register into the block returned by
1170 __builtin_apply_args. 0 indicates that the register is not
1171 used for calling a function. */
1172 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1174 /* Return the size required for the block returned by __builtin_apply_args,
1175 and initialize apply_args_mode. */
1178 apply_args_size (void)
1180 static int size = -1;
1183 enum machine_mode mode;
1185 /* The values computed by this function never change. */
1188 /* The first value is the incoming arg-pointer. */
1189 size = GET_MODE_SIZE (Pmode);
1191 /* The second value is the structure value address unless this is
1192 passed as an "invisible" first argument. */
1193 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1194 size += GET_MODE_SIZE (Pmode);
1196 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1197 if (FUNCTION_ARG_REGNO_P (regno))
1199 mode = reg_raw_mode[regno];
1201 gcc_assert (mode != VOIDmode);
1203 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1204 if (size % align != 0)
1205 size = CEIL (size, align) * align;
1206 apply_args_reg_offset[regno] = size;
1207 size += GET_MODE_SIZE (mode);
1208 apply_args_mode[regno] = mode;
1212 apply_args_mode[regno] = VOIDmode;
1213 apply_args_reg_offset[regno] = 0;
1219 /* Return the size required for the block returned by __builtin_apply,
1220 and initialize apply_result_mode. */
1223 apply_result_size (void)
1225 static int size = -1;
1227 enum machine_mode mode;
1229 /* The values computed by this function never change. */
1234 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1235 if (FUNCTION_VALUE_REGNO_P (regno))
1237 mode = reg_raw_mode[regno];
1239 gcc_assert (mode != VOIDmode);
1241 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1242 if (size % align != 0)
1243 size = CEIL (size, align) * align;
1244 size += GET_MODE_SIZE (mode);
1245 apply_result_mode[regno] = mode;
1248 apply_result_mode[regno] = VOIDmode;
1250 /* Allow targets that use untyped_call and untyped_return to override
1251 the size so that machine-specific information can be stored here. */
1252 #ifdef APPLY_RESULT_SIZE
1253 size = APPLY_RESULT_SIZE;
1259 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1260 /* Create a vector describing the result block RESULT. If SAVEP is true,
1261 the result block is used to save the values; otherwise it is used to
1262 restore the values. */
1265 result_vector (int savep, rtx result)
1267 int regno, size, align, nelts;
1268 enum machine_mode mode;
1270 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1273 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1274 if ((mode = apply_result_mode[regno]) != VOIDmode)
1276 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1277 if (size % align != 0)
1278 size = CEIL (size, align) * align;
1279 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1280 mem = adjust_address (result, mode, size);
1281 savevec[nelts++] = (savep
1282 ? gen_rtx_SET (VOIDmode, mem, reg)
1283 : gen_rtx_SET (VOIDmode, reg, mem));
1284 size += GET_MODE_SIZE (mode);
1286 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1288 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1290 /* Save the state required to perform an untyped call with the same
1291 arguments as were passed to the current function. */
1294 expand_builtin_apply_args_1 (void)
1297 int size, align, regno;
1298 enum machine_mode mode;
1299 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1301 /* Create a block where the arg-pointer, structure value address,
1302 and argument registers can be saved. */
1303 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1305 /* Walk past the arg-pointer and structure value address. */
1306 size = GET_MODE_SIZE (Pmode);
1307 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1308 size += GET_MODE_SIZE (Pmode);
1310 /* Save each register used in calling a function to the block. */
1311 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1312 if ((mode = apply_args_mode[regno]) != VOIDmode)
1314 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1315 if (size % align != 0)
1316 size = CEIL (size, align) * align;
1318 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1320 emit_move_insn (adjust_address (registers, mode, size), tem);
1321 size += GET_MODE_SIZE (mode);
1324 /* Save the arg pointer to the block. */
1325 tem = copy_to_reg (virtual_incoming_args_rtx);
1326 #ifdef STACK_GROWS_DOWNWARD
1327 /* We need the pointer as the caller actually passed them to us, not
1328 as we might have pretended they were passed. Make sure it's a valid
1329 operand, as emit_move_insn isn't expected to handle a PLUS. */
1331 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1334 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1336 size = GET_MODE_SIZE (Pmode);
1338 /* Save the structure value address unless this is passed as an
1339 "invisible" first argument. */
1340 if (struct_incoming_value)
1342 emit_move_insn (adjust_address (registers, Pmode, size),
1343 copy_to_reg (struct_incoming_value));
1344 size += GET_MODE_SIZE (Pmode);
1347 /* Return the address of the block. */
1348 return copy_addr_to_reg (XEXP (registers, 0));
1351 /* __builtin_apply_args returns block of memory allocated on
1352 the stack into which is stored the arg pointer, structure
1353 value address, static chain, and all the registers that might
1354 possibly be used in performing a function call. The code is
1355 moved to the start of the function so the incoming values are
1359 expand_builtin_apply_args (void)
1361 /* Don't do __builtin_apply_args more than once in a function.
1362 Save the result of the first call and reuse it. */
1363 if (apply_args_value != 0)
1364 return apply_args_value;
1366 /* When this function is called, it means that registers must be
1367 saved on entry to this function. So we migrate the
1368 call to the first insn of this function. */
1373 temp = expand_builtin_apply_args_1 ();
1377 apply_args_value = temp;
1379 /* Put the insns after the NOTE that starts the function.
1380 If this is inside a start_sequence, make the outer-level insn
1381 chain current, so the code is placed at the start of the
1383 push_topmost_sequence ();
1384 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1385 pop_topmost_sequence ();
1390 /* Perform an untyped call and save the state required to perform an
1391 untyped return of whatever value was returned by the given function. */
1394 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1396 int size, align, regno;
1397 enum machine_mode mode;
1398 rtx incoming_args, result, reg, dest, src, call_insn;
1399 rtx old_stack_level = 0;
1400 rtx call_fusage = 0;
1401 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1403 arguments = convert_memory_address (Pmode, arguments);
1405 /* Create a block where the return registers can be saved. */
1406 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1408 /* Fetch the arg pointer from the ARGUMENTS block. */
1409 incoming_args = gen_reg_rtx (Pmode);
1410 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1411 #ifndef STACK_GROWS_DOWNWARD
1412 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1413 incoming_args, 0, OPTAB_LIB_WIDEN);
1416 /* Push a new argument block and copy the arguments. Do not allow
1417 the (potential) memcpy call below to interfere with our stack
1419 do_pending_stack_adjust ();
1422 /* Save the stack with nonlocal if available. */
1423 #ifdef HAVE_save_stack_nonlocal
1424 if (HAVE_save_stack_nonlocal)
1425 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1428 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1430 /* Allocate a block of memory onto the stack and copy the memory
1431 arguments to the outgoing arguments address. */
1432 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1433 dest = virtual_outgoing_args_rtx;
1434 #ifndef STACK_GROWS_DOWNWARD
1435 if (GET_CODE (argsize) == CONST_INT)
1436 dest = plus_constant (dest, -INTVAL (argsize));
1438 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1440 dest = gen_rtx_MEM (BLKmode, dest);
1441 set_mem_align (dest, PARM_BOUNDARY);
1442 src = gen_rtx_MEM (BLKmode, incoming_args);
1443 set_mem_align (src, PARM_BOUNDARY);
1444 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1446 /* Refer to the argument block. */
1448 arguments = gen_rtx_MEM (BLKmode, arguments);
1449 set_mem_align (arguments, PARM_BOUNDARY);
1451 /* Walk past the arg-pointer and structure value address. */
1452 size = GET_MODE_SIZE (Pmode);
1454 size += GET_MODE_SIZE (Pmode);
1456 /* Restore each of the registers previously saved. Make USE insns
1457 for each of these registers for use in making the call. */
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_args_mode[regno]) != VOIDmode)
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1464 reg = gen_rtx_REG (mode, regno);
1465 emit_move_insn (reg, adjust_address (arguments, mode, size));
1466 use_reg (&call_fusage, reg);
1467 size += GET_MODE_SIZE (mode);
1470 /* Restore the structure value address unless this is passed as an
1471 "invisible" first argument. */
1472 size = GET_MODE_SIZE (Pmode);
1475 rtx value = gen_reg_rtx (Pmode);
1476 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1477 emit_move_insn (struct_value, value);
1478 if (REG_P (struct_value))
1479 use_reg (&call_fusage, struct_value);
1480 size += GET_MODE_SIZE (Pmode);
1483 /* All arguments and registers used for the call are set up by now! */
1484 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1486 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1487 and we don't want to load it into a register as an optimization,
1488 because prepare_call_address already did it if it should be done. */
1489 if (GET_CODE (function) != SYMBOL_REF)
1490 function = memory_address (FUNCTION_MODE, function);
1492 /* Generate the actual call instruction and save the return value. */
1493 #ifdef HAVE_untyped_call
1494 if (HAVE_untyped_call)
1495 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1496 result, result_vector (1, result)));
1499 #ifdef HAVE_call_value
1500 if (HAVE_call_value)
1504 /* Locate the unique return register. It is not possible to
1505 express a call that sets more than one return register using
1506 call_value; use untyped_call for that. In fact, untyped_call
1507 only needs to save the return registers in the given block. */
1508 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509 if ((mode = apply_result_mode[regno]) != VOIDmode)
1511 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1513 valreg = gen_rtx_REG (mode, regno);
1516 emit_call_insn (GEN_CALL_VALUE (valreg,
1517 gen_rtx_MEM (FUNCTION_MODE, function),
1518 const0_rtx, NULL_RTX, const0_rtx));
1520 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1526 /* Find the CALL insn we just emitted, and attach the register usage
1528 call_insn = last_call_insn ();
1529 add_function_usage_to (call_insn, call_fusage);
1531 /* Restore the stack. */
1532 #ifdef HAVE_save_stack_nonlocal
1533 if (HAVE_save_stack_nonlocal)
1534 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1537 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1541 /* Return the address of the result block. */
1542 result = copy_addr_to_reg (XEXP (result, 0));
1543 return convert_memory_address (ptr_mode, result);
1546 /* Perform an untyped return. */
1549 expand_builtin_return (rtx result)
1551 int size, align, regno;
1552 enum machine_mode mode;
1554 rtx call_fusage = 0;
1556 result = convert_memory_address (Pmode, result);
1558 apply_result_size ();
1559 result = gen_rtx_MEM (BLKmode, result);
1561 #ifdef HAVE_untyped_return
1562 if (HAVE_untyped_return)
1564 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1570 /* Restore the return value and note that each value is used. */
1572 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1573 if ((mode = apply_result_mode[regno]) != VOIDmode)
1575 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1576 if (size % align != 0)
1577 size = CEIL (size, align) * align;
1578 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1579 emit_move_insn (reg, adjust_address (result, mode, size));
1581 push_to_sequence (call_fusage);
1582 emit_insn (gen_rtx_USE (VOIDmode, reg));
1583 call_fusage = get_insns ();
1585 size += GET_MODE_SIZE (mode);
1588 /* Put the USE insns before the return. */
1589 emit_insn (call_fusage);
1591 /* Return whatever values was restored by jumping directly to the end
1593 expand_naked_return ();
1596 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1598 static enum type_class
1599 type_to_class (tree type)
1601 switch (TREE_CODE (type))
1603 case VOID_TYPE: return void_type_class;
1604 case INTEGER_TYPE: return integer_type_class;
1605 case ENUMERAL_TYPE: return enumeral_type_class;
1606 case BOOLEAN_TYPE: return boolean_type_class;
1607 case POINTER_TYPE: return pointer_type_class;
1608 case REFERENCE_TYPE: return reference_type_class;
1609 case OFFSET_TYPE: return offset_type_class;
1610 case REAL_TYPE: return real_type_class;
1611 case COMPLEX_TYPE: return complex_type_class;
1612 case FUNCTION_TYPE: return function_type_class;
1613 case METHOD_TYPE: return method_type_class;
1614 case RECORD_TYPE: return record_type_class;
1616 case QUAL_UNION_TYPE: return union_type_class;
1617 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1618 ? string_type_class : array_type_class);
1619 case LANG_TYPE: return lang_type_class;
1620 default: return no_type_class;
1624 /* Expand a call to __builtin_classify_type with arguments found in
1628 expand_builtin_classify_type (tree arglist)
1631 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1632 return GEN_INT (no_type_class);
1635 /* This helper macro, meant to be used in mathfn_built_in below,
1636 determines which among a set of three builtin math functions is
1637 appropriate for a given type mode. The `F' and `L' cases are
1638 automatically generated from the `double' case. */
1639 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1640 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1641 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1642 fcodel = BUILT_IN_MATHFN##L ; break;
1644 /* Return mathematic function equivalent to FN but operating directly
1645 on TYPE, if available. If we can't do the conversion, return zero. */
1647 mathfn_built_in (tree type, enum built_in_function fn)
1649 enum built_in_function fcode, fcodef, fcodel;
1653 CASE_MATHFN (BUILT_IN_ACOS)
1654 CASE_MATHFN (BUILT_IN_ACOSH)
1655 CASE_MATHFN (BUILT_IN_ASIN)
1656 CASE_MATHFN (BUILT_IN_ASINH)
1657 CASE_MATHFN (BUILT_IN_ATAN)
1658 CASE_MATHFN (BUILT_IN_ATAN2)
1659 CASE_MATHFN (BUILT_IN_ATANH)
1660 CASE_MATHFN (BUILT_IN_CBRT)
1661 CASE_MATHFN (BUILT_IN_CEIL)
1662 CASE_MATHFN (BUILT_IN_COPYSIGN)
1663 CASE_MATHFN (BUILT_IN_COS)
1664 CASE_MATHFN (BUILT_IN_COSH)
1665 CASE_MATHFN (BUILT_IN_DREM)
1666 CASE_MATHFN (BUILT_IN_ERF)
1667 CASE_MATHFN (BUILT_IN_ERFC)
1668 CASE_MATHFN (BUILT_IN_EXP)
1669 CASE_MATHFN (BUILT_IN_EXP10)
1670 CASE_MATHFN (BUILT_IN_EXP2)
1671 CASE_MATHFN (BUILT_IN_EXPM1)
1672 CASE_MATHFN (BUILT_IN_FABS)
1673 CASE_MATHFN (BUILT_IN_FDIM)
1674 CASE_MATHFN (BUILT_IN_FLOOR)
1675 CASE_MATHFN (BUILT_IN_FMA)
1676 CASE_MATHFN (BUILT_IN_FMAX)
1677 CASE_MATHFN (BUILT_IN_FMIN)
1678 CASE_MATHFN (BUILT_IN_FMOD)
1679 CASE_MATHFN (BUILT_IN_FREXP)
1680 CASE_MATHFN (BUILT_IN_GAMMA)
1681 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1682 CASE_MATHFN (BUILT_IN_HYPOT)
1683 CASE_MATHFN (BUILT_IN_ILOGB)
1684 CASE_MATHFN (BUILT_IN_INF)
1685 CASE_MATHFN (BUILT_IN_J0)
1686 CASE_MATHFN (BUILT_IN_J1)
1687 CASE_MATHFN (BUILT_IN_JN)
1688 CASE_MATHFN (BUILT_IN_LCEIL)
1689 CASE_MATHFN (BUILT_IN_LDEXP)
1690 CASE_MATHFN (BUILT_IN_LFLOOR)
1691 CASE_MATHFN (BUILT_IN_LGAMMA)
1692 CASE_MATHFN (BUILT_IN_LLCEIL)
1693 CASE_MATHFN (BUILT_IN_LLFLOOR)
1694 CASE_MATHFN (BUILT_IN_LLRINT)
1695 CASE_MATHFN (BUILT_IN_LLROUND)
1696 CASE_MATHFN (BUILT_IN_LOG)
1697 CASE_MATHFN (BUILT_IN_LOG10)
1698 CASE_MATHFN (BUILT_IN_LOG1P)
1699 CASE_MATHFN (BUILT_IN_LOG2)
1700 CASE_MATHFN (BUILT_IN_LOGB)
1701 CASE_MATHFN (BUILT_IN_LRINT)
1702 CASE_MATHFN (BUILT_IN_LROUND)
1703 CASE_MATHFN (BUILT_IN_MODF)
1704 CASE_MATHFN (BUILT_IN_NAN)
1705 CASE_MATHFN (BUILT_IN_NANS)
1706 CASE_MATHFN (BUILT_IN_NEARBYINT)
1707 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1708 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1709 CASE_MATHFN (BUILT_IN_POW)
1710 CASE_MATHFN (BUILT_IN_POWI)
1711 CASE_MATHFN (BUILT_IN_POW10)
1712 CASE_MATHFN (BUILT_IN_REMAINDER)
1713 CASE_MATHFN (BUILT_IN_REMQUO)
1714 CASE_MATHFN (BUILT_IN_RINT)
1715 CASE_MATHFN (BUILT_IN_ROUND)
1716 CASE_MATHFN (BUILT_IN_SCALB)
1717 CASE_MATHFN (BUILT_IN_SCALBLN)
1718 CASE_MATHFN (BUILT_IN_SCALBN)
1719 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1720 CASE_MATHFN (BUILT_IN_SIN)
1721 CASE_MATHFN (BUILT_IN_SINCOS)
1722 CASE_MATHFN (BUILT_IN_SINH)
1723 CASE_MATHFN (BUILT_IN_SQRT)
1724 CASE_MATHFN (BUILT_IN_TAN)
1725 CASE_MATHFN (BUILT_IN_TANH)
1726 CASE_MATHFN (BUILT_IN_TGAMMA)
1727 CASE_MATHFN (BUILT_IN_TRUNC)
1728 CASE_MATHFN (BUILT_IN_Y0)
1729 CASE_MATHFN (BUILT_IN_Y1)
1730 CASE_MATHFN (BUILT_IN_YN)
1736 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1737 return implicit_built_in_decls[fcode];
1738 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1739 return implicit_built_in_decls[fcodef];
1740 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1741 return implicit_built_in_decls[fcodel];
1746 /* If errno must be maintained, expand the RTL to check if the result,
1747 TARGET, of a built-in function call, EXP, is NaN, and if so set
1751 expand_errno_check (tree exp, rtx target)
1753 rtx lab = gen_label_rtx ();
1755 /* Test the result; if it is NaN, set errno=EDOM because
1756 the argument was not in the domain. */
1757 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1761 /* If this built-in doesn't throw an exception, set errno directly. */
1762 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1764 #ifdef GEN_ERRNO_RTX
1765 rtx errno_rtx = GEN_ERRNO_RTX;
1768 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1770 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1776 /* We can't set errno=EDOM directly; let the library call do it.
1777 Pop the arguments right away in case the call gets deleted. */
1779 expand_call (exp, target, 0);
1785 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1786 Return 0 if a normal call should be emitted rather than expanding the
1787 function in-line. EXP is the expression that is a call to the builtin
1788 function; if convenient, the result should be placed in TARGET.
1789 SUBTARGET may be used as the target for computing one of EXP's operands. */
1792 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1794 optab builtin_optab;
1795 rtx op0, insns, before_call;
1796 tree fndecl = get_callee_fndecl (exp);
1797 tree arglist = TREE_OPERAND (exp, 1);
1798 enum machine_mode mode;
1799 bool errno_set = false;
1802 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1805 arg = TREE_VALUE (arglist);
1807 switch (DECL_FUNCTION_CODE (fndecl))
1809 CASE_FLT_FN (BUILT_IN_SQRT):
1810 errno_set = ! tree_expr_nonnegative_p (arg);
1811 builtin_optab = sqrt_optab;
1813 CASE_FLT_FN (BUILT_IN_EXP):
1814 errno_set = true; builtin_optab = exp_optab; break;
1815 CASE_FLT_FN (BUILT_IN_EXP10):
1816 CASE_FLT_FN (BUILT_IN_POW10):
1817 errno_set = true; builtin_optab = exp10_optab; break;
1818 CASE_FLT_FN (BUILT_IN_EXP2):
1819 errno_set = true; builtin_optab = exp2_optab; break;
1820 CASE_FLT_FN (BUILT_IN_EXPM1):
1821 errno_set = true; builtin_optab = expm1_optab; break;
1822 CASE_FLT_FN (BUILT_IN_LOGB):
1823 errno_set = true; builtin_optab = logb_optab; break;
1824 CASE_FLT_FN (BUILT_IN_ILOGB):
1825 errno_set = true; builtin_optab = ilogb_optab; break;
1826 CASE_FLT_FN (BUILT_IN_LOG):
1827 errno_set = true; builtin_optab = log_optab; break;
1828 CASE_FLT_FN (BUILT_IN_LOG10):
1829 errno_set = true; builtin_optab = log10_optab; break;
1830 CASE_FLT_FN (BUILT_IN_LOG2):
1831 errno_set = true; builtin_optab = log2_optab; break;
1832 CASE_FLT_FN (BUILT_IN_LOG1P):
1833 errno_set = true; builtin_optab = log1p_optab; break;
1834 CASE_FLT_FN (BUILT_IN_ASIN):
1835 builtin_optab = asin_optab; break;
1836 CASE_FLT_FN (BUILT_IN_ACOS):
1837 builtin_optab = acos_optab; break;
1838 CASE_FLT_FN (BUILT_IN_TAN):
1839 builtin_optab = tan_optab; break;
1840 CASE_FLT_FN (BUILT_IN_ATAN):
1841 builtin_optab = atan_optab; break;
1842 CASE_FLT_FN (BUILT_IN_FLOOR):
1843 builtin_optab = floor_optab; break;
1844 CASE_FLT_FN (BUILT_IN_CEIL):
1845 builtin_optab = ceil_optab; break;
1846 CASE_FLT_FN (BUILT_IN_TRUNC):
1847 builtin_optab = btrunc_optab; break;
1848 CASE_FLT_FN (BUILT_IN_ROUND):
1849 builtin_optab = round_optab; break;
1850 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1851 builtin_optab = nearbyint_optab; break;
1852 CASE_FLT_FN (BUILT_IN_RINT):
1853 builtin_optab = rint_optab; break;
1854 CASE_FLT_FN (BUILT_IN_LRINT):
1855 CASE_FLT_FN (BUILT_IN_LLRINT):
1856 builtin_optab = lrint_optab; break;
1861 /* Make a suitable register to place result in. */
1862 mode = TYPE_MODE (TREE_TYPE (exp));
1864 if (! flag_errno_math || ! HONOR_NANS (mode))
1867 /* Before working hard, check whether the instruction is available. */
1868 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1870 target = gen_reg_rtx (mode);
1872 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1873 need to expand the argument again. This way, we will not perform
1874 side-effects more the once. */
1875 narg = builtin_save_expr (arg);
1879 arglist = build_tree_list (NULL_TREE, arg);
1880 exp = build_function_call_expr (fndecl, arglist);
1883 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1887 /* Compute into TARGET.
1888 Set TARGET to wherever the result comes back. */
1889 target = expand_unop (mode, builtin_optab, op0, target, 0);
1894 expand_errno_check (exp, target);
1896 /* Output the entire sequence. */
1897 insns = get_insns ();
1903 /* If we were unable to expand via the builtin, stop the sequence
1904 (without outputting the insns) and call to the library function
1905 with the stabilized argument list. */
1909 before_call = get_last_insn ();
1911 target = expand_call (exp, target, target == const0_rtx);
1913 /* If this is a sqrt operation and we don't care about errno, try to
1914 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1915 This allows the semantics of the libcall to be visible to the RTL
1917 if (builtin_optab == sqrt_optab && !errno_set)
1919 /* Search backwards through the insns emitted by expand_call looking
1920 for the instruction with the REG_RETVAL note. */
1921 rtx last = get_last_insn ();
1922 while (last != before_call)
1924 if (find_reg_note (last, REG_RETVAL, NULL))
1926 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1927 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1928 two elements, i.e. symbol_ref(sqrt) and the operand. */
1930 && GET_CODE (note) == EXPR_LIST
1931 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1932 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1933 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1935 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1936 /* Check operand is a register with expected mode. */
1939 && GET_MODE (operand) == mode)
1941 /* Replace the REG_EQUAL note with a SQRT rtx. */
1942 rtx equiv = gen_rtx_SQRT (mode, operand);
1943 set_unique_reg_note (last, REG_EQUAL, equiv);
1948 last = PREV_INSN (last);
1955 /* Expand a call to the builtin binary math functions (pow and atan2).
1956 Return 0 if a normal call should be emitted rather than expanding the
1957 function in-line. EXP is the expression that is a call to the builtin
1958 function; if convenient, the result should be placed in TARGET.
1959 SUBTARGET may be used as the target for computing one of EXP's
1963 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1965 optab builtin_optab;
1966 rtx op0, op1, insns;
1967 int op1_type = REAL_TYPE;
1968 tree fndecl = get_callee_fndecl (exp);
1969 tree arglist = TREE_OPERAND (exp, 1);
1970 tree arg0, arg1, temp, narg;
1971 enum machine_mode mode;
1972 bool errno_set = true;
1975 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP)
1976 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF)
1977 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL))
1978 op1_type = INTEGER_TYPE;
1980 if (!validate_arglist (arglist, REAL_TYPE, op1_type, VOID_TYPE))
1983 arg0 = TREE_VALUE (arglist);
1984 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 CASE_FLT_FN (BUILT_IN_POW):
1989 builtin_optab = pow_optab; break;
1990 CASE_FLT_FN (BUILT_IN_ATAN2):
1991 builtin_optab = atan2_optab; break;
1992 CASE_FLT_FN (BUILT_IN_LDEXP):
1993 builtin_optab = ldexp_optab; break;
1994 CASE_FLT_FN (BUILT_IN_FMOD):
1995 builtin_optab = fmod_optab; break;
1996 CASE_FLT_FN (BUILT_IN_DREM):
1997 builtin_optab = drem_optab; break;
2002 /* Make a suitable register to place result in. */
2003 mode = TYPE_MODE (TREE_TYPE (exp));
2005 /* Before working hard, check whether the instruction is available. */
2006 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2009 target = gen_reg_rtx (mode);
2011 if (! flag_errno_math || ! HONOR_NANS (mode))
2014 /* Always stabilize the argument list. */
2015 narg = builtin_save_expr (arg1);
2019 temp = build_tree_list (NULL_TREE, narg);
2023 temp = TREE_CHAIN (arglist);
2025 narg = builtin_save_expr (arg0);
2029 arglist = tree_cons (NULL_TREE, narg, temp);
2033 arglist = tree_cons (NULL_TREE, arg0, temp);
2036 exp = build_function_call_expr (fndecl, arglist);
2038 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2039 op1 = expand_normal (arg1);
2043 /* Compute into TARGET.
2044 Set TARGET to wherever the result comes back. */
2045 target = expand_binop (mode, builtin_optab, op0, op1,
2046 target, 0, OPTAB_DIRECT);
2048 /* If we were unable to expand via the builtin, stop the sequence
2049 (without outputting the insns) and call to the library function
2050 with the stabilized argument list. */
2054 return expand_call (exp, target, target == const0_rtx);
2058 expand_errno_check (exp, target);
2060 /* Output the entire sequence. */
2061 insns = get_insns ();
2068 /* Expand a call to the builtin sin and cos math functions.
2069 Return 0 if a normal call should be emitted rather than expanding the
2070 function in-line. EXP is the expression that is a call to the builtin
2071 function; if convenient, the result should be placed in TARGET.
2072 SUBTARGET may be used as the target for computing one of EXP's
2076 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2078 optab builtin_optab;
2080 tree fndecl = get_callee_fndecl (exp);
2081 tree arglist = TREE_OPERAND (exp, 1);
2082 enum machine_mode mode;
2085 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2088 arg = TREE_VALUE (arglist);
2090 switch (DECL_FUNCTION_CODE (fndecl))
2092 CASE_FLT_FN (BUILT_IN_SIN):
2093 CASE_FLT_FN (BUILT_IN_COS):
2094 builtin_optab = sincos_optab; break;
2099 /* Make a suitable register to place result in. */
2100 mode = TYPE_MODE (TREE_TYPE (exp));
2102 /* Check if sincos insn is available, otherwise fallback
2103 to sin or cos insn. */
2104 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
2105 switch (DECL_FUNCTION_CODE (fndecl))
2107 CASE_FLT_FN (BUILT_IN_SIN):
2108 builtin_optab = sin_optab; break;
2109 CASE_FLT_FN (BUILT_IN_COS):
2110 builtin_optab = cos_optab; break;
2116 /* Before working hard, check whether the instruction is available. */
2117 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2119 target = gen_reg_rtx (mode);
2121 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2122 need to expand the argument again. This way, we will not perform
2123 side-effects more the once. */
2124 narg = save_expr (arg);
2128 arglist = build_tree_list (NULL_TREE, arg);
2129 exp = build_function_call_expr (fndecl, arglist);
2132 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2136 /* Compute into TARGET.
2137 Set TARGET to wherever the result comes back. */
2138 if (builtin_optab == sincos_optab)
2142 switch (DECL_FUNCTION_CODE (fndecl))
2144 CASE_FLT_FN (BUILT_IN_SIN):
2145 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2147 CASE_FLT_FN (BUILT_IN_COS):
2148 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2153 gcc_assert (result);
2157 target = expand_unop (mode, builtin_optab, op0, target, 0);
2162 /* Output the entire sequence. */
2163 insns = get_insns ();
2169 /* If we were unable to expand via the builtin, stop the sequence
2170 (without outputting the insns) and call to the library function
2171 with the stabilized argument list. */
2175 target = expand_call (exp, target, target == const0_rtx);
2180 /* Expand a call to the builtin sincos math function.
2181 Return 0 if a normal call should be emitted rather than expanding the
2182 function in-line. EXP is the expression that is a call to the builtin
2186 expand_builtin_sincos (tree exp)
2188 rtx op0, op1, op2, target1, target2;
2189 tree arglist = TREE_OPERAND (exp, 1);
2190 enum machine_mode mode;
2191 tree arg, sinp, cosp;
2194 if (!validate_arglist (arglist, REAL_TYPE,
2195 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2198 arg = TREE_VALUE (arglist);
2199 sinp = TREE_VALUE (TREE_CHAIN (arglist));
2200 cosp = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2202 /* Make a suitable register to place result in. */
2203 mode = TYPE_MODE (TREE_TYPE (arg));
2205 /* Check if sincos insn is available, otherwise emit the call. */
2206 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2209 target1 = gen_reg_rtx (mode);
2210 target2 = gen_reg_rtx (mode);
2212 op0 = expand_normal (arg);
2213 op1 = expand_normal (build_fold_indirect_ref (sinp));
2214 op2 = expand_normal (build_fold_indirect_ref (cosp));
2216 /* Compute into target1 and target2.
2217 Set TARGET to wherever the result comes back. */
2218 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2219 gcc_assert (result);
2221 /* Move target1 and target2 to the memory locations indicated
2223 emit_move_insn (op1, target1);
2224 emit_move_insn (op2, target2);
2229 /* Expand a call to one of the builtin rounding functions (lfloor).
2230 If expanding via optab fails, lower expression to (int)(floor(x)).
2231 EXP is the expression that is a call to the builtin function;
2232 if convenient, the result should be placed in TARGET. SUBTARGET may
2233 be used as the target for computing one of EXP's operands. */
2236 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2238 optab builtin_optab;
2239 rtx op0, insns, tmp;
2240 tree fndecl = get_callee_fndecl (exp);
2241 tree arglist = TREE_OPERAND (exp, 1);
2242 enum built_in_function fallback_fn;
2243 tree fallback_fndecl;
2244 enum machine_mode mode;
2247 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2250 arg = TREE_VALUE (arglist);
2252 switch (DECL_FUNCTION_CODE (fndecl))
2254 CASE_FLT_FN (BUILT_IN_LCEIL):
2255 CASE_FLT_FN (BUILT_IN_LLCEIL):
2256 builtin_optab = lceil_optab;
2257 fallback_fn = BUILT_IN_CEIL;
2260 CASE_FLT_FN (BUILT_IN_LFLOOR):
2261 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2262 builtin_optab = lfloor_optab;
2263 fallback_fn = BUILT_IN_FLOOR;
2270 /* Make a suitable register to place result in. */
2271 mode = TYPE_MODE (TREE_TYPE (exp));
2273 /* Before working hard, check whether the instruction is available. */
2274 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2276 target = gen_reg_rtx (mode);
2278 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2279 need to expand the argument again. This way, we will not perform
2280 side-effects more the once. */
2281 narg = builtin_save_expr (arg);
2285 arglist = build_tree_list (NULL_TREE, arg);
2286 exp = build_function_call_expr (fndecl, arglist);
2289 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2293 /* Compute into TARGET.
2294 Set TARGET to wherever the result comes back. */
2295 target = expand_unop (mode, builtin_optab, op0, target, 0);
2299 /* Output the entire sequence. */
2300 insns = get_insns ();
2306 /* If we were unable to expand via the builtin, stop the sequence
2307 (without outputting the insns). */
2311 /* Fall back to floating point rounding optab. */
2312 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2313 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2314 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2315 gcc_assert (fallback_fndecl != NULL_TREE);
2316 exp = build_function_call_expr (fallback_fndecl, arglist);
2318 tmp = expand_normal (exp);
2320 /* Truncate the result of floating point optab to integer
2321 via expand_fix (). */
2322 target = gen_reg_rtx (mode);
2323 expand_fix (target, tmp, 0);
2328 /* To evaluate powi(x,n), the floating point value x raised to the
2329 constant integer exponent n, we use a hybrid algorithm that
2330 combines the "window method" with look-up tables. For an
2331 introduction to exponentiation algorithms and "addition chains",
2332 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2333 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2334 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2335 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2337 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2338 multiplications to inline before calling the system library's pow
2339 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2340 so this default never requires calling pow, powf or powl. */
2342 #ifndef POWI_MAX_MULTS
2343 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2346 /* The size of the "optimal power tree" lookup table. All
2347 exponents less than this value are simply looked up in the
2348 powi_table below. This threshold is also used to size the
2349 cache of pseudo registers that hold intermediate results. */
2350 #define POWI_TABLE_SIZE 256
2352 /* The size, in bits of the window, used in the "window method"
2353 exponentiation algorithm. This is equivalent to a radix of
2354 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2355 #define POWI_WINDOW_SIZE 3
2357 /* The following table is an efficient representation of an
2358 "optimal power tree". For each value, i, the corresponding
2359 value, j, in the table states than an optimal evaluation
2360 sequence for calculating pow(x,i) can be found by evaluating
2361 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2362 100 integers is given in Knuth's "Seminumerical algorithms". */
2364 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2366 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2367 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2368 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2369 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2370 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2371 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2372 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2373 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2374 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2375 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2376 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2377 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2378 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2379 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2380 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2381 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2382 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2383 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2384 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2385 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2386 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2387 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2388 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2389 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2390 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2391 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2392 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2393 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2394 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2395 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2396 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2397 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2401 /* Return the number of multiplications required to calculate
2402 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2403 subroutine of powi_cost. CACHE is an array indicating
2404 which exponents have already been calculated. */
2407 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2409 /* If we've already calculated this exponent, then this evaluation
2410 doesn't require any additional multiplications. */
2415 return powi_lookup_cost (n - powi_table[n], cache)
2416 + powi_lookup_cost (powi_table[n], cache) + 1;
2419 /* Return the number of multiplications required to calculate
2420 powi(x,n) for an arbitrary x, given the exponent N. This
2421 function needs to be kept in sync with expand_powi below. */
2424 powi_cost (HOST_WIDE_INT n)
2426 bool cache[POWI_TABLE_SIZE];
2427 unsigned HOST_WIDE_INT digit;
2428 unsigned HOST_WIDE_INT val;
2434 /* Ignore the reciprocal when calculating the cost. */
2435 val = (n < 0) ? -n : n;
2437 /* Initialize the exponent cache. */
2438 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2443 while (val >= POWI_TABLE_SIZE)
2447 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2448 result += powi_lookup_cost (digit, cache)
2449 + POWI_WINDOW_SIZE + 1;
2450 val >>= POWI_WINDOW_SIZE;
2459 return result + powi_lookup_cost (val, cache);
2462 /* Recursive subroutine of expand_powi. This function takes the array,
2463 CACHE, of already calculated exponents and an exponent N and returns
2464 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2467 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2469 unsigned HOST_WIDE_INT digit;
2473 if (n < POWI_TABLE_SIZE)
2478 target = gen_reg_rtx (mode);
2481 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2482 op1 = expand_powi_1 (mode, powi_table[n], cache);
2486 target = gen_reg_rtx (mode);
2487 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2488 op0 = expand_powi_1 (mode, n - digit, cache);
2489 op1 = expand_powi_1 (mode, digit, cache);
2493 target = gen_reg_rtx (mode);
2494 op0 = expand_powi_1 (mode, n >> 1, cache);
2498 result = expand_mult (mode, op0, op1, target, 0);
2499 if (result != target)
2500 emit_move_insn (target, result);
2504 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2505 floating point operand in mode MODE, and N is the exponent. This
2506 function needs to be kept in sync with powi_cost above. */
2509 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2511 unsigned HOST_WIDE_INT val;
2512 rtx cache[POWI_TABLE_SIZE];
2516 return CONST1_RTX (mode);
2518 val = (n < 0) ? -n : n;
2520 memset (cache, 0, sizeof (cache));
2523 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2525 /* If the original exponent was negative, reciprocate the result. */
2527 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2528 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2533 /* Expand a call to the pow built-in mathematical function. Return 0 if
2534 a normal call should be emitted rather than expanding the function
2535 in-line. EXP is the expression that is a call to the builtin
2536 function; if convenient, the result should be placed in TARGET. */
2539 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2541 tree arglist = TREE_OPERAND (exp, 1);
2544 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2547 arg0 = TREE_VALUE (arglist);
2548 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2550 if (TREE_CODE (arg1) == REAL_CST
2551 && ! TREE_CONSTANT_OVERFLOW (arg1))
2553 REAL_VALUE_TYPE cint;
2557 c = TREE_REAL_CST (arg1);
2558 n = real_to_integer (&c);
2559 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2560 if (real_identical (&c, &cint))
2562 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2563 Otherwise, check the number of multiplications required.
2564 Note that pow never sets errno for an integer exponent. */
2565 if ((n >= -1 && n <= 2)
2566 || (flag_unsafe_math_optimizations
2568 && powi_cost (n) <= POWI_MAX_MULTS))
2570 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2571 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2572 op = force_reg (mode, op);
2573 return expand_powi (op, mode, n);
2578 if (! flag_unsafe_math_optimizations)
2580 return expand_builtin_mathfn_2 (exp, target, subtarget);
2583 /* Expand a call to the powi built-in mathematical function. Return 0 if
2584 a normal call should be emitted rather than expanding the function
2585 in-line. EXP is the expression that is a call to the builtin
2586 function; if convenient, the result should be placed in TARGET. */
2589 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2591 tree arglist = TREE_OPERAND (exp, 1);
2594 enum machine_mode mode;
2595 enum machine_mode mode2;
2597 if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2600 arg0 = TREE_VALUE (arglist);
2601 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2602 mode = TYPE_MODE (TREE_TYPE (exp));
2604 /* Handle constant power. */
2606 if (TREE_CODE (arg1) == INTEGER_CST
2607 && ! TREE_CONSTANT_OVERFLOW (arg1))
2609 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2611 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2612 Otherwise, check the number of multiplications required. */
2613 if ((TREE_INT_CST_HIGH (arg1) == 0
2614 || TREE_INT_CST_HIGH (arg1) == -1)
2615 && ((n >= -1 && n <= 2)
2617 && powi_cost (n) <= POWI_MAX_MULTS)))
2619 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2620 op0 = force_reg (mode, op0);
2621 return expand_powi (op0, mode, n);
2625 /* Emit a libcall to libgcc. */
2627 /* Mode of the 2nd argument must match that of an int. */
2628 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2630 if (target == NULL_RTX)
2631 target = gen_reg_rtx (mode);
2633 op0 = expand_expr (arg0, subtarget, mode, 0);
2634 if (GET_MODE (op0) != mode)
2635 op0 = convert_to_mode (mode, op0, 0);
2636 op1 = expand_expr (arg1, 0, mode2, 0);
2637 if (GET_MODE (op1) != mode2)
2638 op1 = convert_to_mode (mode2, op1, 0);
2640 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2641 target, LCT_CONST_MAKE_BLOCK, mode, 2,
2642 op0, mode, op1, mode2);
2647 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2648 if we failed the caller should emit a normal call, otherwise
2649 try to get the result in TARGET, if convenient. */
2652 expand_builtin_strlen (tree arglist, rtx target,
2653 enum machine_mode target_mode)
2655 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2660 tree len, src = TREE_VALUE (arglist);
2661 rtx result, src_reg, char_rtx, before_strlen;
2662 enum machine_mode insn_mode = target_mode, char_mode;
2663 enum insn_code icode = CODE_FOR_nothing;
2666 /* If the length can be computed at compile-time, return it. */
2667 len = c_strlen (src, 0);
2669 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2671 /* If the length can be computed at compile-time and is constant
2672 integer, but there are side-effects in src, evaluate
2673 src for side-effects, then return len.
2674 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2675 can be optimized into: i++; x = 3; */
2676 len = c_strlen (src, 1);
2677 if (len && TREE_CODE (len) == INTEGER_CST)
2679 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2680 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2683 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2685 /* If SRC is not a pointer type, don't do this operation inline. */
2689 /* Bail out if we can't compute strlen in the right mode. */
2690 while (insn_mode != VOIDmode)
2692 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2693 if (icode != CODE_FOR_nothing)
2696 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2698 if (insn_mode == VOIDmode)
2701 /* Make a place to write the result of the instruction. */
2705 && GET_MODE (result) == insn_mode
2706 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2707 result = gen_reg_rtx (insn_mode);
2709 /* Make a place to hold the source address. We will not expand
2710 the actual source until we are sure that the expansion will
2711 not fail -- there are trees that cannot be expanded twice. */
2712 src_reg = gen_reg_rtx (Pmode);
2714 /* Mark the beginning of the strlen sequence so we can emit the
2715 source operand later. */
2716 before_strlen = get_last_insn ();
2718 char_rtx = const0_rtx;
2719 char_mode = insn_data[(int) icode].operand[2].mode;
2720 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2722 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2724 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2725 char_rtx, GEN_INT (align));
2730 /* Now that we are assured of success, expand the source. */
2732 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2734 emit_move_insn (src_reg, pat);
2739 emit_insn_after (pat, before_strlen);
2741 emit_insn_before (pat, get_insns ());
2743 /* Return the value in the proper mode for this function. */
2744 if (GET_MODE (result) == target_mode)
2746 else if (target != 0)
2747 convert_move (target, result, 0);
2749 target = convert_to_mode (target_mode, result, 0);
2755 /* Expand a call to the strstr builtin. Return 0 if we failed the
2756 caller should emit a normal call, otherwise try to get the result
2757 in TARGET, if convenient (and in mode MODE if that's convenient). */
2760 expand_builtin_strstr (tree arglist, tree type, rtx target, enum machine_mode mode)
2762 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2764 tree result = fold_builtin_strstr (arglist, type);
2766 return expand_expr (result, target, mode, EXPAND_NORMAL);
2771 /* Expand a call to the strchr builtin. Return 0 if we failed the
2772 caller should emit a normal call, otherwise try to get the result
2773 in TARGET, if convenient (and in mode MODE if that's convenient). */
2776 expand_builtin_strchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2778 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2780 tree result = fold_builtin_strchr (arglist, type);
2782 return expand_expr (result, target, mode, EXPAND_NORMAL);
2784 /* FIXME: Should use strchrM optab so that ports can optimize this. */
2789 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2790 caller should emit a normal call, otherwise try to get the result
2791 in TARGET, if convenient (and in mode MODE if that's convenient). */
2794 expand_builtin_strrchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2796 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2798 tree result = fold_builtin_strrchr (arglist, type);
2800 return expand_expr (result, target, mode, EXPAND_NORMAL);
2805 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2806 caller should emit a normal call, otherwise try to get the result
2807 in TARGET, if convenient (and in mode MODE if that's convenient). */
2810 expand_builtin_strpbrk (tree arglist, tree type, rtx target, enum machine_mode mode)
2812 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2814 tree result = fold_builtin_strpbrk (arglist, type);
2816 return expand_expr (result, target, mode, EXPAND_NORMAL);
2821 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2822 bytes from constant string DATA + OFFSET and return it as target
2826 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2827 enum machine_mode mode)
2829 const char *str = (const char *) data;
2831 gcc_assert (offset >= 0
2832 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2833 <= strlen (str) + 1));
2835 return c_readstr (str + offset, mode);
2838 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2839 Return 0 if we failed, the caller should emit a normal call,
2840 otherwise try to get the result in TARGET, if convenient (and in
2841 mode MODE if that's convenient). */
2843 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
2845 tree fndecl = get_callee_fndecl (exp);
2846 tree arglist = TREE_OPERAND (exp, 1);
2847 if (!validate_arglist (arglist,
2848 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2852 tree dest = TREE_VALUE (arglist);
2853 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2854 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2855 const char *src_str;
2856 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2857 unsigned int dest_align
2858 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2859 rtx dest_mem, src_mem, dest_addr, len_rtx;
2860 tree result = fold_builtin_memcpy (fndecl, arglist);
2863 return expand_expr (result, target, mode, EXPAND_NORMAL);
2865 /* If DEST is not a pointer type, call the normal function. */
2866 if (dest_align == 0)
2869 /* If either SRC is not a pointer type, don't do this
2870 operation in-line. */
2874 dest_mem = get_memory_rtx (dest, len);
2875 set_mem_align (dest_mem, dest_align);
2876 len_rtx = expand_normal (len);
2877 src_str = c_getstr (src);
2879 /* If SRC is a string constant and block move would be done
2880 by pieces, we can avoid loading the string from memory
2881 and only stored the computed constants. */
2883 && GET_CODE (len_rtx) == CONST_INT
2884 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2885 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2886 (void *) src_str, dest_align))
2888 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2889 builtin_memcpy_read_str,
2890 (void *) src_str, dest_align, 0);
2891 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2892 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2896 src_mem = get_memory_rtx (src, len);
2897 set_mem_align (src_mem, src_align);
2899 /* Copy word part most expediently. */
2900 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2901 CALL_EXPR_TAILCALL (exp)
2902 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
2906 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2907 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2913 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2914 Return 0 if we failed; the caller should emit a normal call,
2915 otherwise try to get the result in TARGET, if convenient (and in
2916 mode MODE if that's convenient). If ENDP is 0 return the
2917 destination pointer, if ENDP is 1 return the end pointer ala
2918 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2922 expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode,
2925 if (!validate_arglist (arglist,
2926 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2928 /* If return value is ignored, transform mempcpy into memcpy. */
2929 else if (target == const0_rtx)
2931 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2936 return expand_expr (build_function_call_expr (fn, arglist),
2937 target, mode, EXPAND_NORMAL);
2941 tree dest = TREE_VALUE (arglist);
2942 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2943 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2944 const char *src_str;
2945 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2946 unsigned int dest_align
2947 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2948 rtx dest_mem, src_mem, len_rtx;
2949 tree result = fold_builtin_mempcpy (arglist, type, endp);
2952 return expand_expr (result, target, mode, EXPAND_NORMAL);
2954 /* If either SRC or DEST is not a pointer type, don't do this
2955 operation in-line. */
2956 if (dest_align == 0 || src_align == 0)
2959 /* If LEN is not constant, call the normal function. */
2960 if (! host_integerp (len, 1))
2963 len_rtx = expand_normal (len);
2964 src_str = c_getstr (src);
2966 /* If SRC is a string constant and block move would be done
2967 by pieces, we can avoid loading the string from memory
2968 and only stored the computed constants. */
2970 && GET_CODE (len_rtx) == CONST_INT
2971 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2972 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2973 (void *) src_str, dest_align))
2975 dest_mem = get_memory_rtx (dest, len);
2976 set_mem_align (dest_mem, dest_align);
2977 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2978 builtin_memcpy_read_str,
2979 (void *) src_str, dest_align, endp);
2980 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2981 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2985 if (GET_CODE (len_rtx) == CONST_INT
2986 && can_move_by_pieces (INTVAL (len_rtx),
2987 MIN (dest_align, src_align)))
2989 dest_mem = get_memory_rtx (dest, len);
2990 set_mem_align (dest_mem, dest_align);
2991 src_mem = get_memory_rtx (src, len);
2992 set_mem_align (src_mem, src_align);
2993 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2994 MIN (dest_align, src_align), endp);
2995 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2996 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3004 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
3005 if we failed; the caller should emit a normal call. */
3008 expand_builtin_memmove (tree arglist, tree type, rtx target,
3009 enum machine_mode mode, tree orig_exp)
3011 if (!validate_arglist (arglist,
3012 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3016 tree dest = TREE_VALUE (arglist);
3017 tree src = TREE_VALUE (TREE_CHAIN (arglist));
3018 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3020 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3021 unsigned int dest_align
3022 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3023 tree result = fold_builtin_memmove (arglist, type);
3026 return expand_expr (result, target, mode, EXPAND_NORMAL);
3028 /* If DEST is not a pointer type, call the normal function. */
3029 if (dest_align == 0)
3032 /* If either SRC is not a pointer type, don't do this
3033 operation in-line. */
3037 /* If src is categorized for a readonly section we can use
3039 if (readonly_data_expr (src))
3041 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3044 fn = build_function_call_expr (fn, arglist);
3045 if (TREE_CODE (fn) == CALL_EXPR)
3046 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3047 return expand_expr (fn, target, mode, EXPAND_NORMAL);
3050 /* If length is 1 and we can expand memcpy call inline,
3051 it is ok to use memcpy as well. */
3052 if (integer_onep (len))
3054 rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
3060 /* Otherwise, call the normal function. */
3065 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
3066 if we failed the caller should emit a normal call. */
3069 expand_builtin_bcopy (tree exp)
3071 tree arglist = TREE_OPERAND (exp, 1);
3072 tree type = TREE_TYPE (exp);
3073 tree src, dest, size, newarglist;
3075 if (!validate_arglist (arglist,
3076 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3079 src = TREE_VALUE (arglist);
3080 dest = TREE_VALUE (TREE_CHAIN (arglist));
3081 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3083 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
3084 memmove(ptr y, ptr x, size_t z). This is done this way
3085 so that if it isn't expanded inline, we fallback to
3086 calling bcopy instead of memmove. */
3088 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3089 newarglist = tree_cons (NULL_TREE, src, newarglist);
3090 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3092 return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
3096 # define HAVE_movstr 0
3097 # define CODE_FOR_movstr CODE_FOR_nothing
3100 /* Expand into a movstr instruction, if one is available. Return 0 if
3101 we failed, the caller should emit a normal call, otherwise try to
3102 get the result in TARGET, if convenient. If ENDP is 0 return the
3103 destination pointer, if ENDP is 1 return the end pointer ala
3104 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3108 expand_movstr (tree dest, tree src, rtx target, int endp)
3114 const struct insn_data * data;
3119 dest_mem = get_memory_rtx (dest, NULL);
3120 src_mem = get_memory_rtx (src, NULL);
3123 target = force_reg (Pmode, XEXP (dest_mem, 0));
3124 dest_mem = replace_equiv_address (dest_mem, target);
3125 end = gen_reg_rtx (Pmode);
3129 if (target == 0 || target == const0_rtx)
3131 end = gen_reg_rtx (Pmode);
3139 data = insn_data + CODE_FOR_movstr;
3141 if (data->operand[0].mode != VOIDmode)
3142 end = gen_lowpart (data->operand[0].mode, end);
3144 insn = data->genfun (end, dest_mem, src_mem);
3150 /* movstr is supposed to set end to the address of the NUL
3151 terminator. If the caller requested a mempcpy-like return value,
3153 if (endp == 1 && target != const0_rtx)
3155 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3156 emit_move_insn (target, force_operand (tem, NULL_RTX));
3162 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
3163 if we failed the caller should emit a normal call, otherwise try to get
3164 the result in TARGET, if convenient (and in mode MODE if that's
3168 expand_builtin_strcpy (tree fndecl, tree arglist, rtx target, enum machine_mode mode)
3170 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3172 tree result = fold_builtin_strcpy (fndecl, arglist, 0);
3174 return expand_expr (result, target, mode, EXPAND_NORMAL);
3176 return expand_movstr (TREE_VALUE (arglist),
3177 TREE_VALUE (TREE_CHAIN (arglist)),
3178 target, /*endp=*/0);
3183 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3184 Return 0 if we failed the caller should emit a normal call,
3185 otherwise try to get the result in TARGET, if convenient (and in
3186 mode MODE if that's convenient). */
3189 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3191 tree arglist = TREE_OPERAND (exp, 1);
3192 /* If return value is ignored, transform stpcpy into strcpy. */
3193 if (target == const0_rtx)
3195 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3199 return expand_expr (build_function_call_expr (fn, arglist),
3200 target, mode, EXPAND_NORMAL);
3203 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3207 tree dst, src, len, lenp1;
3211 /* Ensure we get an actual string whose length can be evaluated at
3212 compile-time, not an expression containing a string. This is
3213 because the latter will potentially produce pessimized code
3214 when used to produce the return value. */
3215 src = TREE_VALUE (TREE_CHAIN (arglist));
3216 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3217 return expand_movstr (TREE_VALUE (arglist),
3218 TREE_VALUE (TREE_CHAIN (arglist)),
3219 target, /*endp=*/2);
3221 dst = TREE_VALUE (arglist);
3222 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3223 narglist = build_tree_list (NULL_TREE, lenp1);
3224 narglist = tree_cons (NULL_TREE, src, narglist);
3225 narglist = tree_cons (NULL_TREE, dst, narglist);
3226 ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp),
3227 target, mode, /*endp=*/2);
3232 if (TREE_CODE (len) == INTEGER_CST)
3234 rtx len_rtx = expand_normal (len);
3236 if (GET_CODE (len_rtx) == CONST_INT)
3238 ret = expand_builtin_strcpy (get_callee_fndecl (exp),
3239 arglist, target, mode);
3245 if (mode != VOIDmode)
3246 target = gen_reg_rtx (mode);
3248 target = gen_reg_rtx (GET_MODE (ret));
3250 if (GET_MODE (target) != GET_MODE (ret))
3251 ret = gen_lowpart (GET_MODE (target), ret);
3253 ret = plus_constant (ret, INTVAL (len_rtx));
3254 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3262 return expand_movstr (TREE_VALUE (arglist),
3263 TREE_VALUE (TREE_CHAIN (arglist)),
3264 target, /*endp=*/2);
3268 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3269 bytes from constant string DATA + OFFSET and return it as target
3273 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3274 enum machine_mode mode)
3276 const char *str = (const char *) data;
3278 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3281 return c_readstr (str + offset, mode);
3284 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3285 if we failed the caller should emit a normal call. */
3288 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3290 tree fndecl = get_callee_fndecl (exp);
3291 tree arglist = TREE_OPERAND (exp, 1);
3292 if (validate_arglist (arglist,
3293 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3295 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3296 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3297 tree result = fold_builtin_strncpy (fndecl, arglist, slen);
3300 return expand_expr (result, target, mode, EXPAND_NORMAL);
3302 /* We must be passed a constant len and src parameter. */
3303 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3306 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3308 /* We're required to pad with trailing zeros if the requested
3309 len is greater than strlen(s2)+1. In that case try to
3310 use store_by_pieces, if it fails, punt. */
3311 if (tree_int_cst_lt (slen, len))
3313 tree dest = TREE_VALUE (arglist);
3314 unsigned int dest_align
3315 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3316 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3319 if (!p || dest_align == 0 || !host_integerp (len, 1)
3320 || !can_store_by_pieces (tree_low_cst (len, 1),
3321 builtin_strncpy_read_str,
3322 (void *) p, dest_align))
3325 dest_mem = get_memory_rtx (dest, len);
3326 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3327 builtin_strncpy_read_str,
3328 (void *) p, dest_align, 0);
3329 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3330 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3337 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3338 bytes from constant string DATA + OFFSET and return it as target
3342 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3343 enum machine_mode mode)
3345 const char *c = (const char *) data;
3346 char *p = alloca (GET_MODE_SIZE (mode));
3348 memset (p, *c, GET_MODE_SIZE (mode));
3350 return c_readstr (p, mode);
3353 /* Callback routine for store_by_pieces. Return the RTL of a register
3354 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3355 char value given in the RTL register data. For example, if mode is
3356 4 bytes wide, return the RTL for 0x01010101*data. */
3359 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3360 enum machine_mode mode)
3366 size = GET_MODE_SIZE (mode);
3371 memset (p, 1, size);
3372 coeff = c_readstr (p, mode);
3374 target = convert_to_mode (mode, (rtx) data, 1);
3375 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3376 return force_reg (mode, target);
3379 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3380 if we failed the caller should emit a normal call, otherwise try to get
3381 the result in TARGET, if convenient (and in mode MODE if that's
3385 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
3388 if (!validate_arglist (arglist,
3389 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3393 tree dest = TREE_VALUE (arglist);
3394 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3395 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3397 enum built_in_function fcode;
3399 unsigned int dest_align;
3400 rtx dest_mem, dest_addr, len_rtx;
3402 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3404 /* If DEST is not a pointer type, don't do this
3405 operation in-line. */
3406 if (dest_align == 0)
3409 /* If the LEN parameter is zero, return DEST. */
3410 if (integer_zerop (len))
3412 /* Evaluate and ignore VAL in case it has side-effects. */
3413 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3414 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3417 /* Stabilize the arguments in case we fail. */
3418 dest = builtin_save_expr (dest);
3419 val = builtin_save_expr (val);
3420 len = builtin_save_expr (len);
3422 len_rtx = expand_normal (len);
3423 dest_mem = get_memory_rtx (dest, len);
3425 if (TREE_CODE (val) != INTEGER_CST)
3429 val_rtx = expand_normal (val);
3430 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3433 /* Assume that we can memset by pieces if we can store the
3434 * the coefficients by pieces (in the required modes).
3435 * We can't pass builtin_memset_gen_str as that emits RTL. */
3437 if (host_integerp (len, 1)
3438 && !(optimize_size && tree_low_cst (len, 1) > 1)
3439 && can_store_by_pieces (tree_low_cst (len, 1),
3440 builtin_memset_read_str, &c, dest_align))
3442 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3444 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3445 builtin_memset_gen_str, val_rtx, dest_align, 0);
3447 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3451 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3452 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3456 if (target_char_cast (val, &c))
3461 if (host_integerp (len, 1)
3462 && !(optimize_size && tree_low_cst (len, 1) > 1)
3463 && can_store_by_pieces (tree_low_cst (len, 1),
3464 builtin_memset_read_str, &c, dest_align))
3465 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3466 builtin_memset_read_str, &c, dest_align, 0);
3467 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3471 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3472 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3476 set_mem_align (dest_mem, dest_align);
3477 dest_addr = clear_storage (dest_mem, len_rtx,
3478 CALL_EXPR_TAILCALL (orig_exp)
3479 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
3483 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3484 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3490 fndecl = get_callee_fndecl (orig_exp);
3491 fcode = DECL_FUNCTION_CODE (fndecl);
3492 gcc_assert (fcode == BUILT_IN_MEMSET || fcode == BUILT_IN_BZERO);
3493 arglist = build_tree_list (NULL_TREE, len);
3494 if (fcode == BUILT_IN_MEMSET)
3495 arglist = tree_cons (NULL_TREE, val, arglist);
3496 arglist = tree_cons (NULL_TREE, dest, arglist);
3497 fn = build_function_call_expr (fndecl, arglist);
3498 if (TREE_CODE (fn) == CALL_EXPR)
3499 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3500 return expand_call (fn, target, target == const0_rtx);
3504 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3505 if we failed the caller should emit a normal call. */
3508 expand_builtin_bzero (tree exp)
3510 tree arglist = TREE_OPERAND (exp, 1);
3511 tree dest, size, newarglist;
3513 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3516 dest = TREE_VALUE (arglist);
3517 size = TREE_VALUE (TREE_CHAIN (arglist));
3519 /* New argument list transforming bzero(ptr x, int y) to
3520 memset(ptr x, int 0, size_t y). This is done this way
3521 so that if it isn't expanded inline, we fallback to
3522 calling bzero instead of memset. */
3524 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3525 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3526 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3528 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode, exp);
3531 /* Expand expression EXP, which is a call to the memcmp built-in function.
3532 ARGLIST is the argument list for this call. Return 0 if we failed and the
3533 caller should emit a normal call, otherwise try to get the result in
3534 TARGET, if convenient (and in mode MODE, if that's convenient). */
3537 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3538 enum machine_mode mode)
3540 if (!validate_arglist (arglist,
3541 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3545 tree result = fold_builtin_memcmp (arglist);
3547 return expand_expr (result, target, mode, EXPAND_NORMAL);
3550 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3552 tree arg1 = TREE_VALUE (arglist);
3553 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3554 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3555 rtx arg1_rtx, arg2_rtx, arg3_rtx;