1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
51 #ifndef PAD_VARARGS_DOWN
52 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
55 /* Define the names of the builtin function types and codes. */
56 const char *const built_in_class_names[4]
57 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
59 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
60 const char * built_in_names[(int) END_BUILTINS] =
62 #include "builtins.def"
66 /* Setup an array of _DECL trees, make sure each element is
67 initialized to NULL_TREE. */
68 tree built_in_decls[(int) END_BUILTINS];
69 /* Declarations used when constructing the builtin implicitly in the compiler.
70 It may be NULL_TREE when this is invalid (for instance runtime is not
71 required to implement the function call in all cases). */
72 tree implicit_built_in_decls[(int) END_BUILTINS];
74 static int get_pointer_alignment (tree, unsigned int);
75 static const char *c_getstr (tree);
76 static rtx c_readstr (const char *, enum machine_mode);
77 static int target_char_cast (tree, char *);
78 static rtx get_memory_rtx (tree, tree);
79 static tree build_string_literal (int, const char *);
80 static int apply_args_size (void);
81 static int apply_result_size (void);
82 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
83 static rtx result_vector (int, rtx);
85 static rtx expand_builtin_setjmp (tree, rtx);
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
99 static rtx expand_builtin_args_info (tree);
100 static rtx expand_builtin_next_arg (void);
101 static rtx expand_builtin_va_start (tree);
102 static rtx expand_builtin_va_end (tree);
103 static rtx expand_builtin_va_copy (tree);
104 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
105 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
106 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
107 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
108 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
109 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
114 static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
115 static rtx expand_builtin_bcopy (tree);
116 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
117 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
118 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
120 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
123 static rtx expand_builtin_bzero (tree);
124 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strpbrk (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strchr (tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strrchr (tree, tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, rtx);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static rtx expand_builtin_fputs (tree, rtx, bool);
133 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
134 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
135 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
136 static tree stabilize_va_list (tree, int);
137 static rtx expand_builtin_expect (tree, rtx);
138 static tree fold_builtin_constant_p (tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (tree);
141 static tree fold_builtin_inf (tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static int validate_arglist (tree, ...);
144 static bool integer_valued_real_p (tree);
145 static tree fold_trunc_transparent_mathfn (tree, tree);
146 static bool readonly_data_expr (tree);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_cabs (tree, tree);
150 static tree fold_builtin_sqrt (tree, tree);
151 static tree fold_builtin_cbrt (tree, tree);
152 static tree fold_builtin_pow (tree, tree, tree);
153 static tree fold_builtin_powi (tree, tree, tree);
154 static tree fold_builtin_sin (tree);
155 static tree fold_builtin_cos (tree, tree, tree);
156 static tree fold_builtin_tan (tree);
157 static tree fold_builtin_atan (tree, tree);
158 static tree fold_builtin_trunc (tree, tree);
159 static tree fold_builtin_floor (tree, tree);
160 static tree fold_builtin_ceil (tree, tree);
161 static tree fold_builtin_round (tree, tree);
162 static tree fold_builtin_int_roundingfn (tree, tree);
163 static tree fold_builtin_bitop (tree, tree);
164 static tree fold_builtin_memcpy (tree, tree);
165 static tree fold_builtin_mempcpy (tree, tree, int);
166 static tree fold_builtin_memmove (tree, tree);
167 static tree fold_builtin_strchr (tree, tree);
168 static tree fold_builtin_memcmp (tree);
169 static tree fold_builtin_strcmp (tree);
170 static tree fold_builtin_strncmp (tree);
171 static tree fold_builtin_signbit (tree, tree);
172 static tree fold_builtin_copysign (tree, tree, tree);
173 static tree fold_builtin_isascii (tree);
174 static tree fold_builtin_toascii (tree);
175 static tree fold_builtin_isdigit (tree);
176 static tree fold_builtin_fabs (tree, tree);
177 static tree fold_builtin_abs (tree, tree);
178 static tree fold_builtin_unordered_cmp (tree, tree, enum tree_code,
180 static tree fold_builtin_1 (tree, tree, bool);
182 static tree fold_builtin_strpbrk (tree, tree);
183 static tree fold_builtin_strstr (tree, tree);
184 static tree fold_builtin_strrchr (tree, tree);
185 static tree fold_builtin_strcat (tree);
186 static tree fold_builtin_strncat (tree);
187 static tree fold_builtin_strspn (tree);
188 static tree fold_builtin_strcspn (tree);
189 static tree fold_builtin_sprintf (tree, int);
191 static rtx expand_builtin_object_size (tree);
192 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
193 enum built_in_function);
194 static void maybe_emit_chk_warning (tree, enum built_in_function);
195 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
196 static tree fold_builtin_object_size (tree);
197 static tree fold_builtin_strcat_chk (tree, tree);
198 static tree fold_builtin_strncat_chk (tree, tree);
199 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
200 static tree fold_builtin_printf (tree, tree, bool, enum built_in_function);
201 static tree fold_builtin_fprintf (tree, tree, bool, enum built_in_function);
203 /* Return true if NODE should be considered for inline expansion regardless
204 of the optimization level. This means whenever a function is invoked with
205 its "internal" name, which normally contains the prefix "__builtin". */
207 static bool called_as_built_in (tree node)
209 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
210 if (strncmp (name, "__builtin_", 10) == 0)
212 if (strncmp (name, "__sync_", 7) == 0)
217 /* Return the alignment in bits of EXP, a pointer valued expression.
218 But don't return more than MAX_ALIGN no matter what.
219 The alignment returned is, by default, the alignment of the thing that
220 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
222 Otherwise, look at the expression to see if we can do better, i.e., if the
223 expression is actually pointing at an object whose alignment is tighter. */
226 get_pointer_alignment (tree exp, unsigned int max_align)
228 unsigned int align, inner;
230 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
233 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
234 align = MIN (align, max_align);
238 switch (TREE_CODE (exp))
242 case NON_LVALUE_EXPR:
243 exp = TREE_OPERAND (exp, 0);
244 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
247 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
248 align = MIN (inner, max_align);
252 /* If sum of pointer + int, restrict our maximum alignment to that
253 imposed by the integer. If not, we can't do any better than
255 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
258 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
259 & (max_align / BITS_PER_UNIT - 1))
263 exp = TREE_OPERAND (exp, 0);
267 /* See what we are pointing at and look at its alignment. */
268 exp = TREE_OPERAND (exp, 0);
269 if (TREE_CODE (exp) == FUNCTION_DECL)
270 align = FUNCTION_BOUNDARY;
271 else if (DECL_P (exp))
272 align = DECL_ALIGN (exp);
273 #ifdef CONSTANT_ALIGNMENT
274 else if (CONSTANT_CLASS_P (exp))
275 align = CONSTANT_ALIGNMENT (exp, align);
277 return MIN (align, max_align);
285 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
286 way, because it could contain a zero byte in the middle.
287 TREE_STRING_LENGTH is the size of the character array, not the string.
289 ONLY_VALUE should be nonzero if the result is not going to be emitted
290 into the instruction stream and zero if it is going to be expanded.
291 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
292 is returned, otherwise NULL, since
293 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
294 evaluate the side-effects.
296 The value returned is of type `ssizetype'.
298 Unfortunately, string_constant can't access the values of const char
299 arrays with initializers, so neither can we do so here. */
302 c_strlen (tree src, int only_value)
305 HOST_WIDE_INT offset;
310 if (TREE_CODE (src) == COND_EXPR
311 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
315 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
316 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
317 if (tree_int_cst_equal (len1, len2))
321 if (TREE_CODE (src) == COMPOUND_EXPR
322 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
323 return c_strlen (TREE_OPERAND (src, 1), only_value);
325 src = string_constant (src, &offset_node);
329 max = TREE_STRING_LENGTH (src) - 1;
330 ptr = TREE_STRING_POINTER (src);
332 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
334 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
335 compute the offset to the following null if we don't know where to
336 start searching for it. */
339 for (i = 0; i < max; i++)
343 /* We don't know the starting offset, but we do know that the string
344 has no internal zero bytes. We can assume that the offset falls
345 within the bounds of the string; otherwise, the programmer deserves
346 what he gets. Subtract the offset from the length of the string,
347 and return that. This would perhaps not be valid if we were dealing
348 with named arrays in addition to literal string constants. */
350 return size_diffop (size_int (max), offset_node);
353 /* We have a known offset into the string. Start searching there for
354 a null character if we can represent it as a single HOST_WIDE_INT. */
355 if (offset_node == 0)
357 else if (! host_integerp (offset_node, 0))
360 offset = tree_low_cst (offset_node, 0);
362 /* If the offset is known to be out of bounds, warn, and call strlen at
364 if (offset < 0 || offset > max)
366 warning (0, "offset outside bounds of constant string");
370 /* Use strlen to search for the first zero byte. Since any strings
371 constructed with build_string will have nulls appended, we win even
372 if we get handed something like (char[4])"abcd".
374 Since OFFSET is our starting index into the string, no further
375 calculation is needed. */
376 return ssize_int (strlen (ptr + offset));
379 /* Return a char pointer for a C string if it is a string constant
380 or sum of string constant and integer constant. */
387 src = string_constant (src, &offset_node);
391 if (offset_node == 0)
392 return TREE_STRING_POINTER (src);
393 else if (!host_integerp (offset_node, 1)
394 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
397 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
400 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
401 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
404 c_readstr (const char *str, enum machine_mode mode)
410 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
415 for (i = 0; i < GET_MODE_SIZE (mode); i++)
418 if (WORDS_BIG_ENDIAN)
419 j = GET_MODE_SIZE (mode) - i - 1;
420 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
421 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
422 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
424 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
427 ch = (unsigned char) str[i];
428 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
430 return immed_double_const (c[0], c[1], mode);
433 /* Cast a target constant CST to target CHAR and if that value fits into
434 host char type, return zero and put that value into variable pointed to by
438 target_char_cast (tree cst, char *p)
440 unsigned HOST_WIDE_INT val, hostval;
442 if (!host_integerp (cst, 1)
443 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
446 val = tree_low_cst (cst, 1);
447 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
448 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
451 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
452 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
461 /* Similar to save_expr, but assumes that arbitrary code is not executed
462 in between the multiple evaluations. In particular, we assume that a
463 non-addressable local variable will not be modified. */
466 builtin_save_expr (tree exp)
468 if (TREE_ADDRESSABLE (exp) == 0
469 && (TREE_CODE (exp) == PARM_DECL
470 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
473 return save_expr (exp);
476 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
477 times to get the address of either a higher stack frame, or a return
478 address located within it (depending on FNDECL_CODE). */
481 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
485 #ifdef INITIAL_FRAME_ADDRESS_RTX
486 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
490 /* For a zero count, we don't care what frame address we return, so frame
491 pointer elimination is OK, and using the soft frame pointer is OK.
492 For a non-zero count, we require a stable offset from the current frame
493 pointer to the previous one, so we must use the hard frame pointer, and
494 we must disable frame pointer elimination. */
496 tem = frame_pointer_rtx;
499 tem = hard_frame_pointer_rtx;
501 /* Tell reload not to eliminate the frame pointer. */
502 current_function_accesses_prior_frames = 1;
506 /* Some machines need special handling before we can access
507 arbitrary frames. For example, on the sparc, we must first flush
508 all register windows to the stack. */
509 #ifdef SETUP_FRAME_ADDRESSES
511 SETUP_FRAME_ADDRESSES ();
514 /* On the sparc, the return address is not in the frame, it is in a
515 register. There is no way to access it off of the current frame
516 pointer, but it can be accessed off the previous frame pointer by
517 reading the value from the register window save area. */
518 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
519 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
523 /* Scan back COUNT frames to the specified frame. */
524 for (i = 0; i < count; i++)
526 /* Assume the dynamic chain pointer is in the word that the
527 frame address points to, unless otherwise specified. */
528 #ifdef DYNAMIC_CHAIN_ADDRESS
529 tem = DYNAMIC_CHAIN_ADDRESS (tem);
531 tem = memory_address (Pmode, tem);
532 tem = gen_rtx_MEM (Pmode, tem);
533 set_mem_alias_set (tem, get_frame_alias_set ());
534 tem = copy_to_reg (tem);
537 /* For __builtin_frame_address, return what we've got. */
538 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
541 /* For __builtin_return_address, Get the return address from that
543 #ifdef RETURN_ADDR_RTX
544 tem = RETURN_ADDR_RTX (count, tem);
546 tem = memory_address (Pmode,
547 plus_constant (tem, GET_MODE_SIZE (Pmode)));
548 tem = gen_rtx_MEM (Pmode, tem);
549 set_mem_alias_set (tem, get_frame_alias_set ());
554 /* Alias set used for setjmp buffer. */
555 static HOST_WIDE_INT setjmp_alias_set = -1;
557 /* Construct the leading half of a __builtin_setjmp call. Control will
558 return to RECEIVER_LABEL. This is used directly by sjlj exception
562 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
564 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
568 if (setjmp_alias_set == -1)
569 setjmp_alias_set = new_alias_set ();
571 buf_addr = convert_memory_address (Pmode, buf_addr);
573 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
575 /* We store the frame pointer and the address of receiver_label in
576 the buffer and use the rest of it for the stack save area, which
577 is machine-dependent. */
579 mem = gen_rtx_MEM (Pmode, buf_addr);
580 set_mem_alias_set (mem, setjmp_alias_set);
581 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
583 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
584 set_mem_alias_set (mem, setjmp_alias_set);
586 emit_move_insn (validize_mem (mem),
587 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
589 stack_save = gen_rtx_MEM (sa_mode,
590 plus_constant (buf_addr,
591 2 * GET_MODE_SIZE (Pmode)));
592 set_mem_alias_set (stack_save, setjmp_alias_set);
593 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
595 /* If there is further processing to do, do it. */
596 #ifdef HAVE_builtin_setjmp_setup
597 if (HAVE_builtin_setjmp_setup)
598 emit_insn (gen_builtin_setjmp_setup (buf_addr));
601 /* Tell optimize_save_area_alloca that extra work is going to
602 need to go on during alloca. */
603 current_function_calls_setjmp = 1;
605 /* Set this so all the registers get saved in our frame; we need to be
606 able to copy the saved values for any registers from frames we unwind. */
607 current_function_has_nonlocal_label = 1;
610 /* Construct the trailing part of a __builtin_setjmp call.
611 This is used directly by sjlj exception handling code. */
614 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
616 /* Clobber the FP when we get here, so we have to make sure it's
617 marked as used by this function. */
618 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
620 /* Mark the static chain as clobbered here so life information
621 doesn't get messed up for it. */
622 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
624 /* Now put in the code to restore the frame pointer, and argument
625 pointer, if needed. */
626 #ifdef HAVE_nonlocal_goto
627 if (! HAVE_nonlocal_goto)
629 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
631 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
632 if (fixed_regs[ARG_POINTER_REGNUM])
634 #ifdef ELIMINABLE_REGS
636 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
638 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
639 if (elim_regs[i].from == ARG_POINTER_REGNUM
640 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
643 if (i == ARRAY_SIZE (elim_regs))
646 /* Now restore our arg pointer from the address at which it
647 was saved in our stack frame. */
648 emit_move_insn (virtual_incoming_args_rtx,
649 copy_to_reg (get_arg_pointer_save_area (cfun)));
654 #ifdef HAVE_builtin_setjmp_receiver
655 if (HAVE_builtin_setjmp_receiver)
656 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
659 #ifdef HAVE_nonlocal_goto_receiver
660 if (HAVE_nonlocal_goto_receiver)
661 emit_insn (gen_nonlocal_goto_receiver ());
666 /* @@@ This is a kludge. Not all machine descriptions define a blockage
667 insn, but we must not allow the code we just generated to be reordered
668 by scheduling. Specifically, the update of the frame pointer must
669 happen immediately, not later. So emit an ASM_INPUT to act as blockage
671 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
674 /* __builtin_setjmp is passed a pointer to an array of five words (not
675 all will be used on all machines). It operates similarly to the C
676 library function of the same name, but is more efficient. Much of
677 the code below (and for longjmp) is copied from the handling of
680 NOTE: This is intended for use by GNAT and the exception handling
681 scheme in the compiler and will only work in the method used by
685 expand_builtin_setjmp (tree arglist, rtx target)
687 rtx buf_addr, next_lab, cont_lab;
689 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
692 if (target == 0 || !REG_P (target)
693 || REGNO (target) < FIRST_PSEUDO_REGISTER)
694 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
696 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
698 next_lab = gen_label_rtx ();
699 cont_lab = gen_label_rtx ();
701 expand_builtin_setjmp_setup (buf_addr, next_lab);
703 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
704 ensure that pending stack adjustments are flushed. */
705 emit_move_insn (target, const0_rtx);
706 emit_jump (cont_lab);
708 emit_label (next_lab);
710 expand_builtin_setjmp_receiver (next_lab);
712 /* Set TARGET to one. */
713 emit_move_insn (target, const1_rtx);
714 emit_label (cont_lab);
716 /* Tell flow about the strange goings on. Putting `next_lab' on
717 `nonlocal_goto_handler_labels' to indicates that function
718 calls may traverse the arc back to this label. */
720 current_function_has_nonlocal_label = 1;
721 nonlocal_goto_handler_labels
722 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
727 /* __builtin_longjmp is passed a pointer to an array of five words (not
728 all will be used on all machines). It operates similarly to the C
729 library function of the same name, but is more efficient. Much of
730 the code below is copied from the handling of non-local gotos.
732 NOTE: This is intended for use by GNAT and the exception handling
733 scheme in the compiler and will only work in the method used by
737 expand_builtin_longjmp (rtx buf_addr, rtx value)
739 rtx fp, lab, stack, insn, last;
740 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
742 if (setjmp_alias_set == -1)
743 setjmp_alias_set = new_alias_set ();
745 buf_addr = convert_memory_address (Pmode, buf_addr);
747 buf_addr = force_reg (Pmode, buf_addr);
749 /* We used to store value in static_chain_rtx, but that fails if pointers
750 are smaller than integers. We instead require that the user must pass
751 a second argument of 1, because that is what builtin_setjmp will
752 return. This also makes EH slightly more efficient, since we are no
753 longer copying around a value that we don't care about. */
754 gcc_assert (value == const1_rtx);
756 last = get_last_insn ();
757 #ifdef HAVE_builtin_longjmp
758 if (HAVE_builtin_longjmp)
759 emit_insn (gen_builtin_longjmp (buf_addr));
763 fp = gen_rtx_MEM (Pmode, buf_addr);
764 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
765 GET_MODE_SIZE (Pmode)));
767 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
768 2 * GET_MODE_SIZE (Pmode)));
769 set_mem_alias_set (fp, setjmp_alias_set);
770 set_mem_alias_set (lab, setjmp_alias_set);
771 set_mem_alias_set (stack, setjmp_alias_set);
773 /* Pick up FP, label, and SP from the block and jump. This code is
774 from expand_goto in stmt.c; see there for detailed comments. */
775 #if HAVE_nonlocal_goto
776 if (HAVE_nonlocal_goto)
777 /* We have to pass a value to the nonlocal_goto pattern that will
778 get copied into the static_chain pointer, but it does not matter
779 what that value is, because builtin_setjmp does not use it. */
780 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
784 lab = copy_to_reg (lab);
786 emit_insn (gen_rtx_CLOBBER (VOIDmode,
787 gen_rtx_MEM (BLKmode,
788 gen_rtx_SCRATCH (VOIDmode))));
789 emit_insn (gen_rtx_CLOBBER (VOIDmode,
790 gen_rtx_MEM (BLKmode,
791 hard_frame_pointer_rtx)));
793 emit_move_insn (hard_frame_pointer_rtx, fp);
794 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
796 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
797 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
798 emit_indirect_jump (lab);
802 /* Search backwards and mark the jump insn as a non-local goto.
803 Note that this precludes the use of __builtin_longjmp to a
804 __builtin_setjmp target in the same function. However, we've
805 already cautioned the user that these functions are for
806 internal exception handling use only. */
807 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
809 gcc_assert (insn != last);
813 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
817 else if (CALL_P (insn))
822 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
823 and the address of the save area. */
826 expand_builtin_nonlocal_goto (tree arglist)
828 tree t_label, t_save_area;
829 rtx r_label, r_save_area, r_fp, r_sp, insn;
831 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
834 t_label = TREE_VALUE (arglist);
835 arglist = TREE_CHAIN (arglist);
836 t_save_area = TREE_VALUE (arglist);
838 r_label = expand_expr (t_label, NULL_RTX, VOIDmode, 0);
839 r_label = convert_memory_address (Pmode, r_label);
840 r_save_area = expand_expr (t_save_area, NULL_RTX, VOIDmode, 0);
841 r_save_area = convert_memory_address (Pmode, r_save_area);
842 r_fp = gen_rtx_MEM (Pmode, r_save_area);
843 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
844 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
846 current_function_has_nonlocal_goto = 1;
848 #if HAVE_nonlocal_goto
849 /* ??? We no longer need to pass the static chain value, afaik. */
850 if (HAVE_nonlocal_goto)
851 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
855 r_label = copy_to_reg (r_label);
857 emit_insn (gen_rtx_CLOBBER (VOIDmode,
858 gen_rtx_MEM (BLKmode,
859 gen_rtx_SCRATCH (VOIDmode))));
861 emit_insn (gen_rtx_CLOBBER (VOIDmode,
862 gen_rtx_MEM (BLKmode,
863 hard_frame_pointer_rtx)));
865 /* Restore frame pointer for containing function.
866 This sets the actual hard register used for the frame pointer
867 to the location of the function's incoming static chain info.
868 The non-local goto handler will then adjust it to contain the
869 proper value and reload the argument pointer, if needed. */
870 emit_move_insn (hard_frame_pointer_rtx, r_fp);
871 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
873 /* USE of hard_frame_pointer_rtx added for consistency;
874 not clear if really needed. */
875 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
876 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
877 emit_indirect_jump (r_label);
880 /* Search backwards to the jump insn and mark it as a
882 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
886 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
887 const0_rtx, REG_NOTES (insn));
890 else if (CALL_P (insn))
897 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
898 (not all will be used on all machines) that was passed to __builtin_setjmp.
899 It updates the stack pointer in that block to correspond to the current
903 expand_builtin_update_setjmp_buf (rtx buf_addr)
905 enum machine_mode sa_mode = Pmode;
909 #ifdef HAVE_save_stack_nonlocal
910 if (HAVE_save_stack_nonlocal)
911 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
913 #ifdef STACK_SAVEAREA_MODE
914 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
918 = gen_rtx_MEM (sa_mode,
921 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
925 emit_insn (gen_setjmp ());
928 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
931 /* Expand a call to __builtin_prefetch. For a target that does not support
932 data prefetch, evaluate the memory address argument in case it has side
936 expand_builtin_prefetch (tree arglist)
938 tree arg0, arg1, arg2;
941 if (!validate_arglist (arglist, POINTER_TYPE, 0))
944 arg0 = TREE_VALUE (arglist);
945 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
946 zero (read) and argument 2 (locality) defaults to 3 (high degree of
948 if (TREE_CHAIN (arglist))
950 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
951 if (TREE_CHAIN (TREE_CHAIN (arglist)))
952 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
954 arg2 = build_int_cst (NULL_TREE, 3);
958 arg1 = integer_zero_node;
959 arg2 = build_int_cst (NULL_TREE, 3);
962 /* Argument 0 is an address. */
963 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
965 /* Argument 1 (read/write flag) must be a compile-time constant int. */
966 if (TREE_CODE (arg1) != INTEGER_CST)
968 error ("second argument to %<__builtin_prefetch%> must be a constant");
969 arg1 = integer_zero_node;
971 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
972 /* Argument 1 must be either zero or one. */
973 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
975 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
980 /* Argument 2 (locality) must be a compile-time constant int. */
981 if (TREE_CODE (arg2) != INTEGER_CST)
983 error ("third argument to %<__builtin_prefetch%> must be a constant");
984 arg2 = integer_zero_node;
986 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
987 /* Argument 2 must be 0, 1, 2, or 3. */
988 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
990 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
997 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
999 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1000 || (GET_MODE (op0) != Pmode))
1002 op0 = convert_memory_address (Pmode, op0);
1003 op0 = force_reg (Pmode, op0);
1005 emit_insn (gen_prefetch (op0, op1, op2));
1009 /* Don't do anything with direct references to volatile memory, but
1010 generate code to handle other side effects. */
1011 if (!MEM_P (op0) && side_effects_p (op0))
1015 /* Get a MEM rtx for expression EXP which is the address of an operand
1016 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1017 the maximum length of the block of memory that might be accessed or
1021 get_memory_rtx (tree exp, tree len)
1023 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1024 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1026 /* Get an expression we can use to find the attributes to assign to MEM.
1027 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1028 we can. First remove any nops. */
1029 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1030 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1031 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1032 exp = TREE_OPERAND (exp, 0);
1034 if (TREE_CODE (exp) == ADDR_EXPR)
1035 exp = TREE_OPERAND (exp, 0);
1036 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1037 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1041 /* Honor attributes derived from exp, except for the alias set
1042 (as builtin stringops may alias with anything) and the size
1043 (as stringops may access multiple array elements). */
1046 set_mem_attributes (mem, exp, 0);
1048 /* Allow the string and memory builtins to overflow from one
1049 field into another, see http://gcc.gnu.org/PR23561.
1050 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1051 memory accessed by the string or memory builtin will fit
1052 within the field. */
1053 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1055 tree mem_expr = MEM_EXPR (mem);
1056 HOST_WIDE_INT offset = -1, length = -1;
1059 while (TREE_CODE (inner) == ARRAY_REF
1060 || TREE_CODE (inner) == NOP_EXPR
1061 || TREE_CODE (inner) == CONVERT_EXPR
1062 || TREE_CODE (inner) == NON_LVALUE_EXPR
1063 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1064 || TREE_CODE (inner) == SAVE_EXPR)
1065 inner = TREE_OPERAND (inner, 0);
1067 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1069 if (MEM_OFFSET (mem)
1070 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1071 offset = INTVAL (MEM_OFFSET (mem));
1073 if (offset >= 0 && len && host_integerp (len, 0))
1074 length = tree_low_cst (len, 0);
1076 while (TREE_CODE (inner) == COMPONENT_REF)
1078 tree field = TREE_OPERAND (inner, 1);
1079 gcc_assert (! DECL_BIT_FIELD (field));
1080 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1081 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1084 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1085 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1088 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1089 /* If we can prove the memory starting at XEXP (mem, 0)
1090 and ending at XEXP (mem, 0) + LENGTH will fit into
1091 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1094 && offset + length <= size)
1099 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1100 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1101 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1109 mem_expr = TREE_OPERAND (mem_expr, 0);
1110 inner = TREE_OPERAND (inner, 0);
1113 if (mem_expr == NULL)
1115 if (mem_expr != MEM_EXPR (mem))
1117 set_mem_expr (mem, mem_expr);
1118 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1121 set_mem_alias_set (mem, 0);
1122 set_mem_size (mem, NULL_RTX);
1128 /* Built-in functions to perform an untyped call and return. */
1130 /* For each register that may be used for calling a function, this
1131 gives a mode used to copy the register's value. VOIDmode indicates
1132 the register is not used for calling a function. If the machine
1133 has register windows, this gives only the outbound registers.
1134 INCOMING_REGNO gives the corresponding inbound register. */
1135 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1137 /* For each register that may be used for returning values, this gives
1138 a mode used to copy the register's value. VOIDmode indicates the
1139 register is not used for returning values. If the machine has
1140 register windows, this gives only the outbound registers.
1141 INCOMING_REGNO gives the corresponding inbound register. */
1142 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1144 /* For each register that may be used for calling a function, this
1145 gives the offset of that register into the block returned by
1146 __builtin_apply_args. 0 indicates that the register is not
1147 used for calling a function. */
1148 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1150 /* Return the size required for the block returned by __builtin_apply_args,
1151 and initialize apply_args_mode. */
1154 apply_args_size (void)
1156 static int size = -1;
1159 enum machine_mode mode;
1161 /* The values computed by this function never change. */
1164 /* The first value is the incoming arg-pointer. */
1165 size = GET_MODE_SIZE (Pmode);
1167 /* The second value is the structure value address unless this is
1168 passed as an "invisible" first argument. */
1169 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1170 size += GET_MODE_SIZE (Pmode);
1172 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1173 if (FUNCTION_ARG_REGNO_P (regno))
1175 mode = reg_raw_mode[regno];
1177 gcc_assert (mode != VOIDmode);
1179 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1180 if (size % align != 0)
1181 size = CEIL (size, align) * align;
1182 apply_args_reg_offset[regno] = size;
1183 size += GET_MODE_SIZE (mode);
1184 apply_args_mode[regno] = mode;
1188 apply_args_mode[regno] = VOIDmode;
1189 apply_args_reg_offset[regno] = 0;
1195 /* Return the size required for the block returned by __builtin_apply,
1196 and initialize apply_result_mode. */
1199 apply_result_size (void)
1201 static int size = -1;
1203 enum machine_mode mode;
1205 /* The values computed by this function never change. */
1210 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1211 if (FUNCTION_VALUE_REGNO_P (regno))
1213 mode = reg_raw_mode[regno];
1215 gcc_assert (mode != VOIDmode);
1217 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1218 if (size % align != 0)
1219 size = CEIL (size, align) * align;
1220 size += GET_MODE_SIZE (mode);
1221 apply_result_mode[regno] = mode;
1224 apply_result_mode[regno] = VOIDmode;
1226 /* Allow targets that use untyped_call and untyped_return to override
1227 the size so that machine-specific information can be stored here. */
1228 #ifdef APPLY_RESULT_SIZE
1229 size = APPLY_RESULT_SIZE;
1235 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1236 /* Create a vector describing the result block RESULT. If SAVEP is true,
1237 the result block is used to save the values; otherwise it is used to
1238 restore the values. */
1241 result_vector (int savep, rtx result)
1243 int regno, size, align, nelts;
1244 enum machine_mode mode;
1246 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1249 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1250 if ((mode = apply_result_mode[regno]) != VOIDmode)
1252 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1253 if (size % align != 0)
1254 size = CEIL (size, align) * align;
1255 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1256 mem = adjust_address (result, mode, size);
1257 savevec[nelts++] = (savep
1258 ? gen_rtx_SET (VOIDmode, mem, reg)
1259 : gen_rtx_SET (VOIDmode, reg, mem));
1260 size += GET_MODE_SIZE (mode);
1262 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1264 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1266 /* Save the state required to perform an untyped call with the same
1267 arguments as were passed to the current function. */
1270 expand_builtin_apply_args_1 (void)
1273 int size, align, regno;
1274 enum machine_mode mode;
1275 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1277 /* Create a block where the arg-pointer, structure value address,
1278 and argument registers can be saved. */
1279 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1281 /* Walk past the arg-pointer and structure value address. */
1282 size = GET_MODE_SIZE (Pmode);
1283 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1284 size += GET_MODE_SIZE (Pmode);
1286 /* Save each register used in calling a function to the block. */
1287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1288 if ((mode = apply_args_mode[regno]) != VOIDmode)
1290 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1291 if (size % align != 0)
1292 size = CEIL (size, align) * align;
1294 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1296 emit_move_insn (adjust_address (registers, mode, size), tem);
1297 size += GET_MODE_SIZE (mode);
1300 /* Save the arg pointer to the block. */
1301 tem = copy_to_reg (virtual_incoming_args_rtx);
1302 #ifdef STACK_GROWS_DOWNWARD
1303 /* We need the pointer as the caller actually passed them to us, not
1304 as we might have pretended they were passed. Make sure it's a valid
1305 operand, as emit_move_insn isn't expected to handle a PLUS. */
1307 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1310 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1312 size = GET_MODE_SIZE (Pmode);
1314 /* Save the structure value address unless this is passed as an
1315 "invisible" first argument. */
1316 if (struct_incoming_value)
1318 emit_move_insn (adjust_address (registers, Pmode, size),
1319 copy_to_reg (struct_incoming_value));
1320 size += GET_MODE_SIZE (Pmode);
1323 /* Return the address of the block. */
1324 return copy_addr_to_reg (XEXP (registers, 0));
1327 /* __builtin_apply_args returns block of memory allocated on
1328 the stack into which is stored the arg pointer, structure
1329 value address, static chain, and all the registers that might
1330 possibly be used in performing a function call. The code is
1331 moved to the start of the function so the incoming values are
1335 expand_builtin_apply_args (void)
1337 /* Don't do __builtin_apply_args more than once in a function.
1338 Save the result of the first call and reuse it. */
1339 if (apply_args_value != 0)
1340 return apply_args_value;
1342 /* When this function is called, it means that registers must be
1343 saved on entry to this function. So we migrate the
1344 call to the first insn of this function. */
1349 temp = expand_builtin_apply_args_1 ();
1353 apply_args_value = temp;
1355 /* Put the insns after the NOTE that starts the function.
1356 If this is inside a start_sequence, make the outer-level insn
1357 chain current, so the code is placed at the start of the
1359 push_topmost_sequence ();
1360 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1361 pop_topmost_sequence ();
1366 /* Perform an untyped call and save the state required to perform an
1367 untyped return of whatever value was returned by the given function. */
1370 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1372 int size, align, regno;
1373 enum machine_mode mode;
1374 rtx incoming_args, result, reg, dest, src, call_insn;
1375 rtx old_stack_level = 0;
1376 rtx call_fusage = 0;
1377 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1379 arguments = convert_memory_address (Pmode, arguments);
1381 /* Create a block where the return registers can be saved. */
1382 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1384 /* Fetch the arg pointer from the ARGUMENTS block. */
1385 incoming_args = gen_reg_rtx (Pmode);
1386 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1387 #ifndef STACK_GROWS_DOWNWARD
1388 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1389 incoming_args, 0, OPTAB_LIB_WIDEN);
1392 /* Push a new argument block and copy the arguments. Do not allow
1393 the (potential) memcpy call below to interfere with our stack
1395 do_pending_stack_adjust ();
1398 /* Save the stack with nonlocal if available. */
1399 #ifdef HAVE_save_stack_nonlocal
1400 if (HAVE_save_stack_nonlocal)
1401 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1404 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1406 /* Allocate a block of memory onto the stack and copy the memory
1407 arguments to the outgoing arguments address. */
1408 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1409 dest = virtual_outgoing_args_rtx;
1410 #ifndef STACK_GROWS_DOWNWARD
1411 if (GET_CODE (argsize) == CONST_INT)
1412 dest = plus_constant (dest, -INTVAL (argsize));
1414 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1416 dest = gen_rtx_MEM (BLKmode, dest);
1417 set_mem_align (dest, PARM_BOUNDARY);
1418 src = gen_rtx_MEM (BLKmode, incoming_args);
1419 set_mem_align (src, PARM_BOUNDARY);
1420 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1422 /* Refer to the argument block. */
1424 arguments = gen_rtx_MEM (BLKmode, arguments);
1425 set_mem_align (arguments, PARM_BOUNDARY);
1427 /* Walk past the arg-pointer and structure value address. */
1428 size = GET_MODE_SIZE (Pmode);
1430 size += GET_MODE_SIZE (Pmode);
1432 /* Restore each of the registers previously saved. Make USE insns
1433 for each of these registers for use in making the call. */
1434 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1435 if ((mode = apply_args_mode[regno]) != VOIDmode)
1437 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1438 if (size % align != 0)
1439 size = CEIL (size, align) * align;
1440 reg = gen_rtx_REG (mode, regno);
1441 emit_move_insn (reg, adjust_address (arguments, mode, size));
1442 use_reg (&call_fusage, reg);
1443 size += GET_MODE_SIZE (mode);
1446 /* Restore the structure value address unless this is passed as an
1447 "invisible" first argument. */
1448 size = GET_MODE_SIZE (Pmode);
1451 rtx value = gen_reg_rtx (Pmode);
1452 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1453 emit_move_insn (struct_value, value);
1454 if (REG_P (struct_value))
1455 use_reg (&call_fusage, struct_value);
1456 size += GET_MODE_SIZE (Pmode);
1459 /* All arguments and registers used for the call are set up by now! */
1460 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1462 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1463 and we don't want to load it into a register as an optimization,
1464 because prepare_call_address already did it if it should be done. */
1465 if (GET_CODE (function) != SYMBOL_REF)
1466 function = memory_address (FUNCTION_MODE, function);
1468 /* Generate the actual call instruction and save the return value. */
1469 #ifdef HAVE_untyped_call
1470 if (HAVE_untyped_call)
1471 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1472 result, result_vector (1, result)));
1475 #ifdef HAVE_call_value
1476 if (HAVE_call_value)
1480 /* Locate the unique return register. It is not possible to
1481 express a call that sets more than one return register using
1482 call_value; use untyped_call for that. In fact, untyped_call
1483 only needs to save the return registers in the given block. */
1484 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1485 if ((mode = apply_result_mode[regno]) != VOIDmode)
1487 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1489 valreg = gen_rtx_REG (mode, regno);
1492 emit_call_insn (GEN_CALL_VALUE (valreg,
1493 gen_rtx_MEM (FUNCTION_MODE, function),
1494 const0_rtx, NULL_RTX, const0_rtx));
1496 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1502 /* Find the CALL insn we just emitted, and attach the register usage
1504 call_insn = last_call_insn ();
1505 add_function_usage_to (call_insn, call_fusage);
1507 /* Restore the stack. */
1508 #ifdef HAVE_save_stack_nonlocal
1509 if (HAVE_save_stack_nonlocal)
1510 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1513 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1517 /* Return the address of the result block. */
1518 result = copy_addr_to_reg (XEXP (result, 0));
1519 return convert_memory_address (ptr_mode, result);
1522 /* Perform an untyped return. */
1525 expand_builtin_return (rtx result)
1527 int size, align, regno;
1528 enum machine_mode mode;
1530 rtx call_fusage = 0;
1532 result = convert_memory_address (Pmode, result);
1534 apply_result_size ();
1535 result = gen_rtx_MEM (BLKmode, result);
1537 #ifdef HAVE_untyped_return
1538 if (HAVE_untyped_return)
1540 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1546 /* Restore the return value and note that each value is used. */
1548 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1549 if ((mode = apply_result_mode[regno]) != VOIDmode)
1551 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1552 if (size % align != 0)
1553 size = CEIL (size, align) * align;
1554 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1555 emit_move_insn (reg, adjust_address (result, mode, size));
1557 push_to_sequence (call_fusage);
1558 emit_insn (gen_rtx_USE (VOIDmode, reg));
1559 call_fusage = get_insns ();
1561 size += GET_MODE_SIZE (mode);
1564 /* Put the USE insns before the return. */
1565 emit_insn (call_fusage);
1567 /* Return whatever values was restored by jumping directly to the end
1569 expand_naked_return ();
1572 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1574 static enum type_class
1575 type_to_class (tree type)
1577 switch (TREE_CODE (type))
1579 case VOID_TYPE: return void_type_class;
1580 case INTEGER_TYPE: return integer_type_class;
1581 case CHAR_TYPE: return char_type_class;
1582 case ENUMERAL_TYPE: return enumeral_type_class;
1583 case BOOLEAN_TYPE: return boolean_type_class;
1584 case POINTER_TYPE: return pointer_type_class;
1585 case REFERENCE_TYPE: return reference_type_class;
1586 case OFFSET_TYPE: return offset_type_class;
1587 case REAL_TYPE: return real_type_class;
1588 case COMPLEX_TYPE: return complex_type_class;
1589 case FUNCTION_TYPE: return function_type_class;
1590 case METHOD_TYPE: return method_type_class;
1591 case RECORD_TYPE: return record_type_class;
1593 case QUAL_UNION_TYPE: return union_type_class;
1594 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1595 ? string_type_class : array_type_class);
1596 case LANG_TYPE: return lang_type_class;
1597 default: return no_type_class;
1601 /* Expand a call to __builtin_classify_type with arguments found in
1605 expand_builtin_classify_type (tree arglist)
1608 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1609 return GEN_INT (no_type_class);
1612 /* This helper macro, meant to be used in mathfn_built_in below,
1613 determines which among a set of three builtin math functions is
1614 appropriate for a given type mode. The `F' and `L' cases are
1615 automatically generated from the `double' case. */
1616 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1617 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1618 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1619 fcodel = BUILT_IN_MATHFN##L ; break;
1621 /* Return mathematic function equivalent to FN but operating directly
1622 on TYPE, if available. If we can't do the conversion, return zero. */
1624 mathfn_built_in (tree type, enum built_in_function fn)
1626 enum built_in_function fcode, fcodef, fcodel;
1630 CASE_MATHFN (BUILT_IN_ACOS)
1631 CASE_MATHFN (BUILT_IN_ACOSH)
1632 CASE_MATHFN (BUILT_IN_ASIN)
1633 CASE_MATHFN (BUILT_IN_ASINH)
1634 CASE_MATHFN (BUILT_IN_ATAN)
1635 CASE_MATHFN (BUILT_IN_ATAN2)
1636 CASE_MATHFN (BUILT_IN_ATANH)
1637 CASE_MATHFN (BUILT_IN_CBRT)
1638 CASE_MATHFN (BUILT_IN_CEIL)
1639 CASE_MATHFN (BUILT_IN_COPYSIGN)
1640 CASE_MATHFN (BUILT_IN_COS)
1641 CASE_MATHFN (BUILT_IN_COSH)
1642 CASE_MATHFN (BUILT_IN_DREM)
1643 CASE_MATHFN (BUILT_IN_ERF)
1644 CASE_MATHFN (BUILT_IN_ERFC)
1645 CASE_MATHFN (BUILT_IN_EXP)
1646 CASE_MATHFN (BUILT_IN_EXP10)
1647 CASE_MATHFN (BUILT_IN_EXP2)
1648 CASE_MATHFN (BUILT_IN_EXPM1)
1649 CASE_MATHFN (BUILT_IN_FABS)
1650 CASE_MATHFN (BUILT_IN_FDIM)
1651 CASE_MATHFN (BUILT_IN_FLOOR)
1652 CASE_MATHFN (BUILT_IN_FMA)
1653 CASE_MATHFN (BUILT_IN_FMAX)
1654 CASE_MATHFN (BUILT_IN_FMIN)
1655 CASE_MATHFN (BUILT_IN_FMOD)
1656 CASE_MATHFN (BUILT_IN_FREXP)
1657 CASE_MATHFN (BUILT_IN_GAMMA)
1658 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1659 CASE_MATHFN (BUILT_IN_HYPOT)
1660 CASE_MATHFN (BUILT_IN_ILOGB)
1661 CASE_MATHFN (BUILT_IN_INF)
1662 CASE_MATHFN (BUILT_IN_J0)
1663 CASE_MATHFN (BUILT_IN_J1)
1664 CASE_MATHFN (BUILT_IN_JN)
1665 CASE_MATHFN (BUILT_IN_LCEIL)
1666 CASE_MATHFN (BUILT_IN_LDEXP)
1667 CASE_MATHFN (BUILT_IN_LFLOOR)
1668 CASE_MATHFN (BUILT_IN_LGAMMA)
1669 CASE_MATHFN (BUILT_IN_LLCEIL)
1670 CASE_MATHFN (BUILT_IN_LLFLOOR)
1671 CASE_MATHFN (BUILT_IN_LLRINT)
1672 CASE_MATHFN (BUILT_IN_LLROUND)
1673 CASE_MATHFN (BUILT_IN_LOG)
1674 CASE_MATHFN (BUILT_IN_LOG10)
1675 CASE_MATHFN (BUILT_IN_LOG1P)
1676 CASE_MATHFN (BUILT_IN_LOG2)
1677 CASE_MATHFN (BUILT_IN_LOGB)
1678 CASE_MATHFN (BUILT_IN_LRINT)
1679 CASE_MATHFN (BUILT_IN_LROUND)
1680 CASE_MATHFN (BUILT_IN_MODF)
1681 CASE_MATHFN (BUILT_IN_NAN)
1682 CASE_MATHFN (BUILT_IN_NANS)
1683 CASE_MATHFN (BUILT_IN_NEARBYINT)
1684 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1685 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1686 CASE_MATHFN (BUILT_IN_POW)
1687 CASE_MATHFN (BUILT_IN_POWI)
1688 CASE_MATHFN (BUILT_IN_POW10)
1689 CASE_MATHFN (BUILT_IN_REMAINDER)
1690 CASE_MATHFN (BUILT_IN_REMQUO)
1691 CASE_MATHFN (BUILT_IN_RINT)
1692 CASE_MATHFN (BUILT_IN_ROUND)
1693 CASE_MATHFN (BUILT_IN_SCALB)
1694 CASE_MATHFN (BUILT_IN_SCALBLN)
1695 CASE_MATHFN (BUILT_IN_SCALBN)
1696 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1697 CASE_MATHFN (BUILT_IN_SIN)
1698 CASE_MATHFN (BUILT_IN_SINCOS)
1699 CASE_MATHFN (BUILT_IN_SINH)
1700 CASE_MATHFN (BUILT_IN_SQRT)
1701 CASE_MATHFN (BUILT_IN_TAN)
1702 CASE_MATHFN (BUILT_IN_TANH)
1703 CASE_MATHFN (BUILT_IN_TGAMMA)
1704 CASE_MATHFN (BUILT_IN_TRUNC)
1705 CASE_MATHFN (BUILT_IN_Y0)
1706 CASE_MATHFN (BUILT_IN_Y1)
1707 CASE_MATHFN (BUILT_IN_YN)
1713 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1714 return implicit_built_in_decls[fcode];
1715 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1716 return implicit_built_in_decls[fcodef];
1717 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1718 return implicit_built_in_decls[fcodel];
1723 /* If errno must be maintained, expand the RTL to check if the result,
1724 TARGET, of a built-in function call, EXP, is NaN, and if so set
1728 expand_errno_check (tree exp, rtx target)
1730 rtx lab = gen_label_rtx ();
1732 /* Test the result; if it is NaN, set errno=EDOM because
1733 the argument was not in the domain. */
1734 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1738 /* If this built-in doesn't throw an exception, set errno directly. */
1739 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1741 #ifdef GEN_ERRNO_RTX
1742 rtx errno_rtx = GEN_ERRNO_RTX;
1745 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1747 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1753 /* We can't set errno=EDOM directly; let the library call do it.
1754 Pop the arguments right away in case the call gets deleted. */
1756 expand_call (exp, target, 0);
1762 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1763 Return 0 if a normal call should be emitted rather than expanding the
1764 function in-line. EXP is the expression that is a call to the builtin
1765 function; if convenient, the result should be placed in TARGET.
1766 SUBTARGET may be used as the target for computing one of EXP's operands. */
1769 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1771 optab builtin_optab;
1772 rtx op0, insns, before_call;
1773 tree fndecl = get_callee_fndecl (exp);
1774 tree arglist = TREE_OPERAND (exp, 1);
1775 enum machine_mode mode;
1776 bool errno_set = false;
1779 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1782 arg = TREE_VALUE (arglist);
1784 switch (DECL_FUNCTION_CODE (fndecl))
1787 case BUILT_IN_SQRTF:
1788 case BUILT_IN_SQRTL:
1789 errno_set = ! tree_expr_nonnegative_p (arg);
1790 builtin_optab = sqrt_optab;
1795 errno_set = true; builtin_optab = exp_optab; break;
1796 case BUILT_IN_EXP10:
1797 case BUILT_IN_EXP10F:
1798 case BUILT_IN_EXP10L:
1799 case BUILT_IN_POW10:
1800 case BUILT_IN_POW10F:
1801 case BUILT_IN_POW10L:
1802 errno_set = true; builtin_optab = exp10_optab; break;
1804 case BUILT_IN_EXP2F:
1805 case BUILT_IN_EXP2L:
1806 errno_set = true; builtin_optab = exp2_optab; break;
1807 case BUILT_IN_EXPM1:
1808 case BUILT_IN_EXPM1F:
1809 case BUILT_IN_EXPM1L:
1810 errno_set = true; builtin_optab = expm1_optab; break;
1812 case BUILT_IN_LOGBF:
1813 case BUILT_IN_LOGBL:
1814 errno_set = true; builtin_optab = logb_optab; break;
1815 case BUILT_IN_ILOGB:
1816 case BUILT_IN_ILOGBF:
1817 case BUILT_IN_ILOGBL:
1818 errno_set = true; builtin_optab = ilogb_optab; break;
1822 errno_set = true; builtin_optab = log_optab; break;
1823 case BUILT_IN_LOG10:
1824 case BUILT_IN_LOG10F:
1825 case BUILT_IN_LOG10L:
1826 errno_set = true; builtin_optab = log10_optab; break;
1828 case BUILT_IN_LOG2F:
1829 case BUILT_IN_LOG2L:
1830 errno_set = true; builtin_optab = log2_optab; break;
1831 case BUILT_IN_LOG1P:
1832 case BUILT_IN_LOG1PF:
1833 case BUILT_IN_LOG1PL:
1834 errno_set = true; builtin_optab = log1p_optab; break;
1836 case BUILT_IN_ASINF:
1837 case BUILT_IN_ASINL:
1838 builtin_optab = asin_optab; break;
1840 case BUILT_IN_ACOSF:
1841 case BUILT_IN_ACOSL:
1842 builtin_optab = acos_optab; break;
1846 builtin_optab = tan_optab; break;
1848 case BUILT_IN_ATANF:
1849 case BUILT_IN_ATANL:
1850 builtin_optab = atan_optab; break;
1851 case BUILT_IN_FLOOR:
1852 case BUILT_IN_FLOORF:
1853 case BUILT_IN_FLOORL:
1854 builtin_optab = floor_optab; break;
1856 case BUILT_IN_CEILF:
1857 case BUILT_IN_CEILL:
1858 builtin_optab = ceil_optab; break;
1859 case BUILT_IN_TRUNC:
1860 case BUILT_IN_TRUNCF:
1861 case BUILT_IN_TRUNCL:
1862 builtin_optab = btrunc_optab; break;
1863 case BUILT_IN_ROUND:
1864 case BUILT_IN_ROUNDF:
1865 case BUILT_IN_ROUNDL:
1866 builtin_optab = round_optab; break;
1867 case BUILT_IN_NEARBYINT:
1868 case BUILT_IN_NEARBYINTF:
1869 case BUILT_IN_NEARBYINTL:
1870 builtin_optab = nearbyint_optab; break;
1872 case BUILT_IN_RINTF:
1873 case BUILT_IN_RINTL:
1874 builtin_optab = rint_optab; break;
1875 case BUILT_IN_LRINT:
1876 case BUILT_IN_LRINTF:
1877 case BUILT_IN_LRINTL:
1878 case BUILT_IN_LLRINT:
1879 case BUILT_IN_LLRINTF:
1880 case BUILT_IN_LLRINTL:
1881 builtin_optab = lrint_optab; break;
1886 /* Make a suitable register to place result in. */
1887 mode = TYPE_MODE (TREE_TYPE (exp));
1889 if (! flag_errno_math || ! HONOR_NANS (mode))
1892 /* Before working hard, check whether the instruction is available. */
1893 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1895 target = gen_reg_rtx (mode);
1897 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1898 need to expand the argument again. This way, we will not perform
1899 side-effects more the once. */
1900 narg = builtin_save_expr (arg);
1904 arglist = build_tree_list (NULL_TREE, arg);
1905 exp = build_function_call_expr (fndecl, arglist);
1908 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1912 /* Compute into TARGET.
1913 Set TARGET to wherever the result comes back. */
1914 target = expand_unop (mode, builtin_optab, op0, target, 0);
1919 expand_errno_check (exp, target);
1921 /* Output the entire sequence. */
1922 insns = get_insns ();
1928 /* If we were unable to expand via the builtin, stop the sequence
1929 (without outputting the insns) and call to the library function
1930 with the stabilized argument list. */
1934 before_call = get_last_insn ();
1936 target = expand_call (exp, target, target == const0_rtx);
1938 /* If this is a sqrt operation and we don't care about errno, try to
1939 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1940 This allows the semantics of the libcall to be visible to the RTL
1942 if (builtin_optab == sqrt_optab && !errno_set)
1944 /* Search backwards through the insns emitted by expand_call looking
1945 for the instruction with the REG_RETVAL note. */
1946 rtx last = get_last_insn ();
1947 while (last != before_call)
1949 if (find_reg_note (last, REG_RETVAL, NULL))
1951 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1952 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1953 two elements, i.e. symbol_ref(sqrt) and the operand. */
1955 && GET_CODE (note) == EXPR_LIST
1956 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1957 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1958 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1960 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1961 /* Check operand is a register with expected mode. */
1964 && GET_MODE (operand) == mode)
1966 /* Replace the REG_EQUAL note with a SQRT rtx. */
1967 rtx equiv = gen_rtx_SQRT (mode, operand);
1968 set_unique_reg_note (last, REG_EQUAL, equiv);
1973 last = PREV_INSN (last);
1980 /* Expand a call to the builtin binary math functions (pow and atan2).
1981 Return 0 if a normal call should be emitted rather than expanding the
1982 function in-line. EXP is the expression that is a call to the builtin
1983 function; if convenient, the result should be placed in TARGET.
1984 SUBTARGET may be used as the target for computing one of EXP's
1988 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1990 optab builtin_optab;
1991 rtx op0, op1, insns;
1992 int op1_type = REAL_TYPE;
1993 tree fndecl = get_callee_fndecl (exp);
1994 tree arglist = TREE_OPERAND (exp, 1);
1995 tree arg0, arg1, temp, narg;
1996 enum machine_mode mode;
1997 bool errno_set = true;
2000 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP)
2001 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF)
2002 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL))
2003 op1_type = INTEGER_TYPE;
2005 if (!validate_arglist (arglist, REAL_TYPE, op1_type, VOID_TYPE))
2008 arg0 = TREE_VALUE (arglist);
2009 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2011 switch (DECL_FUNCTION_CODE (fndecl))
2016 builtin_optab = pow_optab; break;
2017 case BUILT_IN_ATAN2:
2018 case BUILT_IN_ATAN2F:
2019 case BUILT_IN_ATAN2L:
2020 builtin_optab = atan2_optab; break;
2021 case BUILT_IN_LDEXP:
2022 case BUILT_IN_LDEXPF:
2023 case BUILT_IN_LDEXPL:
2024 builtin_optab = ldexp_optab; break;
2026 case BUILT_IN_FMODF:
2027 case BUILT_IN_FMODL:
2028 builtin_optab = fmod_optab; break;
2030 case BUILT_IN_DREMF:
2031 case BUILT_IN_DREML:
2032 builtin_optab = drem_optab; break;
2037 /* Make a suitable register to place result in. */
2038 mode = TYPE_MODE (TREE_TYPE (exp));
2040 /* Before working hard, check whether the instruction is available. */
2041 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2044 target = gen_reg_rtx (mode);
2046 if (! flag_errno_math || ! HONOR_NANS (mode))
2049 /* Always stabilize the argument list. */
2050 narg = builtin_save_expr (arg1);
2054 temp = build_tree_list (NULL_TREE, narg);
2058 temp = TREE_CHAIN (arglist);
2060 narg = builtin_save_expr (arg0);
2064 arglist = tree_cons (NULL_TREE, narg, temp);
2068 arglist = tree_cons (NULL_TREE, arg0, temp);
2071 exp = build_function_call_expr (fndecl, arglist);
2073 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2074 op1 = expand_expr (arg1, 0, VOIDmode, 0);
2078 /* Compute into TARGET.
2079 Set TARGET to wherever the result comes back. */
2080 target = expand_binop (mode, builtin_optab, op0, op1,
2081 target, 0, OPTAB_DIRECT);
2083 /* If we were unable to expand via the builtin, stop the sequence
2084 (without outputting the insns) and call to the library function
2085 with the stabilized argument list. */
2089 return expand_call (exp, target, target == const0_rtx);
2093 expand_errno_check (exp, target);
2095 /* Output the entire sequence. */
2096 insns = get_insns ();
2103 /* Expand a call to the builtin sin and cos math functions.
2104 Return 0 if a normal call should be emitted rather than expanding the
2105 function in-line. EXP is the expression that is a call to the builtin
2106 function; if convenient, the result should be placed in TARGET.
2107 SUBTARGET may be used as the target for computing one of EXP's
2111 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2113 optab builtin_optab;
2115 tree fndecl = get_callee_fndecl (exp);
2116 tree arglist = TREE_OPERAND (exp, 1);
2117 enum machine_mode mode;
2118 bool errno_set = false;
2121 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2124 arg = TREE_VALUE (arglist);
2126 switch (DECL_FUNCTION_CODE (fndecl))
2134 builtin_optab = sincos_optab; break;
2139 /* Make a suitable register to place result in. */
2140 mode = TYPE_MODE (TREE_TYPE (exp));
2142 if (! flag_errno_math || ! HONOR_NANS (mode))
2145 /* Check if sincos insn is available, otherwise fallback
2146 to sin or cos insn. */
2147 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
2148 switch (DECL_FUNCTION_CODE (fndecl))
2153 builtin_optab = sin_optab; break;
2157 builtin_optab = cos_optab; break;
2163 /* Before working hard, check whether the instruction is available. */
2164 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2166 target = gen_reg_rtx (mode);
2168 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2169 need to expand the argument again. This way, we will not perform
2170 side-effects more the once. */
2171 narg = save_expr (arg);
2175 arglist = build_tree_list (NULL_TREE, arg);
2176 exp = build_function_call_expr (fndecl, arglist);
2179 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2183 /* Compute into TARGET.
2184 Set TARGET to wherever the result comes back. */
2185 if (builtin_optab == sincos_optab)
2189 switch (DECL_FUNCTION_CODE (fndecl))
2194 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2199 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2204 gcc_assert (result);
2208 target = expand_unop (mode, builtin_optab, op0, target, 0);
2214 expand_errno_check (exp, target);
2216 /* Output the entire sequence. */
2217 insns = get_insns ();
2223 /* If we were unable to expand via the builtin, stop the sequence
2224 (without outputting the insns) and call to the library function
2225 with the stabilized argument list. */
2229 target = expand_call (exp, target, target == const0_rtx);
2234 /* Expand a call to one of the builtin rounding functions (lfloor).
2235 If expanding via optab fails, lower expression to (int)(floor(x)).
2236 EXP is the expression that is a call to the builtin function;
2237 if convenient, the result should be placed in TARGET. SUBTARGET may
2238 be used as the target for computing one of EXP's operands. */
2241 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2243 optab builtin_optab;
2244 rtx op0, insns, tmp;
2245 tree fndecl = get_callee_fndecl (exp);
2246 tree arglist = TREE_OPERAND (exp, 1);
2247 enum built_in_function fallback_fn;
2248 tree fallback_fndecl;
2249 enum machine_mode mode;
2252 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2255 arg = TREE_VALUE (arglist);
2257 switch (DECL_FUNCTION_CODE (fndecl))
2259 case BUILT_IN_LCEIL:
2260 case BUILT_IN_LCEILF:
2261 case BUILT_IN_LCEILL:
2262 case BUILT_IN_LLCEIL:
2263 case BUILT_IN_LLCEILF:
2264 case BUILT_IN_LLCEILL:
2265 builtin_optab = lceil_optab;
2266 fallback_fn = BUILT_IN_CEIL;
2269 case BUILT_IN_LFLOOR:
2270 case BUILT_IN_LFLOORF:
2271 case BUILT_IN_LFLOORL:
2272 case BUILT_IN_LLFLOOR:
2273 case BUILT_IN_LLFLOORF:
2274 case BUILT_IN_LLFLOORL:
2275 builtin_optab = lfloor_optab;
2276 fallback_fn = BUILT_IN_FLOOR;
2283 /* Make a suitable register to place result in. */
2284 mode = TYPE_MODE (TREE_TYPE (exp));
2286 /* Before working hard, check whether the instruction is available. */
2287 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2289 target = gen_reg_rtx (mode);
2291 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2292 need to expand the argument again. This way, we will not perform
2293 side-effects more the once. */
2294 narg = builtin_save_expr (arg);
2298 arglist = build_tree_list (NULL_TREE, arg);
2299 exp = build_function_call_expr (fndecl, arglist);
2302 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2306 /* Compute into TARGET.
2307 Set TARGET to wherever the result comes back. */
2308 target = expand_unop (mode, builtin_optab, op0, target, 0);
2312 /* Output the entire sequence. */
2313 insns = get_insns ();
2319 /* If we were unable to expand via the builtin, stop the sequence
2320 (without outputting the insns). */
2324 /* Fall back to floating point rounding optab. */
2325 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2326 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2327 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2328 gcc_assert (fallback_fndecl != NULL_TREE);
2329 exp = build_function_call_expr (fallback_fndecl, arglist);
2331 tmp = expand_builtin_mathfn (exp, NULL_RTX, NULL_RTX);
2333 /* Truncate the result of floating point optab to integer
2334 via expand_fix (). */
2335 target = gen_reg_rtx (mode);
2336 expand_fix (target, tmp, 0);
2341 /* To evaluate powi(x,n), the floating point value x raised to the
2342 constant integer exponent n, we use a hybrid algorithm that
2343 combines the "window method" with look-up tables. For an
2344 introduction to exponentiation algorithms and "addition chains",
2345 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2346 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2347 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2348 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2350 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2351 multiplications to inline before calling the system library's pow
2352 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2353 so this default never requires calling pow, powf or powl. */
2355 #ifndef POWI_MAX_MULTS
2356 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2359 /* The size of the "optimal power tree" lookup table. All
2360 exponents less than this value are simply looked up in the
2361 powi_table below. This threshold is also used to size the
2362 cache of pseudo registers that hold intermediate results. */
2363 #define POWI_TABLE_SIZE 256
2365 /* The size, in bits of the window, used in the "window method"
2366 exponentiation algorithm. This is equivalent to a radix of
2367 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2368 #define POWI_WINDOW_SIZE 3
2370 /* The following table is an efficient representation of an
2371 "optimal power tree". For each value, i, the corresponding
2372 value, j, in the table states than an optimal evaluation
2373 sequence for calculating pow(x,i) can be found by evaluating
2374 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2375 100 integers is given in Knuth's "Seminumerical algorithms". */
2377 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2379 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2380 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2381 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2382 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2383 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2384 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2385 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2386 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2387 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2388 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2389 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2390 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2391 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2392 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2393 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2394 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2395 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2396 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2397 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2398 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2399 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2400 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2401 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2402 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2403 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2404 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2405 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2406 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2407 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2408 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2409 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2410 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2414 /* Return the number of multiplications required to calculate
2415 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2416 subroutine of powi_cost. CACHE is an array indicating
2417 which exponents have already been calculated. */
2420 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2422 /* If we've already calculated this exponent, then this evaluation
2423 doesn't require any additional multiplications. */
2428 return powi_lookup_cost (n - powi_table[n], cache)
2429 + powi_lookup_cost (powi_table[n], cache) + 1;
2432 /* Return the number of multiplications required to calculate
2433 powi(x,n) for an arbitrary x, given the exponent N. This
2434 function needs to be kept in sync with expand_powi below. */
2437 powi_cost (HOST_WIDE_INT n)
2439 bool cache[POWI_TABLE_SIZE];
2440 unsigned HOST_WIDE_INT digit;
2441 unsigned HOST_WIDE_INT val;
2447 /* Ignore the reciprocal when calculating the cost. */
2448 val = (n < 0) ? -n : n;
2450 /* Initialize the exponent cache. */
2451 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2456 while (val >= POWI_TABLE_SIZE)
2460 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2461 result += powi_lookup_cost (digit, cache)
2462 + POWI_WINDOW_SIZE + 1;
2463 val >>= POWI_WINDOW_SIZE;
2472 return result + powi_lookup_cost (val, cache);
2475 /* Recursive subroutine of expand_powi. This function takes the array,
2476 CACHE, of already calculated exponents and an exponent N and returns
2477 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2480 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2482 unsigned HOST_WIDE_INT digit;
2486 if (n < POWI_TABLE_SIZE)
2491 target = gen_reg_rtx (mode);
2494 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2495 op1 = expand_powi_1 (mode, powi_table[n], cache);
2499 target = gen_reg_rtx (mode);
2500 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2501 op0 = expand_powi_1 (mode, n - digit, cache);
2502 op1 = expand_powi_1 (mode, digit, cache);
2506 target = gen_reg_rtx (mode);
2507 op0 = expand_powi_1 (mode, n >> 1, cache);
2511 result = expand_mult (mode, op0, op1, target, 0);
2512 if (result != target)
2513 emit_move_insn (target, result);
2517 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2518 floating point operand in mode MODE, and N is the exponent. This
2519 function needs to be kept in sync with powi_cost above. */
2522 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2524 unsigned HOST_WIDE_INT val;
2525 rtx cache[POWI_TABLE_SIZE];
2529 return CONST1_RTX (mode);
2531 val = (n < 0) ? -n : n;
2533 memset (cache, 0, sizeof (cache));
2536 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2538 /* If the original exponent was negative, reciprocate the result. */
2540 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2541 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2546 /* Expand a call to the pow built-in mathematical function. Return 0 if
2547 a normal call should be emitted rather than expanding the function
2548 in-line. EXP is the expression that is a call to the builtin
2549 function; if convenient, the result should be placed in TARGET. */
2552 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2554 tree arglist = TREE_OPERAND (exp, 1);
2557 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2560 arg0 = TREE_VALUE (arglist);
2561 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2563 if (TREE_CODE (arg1) == REAL_CST
2564 && ! TREE_CONSTANT_OVERFLOW (arg1))
2566 REAL_VALUE_TYPE cint;
2570 c = TREE_REAL_CST (arg1);
2571 n = real_to_integer (&c);
2572 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2573 if (real_identical (&c, &cint))
2575 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2576 Otherwise, check the number of multiplications required.
2577 Note that pow never sets errno for an integer exponent. */
2578 if ((n >= -1 && n <= 2)
2579 || (flag_unsafe_math_optimizations
2581 && powi_cost (n) <= POWI_MAX_MULTS))
2583 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2584 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2585 op = force_reg (mode, op);
2586 return expand_powi (op, mode, n);
2591 if (! flag_unsafe_math_optimizations)
2593 return expand_builtin_mathfn_2 (exp, target, subtarget);
2596 /* Expand a call to the powi built-in mathematical function. Return 0 if
2597 a normal call should be emitted rather than expanding the function
2598 in-line. EXP is the expression that is a call to the builtin
2599 function; if convenient, the result should be placed in TARGET. */
2602 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2604 tree arglist = TREE_OPERAND (exp, 1);
2607 enum machine_mode mode;
2608 enum machine_mode mode2;
2610 if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2613 arg0 = TREE_VALUE (arglist);
2614 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2615 mode = TYPE_MODE (TREE_TYPE (exp));
2617 /* Handle constant power. */
2619 if (TREE_CODE (arg1) == INTEGER_CST
2620 && ! TREE_CONSTANT_OVERFLOW (arg1))
2622 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2624 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2625 Otherwise, check the number of multiplications required. */
2626 if ((TREE_INT_CST_HIGH (arg1) == 0
2627 || TREE_INT_CST_HIGH (arg1) == -1)
2628 && ((n >= -1 && n <= 2)
2630 && powi_cost (n) <= POWI_MAX_MULTS)))
2632 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2633 op0 = force_reg (mode, op0);
2634 return expand_powi (op0, mode, n);
2638 /* Emit a libcall to libgcc. */
2640 /* Mode of the 2nd argument must match that of an int. */
2641 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2643 if (target == NULL_RTX)
2644 target = gen_reg_rtx (mode);
2646 op0 = expand_expr (arg0, subtarget, mode, 0);
2647 if (GET_MODE (op0) != mode)
2648 op0 = convert_to_mode (mode, op0, 0);
2649 op1 = expand_expr (arg1, 0, mode2, 0);
2650 if (GET_MODE (op1) != mode2)
2651 op1 = convert_to_mode (mode2, op1, 0);
2653 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2654 target, LCT_CONST_MAKE_BLOCK, mode, 2,
2655 op0, mode, op1, mode2);
2660 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2661 if we failed the caller should emit a normal call, otherwise
2662 try to get the result in TARGET, if convenient. */
2665 expand_builtin_strlen (tree arglist, rtx target,
2666 enum machine_mode target_mode)
2668 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2673 tree len, src = TREE_VALUE (arglist);
2674 rtx result, src_reg, char_rtx, before_strlen;
2675 enum machine_mode insn_mode = target_mode, char_mode;
2676 enum insn_code icode = CODE_FOR_nothing;
2679 /* If the length can be computed at compile-time, return it. */
2680 len = c_strlen (src, 0);
2682 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2684 /* If the length can be computed at compile-time and is constant
2685 integer, but there are side-effects in src, evaluate
2686 src for side-effects, then return len.
2687 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2688 can be optimized into: i++; x = 3; */
2689 len = c_strlen (src, 1);
2690 if (len && TREE_CODE (len) == INTEGER_CST)
2692 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2693 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2696 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2698 /* If SRC is not a pointer type, don't do this operation inline. */
2702 /* Bail out if we can't compute strlen in the right mode. */
2703 while (insn_mode != VOIDmode)
2705 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2706 if (icode != CODE_FOR_nothing)
2709 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2711 if (insn_mode == VOIDmode)
2714 /* Make a place to write the result of the instruction. */
2718 && GET_MODE (result) == insn_mode
2719 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2720 result = gen_reg_rtx (insn_mode);
2722 /* Make a place to hold the source address. We will not expand
2723 the actual source until we are sure that the expansion will
2724 not fail -- there are trees that cannot be expanded twice. */
2725 src_reg = gen_reg_rtx (Pmode);
2727 /* Mark the beginning of the strlen sequence so we can emit the
2728 source operand later. */
2729 before_strlen = get_last_insn ();
2731 char_rtx = const0_rtx;
2732 char_mode = insn_data[(int) icode].operand[2].mode;
2733 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2735 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2737 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2738 char_rtx, GEN_INT (align));
2743 /* Now that we are assured of success, expand the source. */
2745 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2747 emit_move_insn (src_reg, pat);
2752 emit_insn_after (pat, before_strlen);
2754 emit_insn_before (pat, get_insns ());
2756 /* Return the value in the proper mode for this function. */
2757 if (GET_MODE (result) == target_mode)
2759 else if (target != 0)
2760 convert_move (target, result, 0);
2762 target = convert_to_mode (target_mode, result, 0);
2768 /* Expand a call to the strstr builtin. Return 0 if we failed the
2769 caller should emit a normal call, otherwise try to get the result
2770 in TARGET, if convenient (and in mode MODE if that's convenient). */
2773 expand_builtin_strstr (tree arglist, tree type, rtx target, enum machine_mode mode)
2775 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2777 tree result = fold_builtin_strstr (arglist, type);
2779 return expand_expr (result, target, mode, EXPAND_NORMAL);
2784 /* Expand a call to the strchr builtin. Return 0 if we failed the
2785 caller should emit a normal call, otherwise try to get the result
2786 in TARGET, if convenient (and in mode MODE if that's convenient). */
2789 expand_builtin_strchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2791 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2793 tree result = fold_builtin_strchr (arglist, type);
2795 return expand_expr (result, target, mode, EXPAND_NORMAL);
2797 /* FIXME: Should use strchrM optab so that ports can optimize this. */
2802 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2803 caller should emit a normal call, otherwise try to get the result
2804 in TARGET, if convenient (and in mode MODE if that's convenient). */
2807 expand_builtin_strrchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2809 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2811 tree result = fold_builtin_strrchr (arglist, type);
2813 return expand_expr (result, target, mode, EXPAND_NORMAL);
2818 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2819 caller should emit a normal call, otherwise try to get the result
2820 in TARGET, if convenient (and in mode MODE if that's convenient). */
2823 expand_builtin_strpbrk (tree arglist, tree type, rtx target, enum machine_mode mode)
2825 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2827 tree result = fold_builtin_strpbrk (arglist, type);
2829 return expand_expr (result, target, mode, EXPAND_NORMAL);
2834 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2835 bytes from constant string DATA + OFFSET and return it as target
2839 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2840 enum machine_mode mode)
2842 const char *str = (const char *) data;
2844 gcc_assert (offset >= 0
2845 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2846 <= strlen (str) + 1));
2848 return c_readstr (str + offset, mode);
2851 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2852 Return 0 if we failed, the caller should emit a normal call,
2853 otherwise try to get the result in TARGET, if convenient (and in
2854 mode MODE if that's convenient). */
2856 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
2858 tree fndecl = get_callee_fndecl (exp);
2859 tree arglist = TREE_OPERAND (exp, 1);
2860 if (!validate_arglist (arglist,
2861 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2865 tree dest = TREE_VALUE (arglist);
2866 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2867 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2868 const char *src_str;
2869 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2870 unsigned int dest_align
2871 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2872 rtx dest_mem, src_mem, dest_addr, len_rtx;
2873 tree result = fold_builtin_memcpy (fndecl, arglist);
2876 return expand_expr (result, target, mode, EXPAND_NORMAL);
2878 /* If DEST is not a pointer type, call the normal function. */
2879 if (dest_align == 0)
2882 /* If either SRC is not a pointer type, don't do this
2883 operation in-line. */
2887 dest_mem = get_memory_rtx (dest, len);
2888 set_mem_align (dest_mem, dest_align);
2889 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2890 src_str = c_getstr (src);
2892 /* If SRC is a string constant and block move would be done
2893 by pieces, we can avoid loading the string from memory
2894 and only stored the computed constants. */
2896 && GET_CODE (len_rtx) == CONST_INT
2897 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2898 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2899 (void *) src_str, dest_align))
2901 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2902 builtin_memcpy_read_str,
2903 (void *) src_str, dest_align, 0);
2904 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2905 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2909 src_mem = get_memory_rtx (src, len);
2910 set_mem_align (src_mem, src_align);
2912 /* Copy word part most expediently. */
2913 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2914 CALL_EXPR_TAILCALL (exp)
2915 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
2919 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2920 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2926 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2927 Return 0 if we failed; the caller should emit a normal call,
2928 otherwise try to get the result in TARGET, if convenient (and in
2929 mode MODE if that's convenient). If ENDP is 0 return the
2930 destination pointer, if ENDP is 1 return the end pointer ala
2931 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2935 expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode,
2938 if (!validate_arglist (arglist,
2939 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2941 /* If return value is ignored, transform mempcpy into memcpy. */
2942 else if (target == const0_rtx)
2944 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2949 return expand_expr (build_function_call_expr (fn, arglist),
2950 target, mode, EXPAND_NORMAL);
2954 tree dest = TREE_VALUE (arglist);
2955 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2956 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2957 const char *src_str;
2958 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2959 unsigned int dest_align
2960 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2961 rtx dest_mem, src_mem, len_rtx;
2962 tree result = fold_builtin_mempcpy (arglist, type, endp);
2965 return expand_expr (result, target, mode, EXPAND_NORMAL);
2967 /* If either SRC or DEST is not a pointer type, don't do this
2968 operation in-line. */
2969 if (dest_align == 0 || src_align == 0)
2972 /* If LEN is not constant, call the normal function. */
2973 if (! host_integerp (len, 1))
2976 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2977 src_str = c_getstr (src);
2979 /* If SRC is a string constant and block move would be done
2980 by pieces, we can avoid loading the string from memory
2981 and only stored the computed constants. */
2983 && GET_CODE (len_rtx) == CONST_INT
2984 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2985 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2986 (void *) src_str, dest_align))
2988 dest_mem = get_memory_rtx (dest, len);
2989 set_mem_align (dest_mem, dest_align);
2990 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2991 builtin_memcpy_read_str,
2992 (void *) src_str, dest_align, endp);
2993 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2994 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2998 if (GET_CODE (len_rtx) == CONST_INT
2999 && can_move_by_pieces (INTVAL (len_rtx),
3000 MIN (dest_align, src_align)))
3002 dest_mem = get_memory_rtx (dest, len);
3003 set_mem_align (dest_mem, dest_align);
3004 src_mem = get_memory_rtx (src, len);
3005 set_mem_align (src_mem, src_align);
3006 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3007 MIN (dest_align, src_align), endp);
3008 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3009 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3017 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
3018 if we failed; the caller should emit a normal call. */
3021 expand_builtin_memmove (tree arglist, tree type, rtx target,
3022 enum machine_mode mode, tree orig_exp)
3024 if (!validate_arglist (arglist,
3025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3029 tree dest = TREE_VALUE (arglist);
3030 tree src = TREE_VALUE (TREE_CHAIN (arglist));
3031 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3033 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3034 unsigned int dest_align
3035 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3036 tree result = fold_builtin_memmove (arglist, type);
3039 return expand_expr (result, target, mode, EXPAND_NORMAL);
3041 /* If DEST is not a pointer type, call the normal function. */
3042 if (dest_align == 0)
3045 /* If either SRC is not a pointer type, don't do this
3046 operation in-line. */
3050 /* If src is categorized for a readonly section we can use
3052 if (readonly_data_expr (src))
3054 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3057 fn = build_function_call_expr (fn, arglist);
3058 if (TREE_CODE (fn) == CALL_EXPR)
3059 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3060 return expand_expr (fn, target, mode, EXPAND_NORMAL);
3063 /* If length is 1 and we can expand memcpy call inline,
3064 it is ok to use memcpy as well. */
3065 if (integer_onep (len))
3067 rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
3073 /* Otherwise, call the normal function. */
3078 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
3079 if we failed the caller should emit a normal call. */
3082 expand_builtin_bcopy (tree exp)
3084 tree arglist = TREE_OPERAND (exp, 1);
3085 tree type = TREE_TYPE (exp);
3086 tree src, dest, size, newarglist;
3088 if (!validate_arglist (arglist,
3089 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3092 src = TREE_VALUE (arglist);
3093 dest = TREE_VALUE (TREE_CHAIN (arglist));
3094 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3096 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
3097 memmove(ptr y, ptr x, size_t z). This is done this way
3098 so that if it isn't expanded inline, we fallback to
3099 calling bcopy instead of memmove. */
3101 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3102 newarglist = tree_cons (NULL_TREE, src, newarglist);
3103 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3105 return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
3109 # define HAVE_movstr 0
3110 # define CODE_FOR_movstr CODE_FOR_nothing
3113 /* Expand into a movstr instruction, if one is available. Return 0 if
3114 we failed, the caller should emit a normal call, otherwise try to
3115 get the result in TARGET, if convenient. If ENDP is 0 return the
3116 destination pointer, if ENDP is 1 return the end pointer ala
3117 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3121 expand_movstr (tree dest, tree src, rtx target, int endp)
3127 const struct insn_data * data;
3132 dest_mem = get_memory_rtx (dest, NULL);
3133 src_mem = get_memory_rtx (src, NULL);
3136 target = force_reg (Pmode, XEXP (dest_mem, 0));
3137 dest_mem = replace_equiv_address (dest_mem, target);
3138 end = gen_reg_rtx (Pmode);
3142 if (target == 0 || target == const0_rtx)
3144 end = gen_reg_rtx (Pmode);
3152 data = insn_data + CODE_FOR_movstr;
3154 if (data->operand[0].mode != VOIDmode)
3155 end = gen_lowpart (data->operand[0].mode, end);
3157 insn = data->genfun (end, dest_mem, src_mem);
3163 /* movstr is supposed to set end to the address of the NUL
3164 terminator. If the caller requested a mempcpy-like return value,
3166 if (endp == 1 && target != const0_rtx)
3168 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3169 emit_move_insn (target, force_operand (tem, NULL_RTX));
3175 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
3176 if we failed the caller should emit a normal call, otherwise try to get
3177 the result in TARGET, if convenient (and in mode MODE if that's
3181 expand_builtin_strcpy (tree fndecl, tree arglist, rtx target, enum machine_mode mode)
3183 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3185 tree result = fold_builtin_strcpy (fndecl, arglist, 0);
3187 return expand_expr (result, target, mode, EXPAND_NORMAL);
3189 return expand_movstr (TREE_VALUE (arglist),
3190 TREE_VALUE (TREE_CHAIN (arglist)),
3191 target, /*endp=*/0);
3196 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3197 Return 0 if we failed the caller should emit a normal call,
3198 otherwise try to get the result in TARGET, if convenient (and in
3199 mode MODE if that's convenient). */
3202 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3204 tree arglist = TREE_OPERAND (exp, 1);
3205 /* If return value is ignored, transform stpcpy into strcpy. */
3206 if (target == const0_rtx)
3208 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3212 return expand_expr (build_function_call_expr (fn, arglist),
3213 target, mode, EXPAND_NORMAL);
3216 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3220 tree dst, src, len, lenp1;
3224 /* Ensure we get an actual string whose length can be evaluated at
3225 compile-time, not an expression containing a string. This is
3226 because the latter will potentially produce pessimized code
3227 when used to produce the return value. */
3228 src = TREE_VALUE (TREE_CHAIN (arglist));
3229 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3230 return expand_movstr (TREE_VALUE (arglist),
3231 TREE_VALUE (TREE_CHAIN (arglist)),
3232 target, /*endp=*/2);
3234 dst = TREE_VALUE (arglist);
3235 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3236 narglist = build_tree_list (NULL_TREE, lenp1);
3237 narglist = tree_cons (NULL_TREE, src, narglist);
3238 narglist = tree_cons (NULL_TREE, dst, narglist);
3239 ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp),
3240 target, mode, /*endp=*/2);
3245 if (TREE_CODE (len) == INTEGER_CST)
3247 rtx len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3249 if (GET_CODE (len_rtx) == CONST_INT)
3251 ret = expand_builtin_strcpy (get_callee_fndecl (exp),
3252 arglist, target, mode);
3258 if (mode != VOIDmode)
3259 target = gen_reg_rtx (mode);
3261 target = gen_reg_rtx (GET_MODE (ret));
3263 if (GET_MODE (target) != GET_MODE (ret))
3264 ret = gen_lowpart (GET_MODE (target), ret);
3266 ret = plus_constant (ret, INTVAL (len_rtx));
3267 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3275 return expand_movstr (TREE_VALUE (arglist),
3276 TREE_VALUE (TREE_CHAIN (arglist)),
3277 target, /*endp=*/2);
3281 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3282 bytes from constant string DATA + OFFSET and return it as target
3286 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3287 enum machine_mode mode)
3289 const char *str = (const char *) data;
3291 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3294 return c_readstr (str + offset, mode);
3297 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3298 if we failed the caller should emit a normal call. */
3301 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3303 tree fndecl = get_callee_fndecl (exp);
3304 tree arglist = TREE_OPERAND (exp, 1);
3305 if (validate_arglist (arglist,
3306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3308 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3309 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3310 tree result = fold_builtin_strncpy (fndecl, arglist, slen);
3313 return expand_expr (result, target, mode, EXPAND_NORMAL);
3315 /* We must be passed a constant len and src parameter. */
3316 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3319 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3321 /* We're required to pad with trailing zeros if the requested
3322 len is greater than strlen(s2)+1. In that case try to
3323 use store_by_pieces, if it fails, punt. */
3324 if (tree_int_cst_lt (slen, len))
3326 tree dest = TREE_VALUE (arglist);
3327 unsigned int dest_align
3328 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3329 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3332 if (!p || dest_align == 0 || !host_integerp (len, 1)
3333 || !can_store_by_pieces (tree_low_cst (len, 1),
3334 builtin_strncpy_read_str,
3335 (void *) p, dest_align))
3338 dest_mem = get_memory_rtx (dest, len);
3339 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3340 builtin_strncpy_read_str,
3341 (void *) p, dest_align, 0);
3342 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3343 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3350 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3351 bytes from constant string DATA + OFFSET and return it as target
3355 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3356 enum machine_mode mode)
3358 const char *c = (const char *) data;
3359 char *p = alloca (GET_MODE_SIZE (mode));
3361 memset (p, *c, GET_MODE_SIZE (mode));
3363 return c_readstr (p, mode);
3366 /* Callback routine for store_by_pieces. Return the RTL of a register
3367 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3368 char value given in the RTL register data. For example, if mode is
3369 4 bytes wide, return the RTL for 0x01010101*data. */
3372 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3373 enum machine_mode mode)
3379 size = GET_MODE_SIZE (mode);
3384 memset (p, 1, size);
3385 coeff = c_readstr (p, mode);
3387 target = convert_to_mode (mode, (rtx) data, 1);
3388 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3389 return force_reg (mode, target);
3392 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3393 if we failed the caller should emit a normal call, otherwise try to get
3394 the result in TARGET, if convenient (and in mode MODE if that's
3398 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
3401 if (!validate_arglist (arglist,
3402 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3406 tree dest = TREE_VALUE (arglist);
3407 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3408 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3411 unsigned int dest_align
3412 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3413 rtx dest_mem, dest_addr, len_rtx;
3415 /* If DEST is not a pointer type, don't do this
3416 operation in-line. */
3417 if (dest_align == 0)
3420 /* If the LEN parameter is zero, return DEST. */
3421 if (integer_zerop (len))
3423 /* Evaluate and ignore VAL in case it has side-effects. */
3424 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3425 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3428 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3429 dest_mem = get_memory_rtx (dest, len);
3431 if (TREE_CODE (val) != INTEGER_CST)
3435 val = fold_build1 (CONVERT_EXPR, unsigned_char_type_node, val);
3436 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3438 /* Assume that we can memset by pieces if we can store the
3439 * the coefficients by pieces (in the required modes).
3440 * We can't pass builtin_memset_gen_str as that emits RTL. */
3442 if (host_integerp (len, 1)
3443 && !(optimize_size && tree_low_cst (len, 1) > 1)
3444 && can_store_by_pieces (tree_low_cst (len, 1),
3445 builtin_memset_read_str, &c, dest_align))
3447 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3449 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3450 builtin_memset_gen_str, val_rtx, dest_align, 0);
3452 else if (!set_storage_via_setmem(dest_mem, len_rtx, val_rtx,
3456 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3457 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3461 if (target_char_cast (val, &c))
3466 if (host_integerp (len, 1)
3467 && !(optimize_size && tree_low_cst (len, 1) > 1)
3468 && can_store_by_pieces (tree_low_cst (len, 1),
3469 builtin_memset_read_str, &c, dest_align))
3470 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3471 builtin_memset_read_str, &c, dest_align, 0);
3472 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3476 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3477 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3481 set_mem_align (dest_mem, dest_align);
3482 dest_addr = clear_storage (dest_mem, len_rtx,
3483 CALL_EXPR_TAILCALL (orig_exp)
3484 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
3488 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3489 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3496 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3497 if we failed the caller should emit a normal call. */
3500 expand_builtin_bzero (tree exp)
3502 tree arglist = TREE_OPERAND (exp, 1);
3503 tree dest, size, newarglist;
3505 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3508 dest = TREE_VALUE (arglist);
3509 size = TREE_VALUE (TREE_CHAIN (arglist));
3511 /* New argument list transforming bzero(ptr x, int y) to
3512 memset(ptr x, int 0, size_t y). This is done this way
3513 so that if it isn't expanded inline, we fallback to
3514 calling bzero instead of memset. */
3516 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3517 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3518 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3520 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode, exp);
3523 /* Expand expression EXP, which is a call to the memcmp built-in function.
3524 ARGLIST is the argument list for this call. Return 0 if we failed and the
3525 caller should emit a normal call, otherwise try to get the result in
3526 TARGET, if convenient (and in mode MODE, if that's convenient). */
3529 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3530 enum machine_mode mode)
3532 if (!validate_arglist (arglist,
3533 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3537 tree result = fold_builtin_memcmp (arglist);
3539 return expand_expr (result, target, mode, EXPAND_NORMAL);
3542 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3544 tree arg1 = TREE_VALUE (arglist);
3545 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3546 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3547 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3552 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3554 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3555 enum machine_mode insn_mode;
3557 #ifdef HAVE_cmpmemsi
3559 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3562 #ifdef HAVE_cmpstrnsi
3564 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;