1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
41 #include "typeclass.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree, int);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static tree build_string_literal (int, const char *);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
97 static rtx result_vector (int, rtx);
99 static rtx expand_builtin_setjmp (tree, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static void expand_errno_check (tree, rtx);
108 static rtx expand_builtin_mathfn (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
110 static rtx expand_builtin_constant_p (tree, enum machine_mode);
111 static rtx expand_builtin_args_info (tree);
112 static rtx expand_builtin_next_arg (tree);
113 static rtx expand_builtin_va_start (tree);
114 static rtx expand_builtin_va_end (tree);
115 static rtx expand_builtin_va_copy (tree);
116 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
126 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_bcopy (tree);
128 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
130 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
132 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
141 static rtx expand_builtin_alloca (tree, rtx);
142 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
143 static rtx expand_builtin_frame_address (tree, tree);
144 static rtx expand_builtin_fputs (tree, rtx, bool);
145 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
146 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
147 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
148 static tree stabilize_va_list (tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_inf (tree, int);
153 static tree fold_builtin_nan (tree, tree, int);
154 static int validate_arglist (tree, ...);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_cabs (tree, rtx);
160 static void init_builtin_dconsts (void);
161 static tree fold_builtin_cabs (tree, tree, tree);
162 static tree fold_builtin_trunc (tree);
163 static tree fold_builtin_floor (tree);
164 static tree fold_builtin_ceil (tree);
165 static tree fold_builtin_bitop (tree);
167 /* Initialize mathematical constants for constant folding builtins.
168 These constants need to be given to at least 160 bits precision. */
171 init_builtin_dconsts (void)
173 real_from_string (&dconstpi,
174 "3.1415926535897932384626433832795028841971693993751058209749445923078");
175 real_from_string (&dconste,
176 "2.7182818284590452353602874713526624977572470936999595749669676277241");
178 builtin_dconsts_init = true;
181 /* Return the alignment in bits of EXP, a pointer valued expression.
182 But don't return more than MAX_ALIGN no matter what.
183 The alignment returned is, by default, the alignment of the thing that
184 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
186 Otherwise, look at the expression to see if we can do better, i.e., if the
187 expression is actually pointing at an object whose alignment is tighter. */
190 get_pointer_alignment (tree exp, unsigned int max_align)
192 unsigned int align, inner;
194 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
197 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
198 align = MIN (align, max_align);
202 switch (TREE_CODE (exp))
206 case NON_LVALUE_EXPR:
207 exp = TREE_OPERAND (exp, 0);
208 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
211 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
212 align = MIN (inner, max_align);
216 /* If sum of pointer + int, restrict our maximum alignment to that
217 imposed by the integer. If not, we can't do any better than
219 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
222 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
223 & (max_align / BITS_PER_UNIT - 1))
227 exp = TREE_OPERAND (exp, 0);
231 /* See what we are pointing at and look at its alignment. */
232 exp = TREE_OPERAND (exp, 0);
233 if (TREE_CODE (exp) == FUNCTION_DECL)
234 align = FUNCTION_BOUNDARY;
235 else if (DECL_P (exp))
236 align = DECL_ALIGN (exp);
237 #ifdef CONSTANT_ALIGNMENT
238 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
239 align = CONSTANT_ALIGNMENT (exp, align);
241 return MIN (align, max_align);
249 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
250 way, because it could contain a zero byte in the middle.
251 TREE_STRING_LENGTH is the size of the character array, not the string.
253 ONLY_VALUE should be nonzero if the result is not going to be emitted
254 into the instruction stream and zero if it is going to be expanded.
255 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
256 is returned, otherwise NULL, since
257 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
258 evaluate the side-effects.
260 The value returned is of type `ssizetype'.
262 Unfortunately, string_constant can't access the values of const char
263 arrays with initializers, so neither can we do so here. */
266 c_strlen (tree src, int only_value)
269 HOST_WIDE_INT offset;
274 if (TREE_CODE (src) == COND_EXPR
275 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
279 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
280 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
281 if (tree_int_cst_equal (len1, len2))
285 if (TREE_CODE (src) == COMPOUND_EXPR
286 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
287 return c_strlen (TREE_OPERAND (src, 1), only_value);
289 src = string_constant (src, &offset_node);
293 max = TREE_STRING_LENGTH (src) - 1;
294 ptr = TREE_STRING_POINTER (src);
296 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
298 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
299 compute the offset to the following null if we don't know where to
300 start searching for it. */
303 for (i = 0; i < max; i++)
307 /* We don't know the starting offset, but we do know that the string
308 has no internal zero bytes. We can assume that the offset falls
309 within the bounds of the string; otherwise, the programmer deserves
310 what he gets. Subtract the offset from the length of the string,
311 and return that. This would perhaps not be valid if we were dealing
312 with named arrays in addition to literal string constants. */
314 return size_diffop (size_int (max), offset_node);
317 /* We have a known offset into the string. Start searching there for
318 a null character if we can represent it as a single HOST_WIDE_INT. */
319 if (offset_node == 0)
321 else if (! host_integerp (offset_node, 0))
324 offset = tree_low_cst (offset_node, 0);
326 /* If the offset is known to be out of bounds, warn, and call strlen at
328 if (offset < 0 || offset > max)
330 warning ("offset outside bounds of constant string");
334 /* Use strlen to search for the first zero byte. Since any strings
335 constructed with build_string will have nulls appended, we win even
336 if we get handed something like (char[4])"abcd".
338 Since OFFSET is our starting index into the string, no further
339 calculation is needed. */
340 return ssize_int (strlen (ptr + offset));
343 /* Return a char pointer for a C string if it is a string constant
344 or sum of string constant and integer constant. */
351 src = string_constant (src, &offset_node);
355 if (offset_node == 0)
356 return TREE_STRING_POINTER (src);
357 else if (!host_integerp (offset_node, 1)
358 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
361 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
364 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
365 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
368 c_readstr (const char *str, enum machine_mode mode)
374 if (GET_MODE_CLASS (mode) != MODE_INT)
379 for (i = 0; i < GET_MODE_SIZE (mode); i++)
382 if (WORDS_BIG_ENDIAN)
383 j = GET_MODE_SIZE (mode) - i - 1;
384 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
385 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
386 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
388 if (j > 2 * HOST_BITS_PER_WIDE_INT)
391 ch = (unsigned char) str[i];
392 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
394 return immed_double_const (c[0], c[1], mode);
397 /* Cast a target constant CST to target CHAR and if that value fits into
398 host char type, return zero and put that value into variable pointed by
402 target_char_cast (tree cst, char *p)
404 unsigned HOST_WIDE_INT val, hostval;
406 if (!host_integerp (cst, 1)
407 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
410 val = tree_low_cst (cst, 1);
411 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
412 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
415 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
416 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
425 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
426 times to get the address of either a higher stack frame, or a return
427 address located within it (depending on FNDECL_CODE). */
430 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
435 /* Some machines need special handling before we can access
436 arbitrary frames. For example, on the sparc, we must first flush
437 all register windows to the stack. */
438 #ifdef SETUP_FRAME_ADDRESSES
440 SETUP_FRAME_ADDRESSES ();
443 /* On the sparc, the return address is not in the frame, it is in a
444 register. There is no way to access it off of the current frame
445 pointer, but it can be accessed off the previous frame pointer by
446 reading the value from the register window save area. */
447 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
448 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
452 /* Scan back COUNT frames to the specified frame. */
453 for (i = 0; i < count; i++)
455 /* Assume the dynamic chain pointer is in the word that the
456 frame address points to, unless otherwise specified. */
457 #ifdef DYNAMIC_CHAIN_ADDRESS
458 tem = DYNAMIC_CHAIN_ADDRESS (tem);
460 tem = memory_address (Pmode, tem);
461 tem = gen_rtx_MEM (Pmode, tem);
462 set_mem_alias_set (tem, get_frame_alias_set ());
463 tem = copy_to_reg (tem);
466 /* For __builtin_frame_address, return what we've got. */
467 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
470 /* For __builtin_return_address, Get the return address from that
472 #ifdef RETURN_ADDR_RTX
473 tem = RETURN_ADDR_RTX (count, tem);
475 tem = memory_address (Pmode,
476 plus_constant (tem, GET_MODE_SIZE (Pmode)));
477 tem = gen_rtx_MEM (Pmode, tem);
478 set_mem_alias_set (tem, get_frame_alias_set ());
483 /* Alias set used for setjmp buffer. */
484 static HOST_WIDE_INT setjmp_alias_set = -1;
486 /* Construct the leading half of a __builtin_setjmp call. Control will
487 return to RECEIVER_LABEL. This is used directly by sjlj exception
491 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
493 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
497 if (setjmp_alias_set == -1)
498 setjmp_alias_set = new_alias_set ();
500 #ifdef POINTERS_EXTEND_UNSIGNED
501 if (GET_MODE (buf_addr) != Pmode)
502 buf_addr = convert_memory_address (Pmode, buf_addr);
505 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
509 /* We store the frame pointer and the address of receiver_label in
510 the buffer and use the rest of it for the stack save area, which
511 is machine-dependent. */
513 #ifndef BUILTIN_SETJMP_FRAME_VALUE
514 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
517 mem = gen_rtx_MEM (Pmode, buf_addr);
518 set_mem_alias_set (mem, setjmp_alias_set);
519 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
521 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
522 set_mem_alias_set (mem, setjmp_alias_set);
524 emit_move_insn (validize_mem (mem),
525 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
527 stack_save = gen_rtx_MEM (sa_mode,
528 plus_constant (buf_addr,
529 2 * GET_MODE_SIZE (Pmode)));
530 set_mem_alias_set (stack_save, setjmp_alias_set);
531 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
533 /* If there is further processing to do, do it. */
534 #ifdef HAVE_builtin_setjmp_setup
535 if (HAVE_builtin_setjmp_setup)
536 emit_insn (gen_builtin_setjmp_setup (buf_addr));
539 /* Tell optimize_save_area_alloca that extra work is going to
540 need to go on during alloca. */
541 current_function_calls_setjmp = 1;
543 /* Set this so all the registers get saved in our frame; we need to be
544 able to copy the saved values for any registers from frames we unwind. */
545 current_function_has_nonlocal_label = 1;
548 /* Construct the trailing part of a __builtin_setjmp call.
549 This is used directly by sjlj exception handling code. */
552 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
554 /* Clobber the FP when we get here, so we have to make sure it's
555 marked as used by this function. */
556 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
558 /* Mark the static chain as clobbered here so life information
559 doesn't get messed up for it. */
560 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
562 /* Now put in the code to restore the frame pointer, and argument
563 pointer, if needed. The code below is from expand_end_bindings
564 in stmt.c; see detailed documentation there. */
565 #ifdef HAVE_nonlocal_goto
566 if (! HAVE_nonlocal_goto)
568 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
570 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
571 if (fixed_regs[ARG_POINTER_REGNUM])
573 #ifdef ELIMINABLE_REGS
575 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
577 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
578 if (elim_regs[i].from == ARG_POINTER_REGNUM
579 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
582 if (i == ARRAY_SIZE (elim_regs))
585 /* Now restore our arg pointer from the address at which it
586 was saved in our stack frame. */
587 emit_move_insn (virtual_incoming_args_rtx,
588 copy_to_reg (get_arg_pointer_save_area (cfun)));
593 #ifdef HAVE_builtin_setjmp_receiver
594 if (HAVE_builtin_setjmp_receiver)
595 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
598 #ifdef HAVE_nonlocal_goto_receiver
599 if (HAVE_nonlocal_goto_receiver)
600 emit_insn (gen_nonlocal_goto_receiver ());
605 /* @@@ This is a kludge. Not all machine descriptions define a blockage
606 insn, but we must not allow the code we just generated to be reordered
607 by scheduling. Specifically, the update of the frame pointer must
608 happen immediately, not later. So emit an ASM_INPUT to act as blockage
610 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
613 /* __builtin_setjmp is passed a pointer to an array of five words (not
614 all will be used on all machines). It operates similarly to the C
615 library function of the same name, but is more efficient. Much of
616 the code below (and for longjmp) is copied from the handling of
619 NOTE: This is intended for use by GNAT and the exception handling
620 scheme in the compiler and will only work in the method used by
624 expand_builtin_setjmp (tree arglist, rtx target)
626 rtx buf_addr, next_lab, cont_lab;
628 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
631 if (target == 0 || GET_CODE (target) != REG
632 || REGNO (target) < FIRST_PSEUDO_REGISTER)
633 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
635 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
637 next_lab = gen_label_rtx ();
638 cont_lab = gen_label_rtx ();
640 expand_builtin_setjmp_setup (buf_addr, next_lab);
642 /* Set TARGET to zero and branch to the continue label. */
643 emit_move_insn (target, const0_rtx);
644 emit_jump_insn (gen_jump (cont_lab));
646 emit_label (next_lab);
648 expand_builtin_setjmp_receiver (next_lab);
650 /* Set TARGET to one. */
651 emit_move_insn (target, const1_rtx);
652 emit_label (cont_lab);
654 /* Tell flow about the strange goings on. Putting `next_lab' on
655 `nonlocal_goto_handler_labels' to indicates that function
656 calls may traverse the arc back to this label. */
658 current_function_has_nonlocal_label = 1;
659 nonlocal_goto_handler_labels
660 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
665 /* __builtin_longjmp is passed a pointer to an array of five words (not
666 all will be used on all machines). It operates similarly to the C
667 library function of the same name, but is more efficient. Much of
668 the code below is copied from the handling of non-local gotos.
670 NOTE: This is intended for use by GNAT and the exception handling
671 scheme in the compiler and will only work in the method used by
675 expand_builtin_longjmp (rtx buf_addr, rtx value)
677 rtx fp, lab, stack, insn, last;
678 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
680 if (setjmp_alias_set == -1)
681 setjmp_alias_set = new_alias_set ();
683 #ifdef POINTERS_EXTEND_UNSIGNED
684 if (GET_MODE (buf_addr) != Pmode)
685 buf_addr = convert_memory_address (Pmode, buf_addr);
688 buf_addr = force_reg (Pmode, buf_addr);
690 /* We used to store value in static_chain_rtx, but that fails if pointers
691 are smaller than integers. We instead require that the user must pass
692 a second argument of 1, because that is what builtin_setjmp will
693 return. This also makes EH slightly more efficient, since we are no
694 longer copying around a value that we don't care about. */
695 if (value != const1_rtx)
698 current_function_calls_longjmp = 1;
700 last = get_last_insn ();
701 #ifdef HAVE_builtin_longjmp
702 if (HAVE_builtin_longjmp)
703 emit_insn (gen_builtin_longjmp (buf_addr));
707 fp = gen_rtx_MEM (Pmode, buf_addr);
708 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
709 GET_MODE_SIZE (Pmode)));
711 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
712 2 * GET_MODE_SIZE (Pmode)));
713 set_mem_alias_set (fp, setjmp_alias_set);
714 set_mem_alias_set (lab, setjmp_alias_set);
715 set_mem_alias_set (stack, setjmp_alias_set);
717 /* Pick up FP, label, and SP from the block and jump. This code is
718 from expand_goto in stmt.c; see there for detailed comments. */
719 #if HAVE_nonlocal_goto
720 if (HAVE_nonlocal_goto)
721 /* We have to pass a value to the nonlocal_goto pattern that will
722 get copied into the static_chain pointer, but it does not matter
723 what that value is, because builtin_setjmp does not use it. */
724 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
728 lab = copy_to_reg (lab);
730 emit_move_insn (hard_frame_pointer_rtx, fp);
731 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
733 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
734 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
735 emit_indirect_jump (lab);
739 /* Search backwards and mark the jump insn as a non-local goto.
740 Note that this precludes the use of __builtin_longjmp to a
741 __builtin_setjmp target in the same function. However, we've
742 already cautioned the user that these functions are for
743 internal exception handling use only. */
744 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
748 if (GET_CODE (insn) == JUMP_INSN)
750 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
754 else if (GET_CODE (insn) == CALL_INSN)
759 /* Expand a call to __builtin_prefetch. For a target that does not support
760 data prefetch, evaluate the memory address argument in case it has side
764 expand_builtin_prefetch (tree arglist)
766 tree arg0, arg1, arg2;
769 if (!validate_arglist (arglist, POINTER_TYPE, 0))
772 arg0 = TREE_VALUE (arglist);
773 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
774 zero (read) and argument 2 (locality) defaults to 3 (high degree of
776 if (TREE_CHAIN (arglist))
778 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
779 if (TREE_CHAIN (TREE_CHAIN (arglist)))
780 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
782 arg2 = build_int_2 (3, 0);
786 arg1 = integer_zero_node;
787 arg2 = build_int_2 (3, 0);
790 /* Argument 0 is an address. */
791 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
793 /* Argument 1 (read/write flag) must be a compile-time constant int. */
794 if (TREE_CODE (arg1) != INTEGER_CST)
796 error ("second arg to `__builtin_prefetch' must be a constant");
797 arg1 = integer_zero_node;
799 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
800 /* Argument 1 must be either zero or one. */
801 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
803 warning ("invalid second arg to __builtin_prefetch; using zero");
807 /* Argument 2 (locality) must be a compile-time constant int. */
808 if (TREE_CODE (arg2) != INTEGER_CST)
810 error ("third arg to `__builtin_prefetch' must be a constant");
811 arg2 = integer_zero_node;
813 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
814 /* Argument 2 must be 0, 1, 2, or 3. */
815 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
817 warning ("invalid third arg to __builtin_prefetch; using zero");
824 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
826 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
827 || (GET_MODE (op0) != Pmode))
829 #ifdef POINTERS_EXTEND_UNSIGNED
830 if (GET_MODE (op0) != Pmode)
831 op0 = convert_memory_address (Pmode, op0);
833 op0 = force_reg (Pmode, op0);
835 emit_insn (gen_prefetch (op0, op1, op2));
839 op0 = protect_from_queue (op0, 0);
840 /* Don't do anything with direct references to volatile memory, but
841 generate code to handle other side effects. */
842 if (GET_CODE (op0) != MEM && side_effects_p (op0))
846 /* Get a MEM rtx for expression EXP which is the address of an operand
847 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
850 get_memory_rtx (tree exp)
852 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
855 #ifdef POINTERS_EXTEND_UNSIGNED
856 if (GET_MODE (addr) != Pmode)
857 addr = convert_memory_address (Pmode, addr);
860 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
862 /* Get an expression we can use to find the attributes to assign to MEM.
863 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
864 we can. First remove any nops. */
865 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
866 || TREE_CODE (exp) == NON_LVALUE_EXPR)
867 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
868 exp = TREE_OPERAND (exp, 0);
870 if (TREE_CODE (exp) == ADDR_EXPR)
872 exp = TREE_OPERAND (exp, 0);
873 set_mem_attributes (mem, exp, 0);
875 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
877 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
878 /* memcpy, memset and other builtin stringops can alias with anything. */
879 set_mem_alias_set (mem, 0);
885 /* Built-in functions to perform an untyped call and return. */
887 /* For each register that may be used for calling a function, this
888 gives a mode used to copy the register's value. VOIDmode indicates
889 the register is not used for calling a function. If the machine
890 has register windows, this gives only the outbound registers.
891 INCOMING_REGNO gives the corresponding inbound register. */
892 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
894 /* For each register that may be used for returning values, this gives
895 a mode used to copy the register's value. VOIDmode indicates the
896 register is not used for returning values. If the machine has
897 register windows, this gives only the outbound registers.
898 INCOMING_REGNO gives the corresponding inbound register. */
899 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
901 /* For each register that may be used for calling a function, this
902 gives the offset of that register into the block returned by
903 __builtin_apply_args. 0 indicates that the register is not
904 used for calling a function. */
905 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
907 /* Return the offset of register REGNO into the block returned by
908 __builtin_apply_args. This is not declared static, since it is
909 needed in objc-act.c. */
912 apply_args_register_offset (int regno)
916 /* Arguments are always put in outgoing registers (in the argument
917 block) if such make sense. */
918 #ifdef OUTGOING_REGNO
919 regno = OUTGOING_REGNO (regno);
921 return apply_args_reg_offset[regno];
924 /* Return the size required for the block returned by __builtin_apply_args,
925 and initialize apply_args_mode. */
928 apply_args_size (void)
930 static int size = -1;
933 enum machine_mode mode;
935 /* The values computed by this function never change. */
938 /* The first value is the incoming arg-pointer. */
939 size = GET_MODE_SIZE (Pmode);
941 /* The second value is the structure value address unless this is
942 passed as an "invisible" first argument. */
943 if (struct_value_rtx)
944 size += GET_MODE_SIZE (Pmode);
946 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
947 if (FUNCTION_ARG_REGNO_P (regno))
949 /* Search for the proper mode for copying this register's
950 value. I'm not sure this is right, but it works so far. */
951 enum machine_mode best_mode = VOIDmode;
953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
955 mode = GET_MODE_WIDER_MODE (mode))
956 if (HARD_REGNO_MODE_OK (regno, mode)
957 && HARD_REGNO_NREGS (regno, mode) == 1)
960 if (best_mode == VOIDmode)
961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
963 mode = GET_MODE_WIDER_MODE (mode))
964 if (HARD_REGNO_MODE_OK (regno, mode)
965 && have_insn_for (SET, mode))
968 if (best_mode == VOIDmode)
969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
971 mode = GET_MODE_WIDER_MODE (mode))
972 if (HARD_REGNO_MODE_OK (regno, mode)
973 && have_insn_for (SET, mode))
976 if (best_mode == VOIDmode)
977 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
979 mode = GET_MODE_WIDER_MODE (mode))
980 if (HARD_REGNO_MODE_OK (regno, mode)
981 && have_insn_for (SET, mode))
985 if (mode == VOIDmode)
988 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
989 if (size % align != 0)
990 size = CEIL (size, align) * align;
991 apply_args_reg_offset[regno] = size;
992 size += GET_MODE_SIZE (mode);
993 apply_args_mode[regno] = mode;
997 apply_args_mode[regno] = VOIDmode;
998 apply_args_reg_offset[regno] = 0;
1004 /* Return the size required for the block returned by __builtin_apply,
1005 and initialize apply_result_mode. */
1008 apply_result_size (void)
1010 static int size = -1;
1012 enum machine_mode mode;
1014 /* The values computed by this function never change. */
1019 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1020 if (FUNCTION_VALUE_REGNO_P (regno))
1022 /* Search for the proper mode for copying this register's
1023 value. I'm not sure this is right, but it works so far. */
1024 enum machine_mode best_mode = VOIDmode;
1026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1028 mode = GET_MODE_WIDER_MODE (mode))
1029 if (HARD_REGNO_MODE_OK (regno, mode))
1032 if (best_mode == VOIDmode)
1033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1035 mode = GET_MODE_WIDER_MODE (mode))
1036 if (HARD_REGNO_MODE_OK (regno, mode)
1037 && have_insn_for (SET, mode))
1040 if (best_mode == VOIDmode)
1041 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1043 mode = GET_MODE_WIDER_MODE (mode))
1044 if (HARD_REGNO_MODE_OK (regno, mode)
1045 && have_insn_for (SET, mode))
1048 if (best_mode == VOIDmode)
1049 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1051 mode = GET_MODE_WIDER_MODE (mode))
1052 if (HARD_REGNO_MODE_OK (regno, mode)
1053 && have_insn_for (SET, mode))
1057 if (mode == VOIDmode)
1060 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1061 if (size % align != 0)
1062 size = CEIL (size, align) * align;
1063 size += GET_MODE_SIZE (mode);
1064 apply_result_mode[regno] = mode;
1067 apply_result_mode[regno] = VOIDmode;
1069 /* Allow targets that use untyped_call and untyped_return to override
1070 the size so that machine-specific information can be stored here. */
1071 #ifdef APPLY_RESULT_SIZE
1072 size = APPLY_RESULT_SIZE;
1078 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1079 /* Create a vector describing the result block RESULT. If SAVEP is true,
1080 the result block is used to save the values; otherwise it is used to
1081 restore the values. */
1084 result_vector (int savep, rtx result)
1086 int regno, size, align, nelts;
1087 enum machine_mode mode;
1089 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1092 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1093 if ((mode = apply_result_mode[regno]) != VOIDmode)
1095 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1096 if (size % align != 0)
1097 size = CEIL (size, align) * align;
1098 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1099 mem = adjust_address (result, mode, size);
1100 savevec[nelts++] = (savep
1101 ? gen_rtx_SET (VOIDmode, mem, reg)
1102 : gen_rtx_SET (VOIDmode, reg, mem));
1103 size += GET_MODE_SIZE (mode);
1105 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1107 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1109 /* Save the state required to perform an untyped call with the same
1110 arguments as were passed to the current function. */
1113 expand_builtin_apply_args_1 (void)
1116 int size, align, regno;
1117 enum machine_mode mode;
1119 /* Create a block where the arg-pointer, structure value address,
1120 and argument registers can be saved. */
1121 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1123 /* Walk past the arg-pointer and structure value address. */
1124 size = GET_MODE_SIZE (Pmode);
1125 if (struct_value_rtx)
1126 size += GET_MODE_SIZE (Pmode);
1128 /* Save each register used in calling a function to the block. */
1129 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1130 if ((mode = apply_args_mode[regno]) != VOIDmode)
1134 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1135 if (size % align != 0)
1136 size = CEIL (size, align) * align;
1138 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1140 emit_move_insn (adjust_address (registers, mode, size), tem);
1141 size += GET_MODE_SIZE (mode);
1144 /* Save the arg pointer to the block. */
1145 emit_move_insn (adjust_address (registers, Pmode, 0),
1146 copy_to_reg (virtual_incoming_args_rtx));
1147 size = GET_MODE_SIZE (Pmode);
1149 /* Save the structure value address unless this is passed as an
1150 "invisible" first argument. */
1151 if (struct_value_incoming_rtx)
1153 emit_move_insn (adjust_address (registers, Pmode, size),
1154 copy_to_reg (struct_value_incoming_rtx));
1155 size += GET_MODE_SIZE (Pmode);
1158 /* Return the address of the block. */
1159 return copy_addr_to_reg (XEXP (registers, 0));
1162 /* __builtin_apply_args returns block of memory allocated on
1163 the stack into which is stored the arg pointer, structure
1164 value address, static chain, and all the registers that might
1165 possibly be used in performing a function call. The code is
1166 moved to the start of the function so the incoming values are
1170 expand_builtin_apply_args (void)
1172 /* Don't do __builtin_apply_args more than once in a function.
1173 Save the result of the first call and reuse it. */
1174 if (apply_args_value != 0)
1175 return apply_args_value;
1177 /* When this function is called, it means that registers must be
1178 saved on entry to this function. So we migrate the
1179 call to the first insn of this function. */
1184 temp = expand_builtin_apply_args_1 ();
1188 apply_args_value = temp;
1190 /* Put the insns after the NOTE that starts the function.
1191 If this is inside a start_sequence, make the outer-level insn
1192 chain current, so the code is placed at the start of the
1194 push_topmost_sequence ();
1195 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1196 pop_topmost_sequence ();
1201 /* Perform an untyped call and save the state required to perform an
1202 untyped return of whatever value was returned by the given function. */
1205 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1207 int size, align, regno;
1208 enum machine_mode mode;
1209 rtx incoming_args, result, reg, dest, src, call_insn;
1210 rtx old_stack_level = 0;
1211 rtx call_fusage = 0;
1213 #ifdef POINTERS_EXTEND_UNSIGNED
1214 if (GET_MODE (arguments) != Pmode)
1215 arguments = convert_memory_address (Pmode, arguments);
1218 /* Create a block where the return registers can be saved. */
1219 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1221 /* Fetch the arg pointer from the ARGUMENTS block. */
1222 incoming_args = gen_reg_rtx (Pmode);
1223 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1224 #ifndef STACK_GROWS_DOWNWARD
1225 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1226 incoming_args, 0, OPTAB_LIB_WIDEN);
1229 /* Perform postincrements before actually calling the function. */
1232 /* Push a new argument block and copy the arguments. Do not allow
1233 the (potential) memcpy call below to interfere with our stack
1235 do_pending_stack_adjust ();
1238 /* Save the stack with nonlocal if available. */
1239 #ifdef HAVE_save_stack_nonlocal
1240 if (HAVE_save_stack_nonlocal)
1241 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1244 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1246 /* Push a block of memory onto the stack to store the memory arguments.
1247 Save the address in a register, and copy the memory arguments. ??? I
1248 haven't figured out how the calling convention macros effect this,
1249 but it's likely that the source and/or destination addresses in
1250 the block copy will need updating in machine specific ways. */
1251 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1252 dest = gen_rtx_MEM (BLKmode, dest);
1253 set_mem_align (dest, PARM_BOUNDARY);
1254 src = gen_rtx_MEM (BLKmode, incoming_args);
1255 set_mem_align (src, PARM_BOUNDARY);
1256 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1258 /* Refer to the argument block. */
1260 arguments = gen_rtx_MEM (BLKmode, arguments);
1261 set_mem_align (arguments, PARM_BOUNDARY);
1263 /* Walk past the arg-pointer and structure value address. */
1264 size = GET_MODE_SIZE (Pmode);
1265 if (struct_value_rtx)
1266 size += GET_MODE_SIZE (Pmode);
1268 /* Restore each of the registers previously saved. Make USE insns
1269 for each of these registers for use in making the call. */
1270 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1271 if ((mode = apply_args_mode[regno]) != VOIDmode)
1273 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1274 if (size % align != 0)
1275 size = CEIL (size, align) * align;
1276 reg = gen_rtx_REG (mode, regno);
1277 emit_move_insn (reg, adjust_address (arguments, mode, size));
1278 use_reg (&call_fusage, reg);
1279 size += GET_MODE_SIZE (mode);
1282 /* Restore the structure value address unless this is passed as an
1283 "invisible" first argument. */
1284 size = GET_MODE_SIZE (Pmode);
1285 if (struct_value_rtx)
1287 rtx value = gen_reg_rtx (Pmode);
1288 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1289 emit_move_insn (struct_value_rtx, value);
1290 if (GET_CODE (struct_value_rtx) == REG)
1291 use_reg (&call_fusage, struct_value_rtx);
1292 size += GET_MODE_SIZE (Pmode);
1295 /* All arguments and registers used for the call are set up by now! */
1296 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1298 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1299 and we don't want to load it into a register as an optimization,
1300 because prepare_call_address already did it if it should be done. */
1301 if (GET_CODE (function) != SYMBOL_REF)
1302 function = memory_address (FUNCTION_MODE, function);
1304 /* Generate the actual call instruction and save the return value. */
1305 #ifdef HAVE_untyped_call
1306 if (HAVE_untyped_call)
1307 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1308 result, result_vector (1, result)));
1311 #ifdef HAVE_call_value
1312 if (HAVE_call_value)
1316 /* Locate the unique return register. It is not possible to
1317 express a call that sets more than one return register using
1318 call_value; use untyped_call for that. In fact, untyped_call
1319 only needs to save the return registers in the given block. */
1320 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1321 if ((mode = apply_result_mode[regno]) != VOIDmode)
1324 abort (); /* HAVE_untyped_call required. */
1325 valreg = gen_rtx_REG (mode, regno);
1328 emit_call_insn (GEN_CALL_VALUE (valreg,
1329 gen_rtx_MEM (FUNCTION_MODE, function),
1330 const0_rtx, NULL_RTX, const0_rtx));
1332 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1338 /* Find the CALL insn we just emitted, and attach the register usage
1340 call_insn = last_call_insn ();
1341 add_function_usage_to (call_insn, call_fusage);
1343 /* Restore the stack. */
1344 #ifdef HAVE_save_stack_nonlocal
1345 if (HAVE_save_stack_nonlocal)
1346 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1349 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1353 /* Return the address of the result block. */
1354 return copy_addr_to_reg (XEXP (result, 0));
1357 /* Perform an untyped return. */
1360 expand_builtin_return (rtx result)
1362 int size, align, regno;
1363 enum machine_mode mode;
1365 rtx call_fusage = 0;
1367 #ifdef POINTERS_EXTEND_UNSIGNED
1368 if (GET_MODE (result) != Pmode)
1369 result = convert_memory_address (Pmode, result);
1372 apply_result_size ();
1373 result = gen_rtx_MEM (BLKmode, result);
1375 #ifdef HAVE_untyped_return
1376 if (HAVE_untyped_return)
1378 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1384 /* Restore the return value and note that each value is used. */
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if ((mode = apply_result_mode[regno]) != VOIDmode)
1389 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1390 if (size % align != 0)
1391 size = CEIL (size, align) * align;
1392 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1393 emit_move_insn (reg, adjust_address (result, mode, size));
1395 push_to_sequence (call_fusage);
1396 emit_insn (gen_rtx_USE (VOIDmode, reg));
1397 call_fusage = get_insns ();
1399 size += GET_MODE_SIZE (mode);
1402 /* Put the USE insns before the return. */
1403 emit_insn (call_fusage);
1405 /* Return whatever values was restored by jumping directly to the end
1407 expand_null_return ();
1410 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1412 static enum type_class
1413 type_to_class (tree type)
1415 switch (TREE_CODE (type))
1417 case VOID_TYPE: return void_type_class;
1418 case INTEGER_TYPE: return integer_type_class;
1419 case CHAR_TYPE: return char_type_class;
1420 case ENUMERAL_TYPE: return enumeral_type_class;
1421 case BOOLEAN_TYPE: return boolean_type_class;
1422 case POINTER_TYPE: return pointer_type_class;
1423 case REFERENCE_TYPE: return reference_type_class;
1424 case OFFSET_TYPE: return offset_type_class;
1425 case REAL_TYPE: return real_type_class;
1426 case COMPLEX_TYPE: return complex_type_class;
1427 case FUNCTION_TYPE: return function_type_class;
1428 case METHOD_TYPE: return method_type_class;
1429 case RECORD_TYPE: return record_type_class;
1431 case QUAL_UNION_TYPE: return union_type_class;
1432 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1433 ? string_type_class : array_type_class);
1434 case SET_TYPE: return set_type_class;
1435 case FILE_TYPE: return file_type_class;
1436 case LANG_TYPE: return lang_type_class;
1437 default: return no_type_class;
1441 /* Expand a call to __builtin_classify_type with arguments found in
1445 expand_builtin_classify_type (tree arglist)
1448 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1449 return GEN_INT (no_type_class);
1452 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1455 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1461 arglist = TREE_VALUE (arglist);
1463 /* We have taken care of the easy cases during constant folding. This
1464 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1465 get a chance to see if it can deduce whether ARGLIST is constant.
1466 If CSE isn't going to run, of course, don't bother waiting. */
1468 if (cse_not_expected)
1471 current_function_calls_constant_p = 1;
1473 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1474 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1478 /* This helper macro, meant to be used in mathfn_built_in below,
1479 determines which among a set of three builtin math functions is
1480 appropriate for a given type mode. The `F' and `L' cases are
1481 automatically generated from the `double' case. */
1482 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1483 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1484 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1485 fcodel = BUILT_IN_MATHFN##L ; break;
1487 /* Return mathematic function equivalent to FN but operating directly
1488 on TYPE, if available. If we can't do the conversion, return zero. */
1490 mathfn_built_in (tree type, enum built_in_function fn)
1492 const enum machine_mode type_mode = TYPE_MODE (type);
1493 enum built_in_function fcode, fcodef, fcodel;
1497 CASE_MATHFN (BUILT_IN_ACOS)
1498 CASE_MATHFN (BUILT_IN_ACOSH)
1499 CASE_MATHFN (BUILT_IN_ASIN)
1500 CASE_MATHFN (BUILT_IN_ASINH)
1501 CASE_MATHFN (BUILT_IN_ATAN)
1502 CASE_MATHFN (BUILT_IN_ATAN2)
1503 CASE_MATHFN (BUILT_IN_ATANH)
1504 CASE_MATHFN (BUILT_IN_CBRT)
1505 CASE_MATHFN (BUILT_IN_CEIL)
1506 CASE_MATHFN (BUILT_IN_COPYSIGN)
1507 CASE_MATHFN (BUILT_IN_COS)
1508 CASE_MATHFN (BUILT_IN_COSH)
1509 CASE_MATHFN (BUILT_IN_DREM)
1510 CASE_MATHFN (BUILT_IN_ERF)
1511 CASE_MATHFN (BUILT_IN_ERFC)
1512 CASE_MATHFN (BUILT_IN_EXP)
1513 CASE_MATHFN (BUILT_IN_EXP10)
1514 CASE_MATHFN (BUILT_IN_EXP2)
1515 CASE_MATHFN (BUILT_IN_EXPM1)
1516 CASE_MATHFN (BUILT_IN_FABS)
1517 CASE_MATHFN (BUILT_IN_FDIM)
1518 CASE_MATHFN (BUILT_IN_FLOOR)
1519 CASE_MATHFN (BUILT_IN_FMA)
1520 CASE_MATHFN (BUILT_IN_FMAX)
1521 CASE_MATHFN (BUILT_IN_FMIN)
1522 CASE_MATHFN (BUILT_IN_FMOD)
1523 CASE_MATHFN (BUILT_IN_FREXP)
1524 CASE_MATHFN (BUILT_IN_GAMMA)
1525 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1526 CASE_MATHFN (BUILT_IN_HYPOT)
1527 CASE_MATHFN (BUILT_IN_ILOGB)
1528 CASE_MATHFN (BUILT_IN_INF)
1529 CASE_MATHFN (BUILT_IN_J0)
1530 CASE_MATHFN (BUILT_IN_J1)
1531 CASE_MATHFN (BUILT_IN_JN)
1532 CASE_MATHFN (BUILT_IN_LDEXP)
1533 CASE_MATHFN (BUILT_IN_LGAMMA)
1534 CASE_MATHFN (BUILT_IN_LLRINT)
1535 CASE_MATHFN (BUILT_IN_LLROUND)
1536 CASE_MATHFN (BUILT_IN_LOG)
1537 CASE_MATHFN (BUILT_IN_LOG10)
1538 CASE_MATHFN (BUILT_IN_LOG1P)
1539 CASE_MATHFN (BUILT_IN_LOG2)
1540 CASE_MATHFN (BUILT_IN_LOGB)
1541 CASE_MATHFN (BUILT_IN_LRINT)
1542 CASE_MATHFN (BUILT_IN_LROUND)
1543 CASE_MATHFN (BUILT_IN_MODF)
1544 CASE_MATHFN (BUILT_IN_NAN)
1545 CASE_MATHFN (BUILT_IN_NANS)
1546 CASE_MATHFN (BUILT_IN_NEARBYINT)
1547 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1548 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1549 CASE_MATHFN (BUILT_IN_POW)
1550 CASE_MATHFN (BUILT_IN_POW10)
1551 CASE_MATHFN (BUILT_IN_REMAINDER)
1552 CASE_MATHFN (BUILT_IN_REMQUO)
1553 CASE_MATHFN (BUILT_IN_RINT)
1554 CASE_MATHFN (BUILT_IN_ROUND)
1555 CASE_MATHFN (BUILT_IN_SCALB)
1556 CASE_MATHFN (BUILT_IN_SCALBLN)
1557 CASE_MATHFN (BUILT_IN_SCALBN)
1558 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1559 CASE_MATHFN (BUILT_IN_SIN)
1560 CASE_MATHFN (BUILT_IN_SINCOS)
1561 CASE_MATHFN (BUILT_IN_SINH)
1562 CASE_MATHFN (BUILT_IN_SQRT)
1563 CASE_MATHFN (BUILT_IN_TAN)
1564 CASE_MATHFN (BUILT_IN_TANH)
1565 CASE_MATHFN (BUILT_IN_TGAMMA)
1566 CASE_MATHFN (BUILT_IN_TRUNC)
1567 CASE_MATHFN (BUILT_IN_Y0)
1568 CASE_MATHFN (BUILT_IN_Y1)
1569 CASE_MATHFN (BUILT_IN_YN)
1575 if (type_mode == TYPE_MODE (double_type_node))
1576 return implicit_built_in_decls[fcode];
1577 else if (type_mode == TYPE_MODE (float_type_node))
1578 return implicit_built_in_decls[fcodef];
1579 else if (type_mode == TYPE_MODE (long_double_type_node))
1580 return implicit_built_in_decls[fcodel];
1585 /* If errno must be maintained, expand the RTL to check if the result,
1586 TARGET, of a built-in function call, EXP, is NaN, and if so set
1590 expand_errno_check (tree exp, rtx target)
1592 rtx lab = gen_label_rtx ();
1594 /* Test the result; if it is NaN, set errno=EDOM because
1595 the argument was not in the domain. */
1596 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1600 /* If this built-in doesn't throw an exception, set errno directly. */
1601 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1603 #ifdef GEN_ERRNO_RTX
1604 rtx errno_rtx = GEN_ERRNO_RTX;
1607 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1609 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1615 /* We can't set errno=EDOM directly; let the library call do it.
1616 Pop the arguments right away in case the call gets deleted. */
1618 expand_call (exp, target, 0);
1624 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1625 Return 0 if a normal call should be emitted rather than expanding the
1626 function in-line. EXP is the expression that is a call to the builtin
1627 function; if convenient, the result should be placed in TARGET.
1628 SUBTARGET may be used as the target for computing one of EXP's operands. */
1631 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1633 optab builtin_optab;
1634 rtx op0, insns, before_call;
1635 tree fndecl = get_callee_fndecl (exp);
1636 tree arglist = TREE_OPERAND (exp, 1);
1637 enum machine_mode mode;
1638 bool errno_set = false;
1641 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1644 arg = TREE_VALUE (arglist);
1646 switch (DECL_FUNCTION_CODE (fndecl))
1651 builtin_optab = sin_optab; break;
1655 builtin_optab = cos_optab; break;
1657 case BUILT_IN_SQRTF:
1658 case BUILT_IN_SQRTL:
1659 errno_set = ! tree_expr_nonnegative_p (arg);
1660 builtin_optab = sqrt_optab;
1665 errno_set = true; builtin_optab = exp_optab; break;
1669 errno_set = true; builtin_optab = log_optab; break;
1673 builtin_optab = tan_optab; break;
1675 case BUILT_IN_ATANF:
1676 case BUILT_IN_ATANL:
1677 builtin_optab = atan_optab; break;
1678 case BUILT_IN_FLOOR:
1679 case BUILT_IN_FLOORF:
1680 case BUILT_IN_FLOORL:
1681 builtin_optab = floor_optab; break;
1683 case BUILT_IN_CEILF:
1684 case BUILT_IN_CEILL:
1685 builtin_optab = ceil_optab; break;
1686 case BUILT_IN_TRUNC:
1687 case BUILT_IN_TRUNCF:
1688 case BUILT_IN_TRUNCL:
1689 builtin_optab = trunc_optab; break;
1690 case BUILT_IN_ROUND:
1691 case BUILT_IN_ROUNDF:
1692 case BUILT_IN_ROUNDL:
1693 builtin_optab = round_optab; break;
1694 case BUILT_IN_NEARBYINT:
1695 case BUILT_IN_NEARBYINTF:
1696 case BUILT_IN_NEARBYINTL:
1697 builtin_optab = nearbyint_optab; break;
1702 /* Make a suitable register to place result in. */
1703 mode = TYPE_MODE (TREE_TYPE (exp));
1705 if (! flag_errno_math || ! HONOR_NANS (mode))
1708 /* Before working hard, check whether the instruction is available. */
1709 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1711 target = gen_reg_rtx (mode);
1713 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1714 need to expand the argument again. This way, we will not perform
1715 side-effects more the once. */
1716 narg = save_expr (arg);
1719 arglist = build_tree_list (NULL_TREE, arg);
1720 exp = build_function_call_expr (fndecl, arglist);
1723 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1728 /* Compute into TARGET.
1729 Set TARGET to wherever the result comes back. */
1730 target = expand_unop (mode, builtin_optab, op0, target, 0);
1735 expand_errno_check (exp, target);
1737 /* Output the entire sequence. */
1738 insns = get_insns ();
1744 /* If we were unable to expand via the builtin, stop the sequence
1745 (without outputting the insns) and call to the library function
1746 with the stabilized argument list. */
1750 before_call = get_last_insn ();
1752 target = expand_call (exp, target, target == const0_rtx);
1754 /* If this is a sqrt operation and we don't care about errno, try to
1755 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1756 This allows the semantics of the libcall to be visible to the RTL
1758 if (builtin_optab == sqrt_optab && !errno_set)
1760 /* Search backwards through the insns emitted by expand_call looking
1761 for the instruction with the REG_RETVAL note. */
1762 rtx last = get_last_insn ();
1763 while (last != before_call)
1765 if (find_reg_note (last, REG_RETVAL, NULL))
1767 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1768 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1769 two elements, i.e. symbol_ref(sqrt) and the operand. */
1771 && GET_CODE (note) == EXPR_LIST
1772 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1773 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1774 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1776 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1777 /* Check operand is a register with expected mode. */
1779 && GET_CODE (operand) == REG
1780 && GET_MODE (operand) == mode)
1782 /* Replace the REG_EQUAL note with a SQRT rtx. */
1783 rtx equiv = gen_rtx_SQRT (mode, operand);
1784 set_unique_reg_note (last, REG_EQUAL, equiv);
1789 last = PREV_INSN (last);
1796 /* Expand a call to the builtin binary math functions (pow and atan2).
1797 Return 0 if a normal call should be emitted rather than expanding the
1798 function in-line. EXP is the expression that is a call to the builtin
1799 function; if convenient, the result should be placed in TARGET.
1800 SUBTARGET may be used as the target for computing one of EXP's
1804 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1806 optab builtin_optab;
1807 rtx op0, op1, insns;
1808 tree fndecl = get_callee_fndecl (exp);
1809 tree arglist = TREE_OPERAND (exp, 1);
1810 tree arg0, arg1, temp, narg;
1811 enum machine_mode mode;
1812 bool errno_set = true;
1815 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1818 arg0 = TREE_VALUE (arglist);
1819 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1821 switch (DECL_FUNCTION_CODE (fndecl))
1826 builtin_optab = pow_optab; break;
1827 case BUILT_IN_ATAN2:
1828 case BUILT_IN_ATAN2F:
1829 case BUILT_IN_ATAN2L:
1830 builtin_optab = atan2_optab; break;
1835 /* Make a suitable register to place result in. */
1836 mode = TYPE_MODE (TREE_TYPE (exp));
1838 /* Before working hard, check whether the instruction is available. */
1839 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1842 target = gen_reg_rtx (mode);
1844 if (! flag_errno_math || ! HONOR_NANS (mode))
1847 /* Alway stabilize the argument list. */
1848 narg = save_expr (arg1);
1851 temp = build_tree_list (NULL_TREE, narg);
1855 temp = TREE_CHAIN (arglist);
1857 narg = save_expr (arg0);
1860 arglist = tree_cons (NULL_TREE, narg, temp);
1864 arglist = tree_cons (NULL_TREE, arg0, temp);
1867 exp = build_function_call_expr (fndecl, arglist);
1869 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1870 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1875 /* Compute into TARGET.
1876 Set TARGET to wherever the result comes back. */
1877 target = expand_binop (mode, builtin_optab, op0, op1,
1878 target, 0, OPTAB_DIRECT);
1880 /* If we were unable to expand via the builtin, stop the sequence
1881 (without outputting the insns) and call to the library function
1882 with the stabilized argument list. */
1886 return expand_call (exp, target, target == const0_rtx);
1890 expand_errno_check (exp, target);
1892 /* Output the entire sequence. */
1893 insns = get_insns ();
1900 /* To evaluate powi(x,n), the floating point value x raised to the
1901 constant integer exponent n, we use a hybrid algorithm that
1902 combines the "window method" with look-up tables. For an
1903 introduction to exponentiation algorithms and "addition chains",
1904 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1905 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1906 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1907 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1909 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1910 multiplications to inline before calling the system library's pow
1911 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1912 so this default never requires calling pow, powf or powl. */
1914 #ifndef POWI_MAX_MULTS
1915 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1918 /* The size of the "optimal power tree" lookup table. All
1919 exponents less than this value are simply looked up in the
1920 powi_table below. This threshold is also used to size the
1921 cache of pseudo registers that hold intermediate results. */
1922 #define POWI_TABLE_SIZE 256
1924 /* The size, in bits of the window, used in the "window method"
1925 exponentiation algorithm. This is equivalent to a radix of
1926 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1927 #define POWI_WINDOW_SIZE 3
1929 /* The following table is an efficient representation of an
1930 "optimal power tree". For each value, i, the corresponding
1931 value, j, in the table states than an optimal evaluation
1932 sequence for calculating pow(x,i) can be found by evaluating
1933 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1934 100 integers is given in Knuth's "Seminumerical algorithms". */
1936 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1938 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1939 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1940 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1941 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1942 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1943 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1944 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1945 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1946 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1947 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1948 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1949 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1950 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1951 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
1952 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
1953 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
1954 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
1955 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
1956 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
1957 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
1958 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
1959 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
1960 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
1961 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
1962 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
1963 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
1964 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
1965 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
1966 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
1967 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
1968 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
1969 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
1973 /* Return the number of multiplications required to calculate
1974 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
1975 subroutine of powi_cost. CACHE is an array indicating
1976 which exponents have already been calculated. */
1979 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
1981 /* If we've already calculated this exponent, then this evaluation
1982 doesn't require any additional multiplications. */
1987 return powi_lookup_cost (n - powi_table[n], cache)
1988 + powi_lookup_cost (powi_table[n], cache) + 1;
1991 /* Return the number of multiplications required to calculate
1992 powi(x,n) for an arbitrary x, given the exponent N. This
1993 function needs to be kept in sync with expand_powi below. */
1996 powi_cost (HOST_WIDE_INT n)
1998 bool cache[POWI_TABLE_SIZE];
1999 unsigned HOST_WIDE_INT digit;
2000 unsigned HOST_WIDE_INT val;
2006 /* Ignore the reciprocal when calculating the cost. */
2007 val = (n < 0) ? -n : n;
2009 /* Initialize the exponent cache. */
2010 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2015 while (val >= POWI_TABLE_SIZE)
2019 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2020 result += powi_lookup_cost (digit, cache)
2021 + POWI_WINDOW_SIZE + 1;
2022 val >>= POWI_WINDOW_SIZE;
2031 return result + powi_lookup_cost (val, cache);
2034 /* Recursive subroutine of expand_powi. This function takes the array,
2035 CACHE, of already calculated exponents and an exponent N and returns
2036 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2039 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2041 unsigned HOST_WIDE_INT digit;
2045 if (n < POWI_TABLE_SIZE)
2050 target = gen_reg_rtx (mode);
2053 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2054 op1 = expand_powi_1 (mode, powi_table[n], cache);
2058 target = gen_reg_rtx (mode);
2059 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2060 op0 = expand_powi_1 (mode, n - digit, cache);
2061 op1 = expand_powi_1 (mode, digit, cache);
2065 target = gen_reg_rtx (mode);
2066 op0 = expand_powi_1 (mode, n >> 1, cache);
2070 result = expand_mult (mode, op0, op1, target, 0);
2071 if (result != target)
2072 emit_move_insn (target, result);
2076 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2077 floating point operand in mode MODE, and N is the exponent. This
2078 function needs to be kept in sync with powi_cost above. */
2081 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2083 unsigned HOST_WIDE_INT val;
2084 rtx cache[POWI_TABLE_SIZE];
2088 return CONST1_RTX (mode);
2090 val = (n < 0) ? -n : n;
2092 memset (cache, 0, sizeof (cache));
2095 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2097 /* If the original exponent was negative, reciprocate the result. */
2099 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2100 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2105 /* Expand a call to the pow built-in mathematical function. Return 0 if
2106 a normal call should be emitted rather than expanding the function
2107 in-line. EXP is the expression that is a call to the builtin
2108 function; if convenient, the result should be placed in TARGET. */
2111 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2113 tree arglist = TREE_OPERAND (exp, 1);
2116 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2119 arg0 = TREE_VALUE (arglist);
2120 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2122 if (TREE_CODE (arg1) == REAL_CST
2123 && ! TREE_CONSTANT_OVERFLOW (arg1))
2125 REAL_VALUE_TYPE cint;
2129 c = TREE_REAL_CST (arg1);
2130 n = real_to_integer (&c);
2131 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2132 if (real_identical (&c, &cint))
2134 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2135 Otherwise, check the number of multiplications required.
2136 Note that pow never sets errno for an integer exponent. */
2137 if ((n >= -1 && n <= 2)
2138 || (flag_unsafe_math_optimizations
2140 && powi_cost (n) <= POWI_MAX_MULTS))
2142 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2143 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2144 op = force_reg (mode, op);
2145 return expand_powi (op, mode, n);
2149 return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
2152 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2153 if we failed the caller should emit a normal call, otherwise
2154 try to get the result in TARGET, if convenient. */
2157 expand_builtin_strlen (tree arglist, rtx target,
2158 enum machine_mode target_mode)
2160 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2165 tree len, src = TREE_VALUE (arglist);
2166 rtx result, src_reg, char_rtx, before_strlen;
2167 enum machine_mode insn_mode = target_mode, char_mode;
2168 enum insn_code icode = CODE_FOR_nothing;
2171 /* If the length can be computed at compile-time, return it. */
2172 len = c_strlen (src, 0);
2174 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2176 /* If the length can be computed at compile-time and is constant
2177 integer, but there are side-effects in src, evaluate
2178 src for side-effects, then return len.
2179 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2180 can be optimized into: i++; x = 3; */
2181 len = c_strlen (src, 1);
2182 if (len && TREE_CODE (len) == INTEGER_CST)
2184 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2185 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2188 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2190 /* If SRC is not a pointer type, don't do this operation inline. */
2194 /* Bail out if we can't compute strlen in the right mode. */
2195 while (insn_mode != VOIDmode)
2197 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2198 if (icode != CODE_FOR_nothing)
2201 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2203 if (insn_mode == VOIDmode)
2206 /* Make a place to write the result of the instruction. */
2209 && GET_CODE (result) == REG
2210 && GET_MODE (result) == insn_mode
2211 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2212 result = gen_reg_rtx (insn_mode);
2214 /* Make a place to hold the source address. We will not expand
2215 the actual source until we are sure that the expansion will
2216 not fail -- there are trees that cannot be expanded twice. */
2217 src_reg = gen_reg_rtx (Pmode);
2219 /* Mark the beginning of the strlen sequence so we can emit the
2220 source operand later. */
2221 before_strlen = get_last_insn ();
2223 char_rtx = const0_rtx;
2224 char_mode = insn_data[(int) icode].operand[2].mode;
2225 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2227 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2229 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2230 char_rtx, GEN_INT (align));
2235 /* Now that we are assured of success, expand the source. */
2237 pat = memory_address (BLKmode,
2238 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2240 emit_move_insn (src_reg, pat);
2245 emit_insn_after (pat, before_strlen);
2247 emit_insn_before (pat, get_insns ());
2249 /* Return the value in the proper mode for this function. */
2250 if (GET_MODE (result) == target_mode)
2252 else if (target != 0)
2253 convert_move (target, result, 0);
2255 target = convert_to_mode (target_mode, result, 0);
2261 /* Expand a call to the strstr builtin. Return 0 if we failed the
2262 caller should emit a normal call, otherwise try to get the result
2263 in TARGET, if convenient (and in mode MODE if that's convenient). */
2266 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2268 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2272 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2274 const char *p1, *p2;
2283 const char *r = strstr (p1, p2);
2288 /* Return an offset into the constant string argument. */
2289 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2290 s1, ssize_int (r - p1))),
2291 target, mode, EXPAND_NORMAL);
2295 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2300 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2304 /* New argument list transforming strstr(s1, s2) to
2305 strchr(s1, s2[0]). */
2307 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2308 arglist = tree_cons (NULL_TREE, s1, arglist);
2309 return expand_expr (build_function_call_expr (fn, arglist),
2310 target, mode, EXPAND_NORMAL);
2314 /* Expand a call to the strchr builtin. Return 0 if we failed the
2315 caller should emit a normal call, otherwise try to get the result
2316 in TARGET, if convenient (and in mode MODE if that's convenient). */
2319 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2321 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2325 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2328 if (TREE_CODE (s2) != INTEGER_CST)
2337 if (target_char_cast (s2, &c))
2345 /* Return an offset into the constant string argument. */
2346 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2347 s1, ssize_int (r - p1))),
2348 target, mode, EXPAND_NORMAL);
2351 /* FIXME: Should use here strchrM optab so that ports can optimize
2357 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2358 caller should emit a normal call, otherwise try to get the result
2359 in TARGET, if convenient (and in mode MODE if that's convenient). */
2362 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2364 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2368 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2372 if (TREE_CODE (s2) != INTEGER_CST)
2381 if (target_char_cast (s2, &c))
2384 r = strrchr (p1, c);
2389 /* Return an offset into the constant string argument. */
2390 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2391 s1, ssize_int (r - p1))),
2392 target, mode, EXPAND_NORMAL);
2395 if (! integer_zerop (s2))
2398 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2402 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2403 return expand_expr (build_function_call_expr (fn, arglist),
2404 target, mode, EXPAND_NORMAL);
2408 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2409 caller should emit a normal call, otherwise try to get the result
2410 in TARGET, if convenient (and in mode MODE if that's convenient). */
2413 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2415 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2419 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2421 const char *p1, *p2;
2430 const char *r = strpbrk (p1, p2);
2435 /* Return an offset into the constant string argument. */
2436 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2437 s1, ssize_int (r - p1))),
2438 target, mode, EXPAND_NORMAL);
2443 /* strpbrk(x, "") == NULL.
2444 Evaluate and ignore the arguments in case they had
2446 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2451 return 0; /* Really call strpbrk. */
2453 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2457 /* New argument list transforming strpbrk(s1, s2) to
2458 strchr(s1, s2[0]). */
2460 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2461 arglist = tree_cons (NULL_TREE, s1, arglist);
2462 return expand_expr (build_function_call_expr (fn, arglist),
2463 target, mode, EXPAND_NORMAL);
2467 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2468 bytes from constant string DATA + OFFSET and return it as target
2472 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2473 enum machine_mode mode)
2475 const char *str = (const char *) data;
2478 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2479 > strlen (str) + 1))
2480 abort (); /* Attempt to read past the end of constant string. */
2482 return c_readstr (str + offset, mode);
2485 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2486 Return 0 if we failed, the caller should emit a normal call,
2487 otherwise try to get the result in TARGET, if convenient (and in
2488 mode MODE if that's convenient). */
2490 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2492 if (!validate_arglist (arglist,
2493 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2497 tree dest = TREE_VALUE (arglist);
2498 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2499 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2500 const char *src_str;
2501 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2502 unsigned int dest_align
2503 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2504 rtx dest_mem, src_mem, dest_addr, len_rtx;
2506 /* If DEST is not a pointer type, call the normal function. */
2507 if (dest_align == 0)
2510 /* If the LEN parameter is zero, return DEST. */
2511 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2513 /* Evaluate and ignore SRC in case it has side-effects. */
2514 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2515 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2518 /* If either SRC is not a pointer type, don't do this
2519 operation in-line. */
2523 dest_mem = get_memory_rtx (dest);
2524 set_mem_align (dest_mem, dest_align);
2525 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2526 src_str = c_getstr (src);
2528 /* If SRC is a string constant and block move would be done
2529 by pieces, we can avoid loading the string from memory
2530 and only stored the computed constants. */
2532 && GET_CODE (len_rtx) == CONST_INT
2533 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2534 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2535 (void *) src_str, dest_align))
2537 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2538 builtin_memcpy_read_str,
2539 (void *) src_str, dest_align, 0);
2540 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2541 #ifdef POINTERS_EXTEND_UNSIGNED
2542 if (GET_MODE (dest_mem) != ptr_mode)
2543 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2548 src_mem = get_memory_rtx (src);
2549 set_mem_align (src_mem, src_align);
2551 /* Copy word part most expediently. */
2552 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2557 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2558 #ifdef POINTERS_EXTEND_UNSIGNED
2559 if (GET_MODE (dest_addr) != ptr_mode)
2560 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2567 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2568 Return 0 if we failed the caller should emit a normal call,
2569 otherwise try to get the result in TARGET, if convenient (and in
2570 mode MODE if that's convenient). If ENDP is 0 return the
2571 destination pointer, if ENDP is 1 return the end pointer ala
2572 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2576 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2579 if (!validate_arglist (arglist,
2580 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2582 /* If return value is ignored, transform mempcpy into memcpy. */
2583 else if (target == const0_rtx)
2585 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2590 return expand_expr (build_function_call_expr (fn, arglist),
2591 target, mode, EXPAND_NORMAL);
2595 tree dest = TREE_VALUE (arglist);
2596 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2597 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2598 const char *src_str;
2599 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2600 unsigned int dest_align
2601 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2602 rtx dest_mem, src_mem, len_rtx;
2604 /* If DEST is not a pointer type or LEN is not constant,
2605 call the normal function. */
2606 if (dest_align == 0 || !host_integerp (len, 1))
2609 /* If the LEN parameter is zero, return DEST. */
2610 if (tree_low_cst (len, 1) == 0)
2612 /* Evaluate and ignore SRC in case it has side-effects. */
2613 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2614 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2617 /* If either SRC is not a pointer type, don't do this
2618 operation in-line. */
2622 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2623 src_str = c_getstr (src);
2625 /* If SRC is a string constant and block move would be done
2626 by pieces, we can avoid loading the string from memory
2627 and only stored the computed constants. */
2629 && GET_CODE (len_rtx) == CONST_INT
2630 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2631 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2632 (void *) src_str, dest_align))
2634 dest_mem = get_memory_rtx (dest);
2635 set_mem_align (dest_mem, dest_align);
2636 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2637 builtin_memcpy_read_str,
2638 (void *) src_str, dest_align, endp);
2639 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2640 #ifdef POINTERS_EXTEND_UNSIGNED
2641 if (GET_MODE (dest_mem) != ptr_mode)
2642 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2647 if (GET_CODE (len_rtx) == CONST_INT
2648 && can_move_by_pieces (INTVAL (len_rtx),
2649 MIN (dest_align, src_align)))
2651 dest_mem = get_memory_rtx (dest);
2652 set_mem_align (dest_mem, dest_align);
2653 src_mem = get_memory_rtx (src);
2654 set_mem_align (src_mem, src_align);
2655 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2656 MIN (dest_align, src_align), endp);
2657 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2658 #ifdef POINTERS_EXTEND_UNSIGNED
2659 if (GET_MODE (dest_mem) != ptr_mode)
2660 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2669 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2670 if we failed the caller should emit a normal call. */
2673 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2675 if (!validate_arglist (arglist,
2676 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2680 tree dest = TREE_VALUE (arglist);
2681 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2682 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2684 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2685 unsigned int dest_align
2686 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2688 /* If DEST is not a pointer type, call the normal function. */
2689 if (dest_align == 0)
2692 /* If the LEN parameter is zero, return DEST. */
2693 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2695 /* Evaluate and ignore SRC in case it has side-effects. */
2696 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2697 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2700 /* If either SRC is not a pointer type, don't do this
2701 operation in-line. */
2705 /* If src is categorized for a readonly section we can use
2707 if (readonly_data_expr (src))
2709 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2712 return expand_expr (build_function_call_expr (fn, arglist),
2713 target, mode, EXPAND_NORMAL);
2716 /* Otherwise, call the normal function. */
2721 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2722 if we failed the caller should emit a normal call. */
2725 expand_builtin_bcopy (tree arglist)
2727 tree src, dest, size, newarglist;
2729 if (!validate_arglist (arglist,
2730 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2733 src = TREE_VALUE (arglist);
2734 dest = TREE_VALUE (TREE_CHAIN (arglist));
2735 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2737 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2738 memmove(ptr y, ptr x, size_t z). This is done this way
2739 so that if it isn't expanded inline, we fallback to
2740 calling bcopy instead of memmove. */
2742 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2743 newarglist = tree_cons (NULL_TREE, src, newarglist);
2744 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2746 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2749 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2750 if we failed the caller should emit a normal call, otherwise try to get
2751 the result in TARGET, if convenient (and in mode MODE if that's
2755 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2757 tree fn, len, src, dst;
2759 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2762 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2766 src = TREE_VALUE (TREE_CHAIN (arglist));
2767 len = c_strlen (src, 1);
2768 if (len == 0 || TREE_SIDE_EFFECTS (len))
2771 dst = TREE_VALUE (arglist);
2772 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2773 arglist = build_tree_list (NULL_TREE, len);
2774 arglist = tree_cons (NULL_TREE, src, arglist);
2775 arglist = tree_cons (NULL_TREE, dst, arglist);
2776 return expand_expr (build_function_call_expr (fn, arglist),
2777 target, mode, EXPAND_NORMAL);
2780 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2781 Return 0 if we failed the caller should emit a normal call,
2782 otherwise try to get the result in TARGET, if convenient (and in
2783 mode MODE if that's convenient). */
2786 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2788 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2794 /* If return value is ignored, transform stpcpy into strcpy. */
2795 if (target == const0_rtx)
2797 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2801 return expand_expr (build_function_call_expr (fn, arglist),
2802 target, mode, EXPAND_NORMAL);
2805 /* Ensure we get an actual string whose length can be evaluated at
2806 compile-time, not an expression containing a string. This is
2807 because the latter will potentially produce pessimized code
2808 when used to produce the return value. */
2809 src = TREE_VALUE (TREE_CHAIN (arglist));
2810 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2813 dst = TREE_VALUE (arglist);
2814 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2815 arglist = build_tree_list (NULL_TREE, len);
2816 arglist = tree_cons (NULL_TREE, src, arglist);
2817 arglist = tree_cons (NULL_TREE, dst, arglist);
2818 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2822 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2823 bytes from constant string DATA + OFFSET and return it as target
2827 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2828 enum machine_mode mode)
2830 const char *str = (const char *) data;
2832 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2835 return c_readstr (str + offset, mode);
2838 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2839 if we failed the caller should emit a normal call. */
2842 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2844 if (!validate_arglist (arglist,
2845 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2849 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2850 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2853 /* We must be passed a constant len parameter. */
2854 if (TREE_CODE (len) != INTEGER_CST)
2857 /* If the len parameter is zero, return the dst parameter. */
2858 if (integer_zerop (len))
2860 /* Evaluate and ignore the src argument in case it has
2862 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2863 VOIDmode, EXPAND_NORMAL);
2864 /* Return the dst parameter. */
2865 return expand_expr (TREE_VALUE (arglist), target, mode,
2869 /* Now, we must be passed a constant src ptr parameter. */
2870 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2873 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2875 /* We're required to pad with trailing zeros if the requested
2876 len is greater than strlen(s2)+1. In that case try to
2877 use store_by_pieces, if it fails, punt. */
2878 if (tree_int_cst_lt (slen, len))
2880 tree dest = TREE_VALUE (arglist);
2881 unsigned int dest_align
2882 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2883 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2886 if (!p || dest_align == 0 || !host_integerp (len, 1)
2887 || !can_store_by_pieces (tree_low_cst (len, 1),
2888 builtin_strncpy_read_str,
2889 (void *) p, dest_align))
2892 dest_mem = get_memory_rtx (dest);
2893 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2894 builtin_strncpy_read_str,
2895 (void *) p, dest_align, 0);
2896 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2897 #ifdef POINTERS_EXTEND_UNSIGNED
2898 if (GET_MODE (dest_mem) != ptr_mode)
2899 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2904 /* OK transform into builtin memcpy. */
2905 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2908 return expand_expr (build_function_call_expr (fn, arglist),
2909 target, mode, EXPAND_NORMAL);
2913 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2914 bytes from constant string DATA + OFFSET and return it as target
2918 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2919 enum machine_mode mode)
2921 const char *c = (const char *) data;
2922 char *p = alloca (GET_MODE_SIZE (mode));
2924 memset (p, *c, GET_MODE_SIZE (mode));
2926 return c_readstr (p, mode);
2929 /* Callback routine for store_by_pieces. Return the RTL of a register
2930 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2931 char value given in the RTL register data. For example, if mode is
2932 4 bytes wide, return the RTL for 0x01010101*data. */
2935 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2936 enum machine_mode mode)
2942 size = GET_MODE_SIZE (mode);
2947 memset (p, 1, size);
2948 coeff = c_readstr (p, mode);
2950 target = convert_to_mode (mode, (rtx) data, 1);
2951 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2952 return force_reg (mode, target);
2955 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2956 if we failed the caller should emit a normal call, otherwise try to get
2957 the result in TARGET, if convenient (and in mode MODE if that's
2961 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
2963 if (!validate_arglist (arglist,
2964 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2968 tree dest = TREE_VALUE (arglist);
2969 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2970 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2973 unsigned int dest_align
2974 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2975 rtx dest_mem, dest_addr, len_rtx;
2977 /* If DEST is not a pointer type, don't do this
2978 operation in-line. */
2979 if (dest_align == 0)
2982 /* If the LEN parameter is zero, return DEST. */
2983 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2985 /* Evaluate and ignore VAL in case it has side-effects. */
2986 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2987 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2990 if (TREE_CODE (val) != INTEGER_CST)
2994 if (!host_integerp (len, 1))
2997 if (optimize_size && tree_low_cst (len, 1) > 1)
3000 /* Assume that we can memset by pieces if we can store the
3001 * the coefficients by pieces (in the required modes).
3002 * We can't pass builtin_memset_gen_str as that emits RTL. */
3004 if (!can_store_by_pieces (tree_low_cst (len, 1),
3005 builtin_memset_read_str,
3009 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3010 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3011 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3013 dest_mem = get_memory_rtx (dest);
3014 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3015 builtin_memset_gen_str,
3016 val_rtx, dest_align, 0);
3017 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3018 #ifdef POINTERS_EXTEND_UNSIGNED
3019 if (GET_MODE (dest_mem) != ptr_mode)
3020 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3025 if (target_char_cast (val, &c))
3030 if (!host_integerp (len, 1))
3032 if (!can_store_by_pieces (tree_low_cst (len, 1),
3033 builtin_memset_read_str, &c,
3037 dest_mem = get_memory_rtx (dest);
3038 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3039 builtin_memset_read_str,
3041 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3042 #ifdef POINTERS_EXTEND_UNSIGNED
3043 if (GET_MODE (dest_mem) != ptr_mode)
3044 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3049 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3051 dest_mem = get_memory_rtx (dest);
3052 set_mem_align (dest_mem, dest_align);
3053 dest_addr = clear_storage (dest_mem, len_rtx);
3057 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3058 #ifdef POINTERS_EXTEND_UNSIGNED
3059 if (GET_MODE (dest_addr) != ptr_mode)
3060 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3068 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3069 if we failed the caller should emit a normal call. */
3072 expand_builtin_bzero (tree arglist)
3074 tree dest, size, newarglist;
3076 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3079 dest = TREE_VALUE (arglist);
3080 size = TREE_VALUE (TREE_CHAIN (arglist));
3082 /* New argument list transforming bzero(ptr x, int y) to
3083 memset(ptr x, int 0, size_t y). This is done this way
3084 so that if it isn't expanded inline, we fallback to
3085 calling bzero instead of memset. */
3087 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3088 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3089 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3091 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3094 /* Expand expression EXP, which is a call to the memcmp built-in function.
3095 ARGLIST is the argument list for this call. Return 0 if we failed and the
3096 caller should emit a normal call, otherwise try to get the result in
3097 TARGET, if convenient (and in mode MODE, if that's convenient). */
3100 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3101 enum machine_mode mode)
3103 tree arg1, arg2, len;
3104 const char *p1, *p2;
3106 if (!validate_arglist (arglist,
3107 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3110 arg1 = TREE_VALUE (arglist);
3111 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3112 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3114 /* If the len parameter is zero, return zero. */
3115 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3117 /* Evaluate and ignore arg1 and arg2 in case they have
3119 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3120 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3124 p1 = c_getstr (arg1);
3125 p2 = c_getstr (arg2);
3127 /* If all arguments are constant, and the value of len is not greater
3128 than the lengths of arg1 and arg2, evaluate at compile-time. */
3129 if (host_integerp (len, 1) && p1 && p2
3130 && compare_tree_int (len, strlen (p1) + 1) <= 0
3131 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3133 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3135 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3138 /* If len parameter is one, return an expression corresponding to
3139 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3140 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
3142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3143 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3145 fold (build1 (CONVERT_EXPR, integer_type_node,
3146 build1 (INDIRECT_REF, cst_uchar_node,
3147 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3149 fold (build1 (CONVERT_EXPR, integer_type_node,
3150 build1 (INDIRECT_REF, cst_uchar_node,
3151 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3152 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3153 return expand_expr (result, target, mode, EXPAND_NORMAL);
3156 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3158 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3163 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3165 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3166 enum machine_mode insn_mode;
3168 #ifdef HAVE_cmpmemsi
3170 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3173 #ifdef HAVE_cmpstrsi
3175 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3180 /* If we don't have POINTER_TYPE, call the function. */
3181 if (arg1_align == 0 || arg2_align == 0)
3184 /* Make a place to write the result of the instruction. */
3187 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3188 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3189 result = gen_reg_rtx (insn_mode);
3191 arg1_rtx = get_memory_rtx (arg1);
3192 arg2_rtx = get_memory_rtx (arg2);
3193 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3194 #ifdef HAVE_cmpmemsi
3196 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3197 GEN_INT (MIN (arg1_align, arg2_align)));
3200 #ifdef HAVE_cmpstrsi
3202 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3203 GEN_INT (MIN (arg1_align, arg2_align)));
3211 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3212 TYPE_MODE (integer_type_node), 3,
3213 XEXP (arg1_rtx, 0), Pmode,
3214 XEXP (arg2_rtx, 0), Pmode,
3215 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3216 TREE_UNSIGNED (sizetype)),
3217 TYPE_MODE (sizetype));
3219 /* Return the value in the proper mode for this function. */
3220 mode = TYPE_MODE (TREE_TYPE (exp));
3221 if (GET_MODE (result) == mode)
3223 else if (target != 0)
3225 convert_move (target, result, 0);
3229 return convert_to_mode (mode, result, 0);
3236 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3237 if we failed the caller should emit a normal call, otherwise try to get
3238 the result in TARGET, if convenient. */
3241 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3243 tree arglist = TREE_OPERAND (exp, 1);
3245 const char *p1, *p2;
3247 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3250 arg1 = TREE_VALUE (arglist);
3251 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3253 p1 = c_getstr (arg1);
3254 p2 = c_getstr (arg2);
3258 const int i = strcmp (p1, p2);
3259 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3262 /* If either arg is "", return an expression corresponding to
3263 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3264 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3266 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3267 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3269 fold (build1 (CONVERT_EXPR, integer_type_node,
3270 build1 (INDIRECT_REF, cst_uchar_node,
3271 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3273 fold (build1 (CONVERT_EXPR, integer_type_node,
3274 build1 (INDIRECT_REF, cst_uchar_node,
3275 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3276 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3277 return expand_expr (result, target, mode, EXPAND_NORMAL);
3280 #ifdef HAVE_cmpstrsi
3283 tree len, len1, len2;
3284 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3288 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3290 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3291 enum machine_mode insn_mode
3292 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3294 len1 = c_strlen (arg1, 1);
3295 len2 = c_strlen (arg2, 1);
3298 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3300 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3302 /* If we don't have a constant length for the first, use the length
3303 of the second, if we know it. We don't require a constant for
3304 this case; some cost analysis could be done if both are available
3305 but neither is constant. For now, assume they're equally cheap,
3306 unless one has side effects. If both strings have constant lengths,
3313 else if (TREE_SIDE_EFFECTS (len1))
3315 else if (TREE_SIDE_EFFECTS (len2))
3317 else if (TREE_CODE (len1) != INTEGER_CST)
3319 else if (TREE_CODE (len2) != INTEGER_CST)
3321 else if (tree_int_cst_lt (len1, len2))
3326 /* If both arguments have side effects, we cannot optimize. */
3327 if (!len || TREE_SIDE_EFFECTS (len))
3330 /* If we don't have POINTER_TYPE, call the function. */
3331 if (arg1_align == 0 || arg2_align == 0)
3334 /* Make a place to write the result of the instruction. */
3337 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3338 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3339 result = gen_reg_rtx (insn_mode);
3341 arg1_rtx = get_memory_rtx (arg1);
3342 arg2_rtx = get_memory_rtx (arg2);
3343 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3344 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3345 GEN_INT (MIN (arg1_align, arg2_align)));
3351 /* Return the value in the proper mode for this function. */
3352 mode = TYPE_MODE (TREE_TYPE (exp));
3353 if (GET_MODE (result) == mode)
3356 return convert_to_mode (mode, result, 0);
3357 convert_move (target, result, 0);
3364 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3365 if we failed the caller should emit a normal call, otherwise try to get
3366 the result in TARGET, if convenient. */
3369 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3371 tree arglist = TREE_OPERAND (exp, 1);
3372 tree arg1, arg2, arg3;
3373 const char *p1, *p2;
3375 if (!validate_arglist (arglist,
3376 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3379 arg1 = TREE_VALUE (arglist);
3380 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3381 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3383 /* If the len parameter is zero, return zero. */
3384 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3386 /* Evaluate and ignore arg1 and arg2 in case they have
3388 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3389 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3393 p1 = c_getstr (arg1);
3394 p2 = c_getstr (arg2);
3396 /* If all arguments are constant, evaluate at compile-time. */
3397 if (host_integerp (arg3, 1) && p1 && p2)
3399 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3400 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3403 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3404 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3405 if (host_integerp (arg3, 1)
3406 && (tree_low_cst (arg3, 1) == 1
3407 || (tree_low_cst (arg3, 1) > 1
3408 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3410 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3411 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3413 fold (build1 (CONVERT_EXPR, integer_type_node,
3414 build1 (INDIRECT_REF, cst_uchar_node,
3415 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3417 fold (build1 (CONVERT_EXPR, integer_type_node,
3418 build1 (INDIRECT_REF, cst_uchar_node,
3419 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3420 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3421 return expand_expr (result, target, mode, EXPAND_NORMAL);
3424 /* If c_strlen can determine an expression for one of the string
3425 lengths, and it doesn't have side effects, then emit cmpstrsi
3426 using length MIN(strlen(string)+1, arg3). */
3427 #ifdef HAVE_cmpstrsi
3430 tree len, len1, len2;
3431 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3435 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3437 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3438 enum machine_mode insn_mode
3439 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3441 len1 = c_strlen (arg1, 1);
3442 len2 = c_strlen (arg2, 1);
3445 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3447 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3449 /* If we don't have a constant length for the first, use the length
3450 of the second, if we know it. We don't require a constant for
3451 this case; some cost analysis could be done if both are available
3452 but neither is constant. For now, assume they're equally cheap,
3453 unless one has side effects. If both strings have constant lengths,
3460 else if (TREE_SIDE_EFFECTS (len1))
3462 else if (TREE_SIDE_EFFECTS (len2))
3464 else if (TREE_CODE (len1) != INTEGER_CST)
3466 else if (TREE_CODE (len2) != INTEGER_CST)
3468 else if (tree_int_cst_lt (len1, len2))
3473 /* If both arguments have side effects, we cannot optimize. */
3474 if (!len || TREE_SIDE_EFFECTS (len))
3477 /* The actual new length parameter is MIN(len,arg3). */
3478 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3480 /* If we don't have POINTER_TYPE, call the function. */
3481 if (arg1_align == 0 || arg2_align == 0)
3484 /* Make a place to write the result of the instruction. */
3487 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3488 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3489 result = gen_reg_rtx (insn_mode);
3491 arg1_rtx = get_memory_rtx (arg1);
3492 arg2_rtx = get_memory_rtx (arg2);
3493 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3494 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3495 GEN_INT (MIN (arg1_align, arg2_align)));
3501 /* Return the value in the proper mode for this function. */
3502 mode = TYPE_MODE (TREE_TYPE (exp));
3503 if (GET_MODE (result) == mode)
3506 return convert_to_mode (mode, result, 0);
3507 convert_move (target, result, 0);
3514 /* Expand expression EXP, which is a call to the strcat builtin.
3515 Return 0 if we failed the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient. */
3519 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3521 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3525 tree dst = TREE_VALUE (arglist),
3526 src = TREE_VALUE (TREE_CHAIN (arglist));
3527 const char *p = c_getstr (src);
3531 /* If the string length is zero, return the dst parameter. */
3533 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3534 else if (!optimize_size)
3536 /* Otherwise if !optimize_size, see if we can store by
3537 pieces into (dst + strlen(dst)). */
3538 tree newdst, arglist,
3539 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3541 /* This is the length argument. */
3542 arglist = build_tree_list (NULL_TREE,
3543 fold (size_binop (PLUS_EXPR,
3546 /* Prepend src argument. */
3547 arglist = tree_cons (NULL_TREE, src, arglist);
3549 /* We're going to use dst more than once. */
3550 dst = save_expr (dst);
3552 /* Create strlen (dst). */
3554 fold (build_function_call_expr (strlen_fn,