1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
50 #define CALLED_AS_BUILT_IN(NODE) \
51 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
53 #ifndef PAD_VARARGS_DOWN
54 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 /* Define the names of the builtin function types and codes. */
58 const char *const built_in_class_names[4]
59 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
61 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
62 const char *const built_in_names[(int) END_BUILTINS] =
64 #include "builtins.def"
68 /* Setup an array of _DECL trees, make sure each element is
69 initialized to NULL_TREE. */
70 tree built_in_decls[(int) END_BUILTINS];
71 /* Declarations used when constructing the builtin implicitly in the compiler.
72 It may be NULL_TREE when this is invalid (for instance runtime is not
73 required to implement the function call in all cases). */
74 tree implicit_built_in_decls[(int) END_BUILTINS];
76 static int get_pointer_alignment (tree, unsigned int);
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree);
81 static tree build_string_literal (int, const char *);
82 static int apply_args_size (void);
83 static int apply_result_size (void);
84 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
85 static rtx result_vector (int, rtx);
87 static rtx expand_builtin_setjmp (tree, rtx);
88 static void expand_builtin_update_setjmp_buf (rtx);
89 static void expand_builtin_prefetch (tree);
90 static rtx expand_builtin_apply_args (void);
91 static rtx expand_builtin_apply_args_1 (void);
92 static rtx expand_builtin_apply (rtx, rtx, rtx);
93 static void expand_builtin_return (rtx);
94 static enum type_class type_to_class (tree);
95 static rtx expand_builtin_classify_type (tree);
96 static void expand_errno_check (tree, rtx);
97 static rtx expand_builtin_mathfn (tree, rtx, rtx);
98 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
99 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
100 static rtx expand_builtin_args_info (tree);
101 static rtx expand_builtin_next_arg (tree);
102 static rtx expand_builtin_va_start (tree);
103 static rtx expand_builtin_va_end (tree);
104 static rtx expand_builtin_va_copy (tree);
105 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
106 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
107 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
108 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
109 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
115 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_bcopy (tree);
117 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
119 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
121 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
123 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_bzero (tree);
125 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static rtx expand_builtin_fputs (tree, rtx, bool);
134 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
135 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
136 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
137 static tree stabilize_va_list (tree, int);
138 static rtx expand_builtin_expect (tree, rtx);
139 static tree fold_builtin_constant_p (tree);
140 static tree fold_builtin_classify_type (tree);
141 static tree fold_builtin_inf (tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static int validate_arglist (tree, ...);
144 static bool integer_valued_real_p (tree);
145 static tree fold_trunc_transparent_mathfn (tree);
146 static bool readonly_data_expr (tree);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_cabs (tree, tree);
150 static tree fold_builtin_trunc (tree);
151 static tree fold_builtin_floor (tree);
152 static tree fold_builtin_ceil (tree);
153 static tree fold_builtin_round (tree);
154 static tree fold_builtin_bitop (tree);
155 static tree fold_builtin_memcpy (tree);
156 static tree fold_builtin_mempcpy (tree);
157 static tree fold_builtin_memmove (tree);
158 static tree fold_builtin_strchr (tree, bool);
159 static tree fold_builtin_memcmp (tree);
160 static tree fold_builtin_strcmp (tree);
161 static tree fold_builtin_strncmp (tree);
162 static tree fold_builtin_signbit (tree);
163 static tree fold_builtin_copysign (tree, tree);
164 static tree fold_builtin_isascii (tree);
165 static tree fold_builtin_toascii (tree);
166 static tree fold_builtin_isdigit (tree);
167 static tree fold_builtin_fabs (tree, tree);
168 static tree fold_builtin_abs (tree, tree);
169 static tree fold_builtin_unordered_cmp (tree, enum tree_code, enum tree_code);
170 static tree fold_builtin_1 (tree, bool);
172 static tree simplify_builtin_strpbrk (tree);
173 static tree simplify_builtin_strstr (tree);
174 static tree simplify_builtin_strchr (tree);
175 static tree simplify_builtin_strrchr (tree);
176 static tree simplify_builtin_strcat (tree);
177 static tree simplify_builtin_strncat (tree);
178 static tree simplify_builtin_strspn (tree);
179 static tree simplify_builtin_strcspn (tree);
180 static void simplify_builtin_next_arg (tree);
181 static void simplify_builtin_va_start (tree);
182 static tree simplify_builtin_sprintf (tree, int);
185 /* Return the alignment in bits of EXP, a pointer valued expression.
186 But don't return more than MAX_ALIGN no matter what.
187 The alignment returned is, by default, the alignment of the thing that
188 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
190 Otherwise, look at the expression to see if we can do better, i.e., if the
191 expression is actually pointing at an object whose alignment is tighter. */
194 get_pointer_alignment (tree exp, unsigned int max_align)
196 unsigned int align, inner;
198 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
201 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
202 align = MIN (align, max_align);
206 switch (TREE_CODE (exp))
210 case NON_LVALUE_EXPR:
211 exp = TREE_OPERAND (exp, 0);
212 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
215 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
216 align = MIN (inner, max_align);
220 /* If sum of pointer + int, restrict our maximum alignment to that
221 imposed by the integer. If not, we can't do any better than
223 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
226 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
227 & (max_align / BITS_PER_UNIT - 1))
231 exp = TREE_OPERAND (exp, 0);
235 /* See what we are pointing at and look at its alignment. */
236 exp = TREE_OPERAND (exp, 0);
237 if (TREE_CODE (exp) == FUNCTION_DECL)
238 align = FUNCTION_BOUNDARY;
239 else if (DECL_P (exp))
240 align = DECL_ALIGN (exp);
241 #ifdef CONSTANT_ALIGNMENT
242 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
243 align = CONSTANT_ALIGNMENT (exp, align);
245 return MIN (align, max_align);
253 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
254 way, because it could contain a zero byte in the middle.
255 TREE_STRING_LENGTH is the size of the character array, not the string.
257 ONLY_VALUE should be nonzero if the result is not going to be emitted
258 into the instruction stream and zero if it is going to be expanded.
259 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
260 is returned, otherwise NULL, since
261 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
262 evaluate the side-effects.
264 The value returned is of type `ssizetype'.
266 Unfortunately, string_constant can't access the values of const char
267 arrays with initializers, so neither can we do so here. */
270 c_strlen (tree src, int only_value)
273 HOST_WIDE_INT offset;
278 if (TREE_CODE (src) == COND_EXPR
279 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
283 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
284 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
285 if (tree_int_cst_equal (len1, len2))
289 if (TREE_CODE (src) == COMPOUND_EXPR
290 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
291 return c_strlen (TREE_OPERAND (src, 1), only_value);
293 src = string_constant (src, &offset_node);
297 max = TREE_STRING_LENGTH (src) - 1;
298 ptr = TREE_STRING_POINTER (src);
300 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
302 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
303 compute the offset to the following null if we don't know where to
304 start searching for it. */
307 for (i = 0; i < max; i++)
311 /* We don't know the starting offset, but we do know that the string
312 has no internal zero bytes. We can assume that the offset falls
313 within the bounds of the string; otherwise, the programmer deserves
314 what he gets. Subtract the offset from the length of the string,
315 and return that. This would perhaps not be valid if we were dealing
316 with named arrays in addition to literal string constants. */
318 return size_diffop (size_int (max), offset_node);
321 /* We have a known offset into the string. Start searching there for
322 a null character if we can represent it as a single HOST_WIDE_INT. */
323 if (offset_node == 0)
325 else if (! host_integerp (offset_node, 0))
328 offset = tree_low_cst (offset_node, 0);
330 /* If the offset is known to be out of bounds, warn, and call strlen at
332 if (offset < 0 || offset > max)
334 warning ("offset outside bounds of constant string");
338 /* Use strlen to search for the first zero byte. Since any strings
339 constructed with build_string will have nulls appended, we win even
340 if we get handed something like (char[4])"abcd".
342 Since OFFSET is our starting index into the string, no further
343 calculation is needed. */
344 return ssize_int (strlen (ptr + offset));
347 /* Return a char pointer for a C string if it is a string constant
348 or sum of string constant and integer constant. */
355 src = string_constant (src, &offset_node);
359 if (offset_node == 0)
360 return TREE_STRING_POINTER (src);
361 else if (!host_integerp (offset_node, 1)
362 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
365 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
368 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
369 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
372 c_readstr (const char *str, enum machine_mode mode)
378 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
383 for (i = 0; i < GET_MODE_SIZE (mode); i++)
386 if (WORDS_BIG_ENDIAN)
387 j = GET_MODE_SIZE (mode) - i - 1;
388 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
389 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
390 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
392 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
395 ch = (unsigned char) str[i];
396 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
398 return immed_double_const (c[0], c[1], mode);
401 /* Cast a target constant CST to target CHAR and if that value fits into
402 host char type, return zero and put that value into variable pointed by
406 target_char_cast (tree cst, char *p)
408 unsigned HOST_WIDE_INT val, hostval;
410 if (!host_integerp (cst, 1)
411 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
414 val = tree_low_cst (cst, 1);
415 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
416 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
419 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
420 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
429 /* Similar to save_expr, but assumes that arbitrary code is not executed
430 in between the multiple evaluations. In particular, we assume that a
431 non-addressable local variable will not be modified. */
434 builtin_save_expr (tree exp)
436 if (TREE_ADDRESSABLE (exp) == 0
437 && (TREE_CODE (exp) == PARM_DECL
438 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
441 return save_expr (exp);
444 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
445 times to get the address of either a higher stack frame, or a return
446 address located within it (depending on FNDECL_CODE). */
449 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
454 /* Some machines need special handling before we can access
455 arbitrary frames. For example, on the sparc, we must first flush
456 all register windows to the stack. */
457 #ifdef SETUP_FRAME_ADDRESSES
459 SETUP_FRAME_ADDRESSES ();
462 /* On the sparc, the return address is not in the frame, it is in a
463 register. There is no way to access it off of the current frame
464 pointer, but it can be accessed off the previous frame pointer by
465 reading the value from the register window save area. */
466 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
467 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
471 /* Scan back COUNT frames to the specified frame. */
472 for (i = 0; i < count; i++)
474 /* Assume the dynamic chain pointer is in the word that the
475 frame address points to, unless otherwise specified. */
476 #ifdef DYNAMIC_CHAIN_ADDRESS
477 tem = DYNAMIC_CHAIN_ADDRESS (tem);
479 tem = memory_address (Pmode, tem);
480 tem = gen_rtx_MEM (Pmode, tem);
481 set_mem_alias_set (tem, get_frame_alias_set ());
482 tem = copy_to_reg (tem);
485 /* For __builtin_frame_address, return what we've got. */
486 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
489 /* For __builtin_return_address, Get the return address from that
491 #ifdef RETURN_ADDR_RTX
492 tem = RETURN_ADDR_RTX (count, tem);
494 tem = memory_address (Pmode,
495 plus_constant (tem, GET_MODE_SIZE (Pmode)));
496 tem = gen_rtx_MEM (Pmode, tem);
497 set_mem_alias_set (tem, get_frame_alias_set ());
502 /* Alias set used for setjmp buffer. */
503 static HOST_WIDE_INT setjmp_alias_set = -1;
505 /* Construct the leading half of a __builtin_setjmp call. Control will
506 return to RECEIVER_LABEL. This is used directly by sjlj exception
510 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
512 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
516 if (setjmp_alias_set == -1)
517 setjmp_alias_set = new_alias_set ();
519 buf_addr = convert_memory_address (Pmode, buf_addr);
521 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
523 /* We store the frame pointer and the address of receiver_label in
524 the buffer and use the rest of it for the stack save area, which
525 is machine-dependent. */
527 mem = gen_rtx_MEM (Pmode, buf_addr);
528 set_mem_alias_set (mem, setjmp_alias_set);
529 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
531 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
532 set_mem_alias_set (mem, setjmp_alias_set);
534 emit_move_insn (validize_mem (mem),
535 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
537 stack_save = gen_rtx_MEM (sa_mode,
538 plus_constant (buf_addr,
539 2 * GET_MODE_SIZE (Pmode)));
540 set_mem_alias_set (stack_save, setjmp_alias_set);
541 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
543 /* If there is further processing to do, do it. */
544 #ifdef HAVE_builtin_setjmp_setup
545 if (HAVE_builtin_setjmp_setup)
546 emit_insn (gen_builtin_setjmp_setup (buf_addr));
549 /* Tell optimize_save_area_alloca that extra work is going to
550 need to go on during alloca. */
551 current_function_calls_setjmp = 1;
553 /* Set this so all the registers get saved in our frame; we need to be
554 able to copy the saved values for any registers from frames we unwind. */
555 current_function_has_nonlocal_label = 1;
558 /* Construct the trailing part of a __builtin_setjmp call.
559 This is used directly by sjlj exception handling code. */
562 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
564 /* Clobber the FP when we get here, so we have to make sure it's
565 marked as used by this function. */
566 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
568 /* Mark the static chain as clobbered here so life information
569 doesn't get messed up for it. */
570 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
572 /* Now put in the code to restore the frame pointer, and argument
573 pointer, if needed. */
574 #ifdef HAVE_nonlocal_goto
575 if (! HAVE_nonlocal_goto)
577 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
579 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
580 if (fixed_regs[ARG_POINTER_REGNUM])
582 #ifdef ELIMINABLE_REGS
584 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
586 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
587 if (elim_regs[i].from == ARG_POINTER_REGNUM
588 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
591 if (i == ARRAY_SIZE (elim_regs))
594 /* Now restore our arg pointer from the address at which it
595 was saved in our stack frame. */
596 emit_move_insn (virtual_incoming_args_rtx,
597 copy_to_reg (get_arg_pointer_save_area (cfun)));
602 #ifdef HAVE_builtin_setjmp_receiver
603 if (HAVE_builtin_setjmp_receiver)
604 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
607 #ifdef HAVE_nonlocal_goto_receiver
608 if (HAVE_nonlocal_goto_receiver)
609 emit_insn (gen_nonlocal_goto_receiver ());
614 /* @@@ This is a kludge. Not all machine descriptions define a blockage
615 insn, but we must not allow the code we just generated to be reordered
616 by scheduling. Specifically, the update of the frame pointer must
617 happen immediately, not later. So emit an ASM_INPUT to act as blockage
619 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
622 /* __builtin_setjmp is passed a pointer to an array of five words (not
623 all will be used on all machines). It operates similarly to the C
624 library function of the same name, but is more efficient. Much of
625 the code below (and for longjmp) is copied from the handling of
628 NOTE: This is intended for use by GNAT and the exception handling
629 scheme in the compiler and will only work in the method used by
633 expand_builtin_setjmp (tree arglist, rtx target)
635 rtx buf_addr, next_lab, cont_lab;
637 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
640 if (target == 0 || !REG_P (target)
641 || REGNO (target) < FIRST_PSEUDO_REGISTER)
642 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
644 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
646 next_lab = gen_label_rtx ();
647 cont_lab = gen_label_rtx ();
649 expand_builtin_setjmp_setup (buf_addr, next_lab);
651 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
652 ensure that pending stack adjustments are flushed. */
653 emit_move_insn (target, const0_rtx);
654 emit_jump (cont_lab);
656 emit_label (next_lab);
658 expand_builtin_setjmp_receiver (next_lab);
660 /* Set TARGET to one. */
661 emit_move_insn (target, const1_rtx);
662 emit_label (cont_lab);
664 /* Tell flow about the strange goings on. Putting `next_lab' on
665 `nonlocal_goto_handler_labels' to indicates that function
666 calls may traverse the arc back to this label. */
668 current_function_has_nonlocal_label = 1;
669 nonlocal_goto_handler_labels
670 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
675 /* __builtin_longjmp is passed a pointer to an array of five words (not
676 all will be used on all machines). It operates similarly to the C
677 library function of the same name, but is more efficient. Much of
678 the code below is copied from the handling of non-local gotos.
680 NOTE: This is intended for use by GNAT and the exception handling
681 scheme in the compiler and will only work in the method used by
685 expand_builtin_longjmp (rtx buf_addr, rtx value)
687 rtx fp, lab, stack, insn, last;
688 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
690 if (setjmp_alias_set == -1)
691 setjmp_alias_set = new_alias_set ();
693 buf_addr = convert_memory_address (Pmode, buf_addr);
695 buf_addr = force_reg (Pmode, buf_addr);
697 /* We used to store value in static_chain_rtx, but that fails if pointers
698 are smaller than integers. We instead require that the user must pass
699 a second argument of 1, because that is what builtin_setjmp will
700 return. This also makes EH slightly more efficient, since we are no
701 longer copying around a value that we don't care about. */
702 gcc_assert (value == const1_rtx);
704 current_function_calls_longjmp = 1;
706 last = get_last_insn ();
707 #ifdef HAVE_builtin_longjmp
708 if (HAVE_builtin_longjmp)
709 emit_insn (gen_builtin_longjmp (buf_addr));
713 fp = gen_rtx_MEM (Pmode, buf_addr);
714 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
715 GET_MODE_SIZE (Pmode)));
717 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
718 2 * GET_MODE_SIZE (Pmode)));
719 set_mem_alias_set (fp, setjmp_alias_set);
720 set_mem_alias_set (lab, setjmp_alias_set);
721 set_mem_alias_set (stack, setjmp_alias_set);
723 /* Pick up FP, label, and SP from the block and jump. This code is
724 from expand_goto in stmt.c; see there for detailed comments. */
725 #if HAVE_nonlocal_goto
726 if (HAVE_nonlocal_goto)
727 /* We have to pass a value to the nonlocal_goto pattern that will
728 get copied into the static_chain pointer, but it does not matter
729 what that value is, because builtin_setjmp does not use it. */
730 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
734 lab = copy_to_reg (lab);
736 emit_insn (gen_rtx_CLOBBER (VOIDmode,
737 gen_rtx_MEM (BLKmode,
738 gen_rtx_SCRATCH (VOIDmode))));
739 emit_insn (gen_rtx_CLOBBER (VOIDmode,
740 gen_rtx_MEM (BLKmode,
741 hard_frame_pointer_rtx)));
743 emit_move_insn (hard_frame_pointer_rtx, fp);
744 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
746 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
747 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
748 emit_indirect_jump (lab);
752 /* Search backwards and mark the jump insn as a non-local goto.
753 Note that this precludes the use of __builtin_longjmp to a
754 __builtin_setjmp target in the same function. However, we've
755 already cautioned the user that these functions are for
756 internal exception handling use only. */
757 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
759 gcc_assert (insn != last);
763 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
767 else if (CALL_P (insn))
772 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
773 and the address of the save area. */
776 expand_builtin_nonlocal_goto (tree arglist)
778 tree t_label, t_save_area;
779 rtx r_label, r_save_area, r_fp, r_sp, insn;
781 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
784 t_label = TREE_VALUE (arglist);
785 arglist = TREE_CHAIN (arglist);
786 t_save_area = TREE_VALUE (arglist);
788 r_label = expand_expr (t_label, NULL_RTX, VOIDmode, 0);
789 r_label = convert_memory_address (Pmode, r_label);
790 r_save_area = expand_expr (t_save_area, NULL_RTX, VOIDmode, 0);
791 r_save_area = convert_memory_address (Pmode, r_save_area);
792 r_fp = gen_rtx_MEM (Pmode, r_save_area);
793 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
794 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
796 current_function_has_nonlocal_goto = 1;
798 #if HAVE_nonlocal_goto
799 /* ??? We no longer need to pass the static chain value, afaik. */
800 if (HAVE_nonlocal_goto)
801 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
805 r_label = copy_to_reg (r_label);
807 emit_insn (gen_rtx_CLOBBER (VOIDmode,
808 gen_rtx_MEM (BLKmode,
809 gen_rtx_SCRATCH (VOIDmode))));
811 emit_insn (gen_rtx_CLOBBER (VOIDmode,
812 gen_rtx_MEM (BLKmode,
813 hard_frame_pointer_rtx)));
815 /* Restore frame pointer for containing function.
816 This sets the actual hard register used for the frame pointer
817 to the location of the function's incoming static chain info.
818 The non-local goto handler will then adjust it to contain the
819 proper value and reload the argument pointer, if needed. */
820 emit_move_insn (hard_frame_pointer_rtx, r_fp);
821 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
823 /* USE of hard_frame_pointer_rtx added for consistency;
824 not clear if really needed. */
825 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
826 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
827 emit_indirect_jump (r_label);
830 /* Search backwards to the jump insn and mark it as a
832 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
836 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
837 const0_rtx, REG_NOTES (insn));
840 else if (CALL_P (insn))
847 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
848 (not all will be used on all machines) that was passed to __builtin_setjmp.
849 It updates the stack pointer in that block to correspond to the current
853 expand_builtin_update_setjmp_buf (rtx buf_addr)
855 enum machine_mode sa_mode = Pmode;
859 #ifdef HAVE_save_stack_nonlocal
860 if (HAVE_save_stack_nonlocal)
861 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
863 #ifdef STACK_SAVEAREA_MODE
864 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
868 = gen_rtx_MEM (sa_mode,
871 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
875 emit_insn (gen_setjmp ());
878 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
881 /* Expand a call to __builtin_prefetch. For a target that does not support
882 data prefetch, evaluate the memory address argument in case it has side
886 expand_builtin_prefetch (tree arglist)
888 tree arg0, arg1, arg2;
891 if (!validate_arglist (arglist, POINTER_TYPE, 0))
894 arg0 = TREE_VALUE (arglist);
895 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
896 zero (read) and argument 2 (locality) defaults to 3 (high degree of
898 if (TREE_CHAIN (arglist))
900 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
901 if (TREE_CHAIN (TREE_CHAIN (arglist)))
902 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
904 arg2 = build_int_cst (NULL_TREE, 3);
908 arg1 = integer_zero_node;
909 arg2 = build_int_cst (NULL_TREE, 3);
912 /* Argument 0 is an address. */
913 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
915 /* Argument 1 (read/write flag) must be a compile-time constant int. */
916 if (TREE_CODE (arg1) != INTEGER_CST)
918 error ("second arg to `__builtin_prefetch' must be a constant");
919 arg1 = integer_zero_node;
921 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
922 /* Argument 1 must be either zero or one. */
923 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
925 warning ("invalid second arg to __builtin_prefetch; using zero");
929 /* Argument 2 (locality) must be a compile-time constant int. */
930 if (TREE_CODE (arg2) != INTEGER_CST)
932 error ("third arg to `__builtin_prefetch' must be a constant");
933 arg2 = integer_zero_node;
935 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
936 /* Argument 2 must be 0, 1, 2, or 3. */
937 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
939 warning ("invalid third arg to __builtin_prefetch; using zero");
946 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
948 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
949 || (GET_MODE (op0) != Pmode))
951 op0 = convert_memory_address (Pmode, op0);
952 op0 = force_reg (Pmode, op0);
954 emit_insn (gen_prefetch (op0, op1, op2));
958 /* Don't do anything with direct references to volatile memory, but
959 generate code to handle other side effects. */
960 if (!MEM_P (op0) && side_effects_p (op0))
964 /* Get a MEM rtx for expression EXP which is the address of an operand
965 to be used to be used in a string instruction (cmpstrsi, movmemsi, ..). */
968 get_memory_rtx (tree exp)
970 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
973 addr = convert_memory_address (Pmode, addr);
975 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
977 /* Get an expression we can use to find the attributes to assign to MEM.
978 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
979 we can. First remove any nops. */
980 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
981 || TREE_CODE (exp) == NON_LVALUE_EXPR)
982 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
983 exp = TREE_OPERAND (exp, 0);
985 if (TREE_CODE (exp) == ADDR_EXPR)
987 exp = TREE_OPERAND (exp, 0);
988 set_mem_attributes (mem, exp, 0);
990 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
992 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
993 /* memcpy, memset and other builtin stringops can alias with anything. */
994 set_mem_alias_set (mem, 0);
1000 /* Built-in functions to perform an untyped call and return. */
1002 /* For each register that may be used for calling a function, this
1003 gives a mode used to copy the register's value. VOIDmode indicates
1004 the register is not used for calling a function. If the machine
1005 has register windows, this gives only the outbound registers.
1006 INCOMING_REGNO gives the corresponding inbound register. */
1007 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1009 /* For each register that may be used for returning values, this gives
1010 a mode used to copy the register's value. VOIDmode indicates the
1011 register is not used for returning values. If the machine has
1012 register windows, this gives only the outbound registers.
1013 INCOMING_REGNO gives the corresponding inbound register. */
1014 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1016 /* For each register that may be used for calling a function, this
1017 gives the offset of that register into the block returned by
1018 __builtin_apply_args. 0 indicates that the register is not
1019 used for calling a function. */
1020 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1022 /* Return the size required for the block returned by __builtin_apply_args,
1023 and initialize apply_args_mode. */
1026 apply_args_size (void)
1028 static int size = -1;
1031 enum machine_mode mode;
1033 /* The values computed by this function never change. */
1036 /* The first value is the incoming arg-pointer. */
1037 size = GET_MODE_SIZE (Pmode);
1039 /* The second value is the structure value address unless this is
1040 passed as an "invisible" first argument. */
1041 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1042 size += GET_MODE_SIZE (Pmode);
1044 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1045 if (FUNCTION_ARG_REGNO_P (regno))
1047 mode = reg_raw_mode[regno];
1049 gcc_assert (mode != VOIDmode);
1051 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1052 if (size % align != 0)
1053 size = CEIL (size, align) * align;
1054 apply_args_reg_offset[regno] = size;
1055 size += GET_MODE_SIZE (mode);
1056 apply_args_mode[regno] = mode;
1060 apply_args_mode[regno] = VOIDmode;
1061 apply_args_reg_offset[regno] = 0;
1067 /* Return the size required for the block returned by __builtin_apply,
1068 and initialize apply_result_mode. */
1071 apply_result_size (void)
1073 static int size = -1;
1075 enum machine_mode mode;
1077 /* The values computed by this function never change. */
1082 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1083 if (FUNCTION_VALUE_REGNO_P (regno))
1085 mode = reg_raw_mode[regno];
1087 gcc_assert (mode != VOIDmode);
1089 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1090 if (size % align != 0)
1091 size = CEIL (size, align) * align;
1092 size += GET_MODE_SIZE (mode);
1093 apply_result_mode[regno] = mode;
1096 apply_result_mode[regno] = VOIDmode;
1098 /* Allow targets that use untyped_call and untyped_return to override
1099 the size so that machine-specific information can be stored here. */
1100 #ifdef APPLY_RESULT_SIZE
1101 size = APPLY_RESULT_SIZE;
1107 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1108 /* Create a vector describing the result block RESULT. If SAVEP is true,
1109 the result block is used to save the values; otherwise it is used to
1110 restore the values. */
1113 result_vector (int savep, rtx result)
1115 int regno, size, align, nelts;
1116 enum machine_mode mode;
1118 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1121 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1122 if ((mode = apply_result_mode[regno]) != VOIDmode)
1124 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1125 if (size % align != 0)
1126 size = CEIL (size, align) * align;
1127 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1128 mem = adjust_address (result, mode, size);
1129 savevec[nelts++] = (savep
1130 ? gen_rtx_SET (VOIDmode, mem, reg)
1131 : gen_rtx_SET (VOIDmode, reg, mem));
1132 size += GET_MODE_SIZE (mode);
1134 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1136 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1138 /* Save the state required to perform an untyped call with the same
1139 arguments as were passed to the current function. */
1142 expand_builtin_apply_args_1 (void)
1145 int size, align, regno;
1146 enum machine_mode mode;
1147 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1149 /* Create a block where the arg-pointer, structure value address,
1150 and argument registers can be saved. */
1151 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1153 /* Walk past the arg-pointer and structure value address. */
1154 size = GET_MODE_SIZE (Pmode);
1155 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1156 size += GET_MODE_SIZE (Pmode);
1158 /* Save each register used in calling a function to the block. */
1159 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1160 if ((mode = apply_args_mode[regno]) != VOIDmode)
1162 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1163 if (size % align != 0)
1164 size = CEIL (size, align) * align;
1166 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1168 emit_move_insn (adjust_address (registers, mode, size), tem);
1169 size += GET_MODE_SIZE (mode);
1172 /* Save the arg pointer to the block. */
1173 tem = copy_to_reg (virtual_incoming_args_rtx);
1174 #ifdef STACK_GROWS_DOWNWARD
1175 /* We need the pointer as the caller actually passed them to us, not
1176 as we might have pretended they were passed. Make sure it's a valid
1177 operand, as emit_move_insn isn't expected to handle a PLUS. */
1179 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1182 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1184 size = GET_MODE_SIZE (Pmode);
1186 /* Save the structure value address unless this is passed as an
1187 "invisible" first argument. */
1188 if (struct_incoming_value)
1190 emit_move_insn (adjust_address (registers, Pmode, size),
1191 copy_to_reg (struct_incoming_value));
1192 size += GET_MODE_SIZE (Pmode);
1195 /* Return the address of the block. */
1196 return copy_addr_to_reg (XEXP (registers, 0));
1199 /* __builtin_apply_args returns block of memory allocated on
1200 the stack into which is stored the arg pointer, structure
1201 value address, static chain, and all the registers that might
1202 possibly be used in performing a function call. The code is
1203 moved to the start of the function so the incoming values are
1207 expand_builtin_apply_args (void)
1209 /* Don't do __builtin_apply_args more than once in a function.
1210 Save the result of the first call and reuse it. */
1211 if (apply_args_value != 0)
1212 return apply_args_value;
1214 /* When this function is called, it means that registers must be
1215 saved on entry to this function. So we migrate the
1216 call to the first insn of this function. */
1221 temp = expand_builtin_apply_args_1 ();
1225 apply_args_value = temp;
1227 /* Put the insns after the NOTE that starts the function.
1228 If this is inside a start_sequence, make the outer-level insn
1229 chain current, so the code is placed at the start of the
1231 push_topmost_sequence ();
1232 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1233 pop_topmost_sequence ();
1238 /* Perform an untyped call and save the state required to perform an
1239 untyped return of whatever value was returned by the given function. */
1242 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1244 int size, align, regno;
1245 enum machine_mode mode;
1246 rtx incoming_args, result, reg, dest, src, call_insn;
1247 rtx old_stack_level = 0;
1248 rtx call_fusage = 0;
1249 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1251 arguments = convert_memory_address (Pmode, arguments);
1253 /* Create a block where the return registers can be saved. */
1254 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1256 /* Fetch the arg pointer from the ARGUMENTS block. */
1257 incoming_args = gen_reg_rtx (Pmode);
1258 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1259 #ifndef STACK_GROWS_DOWNWARD
1260 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1261 incoming_args, 0, OPTAB_LIB_WIDEN);
1264 /* Push a new argument block and copy the arguments. Do not allow
1265 the (potential) memcpy call below to interfere with our stack
1267 do_pending_stack_adjust ();
1270 /* Save the stack with nonlocal if available. */
1271 #ifdef HAVE_save_stack_nonlocal
1272 if (HAVE_save_stack_nonlocal)
1273 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1276 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1278 /* Allocate a block of memory onto the stack and copy the memory
1279 arguments to the outgoing arguments address. */
1280 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1281 dest = virtual_outgoing_args_rtx;
1282 #ifndef STACK_GROWS_DOWNWARD
1283 if (GET_CODE (argsize) == CONST_INT)
1284 dest = plus_constant (dest, -INTVAL (argsize));
1286 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1288 dest = gen_rtx_MEM (BLKmode, dest);
1289 set_mem_align (dest, PARM_BOUNDARY);
1290 src = gen_rtx_MEM (BLKmode, incoming_args);
1291 set_mem_align (src, PARM_BOUNDARY);
1292 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1294 /* Refer to the argument block. */
1296 arguments = gen_rtx_MEM (BLKmode, arguments);
1297 set_mem_align (arguments, PARM_BOUNDARY);
1299 /* Walk past the arg-pointer and structure value address. */
1300 size = GET_MODE_SIZE (Pmode);
1302 size += GET_MODE_SIZE (Pmode);
1304 /* Restore each of the registers previously saved. Make USE insns
1305 for each of these registers for use in making the call. */
1306 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1307 if ((mode = apply_args_mode[regno]) != VOIDmode)
1309 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1310 if (size % align != 0)
1311 size = CEIL (size, align) * align;
1312 reg = gen_rtx_REG (mode, regno);
1313 emit_move_insn (reg, adjust_address (arguments, mode, size));
1314 use_reg (&call_fusage, reg);
1315 size += GET_MODE_SIZE (mode);
1318 /* Restore the structure value address unless this is passed as an
1319 "invisible" first argument. */
1320 size = GET_MODE_SIZE (Pmode);
1323 rtx value = gen_reg_rtx (Pmode);
1324 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1325 emit_move_insn (struct_value, value);
1326 if (REG_P (struct_value))
1327 use_reg (&call_fusage, struct_value);
1328 size += GET_MODE_SIZE (Pmode);
1331 /* All arguments and registers used for the call are set up by now! */
1332 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1334 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1335 and we don't want to load it into a register as an optimization,
1336 because prepare_call_address already did it if it should be done. */
1337 if (GET_CODE (function) != SYMBOL_REF)
1338 function = memory_address (FUNCTION_MODE, function);
1340 /* Generate the actual call instruction and save the return value. */
1341 #ifdef HAVE_untyped_call
1342 if (HAVE_untyped_call)
1343 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1344 result, result_vector (1, result)));
1347 #ifdef HAVE_call_value
1348 if (HAVE_call_value)
1352 /* Locate the unique return register. It is not possible to
1353 express a call that sets more than one return register using
1354 call_value; use untyped_call for that. In fact, untyped_call
1355 only needs to save the return registers in the given block. */
1356 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1357 if ((mode = apply_result_mode[regno]) != VOIDmode)
1359 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1361 valreg = gen_rtx_REG (mode, regno);
1364 emit_call_insn (GEN_CALL_VALUE (valreg,
1365 gen_rtx_MEM (FUNCTION_MODE, function),
1366 const0_rtx, NULL_RTX, const0_rtx));
1368 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1374 /* Find the CALL insn we just emitted, and attach the register usage
1376 call_insn = last_call_insn ();
1377 add_function_usage_to (call_insn, call_fusage);
1379 /* Restore the stack. */
1380 #ifdef HAVE_save_stack_nonlocal
1381 if (HAVE_save_stack_nonlocal)
1382 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1385 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1389 /* Return the address of the result block. */
1390 result = copy_addr_to_reg (XEXP (result, 0));
1391 return convert_memory_address (ptr_mode, result);
1394 /* Perform an untyped return. */
1397 expand_builtin_return (rtx result)
1399 int size, align, regno;
1400 enum machine_mode mode;
1402 rtx call_fusage = 0;
1404 result = convert_memory_address (Pmode, result);
1406 apply_result_size ();
1407 result = gen_rtx_MEM (BLKmode, result);
1409 #ifdef HAVE_untyped_return
1410 if (HAVE_untyped_return)
1412 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1418 /* Restore the return value and note that each value is used. */
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if ((mode = apply_result_mode[regno]) != VOIDmode)
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1427 emit_move_insn (reg, adjust_address (result, mode, size));
1429 push_to_sequence (call_fusage);
1430 emit_insn (gen_rtx_USE (VOIDmode, reg));
1431 call_fusage = get_insns ();
1433 size += GET_MODE_SIZE (mode);
1436 /* Put the USE insns before the return. */
1437 emit_insn (call_fusage);
1439 /* Return whatever values was restored by jumping directly to the end
1441 expand_naked_return ();
1444 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1446 static enum type_class
1447 type_to_class (tree type)
1449 switch (TREE_CODE (type))
1451 case VOID_TYPE: return void_type_class;
1452 case INTEGER_TYPE: return integer_type_class;
1453 case CHAR_TYPE: return char_type_class;
1454 case ENUMERAL_TYPE: return enumeral_type_class;
1455 case BOOLEAN_TYPE: return boolean_type_class;
1456 case POINTER_TYPE: return pointer_type_class;
1457 case REFERENCE_TYPE: return reference_type_class;
1458 case OFFSET_TYPE: return offset_type_class;
1459 case REAL_TYPE: return real_type_class;
1460 case COMPLEX_TYPE: return complex_type_class;
1461 case FUNCTION_TYPE: return function_type_class;
1462 case METHOD_TYPE: return method_type_class;
1463 case RECORD_TYPE: return record_type_class;
1465 case QUAL_UNION_TYPE: return union_type_class;
1466 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1467 ? string_type_class : array_type_class);
1468 case SET_TYPE: return set_type_class;
1469 case FILE_TYPE: return file_type_class;
1470 case LANG_TYPE: return lang_type_class;
1471 default: return no_type_class;
1475 /* Expand a call to __builtin_classify_type with arguments found in
1479 expand_builtin_classify_type (tree arglist)
1482 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1483 return GEN_INT (no_type_class);
1486 /* This helper macro, meant to be used in mathfn_built_in below,
1487 determines which among a set of three builtin math functions is
1488 appropriate for a given type mode. The `F' and `L' cases are
1489 automatically generated from the `double' case. */
1490 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1491 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1492 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1493 fcodel = BUILT_IN_MATHFN##L ; break;
1495 /* Return mathematic function equivalent to FN but operating directly
1496 on TYPE, if available. If we can't do the conversion, return zero. */
1498 mathfn_built_in (tree type, enum built_in_function fn)
1500 enum built_in_function fcode, fcodef, fcodel;
1504 CASE_MATHFN (BUILT_IN_ACOS)
1505 CASE_MATHFN (BUILT_IN_ACOSH)
1506 CASE_MATHFN (BUILT_IN_ASIN)
1507 CASE_MATHFN (BUILT_IN_ASINH)
1508 CASE_MATHFN (BUILT_IN_ATAN)
1509 CASE_MATHFN (BUILT_IN_ATAN2)
1510 CASE_MATHFN (BUILT_IN_ATANH)
1511 CASE_MATHFN (BUILT_IN_CBRT)
1512 CASE_MATHFN (BUILT_IN_CEIL)
1513 CASE_MATHFN (BUILT_IN_COPYSIGN)
1514 CASE_MATHFN (BUILT_IN_COS)
1515 CASE_MATHFN (BUILT_IN_COSH)
1516 CASE_MATHFN (BUILT_IN_DREM)
1517 CASE_MATHFN (BUILT_IN_ERF)
1518 CASE_MATHFN (BUILT_IN_ERFC)
1519 CASE_MATHFN (BUILT_IN_EXP)
1520 CASE_MATHFN (BUILT_IN_EXP10)
1521 CASE_MATHFN (BUILT_IN_EXP2)
1522 CASE_MATHFN (BUILT_IN_EXPM1)
1523 CASE_MATHFN (BUILT_IN_FABS)
1524 CASE_MATHFN (BUILT_IN_FDIM)
1525 CASE_MATHFN (BUILT_IN_FLOOR)
1526 CASE_MATHFN (BUILT_IN_FMA)
1527 CASE_MATHFN (BUILT_IN_FMAX)
1528 CASE_MATHFN (BUILT_IN_FMIN)
1529 CASE_MATHFN (BUILT_IN_FMOD)
1530 CASE_MATHFN (BUILT_IN_FREXP)
1531 CASE_MATHFN (BUILT_IN_GAMMA)
1532 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1533 CASE_MATHFN (BUILT_IN_HYPOT)
1534 CASE_MATHFN (BUILT_IN_ILOGB)
1535 CASE_MATHFN (BUILT_IN_INF)
1536 CASE_MATHFN (BUILT_IN_J0)
1537 CASE_MATHFN (BUILT_IN_J1)
1538 CASE_MATHFN (BUILT_IN_JN)
1539 CASE_MATHFN (BUILT_IN_LDEXP)
1540 CASE_MATHFN (BUILT_IN_LGAMMA)
1541 CASE_MATHFN (BUILT_IN_LLRINT)
1542 CASE_MATHFN (BUILT_IN_LLROUND)
1543 CASE_MATHFN (BUILT_IN_LOG)
1544 CASE_MATHFN (BUILT_IN_LOG10)
1545 CASE_MATHFN (BUILT_IN_LOG1P)
1546 CASE_MATHFN (BUILT_IN_LOG2)
1547 CASE_MATHFN (BUILT_IN_LOGB)
1548 CASE_MATHFN (BUILT_IN_LRINT)
1549 CASE_MATHFN (BUILT_IN_LROUND)
1550 CASE_MATHFN (BUILT_IN_MODF)
1551 CASE_MATHFN (BUILT_IN_NAN)
1552 CASE_MATHFN (BUILT_IN_NANS)
1553 CASE_MATHFN (BUILT_IN_NEARBYINT)
1554 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1555 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1556 CASE_MATHFN (BUILT_IN_POW)
1557 CASE_MATHFN (BUILT_IN_POW10)
1558 CASE_MATHFN (BUILT_IN_REMAINDER)
1559 CASE_MATHFN (BUILT_IN_REMQUO)
1560 CASE_MATHFN (BUILT_IN_RINT)
1561 CASE_MATHFN (BUILT_IN_ROUND)
1562 CASE_MATHFN (BUILT_IN_SCALB)
1563 CASE_MATHFN (BUILT_IN_SCALBLN)
1564 CASE_MATHFN (BUILT_IN_SCALBN)
1565 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1566 CASE_MATHFN (BUILT_IN_SIN)
1567 CASE_MATHFN (BUILT_IN_SINCOS)
1568 CASE_MATHFN (BUILT_IN_SINH)
1569 CASE_MATHFN (BUILT_IN_SQRT)
1570 CASE_MATHFN (BUILT_IN_TAN)
1571 CASE_MATHFN (BUILT_IN_TANH)
1572 CASE_MATHFN (BUILT_IN_TGAMMA)
1573 CASE_MATHFN (BUILT_IN_TRUNC)
1574 CASE_MATHFN (BUILT_IN_Y0)
1575 CASE_MATHFN (BUILT_IN_Y1)
1576 CASE_MATHFN (BUILT_IN_YN)
1582 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1583 return implicit_built_in_decls[fcode];
1584 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1585 return implicit_built_in_decls[fcodef];
1586 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1587 return implicit_built_in_decls[fcodel];
1592 /* If errno must be maintained, expand the RTL to check if the result,
1593 TARGET, of a built-in function call, EXP, is NaN, and if so set
1597 expand_errno_check (tree exp, rtx target)
1599 rtx lab = gen_label_rtx ();
1601 /* Test the result; if it is NaN, set errno=EDOM because
1602 the argument was not in the domain. */
1603 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1607 /* If this built-in doesn't throw an exception, set errno directly. */
1608 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1610 #ifdef GEN_ERRNO_RTX
1611 rtx errno_rtx = GEN_ERRNO_RTX;
1614 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1616 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1622 /* We can't set errno=EDOM directly; let the library call do it.
1623 Pop the arguments right away in case the call gets deleted. */
1625 expand_call (exp, target, 0);
1631 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1632 Return 0 if a normal call should be emitted rather than expanding the
1633 function in-line. EXP is the expression that is a call to the builtin
1634 function; if convenient, the result should be placed in TARGET.
1635 SUBTARGET may be used as the target for computing one of EXP's operands. */
1638 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1640 optab builtin_optab;
1641 rtx op0, insns, before_call;
1642 tree fndecl = get_callee_fndecl (exp);
1643 tree arglist = TREE_OPERAND (exp, 1);
1644 enum machine_mode mode;
1645 bool errno_set = false;
1648 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1651 arg = TREE_VALUE (arglist);
1653 switch (DECL_FUNCTION_CODE (fndecl))
1656 case BUILT_IN_SQRTF:
1657 case BUILT_IN_SQRTL:
1658 errno_set = ! tree_expr_nonnegative_p (arg);
1659 builtin_optab = sqrt_optab;
1664 errno_set = true; builtin_optab = exp_optab; break;
1665 case BUILT_IN_EXP10:
1666 case BUILT_IN_EXP10F:
1667 case BUILT_IN_EXP10L:
1668 case BUILT_IN_POW10:
1669 case BUILT_IN_POW10F:
1670 case BUILT_IN_POW10L:
1671 errno_set = true; builtin_optab = exp10_optab; break;
1673 case BUILT_IN_EXP2F:
1674 case BUILT_IN_EXP2L:
1675 errno_set = true; builtin_optab = exp2_optab; break;
1676 case BUILT_IN_EXPM1:
1677 case BUILT_IN_EXPM1F:
1678 case BUILT_IN_EXPM1L:
1679 errno_set = true; builtin_optab = expm1_optab; break;
1681 case BUILT_IN_LOGBF:
1682 case BUILT_IN_LOGBL:
1683 errno_set = true; builtin_optab = logb_optab; break;
1684 case BUILT_IN_ILOGB:
1685 case BUILT_IN_ILOGBF:
1686 case BUILT_IN_ILOGBL:
1687 errno_set = true; builtin_optab = ilogb_optab; break;
1691 errno_set = true; builtin_optab = log_optab; break;
1692 case BUILT_IN_LOG10:
1693 case BUILT_IN_LOG10F:
1694 case BUILT_IN_LOG10L:
1695 errno_set = true; builtin_optab = log10_optab; break;
1697 case BUILT_IN_LOG2F:
1698 case BUILT_IN_LOG2L:
1699 errno_set = true; builtin_optab = log2_optab; break;
1700 case BUILT_IN_LOG1P:
1701 case BUILT_IN_LOG1PF:
1702 case BUILT_IN_LOG1PL:
1703 errno_set = true; builtin_optab = log1p_optab; break;
1705 case BUILT_IN_ASINF:
1706 case BUILT_IN_ASINL:
1707 builtin_optab = asin_optab; break;
1709 case BUILT_IN_ACOSF:
1710 case BUILT_IN_ACOSL:
1711 builtin_optab = acos_optab; break;
1715 builtin_optab = tan_optab; break;
1717 case BUILT_IN_ATANF:
1718 case BUILT_IN_ATANL:
1719 builtin_optab = atan_optab; break;
1720 case BUILT_IN_FLOOR:
1721 case BUILT_IN_FLOORF:
1722 case BUILT_IN_FLOORL:
1723 builtin_optab = floor_optab; break;
1725 case BUILT_IN_CEILF:
1726 case BUILT_IN_CEILL:
1727 builtin_optab = ceil_optab; break;
1728 case BUILT_IN_TRUNC:
1729 case BUILT_IN_TRUNCF:
1730 case BUILT_IN_TRUNCL:
1731 builtin_optab = btrunc_optab; break;
1732 case BUILT_IN_ROUND:
1733 case BUILT_IN_ROUNDF:
1734 case BUILT_IN_ROUNDL:
1735 builtin_optab = round_optab; break;
1736 case BUILT_IN_NEARBYINT:
1737 case BUILT_IN_NEARBYINTF:
1738 case BUILT_IN_NEARBYINTL:
1739 builtin_optab = nearbyint_optab; break;
1741 case BUILT_IN_RINTF:
1742 case BUILT_IN_RINTL:
1743 builtin_optab = rint_optab; break;
1748 /* Make a suitable register to place result in. */
1749 mode = TYPE_MODE (TREE_TYPE (exp));
1751 if (! flag_errno_math || ! HONOR_NANS (mode))
1754 /* Before working hard, check whether the instruction is available. */
1755 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1757 target = gen_reg_rtx (mode);
1759 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1760 need to expand the argument again. This way, we will not perform
1761 side-effects more the once. */
1762 narg = builtin_save_expr (arg);
1765 arglist = build_tree_list (NULL_TREE, arg);
1766 exp = build_function_call_expr (fndecl, arglist);
1769 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1773 /* Compute into TARGET.
1774 Set TARGET to wherever the result comes back. */
1775 target = expand_unop (mode, builtin_optab, op0, target, 0);
1780 expand_errno_check (exp, target);
1782 /* Output the entire sequence. */
1783 insns = get_insns ();
1789 /* If we were unable to expand via the builtin, stop the sequence
1790 (without outputting the insns) and call to the library function
1791 with the stabilized argument list. */
1795 before_call = get_last_insn ();
1797 target = expand_call (exp, target, target == const0_rtx);
1799 /* If this is a sqrt operation and we don't care about errno, try to
1800 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1801 This allows the semantics of the libcall to be visible to the RTL
1803 if (builtin_optab == sqrt_optab && !errno_set)
1805 /* Search backwards through the insns emitted by expand_call looking
1806 for the instruction with the REG_RETVAL note. */
1807 rtx last = get_last_insn ();
1808 while (last != before_call)
1810 if (find_reg_note (last, REG_RETVAL, NULL))
1812 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1813 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1814 two elements, i.e. symbol_ref(sqrt) and the operand. */
1816 && GET_CODE (note) == EXPR_LIST
1817 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1818 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1819 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1821 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1822 /* Check operand is a register with expected mode. */
1825 && GET_MODE (operand) == mode)
1827 /* Replace the REG_EQUAL note with a SQRT rtx. */
1828 rtx equiv = gen_rtx_SQRT (mode, operand);
1829 set_unique_reg_note (last, REG_EQUAL, equiv);
1834 last = PREV_INSN (last);
1841 /* Expand a call to the builtin binary math functions (pow and atan2).
1842 Return 0 if a normal call should be emitted rather than expanding the
1843 function in-line. EXP is the expression that is a call to the builtin
1844 function; if convenient, the result should be placed in TARGET.
1845 SUBTARGET may be used as the target for computing one of EXP's
1849 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1851 optab builtin_optab;
1852 rtx op0, op1, insns;
1853 tree fndecl = get_callee_fndecl (exp);
1854 tree arglist = TREE_OPERAND (exp, 1);
1855 tree arg0, arg1, temp, narg;
1856 enum machine_mode mode;
1857 bool errno_set = true;
1860 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1863 arg0 = TREE_VALUE (arglist);
1864 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1866 switch (DECL_FUNCTION_CODE (fndecl))
1871 builtin_optab = pow_optab; break;
1872 case BUILT_IN_ATAN2:
1873 case BUILT_IN_ATAN2F:
1874 case BUILT_IN_ATAN2L:
1875 builtin_optab = atan2_optab; break;
1877 case BUILT_IN_FMODF:
1878 case BUILT_IN_FMODL:
1879 builtin_optab = fmod_optab; break;
1881 case BUILT_IN_DREMF:
1882 case BUILT_IN_DREML:
1883 builtin_optab = drem_optab; break;
1888 /* Make a suitable register to place result in. */
1889 mode = TYPE_MODE (TREE_TYPE (exp));
1891 /* Before working hard, check whether the instruction is available. */
1892 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1895 target = gen_reg_rtx (mode);
1897 if (! flag_errno_math || ! HONOR_NANS (mode))
1900 /* Always stabilize the argument list. */
1901 narg = builtin_save_expr (arg1);
1904 temp = build_tree_list (NULL_TREE, narg);
1908 temp = TREE_CHAIN (arglist);
1910 narg = builtin_save_expr (arg0);
1913 arglist = tree_cons (NULL_TREE, narg, temp);
1917 arglist = tree_cons (NULL_TREE, arg0, temp);
1920 exp = build_function_call_expr (fndecl, arglist);
1922 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1923 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1927 /* Compute into TARGET.
1928 Set TARGET to wherever the result comes back. */
1929 target = expand_binop (mode, builtin_optab, op0, op1,
1930 target, 0, OPTAB_DIRECT);
1932 /* If we were unable to expand via the builtin, stop the sequence
1933 (without outputting the insns) and call to the library function
1934 with the stabilized argument list. */
1938 return expand_call (exp, target, target == const0_rtx);
1942 expand_errno_check (exp, target);
1944 /* Output the entire sequence. */
1945 insns = get_insns ();
1952 /* Expand a call to the builtin sin and cos math functions.
1953 Return 0 if a normal call should be emitted rather than expanding the
1954 function in-line. EXP is the expression that is a call to the builtin
1955 function; if convenient, the result should be placed in TARGET.
1956 SUBTARGET may be used as the target for computing one of EXP's
1960 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
1962 optab builtin_optab;
1963 rtx op0, insns, before_call;
1964 tree fndecl = get_callee_fndecl (exp);
1965 tree arglist = TREE_OPERAND (exp, 1);
1966 enum machine_mode mode;
1967 bool errno_set = false;
1970 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1973 arg = TREE_VALUE (arglist);
1975 switch (DECL_FUNCTION_CODE (fndecl))
1983 builtin_optab = sincos_optab; break;
1988 /* Make a suitable register to place result in. */
1989 mode = TYPE_MODE (TREE_TYPE (exp));
1991 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 /* Check if sincos insn is available, otherwise fallback
1995 to sin or cos insn. */
1996 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
1997 switch (DECL_FUNCTION_CODE (fndecl))
2002 builtin_optab = sin_optab; break;
2006 builtin_optab = cos_optab; break;
2012 /* Before working hard, check whether the instruction is available. */
2013 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2015 target = gen_reg_rtx (mode);
2017 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2018 need to expand the argument again. This way, we will not perform
2019 side-effects more the once. */
2020 narg = save_expr (arg);
2023 arglist = build_tree_list (NULL_TREE, arg);
2024 exp = build_function_call_expr (fndecl, arglist);
2027 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2031 /* Compute into TARGET.
2032 Set TARGET to wherever the result comes back. */
2033 if (builtin_optab == sincos_optab)
2037 switch (DECL_FUNCTION_CODE (fndecl))
2042 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2047 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2052 gcc_assert (result);
2056 target = expand_unop (mode, builtin_optab, op0, target, 0);
2062 expand_errno_check (exp, target);
2064 /* Output the entire sequence. */
2065 insns = get_insns ();
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2077 before_call = get_last_insn ();
2079 target = expand_call (exp, target, target == const0_rtx);
2084 /* To evaluate powi(x,n), the floating point value x raised to the
2085 constant integer exponent n, we use a hybrid algorithm that
2086 combines the "window method" with look-up tables. For an
2087 introduction to exponentiation algorithms and "addition chains",
2088 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2089 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2090 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2091 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2093 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2094 multiplications to inline before calling the system library's pow
2095 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2096 so this default never requires calling pow, powf or powl. */
2098 #ifndef POWI_MAX_MULTS
2099 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2102 /* The size of the "optimal power tree" lookup table. All
2103 exponents less than this value are simply looked up in the
2104 powi_table below. This threshold is also used to size the
2105 cache of pseudo registers that hold intermediate results. */
2106 #define POWI_TABLE_SIZE 256
2108 /* The size, in bits of the window, used in the "window method"
2109 exponentiation algorithm. This is equivalent to a radix of
2110 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2111 #define POWI_WINDOW_SIZE 3
2113 /* The following table is an efficient representation of an
2114 "optimal power tree". For each value, i, the corresponding
2115 value, j, in the table states than an optimal evaluation
2116 sequence for calculating pow(x,i) can be found by evaluating
2117 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2118 100 integers is given in Knuth's "Seminumerical algorithms". */
2120 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2122 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2123 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2124 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2125 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2126 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2127 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2128 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2129 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2130 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2131 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2132 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2133 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2134 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2135 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2136 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2137 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2138 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2139 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2140 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2141 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2142 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2143 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2144 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2145 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2146 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2147 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2148 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2149 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2150 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2151 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2152 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2153 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2157 /* Return the number of multiplications required to calculate
2158 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2159 subroutine of powi_cost. CACHE is an array indicating
2160 which exponents have already been calculated. */
2163 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2165 /* If we've already calculated this exponent, then this evaluation
2166 doesn't require any additional multiplications. */
2171 return powi_lookup_cost (n - powi_table[n], cache)
2172 + powi_lookup_cost (powi_table[n], cache) + 1;
2175 /* Return the number of multiplications required to calculate
2176 powi(x,n) for an arbitrary x, given the exponent N. This
2177 function needs to be kept in sync with expand_powi below. */
2180 powi_cost (HOST_WIDE_INT n)
2182 bool cache[POWI_TABLE_SIZE];
2183 unsigned HOST_WIDE_INT digit;
2184 unsigned HOST_WIDE_INT val;
2190 /* Ignore the reciprocal when calculating the cost. */
2191 val = (n < 0) ? -n : n;
2193 /* Initialize the exponent cache. */
2194 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2199 while (val >= POWI_TABLE_SIZE)
2203 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2204 result += powi_lookup_cost (digit, cache)
2205 + POWI_WINDOW_SIZE + 1;
2206 val >>= POWI_WINDOW_SIZE;
2215 return result + powi_lookup_cost (val, cache);
2218 /* Recursive subroutine of expand_powi. This function takes the array,
2219 CACHE, of already calculated exponents and an exponent N and returns
2220 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2223 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2225 unsigned HOST_WIDE_INT digit;
2229 if (n < POWI_TABLE_SIZE)
2234 target = gen_reg_rtx (mode);
2237 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2238 op1 = expand_powi_1 (mode, powi_table[n], cache);
2242 target = gen_reg_rtx (mode);
2243 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2244 op0 = expand_powi_1 (mode, n - digit, cache);
2245 op1 = expand_powi_1 (mode, digit, cache);
2249 target = gen_reg_rtx (mode);
2250 op0 = expand_powi_1 (mode, n >> 1, cache);
2254 result = expand_mult (mode, op0, op1, target, 0);
2255 if (result != target)
2256 emit_move_insn (target, result);
2260 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2261 floating point operand in mode MODE, and N is the exponent. This
2262 function needs to be kept in sync with powi_cost above. */
2265 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2267 unsigned HOST_WIDE_INT val;
2268 rtx cache[POWI_TABLE_SIZE];
2272 return CONST1_RTX (mode);
2274 val = (n < 0) ? -n : n;
2276 memset (cache, 0, sizeof (cache));
2279 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2281 /* If the original exponent was negative, reciprocate the result. */
2283 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2284 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2289 /* Expand a call to the pow built-in mathematical function. Return 0 if
2290 a normal call should be emitted rather than expanding the function
2291 in-line. EXP is the expression that is a call to the builtin
2292 function; if convenient, the result should be placed in TARGET. */
2295 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2297 tree arglist = TREE_OPERAND (exp, 1);
2300 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2303 arg0 = TREE_VALUE (arglist);
2304 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2306 if (TREE_CODE (arg1) == REAL_CST
2307 && ! TREE_CONSTANT_OVERFLOW (arg1))
2309 REAL_VALUE_TYPE cint;
2313 c = TREE_REAL_CST (arg1);
2314 n = real_to_integer (&c);
2315 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2316 if (real_identical (&c, &cint))
2318 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2319 Otherwise, check the number of multiplications required.
2320 Note that pow never sets errno for an integer exponent. */
2321 if ((n >= -1 && n <= 2)
2322 || (flag_unsafe_math_optimizations
2324 && powi_cost (n) <= POWI_MAX_MULTS))
2326 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2327 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2328 op = force_reg (mode, op);
2329 return expand_powi (op, mode, n);
2334 if (! flag_unsafe_math_optimizations)
2336 return expand_builtin_mathfn_2 (exp, target, subtarget);
2339 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2340 if we failed the caller should emit a normal call, otherwise
2341 try to get the result in TARGET, if convenient. */
2344 expand_builtin_strlen (tree arglist, rtx target,
2345 enum machine_mode target_mode)
2347 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2352 tree len, src = TREE_VALUE (arglist);
2353 rtx result, src_reg, char_rtx, before_strlen;
2354 enum machine_mode insn_mode = target_mode, char_mode;
2355 enum insn_code icode = CODE_FOR_nothing;
2358 /* If the length can be computed at compile-time, return it. */
2359 len = c_strlen (src, 0);
2361 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2363 /* If the length can be computed at compile-time and is constant
2364 integer, but there are side-effects in src, evaluate
2365 src for side-effects, then return len.
2366 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2367 can be optimized into: i++; x = 3; */
2368 len = c_strlen (src, 1);
2369 if (len && TREE_CODE (len) == INTEGER_CST)
2371 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2372 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2375 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2377 /* If SRC is not a pointer type, don't do this operation inline. */
2381 /* Bail out if we can't compute strlen in the right mode. */
2382 while (insn_mode != VOIDmode)
2384 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2385 if (icode != CODE_FOR_nothing)
2388 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2390 if (insn_mode == VOIDmode)
2393 /* Make a place to write the result of the instruction. */
2397 && GET_MODE (result) == insn_mode
2398 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2399 result = gen_reg_rtx (insn_mode);
2401 /* Make a place to hold the source address. We will not expand
2402 the actual source until we are sure that the expansion will
2403 not fail -- there are trees that cannot be expanded twice. */
2404 src_reg = gen_reg_rtx (Pmode);
2406 /* Mark the beginning of the strlen sequence so we can emit the
2407 source operand later. */
2408 before_strlen = get_last_insn ();
2410 char_rtx = const0_rtx;
2411 char_mode = insn_data[(int) icode].operand[2].mode;
2412 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2414 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2416 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2417 char_rtx, GEN_INT (align));
2422 /* Now that we are assured of success, expand the source. */
2424 pat = memory_address (BLKmode,
2425 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2427 emit_move_insn (src_reg, pat);
2432 emit_insn_after (pat, before_strlen);
2434 emit_insn_before (pat, get_insns ());
2436 /* Return the value in the proper mode for this function. */
2437 if (GET_MODE (result) == target_mode)
2439 else if (target != 0)
2440 convert_move (target, result, 0);
2442 target = convert_to_mode (target_mode, result, 0);
2448 /* Expand a call to the strstr builtin. Return 0 if we failed the
2449 caller should emit a normal call, otherwise try to get the result
2450 in TARGET, if convenient (and in mode MODE if that's convenient). */
2453 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2455 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2459 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2461 const char *p1, *p2;
2470 const char *r = strstr (p1, p2);
2475 /* Return an offset into the constant string argument. */
2476 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (s1), s1,
2477 fold_convert (TREE_TYPE (s1),
2478 ssize_int (r - p1))));
2479 return expand_expr (tmp, target, mode, EXPAND_NORMAL);
2483 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2488 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2492 /* New argument list transforming strstr(s1, s2) to
2493 strchr(s1, s2[0]). */
2494 arglist = build_tree_list (NULL_TREE,
2495 build_int_cst (NULL_TREE, p2[0]));
2496 arglist = tree_cons (NULL_TREE, s1, arglist);
2497 return expand_expr (build_function_call_expr (fn, arglist),
2498 target, mode, EXPAND_NORMAL);
2502 /* Expand a call to the strchr builtin. Return 0 if we failed the
2503 caller should emit a normal call, otherwise try to get the result
2504 in TARGET, if convenient (and in mode MODE if that's convenient). */
2507 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2509 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2513 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2516 if (TREE_CODE (s2) != INTEGER_CST)
2526 if (target_char_cast (s2, &c))
2534 /* Return an offset into the constant string argument. */
2535 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (s1), s1,
2536 fold_convert (TREE_TYPE (s1),
2537 ssize_int (r - p1))));
2538 return expand_expr (tmp, target, mode, EXPAND_NORMAL);
2541 /* FIXME: Should use here strchrM optab so that ports can optimize
2547 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2548 caller should emit a normal call, otherwise try to get the result
2549 in TARGET, if convenient (and in mode MODE if that's convenient). */
2552 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2554 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2558 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2562 if (TREE_CODE (s2) != INTEGER_CST)
2571 if (target_char_cast (s2, &c))
2574 r = strrchr (p1, c);
2579 /* Return an offset into the constant string argument. */
2580 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (s1), s1,
2581 fold_convert (TREE_TYPE (s1),
2582 ssize_int (r - p1))));
2583 return expand_expr (tmp, target, mode, EXPAND_NORMAL);
2586 if (! integer_zerop (s2))
2589 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2593 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2594 return expand_expr (build_function_call_expr (fn, arglist),
2595 target, mode, EXPAND_NORMAL);
2599 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2600 caller should emit a normal call, otherwise try to get the result
2601 in TARGET, if convenient (and in mode MODE if that's convenient). */
2604 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2606 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2610 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2612 const char *p1, *p2;
2621 const char *r = strpbrk (p1, p2);
2626 /* Return an offset into the constant string argument. */
2627 tmp = fold (build2 (PLUS_EXPR, TREE_TYPE (s1), s1,
2628 fold_convert (TREE_TYPE (s1),
2629 ssize_int (r - p1))));
2630 return expand_expr (tmp, target, mode, EXPAND_NORMAL);
2635 /* strpbrk(x, "") == NULL.
2636 Evaluate and ignore the arguments in case they had
2638 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2643 return 0; /* Really call strpbrk. */
2645 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2649 /* New argument list transforming strpbrk(s1, s2) to
2650 strchr(s1, s2[0]). */
2651 arglist = build_tree_list (NULL_TREE,
2652 build_int_cst (NULL_TREE, p2[0]));
2653 arglist = tree_cons (NULL_TREE, s1, arglist);
2654 return expand_expr (build_function_call_expr (fn, arglist),
2655 target, mode, EXPAND_NORMAL);
2659 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2660 bytes from constant string DATA + OFFSET and return it as target
2664 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2665 enum machine_mode mode)
2667 const char *str = (const char *) data;
2669 gcc_assert (offset >= 0
2670 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2671 <= strlen (str) + 1));
2673 return c_readstr (str + offset, mode);
2676 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2677 Return 0 if we failed, the caller should emit a normal call,
2678 otherwise try to get the result in TARGET, if convenient (and in
2679 mode MODE if that's convenient). */
2681 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2683 if (!validate_arglist (arglist,
2684 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2688 tree dest = TREE_VALUE (arglist);
2689 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2690 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2691 const char *src_str;
2692 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2693 unsigned int dest_align
2694 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2695 rtx dest_mem, src_mem, dest_addr, len_rtx;
2697 /* If DEST is not a pointer type, call the normal function. */
2698 if (dest_align == 0)
2701 /* If the LEN parameter is zero, return DEST. */
2702 if (integer_zerop (len))
2704 /* Evaluate and ignore SRC in case it has side-effects. */
2705 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2706 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2709 /* If SRC and DEST are the same (and not volatile), return DEST. */
2710 if (operand_equal_p (src, dest, 0))
2712 /* Evaluate and ignore LEN in case it has side-effects. */
2713 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2714 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2717 /* If either SRC is not a pointer type, don't do this
2718 operation in-line. */
2722 dest_mem = get_memory_rtx (dest);
2723 set_mem_align (dest_mem, dest_align);
2724 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2725 src_str = c_getstr (src);
2727 /* If SRC is a string constant and block move would be done
2728 by pieces, we can avoid loading the string from memory
2729 and only stored the computed constants. */
2731 && GET_CODE (len_rtx) == CONST_INT
2732 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2733 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2734 (void *) src_str, dest_align))
2736 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2737 builtin_memcpy_read_str,
2738 (void *) src_str, dest_align, 0);
2739 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2740 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2744 src_mem = get_memory_rtx (src);
2745 set_mem_align (src_mem, src_align);
2747 /* Copy word part most expediently. */
2748 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2753 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2754 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2760 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2761 Return 0 if we failed the caller should emit a normal call,
2762 otherwise try to get the result in TARGET, if convenient (and in
2763 mode MODE if that's convenient). If ENDP is 0 return the
2764 destination pointer, if ENDP is 1 return the end pointer ala
2765 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2769 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2772 if (!validate_arglist (arglist,
2773 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2775 /* If return value is ignored, transform mempcpy into memcpy. */
2776 else if (target == const0_rtx)
2778 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2783 return expand_expr (build_function_call_expr (fn, arglist),
2784 target, mode, EXPAND_NORMAL);
2788 tree dest = TREE_VALUE (arglist);
2789 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2790 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2791 const char *src_str;
2792 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2793 unsigned int dest_align
2794 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2795 rtx dest_mem, src_mem, len_rtx;
2797 /* If DEST is not a pointer type, call the normal function. */
2798 if (dest_align == 0)
2801 /* If SRC and DEST are the same (and not volatile), do nothing. */
2802 if (operand_equal_p (src, dest, 0))
2808 /* Evaluate and ignore LEN in case it has side-effects. */
2809 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2810 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2814 len = fold (build2 (MINUS_EXPR, TREE_TYPE (len), len,
2816 len = fold_convert (TREE_TYPE (dest), len);
2817 expr = fold (build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len));
2818 return expand_expr (expr, target, mode, EXPAND_NORMAL);
2821 /* If LEN is not constant, call the normal function. */
2822 if (! host_integerp (len, 1))
2825 /* If the LEN parameter is zero, return DEST. */
2826 if (tree_low_cst (len, 1) == 0)
2828 /* Evaluate and ignore SRC in case it has side-effects. */
2829 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2830 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2833 /* If either SRC is not a pointer type, don't do this
2834 operation in-line. */
2838 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2839 src_str = c_getstr (src);
2841 /* If SRC is a string constant and block move would be done
2842 by pieces, we can avoid loading the string from memory
2843 and only stored the computed constants. */
2845 && GET_CODE (len_rtx) == CONST_INT
2846 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2847 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2848 (void *) src_str, dest_align))
2850 dest_mem = get_memory_rtx (dest);
2851 set_mem_align (dest_mem, dest_align);
2852 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2853 builtin_memcpy_read_str,
2854 (void *) src_str, dest_align, endp);
2855 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2860 if (GET_CODE (len_rtx) == CONST_INT
2861 && can_move_by_pieces (INTVAL (len_rtx),
2862 MIN (dest_align, src_align)))
2864 dest_mem = get_memory_rtx (dest);
2865 set_mem_align (dest_mem, dest_align);
2866 src_mem = get_memory_rtx (src);
2867 set_mem_align (src_mem, src_align);
2868 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2869 MIN (dest_align, src_align), endp);
2870 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2871 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2879 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2880 if we failed the caller should emit a normal call. */
2883 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2885 if (!validate_arglist (arglist,
2886 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2890 tree dest = TREE_VALUE (arglist);
2891 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2892 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2894 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2895 unsigned int dest_align
2896 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2898 /* If DEST is not a pointer type, call the normal function. */
2899 if (dest_align == 0)
2902 /* If the LEN parameter is zero, return DEST. */
2903 if (integer_zerop (len))
2905 /* Evaluate and ignore SRC in case it has side-effects. */
2906 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2907 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2910 /* If SRC and DEST are the same (and not volatile), return DEST. */
2911 if (operand_equal_p (src, dest, 0))
2913 /* Evaluate and ignore LEN in case it has side-effects. */
2914 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2915 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2918 /* If either SRC is not a pointer type, don't do this
2919 operation in-line. */
2923 /* If src is categorized for a readonly section we can use
2925 if (readonly_data_expr (src))
2927 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2930 return expand_expr (build_function_call_expr (fn, arglist),
2931 target, mode, EXPAND_NORMAL);
2934 /* Otherwise, call the normal function. */
2939 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2940 if we failed the caller should emit a normal call. */
2943 expand_builtin_bcopy (tree arglist)
2945 tree src, dest, size, newarglist;
2947 if (!validate_arglist (arglist,
2948 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2951 src = TREE_VALUE (arglist);
2952 dest = TREE_VALUE (TREE_CHAIN (arglist));
2953 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2955 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2956 memmove(ptr y, ptr x, size_t z). This is done this way
2957 so that if it isn't expanded inline, we fallback to
2958 calling bcopy instead of memmove. */
2960 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
2961 newarglist = tree_cons (NULL_TREE, src, newarglist);
2962 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2964 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2968 # define HAVE_movstr 0
2969 # define CODE_FOR_movstr CODE_FOR_nothing
2972 /* Expand into a movstr instruction, if one is available. Return 0 if
2973 we failed, the caller should emit a normal call, otherwise try to
2974 get the result in TARGET, if convenient. If ENDP is 0 return the
2975 destination pointer, if ENDP is 1 return the end pointer ala
2976 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2980 expand_movstr (tree dest, tree src, rtx target, int endp)
2986 const struct insn_data * data;
2991 dest_mem = get_memory_rtx (dest);
2992 src_mem = get_memory_rtx (src);
2995 target = force_reg (Pmode, XEXP (dest_mem, 0));
2996 dest_mem = replace_equiv_address (dest_mem, target);
2997 end = gen_reg_rtx (Pmode);
3001 if (target == 0 || target == const0_rtx)
3003 end = gen_reg_rtx (Pmode);
3011 data = insn_data + CODE_FOR_movstr;
3013 if (data->operand[0].mode != VOIDmode)
3014 end = gen_lowpart (data->operand[0].mode, end);
3016 insn = data->genfun (end, dest_mem, src_mem);
3022 /* movstr is supposed to set end to the address of the NUL
3023 terminator. If the caller requested a mempcpy-like return value,
3025 if (endp == 1 && target != const0_rtx)
3026 emit_move_insn (target, plus_constant (gen_lowpart (GET_MODE (target),
3032 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
3033 if we failed the caller should emit a normal call, otherwise try to get
3034 the result in TARGET, if convenient (and in mode MODE if that's
3038 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
3040 tree fn, len, src, dst;
3042 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3045 src = TREE_VALUE (TREE_CHAIN (arglist));
3046 dst = TREE_VALUE (arglist);
3048 /* If SRC and DST are equal (and not volatile), return DST. */
3049 if (operand_equal_p (src, dst, 0))
3050 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3052 len = c_strlen (src, 1);
3053 if (len == 0 || TREE_SIDE_EFFECTS (len))
3054 return expand_movstr (TREE_VALUE (arglist),
3055 TREE_VALUE (TREE_CHAIN (arglist)),
3056 target, /*endp=*/0);
3058 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3062 len = size_binop (PLUS_EXPR, len, ssize_int (1));
3063 arglist = build_tree_list (NULL_TREE, len);
3064 arglist = tree_cons (NULL_TREE, src, arglist);
3065 arglist = tree_cons (NULL_TREE, dst, arglist);
3066 return expand_expr (build_function_call_expr (fn, arglist),
3067 target, mode, EXPAND_NORMAL);
3070 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3071 Return 0 if we failed the caller should emit a normal call,
3072 otherwise try to get the result in TARGET, if convenient (and in
3073 mode MODE if that's convenient). */
3076 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
3078 /* If return value is ignored, transform stpcpy into strcpy. */
3079 if (target == const0_rtx)
3081 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3085 return expand_expr (build_function_call_expr (fn, arglist),
3086 target, mode, EXPAND_NORMAL);
3089 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3093 tree dst, src, len, lenp1;
3097 /* Ensure we get an actual string whose length can be evaluated at
3098 compile-time, not an expression containing a string. This is
3099 because the latter will potentially produce pessimized code
3100 when used to produce the return value. */
3101 src = TREE_VALUE (TREE_CHAIN (arglist));
3102 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3103 return expand_movstr (TREE_VALUE (arglist),
3104 TREE_VALUE (TREE_CHAIN (arglist)),
3105 target, /*endp=*/2);
3107 dst = TREE_VALUE (arglist);
3108 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3109 narglist = build_tree_list (NULL_TREE, lenp1);
3110 narglist = tree_cons (NULL_TREE, src, narglist);
3111 narglist = tree_cons (NULL_TREE, dst, narglist);
3112 ret = expand_builtin_mempcpy (narglist, target, mode, /*endp=*/2);
3117 if (TREE_CODE (len) == INTEGER_CST)
3119 rtx len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3121 if (GET_CODE (len_rtx) == CONST_INT)
3123 ret = expand_builtin_strcpy (arglist, target, mode);
3129 if (mode != VOIDmode)
3130 target = gen_reg_rtx (mode);
3132 target = gen_reg_rtx (GET_MODE (ret));
3134 if (GET_MODE (target) != GET_MODE (ret))
3135 ret = gen_lowpart (GET_MODE (target), ret);
3137 ret = emit_move_insn (target,
3147 return expand_movstr (TREE_VALUE (arglist),
3148 TREE_VALUE (TREE_CHAIN (arglist)),
3149 target, /*endp=*/2);
3153 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3154 bytes from constant string DATA + OFFSET and return it as target
3158 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3159 enum machine_mode mode)
3161 const char *str = (const char *) data;
3163 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3166 return c_readstr (str + offset, mode);
3169 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3170 if we failed the caller should emit a normal call. */
3173 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
3175 if (!validate_arglist (arglist,
3176 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3181 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3184 /* We must be passed a constant len parameter. */
3185 if (TREE_CODE (len) != INTEGER_CST)
3188 /* If the len parameter is zero, return the dst parameter. */
3189 if (integer_zerop (len))
3191 /* Evaluate and ignore the src argument in case it has
3193 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3194 VOIDmode, EXPAND_NORMAL);
3195 /* Return the dst parameter. */
3196 return expand_expr (TREE_VALUE (arglist), target, mode,
3200 /* Now, we must be passed a constant src ptr parameter. */
3201 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
3204 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3206 /* We're required to pad with trailing zeros if the requested
3207 len is greater than strlen(s2)+1. In that case try to
3208 use store_by_pieces, if it fails, punt. */
3209 if (tree_int_cst_lt (slen, len))
3211 tree dest = TREE_VALUE (arglist);
3212 unsigned int dest_align
3213 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3214 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3217 if (!p || dest_align == 0 || !host_integerp (len, 1)
3218 || !can_store_by_pieces (tree_low_cst (len, 1),
3219 builtin_strncpy_read_str,
3220 (void *) p, dest_align))
3223 dest_mem = get_memory_rtx (dest);
3224 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3225 builtin_strncpy_read_str,
3226 (void *) p, dest_align, 0);
3227 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3228 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3232 /* OK transform into builtin memcpy. */
3233 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3236 return expand_expr (build_function_call_expr (fn, arglist),
3237 target, mode, EXPAND_NORMAL);
3241 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3242 bytes from constant string DATA + OFFSET and return it as target
3246 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3247 enum machine_mode mode)
3249 const char *c = (const char *) data;
3250 char *p = alloca (GET_MODE_SIZE (mode));
3252 memset (p, *c, GET_MODE_SIZE (mode));
3254 return c_readstr (p, mode);
3257 /* Callback routine for store_by_pieces. Return the RTL of a register
3258 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3259 char value given in the RTL register data. For example, if mode is
3260 4 bytes wide, return the RTL for 0x01010101*data. */
3263 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3264 enum machine_mode mode)
3270 size = GET_MODE_SIZE (mode);
3275 memset (p, 1, size);
3276 coeff = c_readstr (p, mode);
3278 target = convert_to_mode (mode, (rtx) data, 1);
3279 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3280 return force_reg (mode, target);
3283 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3284 if we failed the caller should emit a normal call, otherwise try to get
3285 the result in TARGET, if convenient (and in mode MODE if that's
3289 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
3291 if (!validate_arglist (arglist,
3292 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3296 tree dest = TREE_VALUE (arglist);
3297 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3298 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3301 unsigned int dest_align
3302 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3303 rtx dest_mem, dest_addr, len_rtx;
3305 /* If DEST is not a pointer type, don't do this
3306 operation in-line. */
3307 if (dest_align == 0)
3310 /* If the LEN parameter is zero, return DEST. */
3311 if (integer_zerop (len))
3313 /* Evaluate and ignore VAL in case it has side-effects. */
3314 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3315 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3318 if (TREE_CODE (val) != INTEGER_CST)
3322 if (!host_integerp (len, 1))
3325 if (optimize_size && tree_low_cst (len, 1) > 1)
3328 /* Assume that we can memset by pieces if we can store the
3329 * the coefficients by pieces (in the required modes).
3330 * We can't pass builtin_memset_gen_str as that emits RTL. */
3332 if (!can_store_by_pieces (tree_low_cst (len, 1),
3333 builtin_memset_read_str,
3337 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3338 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3339 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3341 dest_mem = get_memory_rtx (dest);
3342 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3343 builtin_memset_gen_str,
3344 val_rtx, dest_align, 0);
3345 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3346 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3350 if (target_char_cast (val, &c))
3355 if (!host_integerp (len, 1))
3357 if (!can_store_by_pieces (tree_low_cst (len, 1),
3358 builtin_memset_read_str, &c,
3362 dest_mem = get_memory_rtx (dest);
3363 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3364 builtin_memset_read_str,
3366 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3367 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3371 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3373 dest_mem = get_memory_rtx (dest);
3374 set_mem_align (dest_mem, dest_align);
3375 dest_addr = clear_storage (dest_mem, len_rtx);
3379 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3380 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3387 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3388 if we failed the caller should emit a normal call. */
3391 expand_builtin_bzero (tree arglist)
3393 tree dest, size, newarglist;
3395 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3398 dest = TREE_VALUE (arglist);
3399 size = TREE_VALUE (TREE_CHAIN (arglist));
3401 /* New argument list transforming bzero(ptr x, int y) to
3402 memset(ptr x, int 0, size_t y). This is done this way
3403 so that if it isn't expanded inline, we fallback to
3404 calling bzero instead of memset. */
3406 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3407 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3408 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3410 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3413 /* Expand expression EXP, which is a call to the memcmp built-in function.
3414 ARGLIST is the argument list for this call. Return 0 if we failed and the
3415 caller should emit a normal call, otherwise try to get the result in
3416 TARGET, if convenient (and in mode MODE, if that's convenient). */
3419 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3420 enum machine_mode mode)
3422 tree arg1, arg2, len;
3423 const char *p1, *p2;
3425 if (!validate_arglist (arglist,
3426 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3429 arg1 = TREE_VALUE (arglist);
3430 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3431 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3433 /* If the len parameter is zero, return zero. */
3434 if (integer_zerop (len))
3436 /* Evaluate and ignore arg1 and arg2 in case they have
3438 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3439 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3443 /* If both arguments are equal (and not volatile), return zero. */
3444 if (operand_equal_p (arg1, arg2, 0))
3446 /* Evaluate and ignore len in case it has side-effects. */
3447 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3451 p1 = c_getstr (arg1);
3452 p2 = c_getstr (arg2);
3454 /* If all arguments are constant, and the value of len is not greater
3455 than the lengths of arg1 and arg2, evaluate at compile-time. */
3456 if (host_integerp (len, 1) && p1 && p2
3457 && compare_tree_int (len, strlen (p1) + 1) <= 0
3458 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3460 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3462 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3465 /* If len parameter is one, return an expression corresponding to
3466 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3467 if (integer_onep (len))
3469 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3470 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3472 fold (build1 (CONVERT_EXPR, integer_type_node,
3473 build1 (INDIRECT_REF, cst_uchar_node,
3474 fold_convert (cst_uchar_ptr_node, arg1))));
3476 fold (build1 (CONVERT_EXPR, integer_type_node,
3477 build1 (INDIRECT_REF, cst_uchar_node,
3478 fold_convert (cst_uchar_ptr_node, arg2))));
3479 tree result = fold (build2 (MINUS_EXPR, integer_type_node, ind1, ind2));
3480 return expand_expr (result, target, mode, EXPAND_NORMAL);
3483 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3485 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3490 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3492 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3493 enum machine_mode insn_mode;
3495 #ifdef HAVE_cmpmemsi
3497 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3500 #ifdef HAVE_cmpstrsi
3502 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3507 /* If we don't have POINTER_TYPE, call the function. */
3508 if (arg1_align == 0 || arg2_align == 0)
3511 /* Make a place to write the result of the instruction. */
3514 && REG_P (result) && GET_MODE (result) == insn_mode
3515 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3516 result = gen_reg_rtx (insn_mode);
3518 arg1_rtx = get_memory_rtx (arg1);
3519 arg2_rtx = get_memory_rtx (arg2);
3520 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3521 #ifdef HAVE_cmpmemsi
3523 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3524 GEN_INT (MIN (arg1_align, arg2_align)));
3527 #ifdef HAVE_cmpstrsi
3529 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3530 GEN_INT (MIN (arg1_align, arg2_align)));
3538 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3539 TYPE_MODE (integer_type_node), 3,
3540 XEXP (arg1_rtx, 0), Pmode,
3541 XEXP (arg2_rtx, 0), Pmode,
3542 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3543 TYPE_UNSIGNED (sizetype)),
3544 TYPE_MODE (sizetype));
3546 /* Return the value in the proper mode for this function. */
3547 mode = TYPE_MODE (TREE_TYPE (exp));
3548 if (GET_MODE (result) == mode)
3550 else if (target != 0)
3552 convert_move (target, result, 0);
3556 return convert_to_mode (mode, result, 0);