1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
51 #ifndef PAD_VARARGS_DOWN
52 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
55 /* Define the names of the builtin function types and codes. */
56 const char *const built_in_class_names[4]
57 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
59 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
60 const char * built_in_names[(int) END_BUILTINS] =
62 #include "builtins.def"
66 /* Setup an array of _DECL trees, make sure each element is
67 initialized to NULL_TREE. */
68 tree built_in_decls[(int) END_BUILTINS];
69 /* Declarations used when constructing the builtin implicitly in the compiler.
70 It may be NULL_TREE when this is invalid (for instance runtime is not
71 required to implement the function call in all cases). */
72 tree implicit_built_in_decls[(int) END_BUILTINS];
74 static int get_pointer_alignment (tree, unsigned int);
75 static const char *c_getstr (tree);
76 static rtx c_readstr (const char *, enum machine_mode);
77 static int target_char_cast (tree, char *);
78 static rtx get_memory_rtx (tree, tree);
79 static int apply_args_size (void);
80 static int apply_result_size (void);
81 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
82 static rtx result_vector (int, rtx);
84 static rtx expand_builtin_setjmp (tree, rtx);
85 static void expand_builtin_update_setjmp_buf (rtx);
86 static void expand_builtin_prefetch (tree);
87 static rtx expand_builtin_apply_args (void);
88 static rtx expand_builtin_apply_args_1 (void);
89 static rtx expand_builtin_apply (rtx, rtx, rtx);
90 static void expand_builtin_return (rtx);
91 static enum type_class type_to_class (tree);
92 static rtx expand_builtin_classify_type (tree);
93 static void expand_errno_check (tree, rtx);
94 static rtx expand_builtin_mathfn (tree, rtx, rtx);
95 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
97 static rtx expand_builtin_sincos (tree);
98 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
99 static rtx expand_builtin_args_info (tree);
100 static rtx expand_builtin_next_arg (void);
101 static rtx expand_builtin_va_start (tree);
102 static rtx expand_builtin_va_end (tree);
103 static rtx expand_builtin_va_copy (tree);
104 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
105 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
106 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
107 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
108 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
109 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
112 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
114 static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
115 static rtx expand_builtin_bcopy (tree);
116 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
117 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
118 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
120 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
121 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
123 static rtx expand_builtin_bzero (tree);
124 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
125 static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strpbrk (tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_strchr (tree, tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strrchr (tree, tree, rtx, enum machine_mode);
129 static rtx expand_builtin_alloca (tree, rtx);
130 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
131 static rtx expand_builtin_frame_address (tree, tree);
132 static rtx expand_builtin_fputs (tree, rtx, bool);
133 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
134 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
135 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
136 static tree stabilize_va_list (tree, int);
137 static rtx expand_builtin_expect (tree, rtx);
138 static tree fold_builtin_constant_p (tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (tree);
141 static tree fold_builtin_inf (tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static int validate_arglist (tree, ...);
144 static bool integer_valued_real_p (tree);
145 static tree fold_trunc_transparent_mathfn (tree, tree);
146 static bool readonly_data_expr (tree);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_sqrt (tree, tree);
150 static tree fold_builtin_cbrt (tree, tree);
151 static tree fold_builtin_pow (tree, tree, tree);
152 static tree fold_builtin_powi (tree, tree, tree);
153 static tree fold_builtin_sin (tree);
154 static tree fold_builtin_cos (tree, tree, tree);
155 static tree fold_builtin_tan (tree);
156 static tree fold_builtin_atan (tree, tree);
157 static tree fold_builtin_trunc (tree, tree);
158 static tree fold_builtin_floor (tree, tree);
159 static tree fold_builtin_ceil (tree, tree);
160 static tree fold_builtin_round (tree, tree);
161 static tree fold_builtin_int_roundingfn (tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (tree, tree, bool, int);
164 static tree fold_builtin_strchr (tree, tree);
165 static tree fold_builtin_memcmp (tree);
166 static tree fold_builtin_strcmp (tree);
167 static tree fold_builtin_strncmp (tree);
168 static tree fold_builtin_signbit (tree, tree);
169 static tree fold_builtin_copysign (tree, tree, tree);
170 static tree fold_builtin_isascii (tree);
171 static tree fold_builtin_toascii (tree);
172 static tree fold_builtin_isdigit (tree);
173 static tree fold_builtin_fabs (tree, tree);
174 static tree fold_builtin_abs (tree, tree);
175 static tree fold_builtin_unordered_cmp (tree, tree, enum tree_code,
177 static tree fold_builtin_1 (tree, tree, bool);
179 static tree fold_builtin_strpbrk (tree, tree);
180 static tree fold_builtin_strstr (tree, tree);
181 static tree fold_builtin_strrchr (tree, tree);
182 static tree fold_builtin_strcat (tree);
183 static tree fold_builtin_strncat (tree);
184 static tree fold_builtin_strspn (tree);
185 static tree fold_builtin_strcspn (tree);
186 static tree fold_builtin_sprintf (tree, int);
188 static rtx expand_builtin_object_size (tree);
189 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
190 enum built_in_function);
191 static void maybe_emit_chk_warning (tree, enum built_in_function);
192 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
193 static tree fold_builtin_object_size (tree);
194 static tree fold_builtin_strcat_chk (tree, tree);
195 static tree fold_builtin_strncat_chk (tree, tree);
196 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
197 static tree fold_builtin_printf (tree, tree, bool, enum built_in_function);
198 static tree fold_builtin_fprintf (tree, tree, bool, enum built_in_function);
199 static bool init_target_chars (void);
201 static unsigned HOST_WIDE_INT target_newline;
202 static unsigned HOST_WIDE_INT target_percent;
203 static unsigned HOST_WIDE_INT target_c;
204 static unsigned HOST_WIDE_INT target_s;
205 static char target_percent_c[3];
206 static char target_percent_s[3];
207 static char target_percent_s_newline[4];
209 /* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
213 static bool called_as_built_in (tree node)
215 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
216 if (strncmp (name, "__builtin_", 10) == 0)
218 if (strncmp (name, "__sync_", 7) == 0)
223 /* Return the alignment in bits of EXP, a pointer valued expression.
224 But don't return more than MAX_ALIGN no matter what.
225 The alignment returned is, by default, the alignment of the thing that
226 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
228 Otherwise, look at the expression to see if we can do better, i.e., if the
229 expression is actually pointing at an object whose alignment is tighter. */
232 get_pointer_alignment (tree exp, unsigned int max_align)
234 unsigned int align, inner;
236 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
239 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
240 align = MIN (align, max_align);
244 switch (TREE_CODE (exp))
248 case NON_LVALUE_EXPR:
249 exp = TREE_OPERAND (exp, 0);
250 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
253 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
254 align = MIN (inner, max_align);
258 /* If sum of pointer + int, restrict our maximum alignment to that
259 imposed by the integer. If not, we can't do any better than
261 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
264 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
265 & (max_align / BITS_PER_UNIT - 1))
269 exp = TREE_OPERAND (exp, 0);
273 /* See what we are pointing at and look at its alignment. */
274 exp = TREE_OPERAND (exp, 0);
276 if (handled_component_p (exp))
278 HOST_WIDE_INT bitsize, bitpos;
280 enum machine_mode mode;
281 int unsignedp, volatilep;
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
286 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
287 if (offset && TREE_CODE (offset) == PLUS_EXPR
288 && host_integerp (TREE_OPERAND (offset, 1), 1))
290 /* Any overflow in calculating offset_bits won't change
293 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
297 inner = MIN (inner, (offset_bits & -offset_bits));
298 offset = TREE_OPERAND (offset, 0);
300 if (offset && TREE_CODE (offset) == MULT_EXPR
301 && host_integerp (TREE_OPERAND (offset, 1), 1))
303 /* Any overflow in calculating offset_factor won't change
305 unsigned offset_factor
306 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
310 inner = MIN (inner, (offset_factor & -offset_factor));
313 inner = MIN (inner, BITS_PER_UNIT);
315 if (TREE_CODE (exp) == FUNCTION_DECL)
316 align = FUNCTION_BOUNDARY;
317 else if (DECL_P (exp))
318 align = MIN (inner, DECL_ALIGN (exp));
319 #ifdef CONSTANT_ALIGNMENT
320 else if (CONSTANT_CLASS_P (exp))
321 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
323 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
324 || TREE_CODE (exp) == INDIRECT_REF)
325 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
327 align = MIN (align, inner);
328 return MIN (align, max_align);
336 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
337 way, because it could contain a zero byte in the middle.
338 TREE_STRING_LENGTH is the size of the character array, not the string.
340 ONLY_VALUE should be nonzero if the result is not going to be emitted
341 into the instruction stream and zero if it is going to be expanded.
342 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
343 is returned, otherwise NULL, since
344 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
345 evaluate the side-effects.
347 The value returned is of type `ssizetype'.
349 Unfortunately, string_constant can't access the values of const char
350 arrays with initializers, so neither can we do so here. */
353 c_strlen (tree src, int only_value)
356 HOST_WIDE_INT offset;
361 if (TREE_CODE (src) == COND_EXPR
362 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
366 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
367 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
368 if (tree_int_cst_equal (len1, len2))
372 if (TREE_CODE (src) == COMPOUND_EXPR
373 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
374 return c_strlen (TREE_OPERAND (src, 1), only_value);
376 src = string_constant (src, &offset_node);
380 max = TREE_STRING_LENGTH (src) - 1;
381 ptr = TREE_STRING_POINTER (src);
383 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
385 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
386 compute the offset to the following null if we don't know where to
387 start searching for it. */
390 for (i = 0; i < max; i++)
394 /* We don't know the starting offset, but we do know that the string
395 has no internal zero bytes. We can assume that the offset falls
396 within the bounds of the string; otherwise, the programmer deserves
397 what he gets. Subtract the offset from the length of the string,
398 and return that. This would perhaps not be valid if we were dealing
399 with named arrays in addition to literal string constants. */
401 return size_diffop (size_int (max), offset_node);
404 /* We have a known offset into the string. Start searching there for
405 a null character if we can represent it as a single HOST_WIDE_INT. */
406 if (offset_node == 0)
408 else if (! host_integerp (offset_node, 0))
411 offset = tree_low_cst (offset_node, 0);
413 /* If the offset is known to be out of bounds, warn, and call strlen at
415 if (offset < 0 || offset > max)
417 warning (0, "offset outside bounds of constant string");
421 /* Use strlen to search for the first zero byte. Since any strings
422 constructed with build_string will have nulls appended, we win even
423 if we get handed something like (char[4])"abcd".
425 Since OFFSET is our starting index into the string, no further
426 calculation is needed. */
427 return ssize_int (strlen (ptr + offset));
430 /* Return a char pointer for a C string if it is a string constant
431 or sum of string constant and integer constant. */
438 src = string_constant (src, &offset_node);
442 if (offset_node == 0)
443 return TREE_STRING_POINTER (src);
444 else if (!host_integerp (offset_node, 1)
445 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
448 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
451 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
452 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
455 c_readstr (const char *str, enum machine_mode mode)
461 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
466 for (i = 0; i < GET_MODE_SIZE (mode); i++)
469 if (WORDS_BIG_ENDIAN)
470 j = GET_MODE_SIZE (mode) - i - 1;
471 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
472 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
473 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
475 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
478 ch = (unsigned char) str[i];
479 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
481 return immed_double_const (c[0], c[1], mode);
484 /* Cast a target constant CST to target CHAR and if that value fits into
485 host char type, return zero and put that value into variable pointed to by
489 target_char_cast (tree cst, char *p)
491 unsigned HOST_WIDE_INT val, hostval;
493 if (!host_integerp (cst, 1)
494 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
497 val = tree_low_cst (cst, 1);
498 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
499 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
502 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
503 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
512 /* Similar to save_expr, but assumes that arbitrary code is not executed
513 in between the multiple evaluations. In particular, we assume that a
514 non-addressable local variable will not be modified. */
517 builtin_save_expr (tree exp)
519 if (TREE_ADDRESSABLE (exp) == 0
520 && (TREE_CODE (exp) == PARM_DECL
521 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
524 return save_expr (exp);
527 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
528 times to get the address of either a higher stack frame, or a return
529 address located within it (depending on FNDECL_CODE). */
532 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
536 #ifdef INITIAL_FRAME_ADDRESS_RTX
537 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
541 /* For a zero count with __builtin_return_address, we don't care what
542 frame address we return, because target-specific definitions will
543 override us. Therefore frame pointer elimination is OK, and using
544 the soft frame pointer is OK.
546 For a non-zero count, or a zero count with __builtin_frame_address,
547 we require a stable offset from the current frame pointer to the
548 previous one, so we must use the hard frame pointer, and
549 we must disable frame pointer elimination. */
550 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
551 tem = frame_pointer_rtx;
554 tem = hard_frame_pointer_rtx;
556 /* Tell reload not to eliminate the frame pointer. */
557 current_function_accesses_prior_frames = 1;
561 /* Some machines need special handling before we can access
562 arbitrary frames. For example, on the sparc, we must first flush
563 all register windows to the stack. */
564 #ifdef SETUP_FRAME_ADDRESSES
566 SETUP_FRAME_ADDRESSES ();
569 /* On the sparc, the return address is not in the frame, it is in a
570 register. There is no way to access it off of the current frame
571 pointer, but it can be accessed off the previous frame pointer by
572 reading the value from the register window save area. */
573 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
574 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
578 /* Scan back COUNT frames to the specified frame. */
579 for (i = 0; i < count; i++)
581 /* Assume the dynamic chain pointer is in the word that the
582 frame address points to, unless otherwise specified. */
583 #ifdef DYNAMIC_CHAIN_ADDRESS
584 tem = DYNAMIC_CHAIN_ADDRESS (tem);
586 tem = memory_address (Pmode, tem);
587 tem = gen_frame_mem (Pmode, tem);
588 tem = copy_to_reg (tem);
591 /* For __builtin_frame_address, return what we've got. */
592 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
595 /* For __builtin_return_address, Get the return address from that
597 #ifdef RETURN_ADDR_RTX
598 tem = RETURN_ADDR_RTX (count, tem);
600 tem = memory_address (Pmode,
601 plus_constant (tem, GET_MODE_SIZE (Pmode)));
602 tem = gen_frame_mem (Pmode, tem);
607 /* Alias set used for setjmp buffer. */
608 static HOST_WIDE_INT setjmp_alias_set = -1;
610 /* Construct the leading half of a __builtin_setjmp call. Control will
611 return to RECEIVER_LABEL. This is used directly by sjlj exception
615 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
617 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
621 if (setjmp_alias_set == -1)
622 setjmp_alias_set = new_alias_set ();
624 buf_addr = convert_memory_address (Pmode, buf_addr);
626 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
628 /* We store the frame pointer and the address of receiver_label in
629 the buffer and use the rest of it for the stack save area, which
630 is machine-dependent. */
632 mem = gen_rtx_MEM (Pmode, buf_addr);
633 set_mem_alias_set (mem, setjmp_alias_set);
634 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
636 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
637 set_mem_alias_set (mem, setjmp_alias_set);
639 emit_move_insn (validize_mem (mem),
640 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
642 stack_save = gen_rtx_MEM (sa_mode,
643 plus_constant (buf_addr,
644 2 * GET_MODE_SIZE (Pmode)));
645 set_mem_alias_set (stack_save, setjmp_alias_set);
646 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
648 /* If there is further processing to do, do it. */
649 #ifdef HAVE_builtin_setjmp_setup
650 if (HAVE_builtin_setjmp_setup)
651 emit_insn (gen_builtin_setjmp_setup (buf_addr));
654 /* Tell optimize_save_area_alloca that extra work is going to
655 need to go on during alloca. */
656 current_function_calls_setjmp = 1;
658 /* Set this so all the registers get saved in our frame; we need to be
659 able to copy the saved values for any registers from frames we unwind. */
660 current_function_has_nonlocal_label = 1;
663 /* Construct the trailing part of a __builtin_setjmp call.
664 This is used directly by sjlj exception handling code. */
667 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
669 /* Clobber the FP when we get here, so we have to make sure it's
670 marked as used by this function. */
671 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
673 /* Mark the static chain as clobbered here so life information
674 doesn't get messed up for it. */
675 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
677 /* Now put in the code to restore the frame pointer, and argument
678 pointer, if needed. */
679 #ifdef HAVE_nonlocal_goto
680 if (! HAVE_nonlocal_goto)
682 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
684 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
685 if (fixed_regs[ARG_POINTER_REGNUM])
687 #ifdef ELIMINABLE_REGS
689 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
691 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
692 if (elim_regs[i].from == ARG_POINTER_REGNUM
693 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
696 if (i == ARRAY_SIZE (elim_regs))
699 /* Now restore our arg pointer from the address at which it
700 was saved in our stack frame. */
701 emit_move_insn (virtual_incoming_args_rtx,
702 copy_to_reg (get_arg_pointer_save_area (cfun)));
707 #ifdef HAVE_builtin_setjmp_receiver
708 if (HAVE_builtin_setjmp_receiver)
709 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
712 #ifdef HAVE_nonlocal_goto_receiver
713 if (HAVE_nonlocal_goto_receiver)
714 emit_insn (gen_nonlocal_goto_receiver ());
719 /* @@@ This is a kludge. Not all machine descriptions define a blockage
720 insn, but we must not allow the code we just generated to be reordered
721 by scheduling. Specifically, the update of the frame pointer must
722 happen immediately, not later. So emit an ASM_INPUT to act as blockage
724 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
727 /* __builtin_setjmp is passed a pointer to an array of five words (not
728 all will be used on all machines). It operates similarly to the C
729 library function of the same name, but is more efficient. Much of
730 the code below (and for longjmp) is copied from the handling of
733 NOTE: This is intended for use by GNAT and the exception handling
734 scheme in the compiler and will only work in the method used by
738 expand_builtin_setjmp (tree arglist, rtx target)
740 rtx buf_addr, next_lab, cont_lab;
742 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
745 if (target == 0 || !REG_P (target)
746 || REGNO (target) < FIRST_PSEUDO_REGISTER)
747 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
749 buf_addr = expand_normal (TREE_VALUE (arglist));
751 next_lab = gen_label_rtx ();
752 cont_lab = gen_label_rtx ();
754 expand_builtin_setjmp_setup (buf_addr, next_lab);
756 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
757 ensure that pending stack adjustments are flushed. */
758 emit_move_insn (target, const0_rtx);
759 emit_jump (cont_lab);
761 emit_label (next_lab);
763 expand_builtin_setjmp_receiver (next_lab);
765 /* Set TARGET to one. */
766 emit_move_insn (target, const1_rtx);
767 emit_label (cont_lab);
769 /* Tell flow about the strange goings on. Putting `next_lab' on
770 `nonlocal_goto_handler_labels' to indicates that function
771 calls may traverse the arc back to this label. */
773 current_function_has_nonlocal_label = 1;
774 nonlocal_goto_handler_labels
775 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
780 /* __builtin_longjmp is passed a pointer to an array of five words (not
781 all will be used on all machines). It operates similarly to the C
782 library function of the same name, but is more efficient. Much of
783 the code below is copied from the handling of non-local gotos.
785 NOTE: This is intended for use by GNAT and the exception handling
786 scheme in the compiler and will only work in the method used by
790 expand_builtin_longjmp (rtx buf_addr, rtx value)
792 rtx fp, lab, stack, insn, last;
793 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
795 if (setjmp_alias_set == -1)
796 setjmp_alias_set = new_alias_set ();
798 buf_addr = convert_memory_address (Pmode, buf_addr);
800 buf_addr = force_reg (Pmode, buf_addr);
802 /* We used to store value in static_chain_rtx, but that fails if pointers
803 are smaller than integers. We instead require that the user must pass
804 a second argument of 1, because that is what builtin_setjmp will
805 return. This also makes EH slightly more efficient, since we are no
806 longer copying around a value that we don't care about. */
807 gcc_assert (value == const1_rtx);
809 last = get_last_insn ();
810 #ifdef HAVE_builtin_longjmp
811 if (HAVE_builtin_longjmp)
812 emit_insn (gen_builtin_longjmp (buf_addr));
816 fp = gen_rtx_MEM (Pmode, buf_addr);
817 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
818 GET_MODE_SIZE (Pmode)));
820 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
821 2 * GET_MODE_SIZE (Pmode)));
822 set_mem_alias_set (fp, setjmp_alias_set);
823 set_mem_alias_set (lab, setjmp_alias_set);
824 set_mem_alias_set (stack, setjmp_alias_set);
826 /* Pick up FP, label, and SP from the block and jump. This code is
827 from expand_goto in stmt.c; see there for detailed comments. */
828 #ifdef HAVE_nonlocal_goto
829 if (HAVE_nonlocal_goto)
830 /* We have to pass a value to the nonlocal_goto pattern that will
831 get copied into the static_chain pointer, but it does not matter
832 what that value is, because builtin_setjmp does not use it. */
833 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
837 lab = copy_to_reg (lab);
839 emit_insn (gen_rtx_CLOBBER (VOIDmode,
840 gen_rtx_MEM (BLKmode,
841 gen_rtx_SCRATCH (VOIDmode))));
842 emit_insn (gen_rtx_CLOBBER (VOIDmode,
843 gen_rtx_MEM (BLKmode,
844 hard_frame_pointer_rtx)));
846 emit_move_insn (hard_frame_pointer_rtx, fp);
847 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
849 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
850 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
851 emit_indirect_jump (lab);
855 /* Search backwards and mark the jump insn as a non-local goto.
856 Note that this precludes the use of __builtin_longjmp to a
857 __builtin_setjmp target in the same function. However, we've
858 already cautioned the user that these functions are for
859 internal exception handling use only. */
860 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
862 gcc_assert (insn != last);
866 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
870 else if (CALL_P (insn))
875 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
876 and the address of the save area. */
879 expand_builtin_nonlocal_goto (tree arglist)
881 tree t_label, t_save_area;
882 rtx r_label, r_save_area, r_fp, r_sp, insn;
884 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
887 t_label = TREE_VALUE (arglist);
888 arglist = TREE_CHAIN (arglist);
889 t_save_area = TREE_VALUE (arglist);
891 r_label = expand_normal (t_label);
892 r_label = convert_memory_address (Pmode, r_label);
893 r_save_area = expand_normal (t_save_area);
894 r_save_area = convert_memory_address (Pmode, r_save_area);
895 r_fp = gen_rtx_MEM (Pmode, r_save_area);
896 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
897 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
899 current_function_has_nonlocal_goto = 1;
901 #ifdef HAVE_nonlocal_goto
902 /* ??? We no longer need to pass the static chain value, afaik. */
903 if (HAVE_nonlocal_goto)
904 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
908 r_label = copy_to_reg (r_label);
910 emit_insn (gen_rtx_CLOBBER (VOIDmode,
911 gen_rtx_MEM (BLKmode,
912 gen_rtx_SCRATCH (VOIDmode))));
914 emit_insn (gen_rtx_CLOBBER (VOIDmode,
915 gen_rtx_MEM (BLKmode,
916 hard_frame_pointer_rtx)));
918 /* Restore frame pointer for containing function.
919 This sets the actual hard register used for the frame pointer
920 to the location of the function's incoming static chain info.
921 The non-local goto handler will then adjust it to contain the
922 proper value and reload the argument pointer, if needed. */
923 emit_move_insn (hard_frame_pointer_rtx, r_fp);
924 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
926 /* USE of hard_frame_pointer_rtx added for consistency;
927 not clear if really needed. */
928 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
929 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
930 emit_indirect_jump (r_label);
933 /* Search backwards to the jump insn and mark it as a
935 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
939 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
940 const0_rtx, REG_NOTES (insn));
943 else if (CALL_P (insn))
950 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
951 (not all will be used on all machines) that was passed to __builtin_setjmp.
952 It updates the stack pointer in that block to correspond to the current
956 expand_builtin_update_setjmp_buf (rtx buf_addr)
958 enum machine_mode sa_mode = Pmode;
962 #ifdef HAVE_save_stack_nonlocal
963 if (HAVE_save_stack_nonlocal)
964 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
966 #ifdef STACK_SAVEAREA_MODE
967 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
971 = gen_rtx_MEM (sa_mode,
974 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
978 emit_insn (gen_setjmp ());
981 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
984 /* Expand a call to __builtin_prefetch. For a target that does not support
985 data prefetch, evaluate the memory address argument in case it has side
989 expand_builtin_prefetch (tree arglist)
991 tree arg0, arg1, arg2;
994 if (!validate_arglist (arglist, POINTER_TYPE, 0))
997 arg0 = TREE_VALUE (arglist);
998 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
999 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1001 if (TREE_CHAIN (arglist))
1003 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1004 if (TREE_CHAIN (TREE_CHAIN (arglist)))
1005 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1007 arg2 = build_int_cst (NULL_TREE, 3);
1011 arg1 = integer_zero_node;
1012 arg2 = build_int_cst (NULL_TREE, 3);
1015 /* Argument 0 is an address. */
1016 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1018 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1019 if (TREE_CODE (arg1) != INTEGER_CST)
1021 error ("second argument to %<__builtin_prefetch%> must be a constant");
1022 arg1 = integer_zero_node;
1024 op1 = expand_normal (arg1);
1025 /* Argument 1 must be either zero or one. */
1026 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1028 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1033 /* Argument 2 (locality) must be a compile-time constant int. */
1034 if (TREE_CODE (arg2) != INTEGER_CST)
1036 error ("third argument to %<__builtin_prefetch%> must be a constant");
1037 arg2 = integer_zero_node;
1039 op2 = expand_normal (arg2);
1040 /* Argument 2 must be 0, 1, 2, or 3. */
1041 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1043 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1047 #ifdef HAVE_prefetch
1050 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1052 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1053 || (GET_MODE (op0) != Pmode))
1055 op0 = convert_memory_address (Pmode, op0);
1056 op0 = force_reg (Pmode, op0);
1058 emit_insn (gen_prefetch (op0, op1, op2));
1062 /* Don't do anything with direct references to volatile memory, but
1063 generate code to handle other side effects. */
1064 if (!MEM_P (op0) && side_effects_p (op0))
1068 /* Get a MEM rtx for expression EXP which is the address of an operand
1069 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1070 the maximum length of the block of memory that might be accessed or
1074 get_memory_rtx (tree exp, tree len)
1076 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1077 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1079 /* Get an expression we can use to find the attributes to assign to MEM.
1080 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1081 we can. First remove any nops. */
1082 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1083 || TREE_CODE (exp) == NON_LVALUE_EXPR)
1084 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1085 exp = TREE_OPERAND (exp, 0);
1087 if (TREE_CODE (exp) == ADDR_EXPR)
1088 exp = TREE_OPERAND (exp, 0);
1089 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1090 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1094 /* Honor attributes derived from exp, except for the alias set
1095 (as builtin stringops may alias with anything) and the size
1096 (as stringops may access multiple array elements). */
1099 set_mem_attributes (mem, exp, 0);
1101 /* Allow the string and memory builtins to overflow from one
1102 field into another, see http://gcc.gnu.org/PR23561.
1103 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1104 memory accessed by the string or memory builtin will fit
1105 within the field. */
1106 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1108 tree mem_expr = MEM_EXPR (mem);
1109 HOST_WIDE_INT offset = -1, length = -1;
1112 while (TREE_CODE (inner) == ARRAY_REF
1113 || TREE_CODE (inner) == NOP_EXPR
1114 || TREE_CODE (inner) == CONVERT_EXPR
1115 || TREE_CODE (inner) == NON_LVALUE_EXPR
1116 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1117 || TREE_CODE (inner) == SAVE_EXPR)
1118 inner = TREE_OPERAND (inner, 0);
1120 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1122 if (MEM_OFFSET (mem)
1123 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1124 offset = INTVAL (MEM_OFFSET (mem));
1126 if (offset >= 0 && len && host_integerp (len, 0))
1127 length = tree_low_cst (len, 0);
1129 while (TREE_CODE (inner) == COMPONENT_REF)
1131 tree field = TREE_OPERAND (inner, 1);
1132 gcc_assert (! DECL_BIT_FIELD (field));
1133 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1134 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1137 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1138 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1141 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1142 /* If we can prove the memory starting at XEXP (mem, 0)
1143 and ending at XEXP (mem, 0) + LENGTH will fit into
1144 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1147 && offset + length <= size)
1152 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1153 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1154 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1162 mem_expr = TREE_OPERAND (mem_expr, 0);
1163 inner = TREE_OPERAND (inner, 0);
1166 if (mem_expr == NULL)
1168 if (mem_expr != MEM_EXPR (mem))
1170 set_mem_expr (mem, mem_expr);
1171 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1174 set_mem_alias_set (mem, 0);
1175 set_mem_size (mem, NULL_RTX);
1181 /* Built-in functions to perform an untyped call and return. */
1183 /* For each register that may be used for calling a function, this
1184 gives a mode used to copy the register's value. VOIDmode indicates
1185 the register is not used for calling a function. If the machine
1186 has register windows, this gives only the outbound registers.
1187 INCOMING_REGNO gives the corresponding inbound register. */
1188 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1190 /* For each register that may be used for returning values, this gives
1191 a mode used to copy the register's value. VOIDmode indicates the
1192 register is not used for returning values. If the machine has
1193 register windows, this gives only the outbound registers.
1194 INCOMING_REGNO gives the corresponding inbound register. */
1195 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1197 /* For each register that may be used for calling a function, this
1198 gives the offset of that register into the block returned by
1199 __builtin_apply_args. 0 indicates that the register is not
1200 used for calling a function. */
1201 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1203 /* Return the size required for the block returned by __builtin_apply_args,
1204 and initialize apply_args_mode. */
1207 apply_args_size (void)
1209 static int size = -1;
1212 enum machine_mode mode;
1214 /* The values computed by this function never change. */
1217 /* The first value is the incoming arg-pointer. */
1218 size = GET_MODE_SIZE (Pmode);
1220 /* The second value is the structure value address unless this is
1221 passed as an "invisible" first argument. */
1222 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1223 size += GET_MODE_SIZE (Pmode);
1225 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1226 if (FUNCTION_ARG_REGNO_P (regno))
1228 mode = reg_raw_mode[regno];
1230 gcc_assert (mode != VOIDmode);
1232 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1233 if (size % align != 0)
1234 size = CEIL (size, align) * align;
1235 apply_args_reg_offset[regno] = size;
1236 size += GET_MODE_SIZE (mode);
1237 apply_args_mode[regno] = mode;
1241 apply_args_mode[regno] = VOIDmode;
1242 apply_args_reg_offset[regno] = 0;
1248 /* Return the size required for the block returned by __builtin_apply,
1249 and initialize apply_result_mode. */
1252 apply_result_size (void)
1254 static int size = -1;
1256 enum machine_mode mode;
1258 /* The values computed by this function never change. */
1263 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1264 if (FUNCTION_VALUE_REGNO_P (regno))
1266 mode = reg_raw_mode[regno];
1268 gcc_assert (mode != VOIDmode);
1270 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1271 if (size % align != 0)
1272 size = CEIL (size, align) * align;
1273 size += GET_MODE_SIZE (mode);
1274 apply_result_mode[regno] = mode;
1277 apply_result_mode[regno] = VOIDmode;
1279 /* Allow targets that use untyped_call and untyped_return to override
1280 the size so that machine-specific information can be stored here. */
1281 #ifdef APPLY_RESULT_SIZE
1282 size = APPLY_RESULT_SIZE;
1288 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1289 /* Create a vector describing the result block RESULT. If SAVEP is true,
1290 the result block is used to save the values; otherwise it is used to
1291 restore the values. */
1294 result_vector (int savep, rtx result)
1296 int regno, size, align, nelts;
1297 enum machine_mode mode;
1299 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1302 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1303 if ((mode = apply_result_mode[regno]) != VOIDmode)
1305 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1306 if (size % align != 0)
1307 size = CEIL (size, align) * align;
1308 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1309 mem = adjust_address (result, mode, size);
1310 savevec[nelts++] = (savep
1311 ? gen_rtx_SET (VOIDmode, mem, reg)
1312 : gen_rtx_SET (VOIDmode, reg, mem));
1313 size += GET_MODE_SIZE (mode);
1315 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1317 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1319 /* Save the state required to perform an untyped call with the same
1320 arguments as were passed to the current function. */
1323 expand_builtin_apply_args_1 (void)
1326 int size, align, regno;
1327 enum machine_mode mode;
1328 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1330 /* Create a block where the arg-pointer, structure value address,
1331 and argument registers can be saved. */
1332 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1334 /* Walk past the arg-pointer and structure value address. */
1335 size = GET_MODE_SIZE (Pmode);
1336 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1337 size += GET_MODE_SIZE (Pmode);
1339 /* Save each register used in calling a function to the block. */
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if ((mode = apply_args_mode[regno]) != VOIDmode)
1343 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1344 if (size % align != 0)
1345 size = CEIL (size, align) * align;
1347 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1349 emit_move_insn (adjust_address (registers, mode, size), tem);
1350 size += GET_MODE_SIZE (mode);
1353 /* Save the arg pointer to the block. */
1354 tem = copy_to_reg (virtual_incoming_args_rtx);
1355 #ifdef STACK_GROWS_DOWNWARD
1356 /* We need the pointer as the caller actually passed them to us, not
1357 as we might have pretended they were passed. Make sure it's a valid
1358 operand, as emit_move_insn isn't expected to handle a PLUS. */
1360 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1363 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1365 size = GET_MODE_SIZE (Pmode);
1367 /* Save the structure value address unless this is passed as an
1368 "invisible" first argument. */
1369 if (struct_incoming_value)
1371 emit_move_insn (adjust_address (registers, Pmode, size),
1372 copy_to_reg (struct_incoming_value));
1373 size += GET_MODE_SIZE (Pmode);
1376 /* Return the address of the block. */
1377 return copy_addr_to_reg (XEXP (registers, 0));
1380 /* __builtin_apply_args returns block of memory allocated on
1381 the stack into which is stored the arg pointer, structure
1382 value address, static chain, and all the registers that might
1383 possibly be used in performing a function call. The code is
1384 moved to the start of the function so the incoming values are
1388 expand_builtin_apply_args (void)
1390 /* Don't do __builtin_apply_args more than once in a function.
1391 Save the result of the first call and reuse it. */
1392 if (apply_args_value != 0)
1393 return apply_args_value;
1395 /* When this function is called, it means that registers must be
1396 saved on entry to this function. So we migrate the
1397 call to the first insn of this function. */
1402 temp = expand_builtin_apply_args_1 ();
1406 apply_args_value = temp;
1408 /* Put the insns after the NOTE that starts the function.
1409 If this is inside a start_sequence, make the outer-level insn
1410 chain current, so the code is placed at the start of the
1412 push_topmost_sequence ();
1413 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1414 pop_topmost_sequence ();
1419 /* Perform an untyped call and save the state required to perform an
1420 untyped return of whatever value was returned by the given function. */
1423 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1425 int size, align, regno;
1426 enum machine_mode mode;
1427 rtx incoming_args, result, reg, dest, src, call_insn;
1428 rtx old_stack_level = 0;
1429 rtx call_fusage = 0;
1430 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1432 arguments = convert_memory_address (Pmode, arguments);
1434 /* Create a block where the return registers can be saved. */
1435 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1437 /* Fetch the arg pointer from the ARGUMENTS block. */
1438 incoming_args = gen_reg_rtx (Pmode);
1439 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1440 #ifndef STACK_GROWS_DOWNWARD
1441 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1442 incoming_args, 0, OPTAB_LIB_WIDEN);
1445 /* Push a new argument block and copy the arguments. Do not allow
1446 the (potential) memcpy call below to interfere with our stack
1448 do_pending_stack_adjust ();
1451 /* Save the stack with nonlocal if available. */
1452 #ifdef HAVE_save_stack_nonlocal
1453 if (HAVE_save_stack_nonlocal)
1454 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1457 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1459 /* Allocate a block of memory onto the stack and copy the memory
1460 arguments to the outgoing arguments address. */
1461 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1462 dest = virtual_outgoing_args_rtx;
1463 #ifndef STACK_GROWS_DOWNWARD
1464 if (GET_CODE (argsize) == CONST_INT)
1465 dest = plus_constant (dest, -INTVAL (argsize));
1467 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1469 dest = gen_rtx_MEM (BLKmode, dest);
1470 set_mem_align (dest, PARM_BOUNDARY);
1471 src = gen_rtx_MEM (BLKmode, incoming_args);
1472 set_mem_align (src, PARM_BOUNDARY);
1473 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1475 /* Refer to the argument block. */
1477 arguments = gen_rtx_MEM (BLKmode, arguments);
1478 set_mem_align (arguments, PARM_BOUNDARY);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1483 size += GET_MODE_SIZE (Pmode);
1485 /* Restore each of the registers previously saved. Make USE insns
1486 for each of these registers for use in making the call. */
1487 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1488 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1491 if (size % align != 0)
1492 size = CEIL (size, align) * align;
1493 reg = gen_rtx_REG (mode, regno);
1494 emit_move_insn (reg, adjust_address (arguments, mode, size));
1495 use_reg (&call_fusage, reg);
1496 size += GET_MODE_SIZE (mode);
1499 /* Restore the structure value address unless this is passed as an
1500 "invisible" first argument. */
1501 size = GET_MODE_SIZE (Pmode);
1504 rtx value = gen_reg_rtx (Pmode);
1505 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1506 emit_move_insn (struct_value, value);
1507 if (REG_P (struct_value))
1508 use_reg (&call_fusage, struct_value);
1509 size += GET_MODE_SIZE (Pmode);
1512 /* All arguments and registers used for the call are set up by now! */
1513 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1515 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1516 and we don't want to load it into a register as an optimization,
1517 because prepare_call_address already did it if it should be done. */
1518 if (GET_CODE (function) != SYMBOL_REF)
1519 function = memory_address (FUNCTION_MODE, function);
1521 /* Generate the actual call instruction and save the return value. */
1522 #ifdef HAVE_untyped_call
1523 if (HAVE_untyped_call)
1524 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1525 result, result_vector (1, result)));
1528 #ifdef HAVE_call_value
1529 if (HAVE_call_value)
1533 /* Locate the unique return register. It is not possible to
1534 express a call that sets more than one return register using
1535 call_value; use untyped_call for that. In fact, untyped_call
1536 only needs to save the return registers in the given block. */
1537 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1538 if ((mode = apply_result_mode[regno]) != VOIDmode)
1540 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1542 valreg = gen_rtx_REG (mode, regno);
1545 emit_call_insn (GEN_CALL_VALUE (valreg,
1546 gen_rtx_MEM (FUNCTION_MODE, function),
1547 const0_rtx, NULL_RTX, const0_rtx));
1549 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1555 /* Find the CALL insn we just emitted, and attach the register usage
1557 call_insn = last_call_insn ();
1558 add_function_usage_to (call_insn, call_fusage);
1560 /* Restore the stack. */
1561 #ifdef HAVE_save_stack_nonlocal
1562 if (HAVE_save_stack_nonlocal)
1563 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1566 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1570 /* Return the address of the result block. */
1571 result = copy_addr_to_reg (XEXP (result, 0));
1572 return convert_memory_address (ptr_mode, result);
1575 /* Perform an untyped return. */
1578 expand_builtin_return (rtx result)
1580 int size, align, regno;
1581 enum machine_mode mode;
1583 rtx call_fusage = 0;
1585 result = convert_memory_address (Pmode, result);
1587 apply_result_size ();
1588 result = gen_rtx_MEM (BLKmode, result);
1590 #ifdef HAVE_untyped_return
1591 if (HAVE_untyped_return)
1593 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1599 /* Restore the return value and note that each value is used. */
1601 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1602 if ((mode = apply_result_mode[regno]) != VOIDmode)
1604 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1605 if (size % align != 0)
1606 size = CEIL (size, align) * align;
1607 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1608 emit_move_insn (reg, adjust_address (result, mode, size));
1610 push_to_sequence (call_fusage);
1611 emit_insn (gen_rtx_USE (VOIDmode, reg));
1612 call_fusage = get_insns ();
1614 size += GET_MODE_SIZE (mode);
1617 /* Put the USE insns before the return. */
1618 emit_insn (call_fusage);
1620 /* Return whatever values was restored by jumping directly to the end
1622 expand_naked_return ();
1625 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1627 static enum type_class
1628 type_to_class (tree type)
1630 switch (TREE_CODE (type))
1632 case VOID_TYPE: return void_type_class;
1633 case INTEGER_TYPE: return integer_type_class;
1634 case ENUMERAL_TYPE: return enumeral_type_class;
1635 case BOOLEAN_TYPE: return boolean_type_class;
1636 case POINTER_TYPE: return pointer_type_class;
1637 case REFERENCE_TYPE: return reference_type_class;
1638 case OFFSET_TYPE: return offset_type_class;
1639 case REAL_TYPE: return real_type_class;
1640 case COMPLEX_TYPE: return complex_type_class;
1641 case FUNCTION_TYPE: return function_type_class;
1642 case METHOD_TYPE: return method_type_class;
1643 case RECORD_TYPE: return record_type_class;
1645 case QUAL_UNION_TYPE: return union_type_class;
1646 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1647 ? string_type_class : array_type_class);
1648 case LANG_TYPE: return lang_type_class;
1649 default: return no_type_class;
1653 /* Expand a call to __builtin_classify_type with arguments found in
1657 expand_builtin_classify_type (tree arglist)
1660 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1661 return GEN_INT (no_type_class);
1664 /* This helper macro, meant to be used in mathfn_built_in below,
1665 determines which among a set of three builtin math functions is
1666 appropriate for a given type mode. The `F' and `L' cases are
1667 automatically generated from the `double' case. */
1668 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1669 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1670 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1671 fcodel = BUILT_IN_MATHFN##L ; break;
1673 /* Return mathematic function equivalent to FN but operating directly
1674 on TYPE, if available. If we can't do the conversion, return zero. */
1676 mathfn_built_in (tree type, enum built_in_function fn)
1678 enum built_in_function fcode, fcodef, fcodel;
1682 CASE_MATHFN (BUILT_IN_ACOS)
1683 CASE_MATHFN (BUILT_IN_ACOSH)
1684 CASE_MATHFN (BUILT_IN_ASIN)
1685 CASE_MATHFN (BUILT_IN_ASINH)
1686 CASE_MATHFN (BUILT_IN_ATAN)
1687 CASE_MATHFN (BUILT_IN_ATAN2)
1688 CASE_MATHFN (BUILT_IN_ATANH)
1689 CASE_MATHFN (BUILT_IN_CBRT)
1690 CASE_MATHFN (BUILT_IN_CEIL)
1691 CASE_MATHFN (BUILT_IN_COPYSIGN)
1692 CASE_MATHFN (BUILT_IN_COS)
1693 CASE_MATHFN (BUILT_IN_COSH)
1694 CASE_MATHFN (BUILT_IN_DREM)
1695 CASE_MATHFN (BUILT_IN_ERF)
1696 CASE_MATHFN (BUILT_IN_ERFC)
1697 CASE_MATHFN (BUILT_IN_EXP)
1698 CASE_MATHFN (BUILT_IN_EXP10)
1699 CASE_MATHFN (BUILT_IN_EXP2)
1700 CASE_MATHFN (BUILT_IN_EXPM1)
1701 CASE_MATHFN (BUILT_IN_FABS)
1702 CASE_MATHFN (BUILT_IN_FDIM)
1703 CASE_MATHFN (BUILT_IN_FLOOR)
1704 CASE_MATHFN (BUILT_IN_FMA)
1705 CASE_MATHFN (BUILT_IN_FMAX)
1706 CASE_MATHFN (BUILT_IN_FMIN)
1707 CASE_MATHFN (BUILT_IN_FMOD)
1708 CASE_MATHFN (BUILT_IN_FREXP)
1709 CASE_MATHFN (BUILT_IN_GAMMA)
1710 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1711 CASE_MATHFN (BUILT_IN_HYPOT)
1712 CASE_MATHFN (BUILT_IN_ILOGB)
1713 CASE_MATHFN (BUILT_IN_INF)
1714 CASE_MATHFN (BUILT_IN_J0)
1715 CASE_MATHFN (BUILT_IN_J1)
1716 CASE_MATHFN (BUILT_IN_JN)
1717 CASE_MATHFN (BUILT_IN_LCEIL)
1718 CASE_MATHFN (BUILT_IN_LDEXP)
1719 CASE_MATHFN (BUILT_IN_LFLOOR)
1720 CASE_MATHFN (BUILT_IN_LGAMMA)
1721 CASE_MATHFN (BUILT_IN_LLCEIL)
1722 CASE_MATHFN (BUILT_IN_LLFLOOR)
1723 CASE_MATHFN (BUILT_IN_LLRINT)
1724 CASE_MATHFN (BUILT_IN_LLROUND)
1725 CASE_MATHFN (BUILT_IN_LOG)
1726 CASE_MATHFN (BUILT_IN_LOG10)
1727 CASE_MATHFN (BUILT_IN_LOG1P)
1728 CASE_MATHFN (BUILT_IN_LOG2)
1729 CASE_MATHFN (BUILT_IN_LOGB)
1730 CASE_MATHFN (BUILT_IN_LRINT)
1731 CASE_MATHFN (BUILT_IN_LROUND)
1732 CASE_MATHFN (BUILT_IN_MODF)
1733 CASE_MATHFN (BUILT_IN_NAN)
1734 CASE_MATHFN (BUILT_IN_NANS)
1735 CASE_MATHFN (BUILT_IN_NEARBYINT)
1736 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1737 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1738 CASE_MATHFN (BUILT_IN_POW)
1739 CASE_MATHFN (BUILT_IN_POWI)
1740 CASE_MATHFN (BUILT_IN_POW10)
1741 CASE_MATHFN (BUILT_IN_REMAINDER)
1742 CASE_MATHFN (BUILT_IN_REMQUO)
1743 CASE_MATHFN (BUILT_IN_RINT)
1744 CASE_MATHFN (BUILT_IN_ROUND)
1745 CASE_MATHFN (BUILT_IN_SCALB)
1746 CASE_MATHFN (BUILT_IN_SCALBLN)
1747 CASE_MATHFN (BUILT_IN_SCALBN)
1748 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1749 CASE_MATHFN (BUILT_IN_SIN)
1750 CASE_MATHFN (BUILT_IN_SINCOS)
1751 CASE_MATHFN (BUILT_IN_SINH)
1752 CASE_MATHFN (BUILT_IN_SQRT)
1753 CASE_MATHFN (BUILT_IN_TAN)
1754 CASE_MATHFN (BUILT_IN_TANH)
1755 CASE_MATHFN (BUILT_IN_TGAMMA)
1756 CASE_MATHFN (BUILT_IN_TRUNC)
1757 CASE_MATHFN (BUILT_IN_Y0)
1758 CASE_MATHFN (BUILT_IN_Y1)
1759 CASE_MATHFN (BUILT_IN_YN)
1765 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1766 return implicit_built_in_decls[fcode];
1767 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1768 return implicit_built_in_decls[fcodef];
1769 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1770 return implicit_built_in_decls[fcodel];
1775 /* If errno must be maintained, expand the RTL to check if the result,
1776 TARGET, of a built-in function call, EXP, is NaN, and if so set
1780 expand_errno_check (tree exp, rtx target)
1782 rtx lab = gen_label_rtx ();
1784 /* Test the result; if it is NaN, set errno=EDOM because
1785 the argument was not in the domain. */
1786 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1790 /* If this built-in doesn't throw an exception, set errno directly. */
1791 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1793 #ifdef GEN_ERRNO_RTX
1794 rtx errno_rtx = GEN_ERRNO_RTX;
1797 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1799 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1805 /* We can't set errno=EDOM directly; let the library call do it.
1806 Pop the arguments right away in case the call gets deleted. */
1808 expand_call (exp, target, 0);
1814 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1815 Return 0 if a normal call should be emitted rather than expanding the
1816 function in-line. EXP is the expression that is a call to the builtin
1817 function; if convenient, the result should be placed in TARGET.
1818 SUBTARGET may be used as the target for computing one of EXP's operands. */
1821 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1823 optab builtin_optab;
1824 rtx op0, insns, before_call;
1825 tree fndecl = get_callee_fndecl (exp);
1826 tree arglist = TREE_OPERAND (exp, 1);
1827 enum machine_mode mode;
1828 bool errno_set = false;
1831 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1834 arg = TREE_VALUE (arglist);
1836 switch (DECL_FUNCTION_CODE (fndecl))
1838 CASE_FLT_FN (BUILT_IN_SQRT):
1839 errno_set = ! tree_expr_nonnegative_p (arg);
1840 builtin_optab = sqrt_optab;
1842 CASE_FLT_FN (BUILT_IN_EXP):
1843 errno_set = true; builtin_optab = exp_optab; break;
1844 CASE_FLT_FN (BUILT_IN_EXP10):
1845 CASE_FLT_FN (BUILT_IN_POW10):
1846 errno_set = true; builtin_optab = exp10_optab; break;
1847 CASE_FLT_FN (BUILT_IN_EXP2):
1848 errno_set = true; builtin_optab = exp2_optab; break;
1849 CASE_FLT_FN (BUILT_IN_EXPM1):
1850 errno_set = true; builtin_optab = expm1_optab; break;
1851 CASE_FLT_FN (BUILT_IN_LOGB):
1852 errno_set = true; builtin_optab = logb_optab; break;
1853 CASE_FLT_FN (BUILT_IN_ILOGB):
1854 errno_set = true; builtin_optab = ilogb_optab; break;
1855 CASE_FLT_FN (BUILT_IN_LOG):
1856 errno_set = true; builtin_optab = log_optab; break;
1857 CASE_FLT_FN (BUILT_IN_LOG10):
1858 errno_set = true; builtin_optab = log10_optab; break;
1859 CASE_FLT_FN (BUILT_IN_LOG2):
1860 errno_set = true; builtin_optab = log2_optab; break;
1861 CASE_FLT_FN (BUILT_IN_LOG1P):
1862 errno_set = true; builtin_optab = log1p_optab; break;
1863 CASE_FLT_FN (BUILT_IN_ASIN):
1864 builtin_optab = asin_optab; break;
1865 CASE_FLT_FN (BUILT_IN_ACOS):
1866 builtin_optab = acos_optab; break;
1867 CASE_FLT_FN (BUILT_IN_TAN):
1868 builtin_optab = tan_optab; break;
1869 CASE_FLT_FN (BUILT_IN_ATAN):
1870 builtin_optab = atan_optab; break;
1871 CASE_FLT_FN (BUILT_IN_FLOOR):
1872 builtin_optab = floor_optab; break;
1873 CASE_FLT_FN (BUILT_IN_CEIL):
1874 builtin_optab = ceil_optab; break;
1875 CASE_FLT_FN (BUILT_IN_TRUNC):
1876 builtin_optab = btrunc_optab; break;
1877 CASE_FLT_FN (BUILT_IN_ROUND):
1878 builtin_optab = round_optab; break;
1879 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1880 builtin_optab = nearbyint_optab; break;
1881 CASE_FLT_FN (BUILT_IN_RINT):
1882 builtin_optab = rint_optab; break;
1883 CASE_FLT_FN (BUILT_IN_LRINT):
1884 CASE_FLT_FN (BUILT_IN_LLRINT):
1885 builtin_optab = lrint_optab; break;
1890 /* Make a suitable register to place result in. */
1891 mode = TYPE_MODE (TREE_TYPE (exp));
1893 if (! flag_errno_math || ! HONOR_NANS (mode))
1896 /* Before working hard, check whether the instruction is available. */
1897 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1899 target = gen_reg_rtx (mode);
1901 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1902 need to expand the argument again. This way, we will not perform
1903 side-effects more the once. */
1904 narg = builtin_save_expr (arg);
1908 arglist = build_tree_list (NULL_TREE, arg);
1909 exp = build_function_call_expr (fndecl, arglist);
1912 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1916 /* Compute into TARGET.
1917 Set TARGET to wherever the result comes back. */
1918 target = expand_unop (mode, builtin_optab, op0, target, 0);
1923 expand_errno_check (exp, target);
1925 /* Output the entire sequence. */
1926 insns = get_insns ();
1932 /* If we were unable to expand via the builtin, stop the sequence
1933 (without outputting the insns) and call to the library function
1934 with the stabilized argument list. */
1938 before_call = get_last_insn ();
1940 target = expand_call (exp, target, target == const0_rtx);
1942 /* If this is a sqrt operation and we don't care about errno, try to
1943 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1944 This allows the semantics of the libcall to be visible to the RTL
1946 if (builtin_optab == sqrt_optab && !errno_set)
1948 /* Search backwards through the insns emitted by expand_call looking
1949 for the instruction with the REG_RETVAL note. */
1950 rtx last = get_last_insn ();
1951 while (last != before_call)
1953 if (find_reg_note (last, REG_RETVAL, NULL))
1955 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1956 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1957 two elements, i.e. symbol_ref(sqrt) and the operand. */
1959 && GET_CODE (note) == EXPR_LIST
1960 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1961 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1962 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1964 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1965 /* Check operand is a register with expected mode. */
1968 && GET_MODE (operand) == mode)
1970 /* Replace the REG_EQUAL note with a SQRT rtx. */
1971 rtx equiv = gen_rtx_SQRT (mode, operand);
1972 set_unique_reg_note (last, REG_EQUAL, equiv);
1977 last = PREV_INSN (last);
1984 /* Expand a call to the builtin binary math functions (pow and atan2).
1985 Return 0 if a normal call should be emitted rather than expanding the
1986 function in-line. EXP is the expression that is a call to the builtin
1987 function; if convenient, the result should be placed in TARGET.
1988 SUBTARGET may be used as the target for computing one of EXP's
1992 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1994 optab builtin_optab;
1995 rtx op0, op1, insns;
1996 int op1_type = REAL_TYPE;
1997 tree fndecl = get_callee_fndecl (exp);
1998 tree arglist = TREE_OPERAND (exp, 1);
1999 tree arg0, arg1, temp, narg;
2000 enum machine_mode mode;
2001 bool errno_set = true;
2004 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP)
2005 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF)
2006 || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL))
2007 op1_type = INTEGER_TYPE;
2009 if (!validate_arglist (arglist, REAL_TYPE, op1_type, VOID_TYPE))
2012 arg0 = TREE_VALUE (arglist);
2013 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2015 switch (DECL_FUNCTION_CODE (fndecl))
2017 CASE_FLT_FN (BUILT_IN_POW):
2018 builtin_optab = pow_optab; break;
2019 CASE_FLT_FN (BUILT_IN_ATAN2):
2020 builtin_optab = atan2_optab; break;
2021 CASE_FLT_FN (BUILT_IN_LDEXP):
2022 builtin_optab = ldexp_optab; break;
2023 CASE_FLT_FN (BUILT_IN_FMOD):
2024 builtin_optab = fmod_optab; break;
2025 CASE_FLT_FN (BUILT_IN_DREM):
2026 builtin_optab = drem_optab; break;
2031 /* Make a suitable register to place result in. */
2032 mode = TYPE_MODE (TREE_TYPE (exp));
2034 /* Before working hard, check whether the instruction is available. */
2035 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2038 target = gen_reg_rtx (mode);
2040 if (! flag_errno_math || ! HONOR_NANS (mode))
2043 /* Always stabilize the argument list. */
2044 narg = builtin_save_expr (arg1);
2048 temp = build_tree_list (NULL_TREE, narg);
2052 temp = TREE_CHAIN (arglist);
2054 narg = builtin_save_expr (arg0);
2058 arglist = tree_cons (NULL_TREE, narg, temp);
2062 arglist = tree_cons (NULL_TREE, arg0, temp);
2065 exp = build_function_call_expr (fndecl, arglist);
2067 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2068 op1 = expand_normal (arg1);
2072 /* Compute into TARGET.
2073 Set TARGET to wherever the result comes back. */
2074 target = expand_binop (mode, builtin_optab, op0, op1,
2075 target, 0, OPTAB_DIRECT);
2077 /* If we were unable to expand via the builtin, stop the sequence
2078 (without outputting the insns) and call to the library function
2079 with the stabilized argument list. */
2083 return expand_call (exp, target, target == const0_rtx);
2087 expand_errno_check (exp, target);
2089 /* Output the entire sequence. */
2090 insns = get_insns ();
2097 /* Expand a call to the builtin sin and cos math functions.
2098 Return 0 if a normal call should be emitted rather than expanding the
2099 function in-line. EXP is the expression that is a call to the builtin
2100 function; if convenient, the result should be placed in TARGET.
2101 SUBTARGET may be used as the target for computing one of EXP's
2105 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2107 optab builtin_optab;
2109 tree fndecl = get_callee_fndecl (exp);
2110 tree arglist = TREE_OPERAND (exp, 1);
2111 enum machine_mode mode;
2114 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2117 arg = TREE_VALUE (arglist);
2119 switch (DECL_FUNCTION_CODE (fndecl))
2121 CASE_FLT_FN (BUILT_IN_SIN):
2122 CASE_FLT_FN (BUILT_IN_COS):
2123 builtin_optab = sincos_optab; break;
2128 /* Make a suitable register to place result in. */
2129 mode = TYPE_MODE (TREE_TYPE (exp));
2131 /* Check if sincos insn is available, otherwise fallback
2132 to sin or cos insn. */
2133 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_SIN):
2137 builtin_optab = sin_optab; break;
2138 CASE_FLT_FN (BUILT_IN_COS):
2139 builtin_optab = cos_optab; break;
2145 /* Before working hard, check whether the instruction is available. */
2146 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2148 target = gen_reg_rtx (mode);
2150 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2151 need to expand the argument again. This way, we will not perform
2152 side-effects more the once. */
2153 narg = save_expr (arg);
2157 arglist = build_tree_list (NULL_TREE, arg);
2158 exp = build_function_call_expr (fndecl, arglist);
2161 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 if (builtin_optab == sincos_optab)
2171 switch (DECL_FUNCTION_CODE (fndecl))
2173 CASE_FLT_FN (BUILT_IN_SIN):
2174 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2176 CASE_FLT_FN (BUILT_IN_COS):
2177 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2182 gcc_assert (result);
2186 target = expand_unop (mode, builtin_optab, op0, target, 0);
2191 /* Output the entire sequence. */
2192 insns = get_insns ();
2198 /* If we were unable to expand via the builtin, stop the sequence
2199 (without outputting the insns) and call to the library function
2200 with the stabilized argument list. */
2204 target = expand_call (exp, target, target == const0_rtx);
2209 /* Expand a call to the builtin sincos math function.
2210 Return 0 if a normal call should be emitted rather than expanding the
2211 function in-line. EXP is the expression that is a call to the builtin
2215 expand_builtin_sincos (tree exp)
2217 rtx op0, op1, op2, target1, target2;
2218 tree arglist = TREE_OPERAND (exp, 1);
2219 enum machine_mode mode;
2220 tree arg, sinp, cosp;
2223 if (!validate_arglist (arglist, REAL_TYPE,
2224 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2227 arg = TREE_VALUE (arglist);
2228 sinp = TREE_VALUE (TREE_CHAIN (arglist));
2229 cosp = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2231 /* Make a suitable register to place result in. */
2232 mode = TYPE_MODE (TREE_TYPE (arg));
2234 /* Check if sincos insn is available, otherwise emit the call. */
2235 if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2238 target1 = gen_reg_rtx (mode);
2239 target2 = gen_reg_rtx (mode);
2241 op0 = expand_normal (arg);
2242 op1 = expand_normal (build_fold_indirect_ref (sinp));
2243 op2 = expand_normal (build_fold_indirect_ref (cosp));
2245 /* Compute into target1 and target2.
2246 Set TARGET to wherever the result comes back. */
2247 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2248 gcc_assert (result);
2250 /* Move target1 and target2 to the memory locations indicated
2252 emit_move_insn (op1, target1);
2253 emit_move_insn (op2, target2);
2258 /* Expand a call to one of the builtin rounding functions (lfloor).
2259 If expanding via optab fails, lower expression to (int)(floor(x)).
2260 EXP is the expression that is a call to the builtin function;
2261 if convenient, the result should be placed in TARGET. SUBTARGET may
2262 be used as the target for computing one of EXP's operands. */
2265 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2267 optab builtin_optab;
2268 rtx op0, insns, tmp;
2269 tree fndecl = get_callee_fndecl (exp);
2270 tree arglist = TREE_OPERAND (exp, 1);
2271 enum built_in_function fallback_fn;
2272 tree fallback_fndecl;
2273 enum machine_mode mode;
2276 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2279 arg = TREE_VALUE (arglist);
2281 switch (DECL_FUNCTION_CODE (fndecl))
2283 CASE_FLT_FN (BUILT_IN_LCEIL):
2284 CASE_FLT_FN (BUILT_IN_LLCEIL):
2285 builtin_optab = lceil_optab;
2286 fallback_fn = BUILT_IN_CEIL;
2289 CASE_FLT_FN (BUILT_IN_LFLOOR):
2290 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2291 builtin_optab = lfloor_optab;
2292 fallback_fn = BUILT_IN_FLOOR;
2299 /* Make a suitable register to place result in. */
2300 mode = TYPE_MODE (TREE_TYPE (exp));
2302 /* Before working hard, check whether the instruction is available. */
2303 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2305 target = gen_reg_rtx (mode);
2307 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2308 need to expand the argument again. This way, we will not perform
2309 side-effects more the once. */
2310 narg = builtin_save_expr (arg);
2314 arglist = build_tree_list (NULL_TREE, arg);
2315 exp = build_function_call_expr (fndecl, arglist);
2318 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2322 /* Compute into TARGET.
2323 Set TARGET to wherever the result comes back. */
2324 target = expand_unop (mode, builtin_optab, op0, target, 0);
2328 /* Output the entire sequence. */
2329 insns = get_insns ();
2335 /* If we were unable to expand via the builtin, stop the sequence
2336 (without outputting the insns). */
2340 /* Fall back to floating point rounding optab. */
2341 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2342 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2343 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2344 gcc_assert (fallback_fndecl != NULL_TREE);
2345 exp = build_function_call_expr (fallback_fndecl, arglist);
2347 tmp = expand_normal (exp);
2349 /* Truncate the result of floating point optab to integer
2350 via expand_fix (). */
2351 target = gen_reg_rtx (mode);
2352 expand_fix (target, tmp, 0);
2357 /* To evaluate powi(x,n), the floating point value x raised to the
2358 constant integer exponent n, we use a hybrid algorithm that
2359 combines the "window method" with look-up tables. For an
2360 introduction to exponentiation algorithms and "addition chains",
2361 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2362 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2363 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2364 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2366 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2367 multiplications to inline before calling the system library's pow
2368 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2369 so this default never requires calling pow, powf or powl. */
2371 #ifndef POWI_MAX_MULTS
2372 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2375 /* The size of the "optimal power tree" lookup table. All
2376 exponents less than this value are simply looked up in the
2377 powi_table below. This threshold is also used to size the
2378 cache of pseudo registers that hold intermediate results. */
2379 #define POWI_TABLE_SIZE 256
2381 /* The size, in bits of the window, used in the "window method"
2382 exponentiation algorithm. This is equivalent to a radix of
2383 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2384 #define POWI_WINDOW_SIZE 3
2386 /* The following table is an efficient representation of an
2387 "optimal power tree". For each value, i, the corresponding
2388 value, j, in the table states than an optimal evaluation
2389 sequence for calculating pow(x,i) can be found by evaluating
2390 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2391 100 integers is given in Knuth's "Seminumerical algorithms". */
2393 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2395 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2396 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2397 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2398 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2399 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2400 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2401 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2402 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2403 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2404 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2405 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2406 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2407 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2408 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2409 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2410 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2411 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2412 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2413 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2414 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2415 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2416 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2417 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2418 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2419 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2420 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2421 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2422 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2423 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2424 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2425 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2426 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2430 /* Return the number of multiplications required to calculate
2431 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2432 subroutine of powi_cost. CACHE is an array indicating
2433 which exponents have already been calculated. */
2436 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2438 /* If we've already calculated this exponent, then this evaluation
2439 doesn't require any additional multiplications. */
2444 return powi_lookup_cost (n - powi_table[n], cache)
2445 + powi_lookup_cost (powi_table[n], cache) + 1;
2448 /* Return the number of multiplications required to calculate
2449 powi(x,n) for an arbitrary x, given the exponent N. This
2450 function needs to be kept in sync with expand_powi below. */
2453 powi_cost (HOST_WIDE_INT n)
2455 bool cache[POWI_TABLE_SIZE];
2456 unsigned HOST_WIDE_INT digit;
2457 unsigned HOST_WIDE_INT val;
2463 /* Ignore the reciprocal when calculating the cost. */
2464 val = (n < 0) ? -n : n;
2466 /* Initialize the exponent cache. */
2467 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2472 while (val >= POWI_TABLE_SIZE)
2476 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2477 result += powi_lookup_cost (digit, cache)
2478 + POWI_WINDOW_SIZE + 1;
2479 val >>= POWI_WINDOW_SIZE;
2488 return result + powi_lookup_cost (val, cache);
2491 /* Recursive subroutine of expand_powi. This function takes the array,
2492 CACHE, of already calculated exponents and an exponent N and returns
2493 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2496 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2498 unsigned HOST_WIDE_INT digit;
2502 if (n < POWI_TABLE_SIZE)
2507 target = gen_reg_rtx (mode);
2510 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2511 op1 = expand_powi_1 (mode, powi_table[n], cache);
2515 target = gen_reg_rtx (mode);
2516 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2517 op0 = expand_powi_1 (mode, n - digit, cache);
2518 op1 = expand_powi_1 (mode, digit, cache);
2522 target = gen_reg_rtx (mode);
2523 op0 = expand_powi_1 (mode, n >> 1, cache);
2527 result = expand_mult (mode, op0, op1, target, 0);
2528 if (result != target)
2529 emit_move_insn (target, result);
2533 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2534 floating point operand in mode MODE, and N is the exponent. This
2535 function needs to be kept in sync with powi_cost above. */
2538 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2540 unsigned HOST_WIDE_INT val;
2541 rtx cache[POWI_TABLE_SIZE];
2545 return CONST1_RTX (mode);
2547 val = (n < 0) ? -n : n;
2549 memset (cache, 0, sizeof (cache));
2552 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2554 /* If the original exponent was negative, reciprocate the result. */
2556 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2557 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2562 /* Expand a call to the pow built-in mathematical function. Return 0 if
2563 a normal call should be emitted rather than expanding the function
2564 in-line. EXP is the expression that is a call to the builtin
2565 function; if convenient, the result should be placed in TARGET. */
2568 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2570 tree arglist = TREE_OPERAND (exp, 1);
2573 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2576 arg0 = TREE_VALUE (arglist);
2577 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2579 if (TREE_CODE (arg1) == REAL_CST
2580 && ! TREE_CONSTANT_OVERFLOW (arg1))
2582 REAL_VALUE_TYPE cint;
2586 c = TREE_REAL_CST (arg1);
2587 n = real_to_integer (&c);
2588 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2589 if (real_identical (&c, &cint))
2591 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2592 Otherwise, check the number of multiplications required.
2593 Note that pow never sets errno for an integer exponent. */
2594 if ((n >= -1 && n <= 2)
2595 || (flag_unsafe_math_optimizations
2597 && powi_cost (n) <= POWI_MAX_MULTS))
2599 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2600 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2601 op = force_reg (mode, op);
2602 return expand_powi (op, mode, n);
2607 if (! flag_unsafe_math_optimizations)
2609 return expand_builtin_mathfn_2 (exp, target, subtarget);
2612 /* Expand a call to the powi built-in mathematical function. Return 0 if
2613 a normal call should be emitted rather than expanding the function
2614 in-line. EXP is the expression that is a call to the builtin
2615 function; if convenient, the result should be placed in TARGET. */
2618 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2620 tree arglist = TREE_OPERAND (exp, 1);
2623 enum machine_mode mode;
2624 enum machine_mode mode2;
2626 if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2629 arg0 = TREE_VALUE (arglist);
2630 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2631 mode = TYPE_MODE (TREE_TYPE (exp));
2633 /* Handle constant power. */
2635 if (TREE_CODE (arg1) == INTEGER_CST
2636 && ! TREE_CONSTANT_OVERFLOW (arg1))
2638 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2640 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2641 Otherwise, check the number of multiplications required. */
2642 if ((TREE_INT_CST_HIGH (arg1) == 0
2643 || TREE_INT_CST_HIGH (arg1) == -1)
2644 && ((n >= -1 && n <= 2)
2646 && powi_cost (n) <= POWI_MAX_MULTS)))
2648 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2649 op0 = force_reg (mode, op0);
2650 return expand_powi (op0, mode, n);
2654 /* Emit a libcall to libgcc. */
2656 /* Mode of the 2nd argument must match that of an int. */
2657 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2659 if (target == NULL_RTX)
2660 target = gen_reg_rtx (mode);
2662 op0 = expand_expr (arg0, subtarget, mode, 0);
2663 if (GET_MODE (op0) != mode)
2664 op0 = convert_to_mode (mode, op0, 0);
2665 op1 = expand_expr (arg1, 0, mode2, 0);
2666 if (GET_MODE (op1) != mode2)
2667 op1 = convert_to_mode (mode2, op1, 0);
2669 target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2670 target, LCT_CONST_MAKE_BLOCK, mode, 2,
2671 op0, mode, op1, mode2);
2676 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2677 if we failed the caller should emit a normal call, otherwise
2678 try to get the result in TARGET, if convenient. */
2681 expand_builtin_strlen (tree arglist, rtx target,
2682 enum machine_mode target_mode)
2684 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2689 tree len, src = TREE_VALUE (arglist);
2690 rtx result, src_reg, char_rtx, before_strlen;
2691 enum machine_mode insn_mode = target_mode, char_mode;
2692 enum insn_code icode = CODE_FOR_nothing;
2695 /* If the length can be computed at compile-time, return it. */
2696 len = c_strlen (src, 0);
2698 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2700 /* If the length can be computed at compile-time and is constant
2701 integer, but there are side-effects in src, evaluate
2702 src for side-effects, then return len.
2703 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2704 can be optimized into: i++; x = 3; */
2705 len = c_strlen (src, 1);
2706 if (len && TREE_CODE (len) == INTEGER_CST)
2708 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2709 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2712 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2714 /* If SRC is not a pointer type, don't do this operation inline. */
2718 /* Bail out if we can't compute strlen in the right mode. */
2719 while (insn_mode != VOIDmode)
2721 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2722 if (icode != CODE_FOR_nothing)
2725 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2727 if (insn_mode == VOIDmode)
2730 /* Make a place to write the result of the instruction. */
2734 && GET_MODE (result) == insn_mode
2735 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2736 result = gen_reg_rtx (insn_mode);
2738 /* Make a place to hold the source address. We will not expand
2739 the actual source until we are sure that the expansion will
2740 not fail -- there are trees that cannot be expanded twice. */
2741 src_reg = gen_reg_rtx (Pmode);
2743 /* Mark the beginning of the strlen sequence so we can emit the
2744 source operand later. */
2745 before_strlen = get_last_insn ();
2747 char_rtx = const0_rtx;
2748 char_mode = insn_data[(int) icode].operand[2].mode;
2749 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2751 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2753 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2754 char_rtx, GEN_INT (align));
2759 /* Now that we are assured of success, expand the source. */
2761 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2763 emit_move_insn (src_reg, pat);
2768 emit_insn_after (pat, before_strlen);
2770 emit_insn_before (pat, get_insns ());
2772 /* Return the value in the proper mode for this function. */
2773 if (GET_MODE (result) == target_mode)
2775 else if (target != 0)
2776 convert_move (target, result, 0);
2778 target = convert_to_mode (target_mode, result, 0);
2784 /* Expand a call to the strstr builtin. Return 0 if we failed the
2785 caller should emit a normal call, otherwise try to get the result
2786 in TARGET, if convenient (and in mode MODE if that's convenient). */
2789 expand_builtin_strstr (tree arglist, tree type, rtx target, enum machine_mode mode)
2791 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2793 tree result = fold_builtin_strstr (arglist, type);
2795 return expand_expr (result, target, mode, EXPAND_NORMAL);
2800 /* Expand a call to the strchr builtin. Return 0 if we failed the
2801 caller should emit a normal call, otherwise try to get the result
2802 in TARGET, if convenient (and in mode MODE if that's convenient). */
2805 expand_builtin_strchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2807 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2809 tree result = fold_builtin_strchr (arglist, type);
2811 return expand_expr (result, target, mode, EXPAND_NORMAL);
2813 /* FIXME: Should use strchrM optab so that ports can optimize this. */
2818 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2819 caller should emit a normal call, otherwise try to get the result
2820 in TARGET, if convenient (and in mode MODE if that's convenient). */
2823 expand_builtin_strrchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2825 if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2827 tree result = fold_builtin_strrchr (arglist, type);
2829 return expand_expr (result, target, mode, EXPAND_NORMAL);
2834 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2835 caller should emit a normal call, otherwise try to get the result
2836 in TARGET, if convenient (and in mode MODE if that's convenient). */
2839 expand_builtin_strpbrk (tree arglist, tree type, rtx target, enum machine_mode mode)
2841 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2843 tree result = fold_builtin_strpbrk (arglist, type);
2845 return expand_expr (result, target, mode, EXPAND_NORMAL);
2850 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2851 bytes from constant string DATA + OFFSET and return it as target
2855 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2856 enum machine_mode mode)
2858 const char *str = (const char *) data;
2860 gcc_assert (offset >= 0
2861 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2862 <= strlen (str) + 1));
2864 return c_readstr (str + offset, mode);
2867 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2868 Return 0 if we failed, the caller should emit a normal call,
2869 otherwise try to get the result in TARGET, if convenient (and in
2870 mode MODE if that's convenient). */
2872 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
2874 tree fndecl = get_callee_fndecl (exp);
2875 tree arglist = TREE_OPERAND (exp, 1);
2876 if (!validate_arglist (arglist,
2877 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2881 tree dest = TREE_VALUE (arglist);
2882 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2883 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2884 const char *src_str;
2885 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2886 unsigned int dest_align
2887 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2888 rtx dest_mem, src_mem, dest_addr, len_rtx;
2889 tree result = fold_builtin_memory_op (arglist, TREE_TYPE (TREE_TYPE (fndecl)),
2894 while (TREE_CODE (result) == COMPOUND_EXPR)
2896 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
2898 result = TREE_OPERAND (result, 1);
2900 return expand_expr (result, target, mode, EXPAND_NORMAL);
2903 /* If DEST is not a pointer type, call the normal function. */
2904 if (dest_align == 0)
2907 /* If either SRC is not a pointer type, don't do this
2908 operation in-line. */
2912 dest_mem = get_memory_rtx (dest, len);
2913 set_mem_align (dest_mem, dest_align);
2914 len_rtx = expand_normal (len);
2915 src_str = c_getstr (src);
2917 /* If SRC is a string constant and block move would be done
2918 by pieces, we can avoid loading the string from memory
2919 and only stored the computed constants. */
2921 && GET_CODE (len_rtx) == CONST_INT
2922 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2923 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2924 (void *) src_str, dest_align))
2926 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2927 builtin_memcpy_read_str,
2928 (void *) src_str, dest_align, 0);
2929 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2930 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2934 src_mem = get_memory_rtx (src, len);
2935 set_mem_align (src_mem, src_align);
2937 /* Copy word part most expediently. */
2938 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2939 CALL_EXPR_TAILCALL (exp)
2940 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
2944 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2945 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2951 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2952 Return 0 if we failed; the caller should emit a normal call,
2953 otherwise try to get the result in TARGET, if convenient (and in
2954 mode MODE if that's convenient). If ENDP is 0 return the
2955 destination pointer, if ENDP is 1 return the end pointer ala
2956 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2960 expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode,
2963 if (!validate_arglist (arglist,
2964 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2966 /* If return value is ignored, transform mempcpy into memcpy. */
2967 else if (target == const0_rtx)
2969 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2974 return expand_expr (build_function_call_expr (fn, arglist),
2975 target, mode, EXPAND_NORMAL);
2979 tree dest = TREE_VALUE (arglist);
2980 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2981 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2982 const char *src_str;
2983 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2984 unsigned int dest_align
2985 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2986 rtx dest_mem, src_mem, len_rtx;
2987 tree result = fold_builtin_memory_op (arglist, type, false, endp);
2991 while (TREE_CODE (result) == COMPOUND_EXPR)
2993 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
2995 result = TREE_OPERAND (result, 1);
2997 return expand_expr (result, target, mode, EXPAND_NORMAL);
3000 /* If either SRC or DEST is not a pointer type, don't do this
3001 operation in-line. */
3002 if (dest_align == 0 || src_align == 0)
3005 /* If LEN is not constant, call the normal function. */
3006 if (! host_integerp (len, 1))
3009 len_rtx = expand_normal (len);
3010 src_str = c_getstr (src);
3012 /* If SRC is a string constant and block move would be done
3013 by pieces, we can avoid loading the string from memory
3014 and only stored the computed constants. */
3016 && GET_CODE (len_rtx) == CONST_INT
3017 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3018 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3019 (void *) src_str, dest_align))
3021 dest_mem = get_memory_rtx (dest, len);
3022 set_mem_align (dest_mem, dest_align);
3023 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3024 builtin_memcpy_read_str,
3025 (void *) src_str, dest_align, endp);
3026 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3027 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3031 if (GET_CODE (len_rtx) == CONST_INT
3032 && can_move_by_pieces (INTVAL (len_rtx),
3033 MIN (dest_align, src_align)))
3035 dest_mem = get_memory_rtx (dest, len);
3036 set_mem_align (dest_mem, dest_align);
3037 src_mem = get_memory_rtx (src, len);
3038 set_mem_align (src_mem, src_align);
3039 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3040 MIN (dest_align, src_align), endp);
3041 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3042 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3050 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
3051 if we failed; the caller should emit a normal call. */
3054 expand_builtin_memmove (tree arglist, tree type, rtx target,
3055 enum machine_mode mode, tree orig_exp)
3057 if (!validate_arglist (arglist,
3058 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3062 tree dest = TREE_VALUE (arglist);
3063 tree src = TREE_VALUE (TREE_CHAIN (arglist));
3064 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3066 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3067 unsigned int dest_align
3068 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3069 tree result = fold_builtin_memory_op (arglist, type, false, /*endp=*/3);
3073 while (TREE_CODE (result) == COMPOUND_EXPR)
3075 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3077 result = TREE_OPERAND (result, 1);
3079 return expand_expr (result, target, mode, EXPAND_NORMAL);
3082 /* If DEST is not a pointer type, call the normal function. */
3083 if (dest_align == 0)
3086 /* If either SRC is not a pointer type, don't do this
3087 operation in-line. */
3091 /* If src is categorized for a readonly section we can use
3093 if (readonly_data_expr (src))
3095 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3098 fn = build_function_call_expr (fn, arglist);
3099 if (TREE_CODE (fn) == CALL_EXPR)
3100 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3101 return expand_expr (fn, target, mode, EXPAND_NORMAL);
3104 /* If length is 1 and we can expand memcpy call inline,
3105 it is ok to use memcpy as well. */
3106 if (integer_onep (len))
3108 rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
3114 /* Otherwise, call the normal function. */
3119 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
3120 if we failed the caller should emit a normal call. */
3123 expand_builtin_bcopy (tree exp)
3125 tree arglist = TREE_OPERAND (exp, 1);
3126 tree type = TREE_TYPE (exp);
3127 tree src, dest, size, newarglist;
3129 if (!validate_arglist (arglist,
3130 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3133 src = TREE_VALUE (arglist);
3134 dest = TREE_VALUE (TREE_CHAIN (arglist));
3135 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3137 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
3138 memmove(ptr y, ptr x, size_t z). This is done this way
3139 so that if it isn't expanded inline, we fallback to
3140 calling bcopy instead of memmove. */
3142 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3143 newarglist = tree_cons (NULL_TREE, src, newarglist);
3144 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3146 return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
3150 # define HAVE_movstr 0
3151 # define CODE_FOR_movstr CODE_FOR_nothing
3154 /* Expand into a movstr instruction, if one is available. Return 0 if
3155 we failed, the caller should emit a normal call, otherwise try to
3156 get the result in TARGET, if convenient. If ENDP is 0 return the
3157 destination pointer, if ENDP is 1 return the end pointer ala
3158 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3162 expand_movstr (tree dest, tree src, rtx target, int endp)
3168 const struct insn_data * data;
3173 dest_mem = get_memory_rtx (dest, NULL);
3174 src_mem = get_memory_rtx (src, NULL);
3177 target = force_reg (Pmode, XEXP (dest_mem, 0));
3178 dest_mem = replace_equiv_address (dest_mem, target);
3179 end = gen_reg_rtx (Pmode);
3183 if (target == 0 || target == const0_rtx)
3185 end = gen_reg_rtx (Pmode);
3193 data = insn_data + CODE_FOR_movstr;
3195 if (data->operand[0].mode != VOIDmode)
3196 end = gen_lowpart (data->operand[0].mode, end);
3198 insn = data->genfun (end, dest_mem, src_mem);
3204 /* movstr is supposed to set end to the address of the NUL
3205 terminator. If the caller requested a mempcpy-like return value,
3207 if (endp == 1 && target != const0_rtx)
3209 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3210 emit_move_insn (target, force_operand (tem, NULL_RTX));
3216 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
3217 if we failed the caller should emit a normal call, otherwise try to get
3218 the result in TARGET, if convenient (and in mode MODE if that's
3222 expand_builtin_strcpy (tree fndecl, tree arglist, rtx target, enum machine_mode mode)
3224 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3226 tree result = fold_builtin_strcpy (fndecl, arglist, 0);
3229 while (TREE_CODE (result) == COMPOUND_EXPR)
3231 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3233 result = TREE_OPERAND (result, 1);
3235 return expand_expr (result, target, mode, EXPAND_NORMAL);
3238 return expand_movstr (TREE_VALUE (arglist),
3239 TREE_VALUE (TREE_CHAIN (arglist)),
3240 target, /*endp=*/0);
3245 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3246 Return 0 if we failed the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3251 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3253 tree arglist = TREE_OPERAND (exp, 1);
3254 /* If return value is ignored, transform stpcpy into strcpy. */
3255 if (target == const0_rtx)
3257 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3261 return expand_expr (build_function_call_expr (fn, arglist),
3262 target, mode, EXPAND_NORMAL);
3265 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3269 tree dst, src, len, lenp1;
3273 /* Ensure we get an actual string whose length can be evaluated at
3274 compile-time, not an expression containing a string. This is
3275 because the latter will potentially produce pessimized code
3276 when used to produce the return value. */
3277 src = TREE_VALUE (TREE_CHAIN (arglist));
3278 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3279 return expand_movstr (TREE_VALUE (arglist),
3280 TREE_VALUE (TREE_CHAIN (arglist)),
3281 target, /*endp=*/2);
3283 dst = TREE_VALUE (arglist);
3284 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3285 narglist = build_tree_list (NULL_TREE, lenp1);
3286 narglist = tree_cons (NULL_TREE, src, narglist);
3287 narglist = tree_cons (NULL_TREE, dst, narglist);
3288 ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp),
3289 target, mode, /*endp=*/2);
3294 if (TREE_CODE (len) == INTEGER_CST)
3296 rtx len_rtx = expand_normal (len);
3298 if (GET_CODE (len_rtx) == CONST_INT)
3300 ret = expand_builtin_strcpy (get_callee_fndecl (exp),
3301 arglist, target, mode);
3307 if (mode != VOIDmode)
3308 target = gen_reg_rtx (mode);
3310 target = gen_reg_rtx (GET_MODE (ret));
3312 if (GET_MODE (target) != GET_MODE (ret))
3313 ret = gen_lowpart (GET_MODE (target), ret);
3315 ret = plus_constant (ret, INTVAL (len_rtx));
3316 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3324 return expand_movstr (TREE_VALUE (arglist),
3325 TREE_VALUE (TREE_CHAIN (arglist)),
3326 target, /*endp=*/2);
3330 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3331 bytes from constant string DATA + OFFSET and return it as target
3335 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3336 enum machine_mode mode)
3338 const char *str = (const char *) data;
3340 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3343 return c_readstr (str + offset, mode);
3346 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3347 if we failed the caller should emit a normal call. */
3350 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3352 tree fndecl = get_callee_fndecl (exp);
3353 tree arglist = TREE_OPERAND (exp, 1);
3354 if (validate_arglist (arglist,
3355 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3357 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3358 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3359 tree result = fold_builtin_strncpy (fndecl, arglist, slen);
3363 while (TREE_CODE (result) == COMPOUND_EXPR)
3365 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3367 result = TREE_OPERAND (result, 1);
3369 return expand_expr (result, target, mode, EXPAND_NORMAL);
3372 /* We must be passed a constant len and src parameter. */
3373 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3376 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3378 /* We're required to pad with trailing zeros if the requested
3379 len is greater than strlen(s2)+1. In that case try to
3380 use store_by_pieces, if it fails, punt. */
3381 if (tree_int_cst_lt (slen, len))
3383 tree dest = TREE_VALUE (arglist);
3384 unsigned int dest_align
3385 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3386 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3389 if (!p || dest_align == 0 || !host_integerp (len, 1)
3390 || !can_store_by_pieces (tree_low_cst (len, 1),
3391 builtin_strncpy_read_str,
3392 (void *) p, dest_align))
3395 dest_mem = get_memory_rtx (dest, len);
3396 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3397 builtin_strncpy_read_str,
3398 (void *) p, dest_align, 0);
3399 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3400 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3407 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3408 bytes from constant string DATA + OFFSET and return it as target
3412 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3413 enum machine_mode mode)
3415 const char *c = (const char *) data;
3416 char *p = alloca (GET_MODE_SIZE (mode));
3418 memset (p, *c, GET_MODE_SIZE (mode));
3420 return c_readstr (p, mode);
3423 /* Callback routine for store_by_pieces. Return the RTL of a register
3424 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3425 char value given in the RTL register data. For example, if mode is
3426 4 bytes wide, return the RTL for 0x01010101*data. */
3429 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3430 enum machine_mode mode)
3436 size = GET_MODE_SIZE (mode);
3441 memset (p, 1, size);
3442 coeff = c_readstr (p, mode);
3444 target = convert_to_mode (mode, (rtx) data, 1);
3445 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3446 return force_reg (mode, target);
3449 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3450 if we failed the caller should emit a normal call, otherwise try to get
3451 the result in TARGET, if convenient (and in mode MODE if that's
3455 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
3458 if (!validate_arglist (arglist,
3459 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3463 tree dest = TREE_VALUE (arglist);
3464 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3465 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3467 enum built_in_function fcode;
3469 unsigned int dest_align;
3470 rtx dest_mem, dest_addr, len_rtx;
3472 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3474 /* If DEST is not a pointer type, don't do this
3475 operation in-line. */
3476 if (dest_align == 0)
3479 /* If the LEN parameter is zero, return DEST. */
3480 if (integer_zerop (len))
3482 /* Evaluate and ignore VAL in case it has side-effects. */
3483 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3484 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3487 /* Stabilize the arguments in case we fail. */
3488 dest = builtin_save_expr (dest);
3489 val = builtin_save_expr (val);
3490 len = builtin_save_expr (len);
3492 len_rtx = expand_normal (len);
3493 dest_mem = get_memory_rtx (dest, len);
3495 if (TREE_CODE (val) != INTEGER_CST)
3499 val_rtx = expand_normal (val);
3500 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3503 /* Assume that we can memset by pieces if we can store the
3504 * the coefficients by pieces (in the required modes).
3505 * We can't pass builtin_memset_gen_str as that emits RTL. */
3507 if (host_integerp (len, 1)
3508 && !(optimize_size && tree_low_cst (len, 1) > 1)
3509 && can_store_by_pieces (tree_low_cst (len, 1),
3510 builtin_memset_read_str, &c, dest_align))
3512 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3514 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3515 builtin_memset_gen_str, val_rtx, dest_align, 0);
3517 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3521 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3522 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3526 if (target_char_cast (val, &c))
3531 if (host_integerp (len, 1)
3532 && !(optimize_size && tree_low_cst (len, 1) > 1)
3533 && can_store_by_pieces (tree_low_cst (len, 1),
3534 builtin_memset_read_str, &c, dest_align))
3535 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3536 builtin_memset_read_str, &c, dest_align, 0);
3537 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3541 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3542 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3546 set_mem_align (dest_mem, dest_align);
3547 dest_addr = clear_storage (dest_mem, len_rtx,
3548 CALL_EXPR_TAILCALL (orig_exp)
3549 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
3553 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3554 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3560 fndecl = get_callee_fndecl (orig_exp);
3561 fcode = DECL_FUNCTION_CODE (fndecl);
3562 gcc_assert (fcode == BUILT_IN_MEMSET || fcode == BUILT_IN_BZERO);
3563 arglist = build_tree_list (NULL_TREE, len);
3564 if (fcode == BUILT_IN_MEMSET)
3565 arglist = tree_cons (NULL_TREE, val, arglist);
3566 arglist = tree_cons (NULL_TREE, dest, arglist);
3567 fn = build_function_call_expr (fndecl, arglist);
3568 if (TREE_CODE (fn) == CALL_EXPR)
3569 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3570 return expand_call (fn, target, target == const0_rtx);
3574 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3575 if we failed the caller should emit a normal call. */
3578 expand_builtin_bzero (tree exp)
3580 tree arglist = TREE_OPERAND (exp, 1);
3581 tree dest, size, newarglist;
3583 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3586 dest = TREE_VALUE (arglist);
3587 size = TREE_VALUE (TREE_CHAIN (arglist));
3589 /* New argument list transforming bzero(ptr x, int y) to
3590 memset(ptr x, int 0, size_t y). This is done this way
3591 so that if it isn't expanded inline, we fallback to
3592 calling bzero instead of memset. */
3594 newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3595 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3596 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3598 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode, exp);
3601 /* Expand expression EXP, which is a call to the memcmp built-in function.
3602 ARGLIST is the argument list for this call. Return 0 if we failed and the
3603 caller should emit a normal call, otherwise try to get the result in
3604 TARGET, if convenient (and in mode MODE, if that's convenient). */
3607 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3608 enum machine_mode mode)
3610 if (!validate_arglist (arglist,
3611 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3615 tree result = fold_builtin_memcmp (arglist);
3617 return expand_expr (result, target, mode, EXPAND_NORMAL);
3620 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3622 tree arg1 = TREE_VALUE (arglist);
3623 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3624 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3625 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3630 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3632 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3633 enum machine_mode insn_mode;
3635 #ifdef HAVE_cmpmemsi
3637 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3640 #ifdef HAVE_cmpstrnsi
3642 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3647 /* If we don't have POINTER_TYPE, call the function. */
3648 if (arg1_align == 0 || arg2_align == 0)
3651 /* Make a place to write the result of the instruction. */
3654 && REG_P (result) && GET_MODE (result) == insn_mode
3655 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3656 result = gen_reg_rtx (insn_mode);
3658 arg1_rtx = get_memory_rtx (arg1, len);
3659 arg2_rtx = get_memory_rtx (arg2, len);
3660 arg3_rtx = expand_normal (len);
3662 /* Set MEM_SIZE as appropriate. */
3663 if (GET_CODE (arg3_rtx) == CONST_INT)
3665 set_mem_size (arg1_rtx, arg3_rtx);
3666 set_mem_size (arg2_rtx, arg3_rtx);
3669 #ifdef HAVE_cmpmemsi
3671 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3672 GEN_INT (MIN (arg1_align, arg2_align)));
3675 #ifdef HAVE_cmpstrnsi
3677 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3678 GEN_INT (MIN (arg1_align, arg2_align)));
3686 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3687 TYPE_MODE (integer_type_node), 3,
3688 XEXP (arg1_rtx, 0), Pmode,
3689 XEXP (arg2_rtx, 0), Pmode,
3690 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3691 TYPE_UNSIGNED (sizetype)),
3692 TYPE_MODE (sizetype));
3694 /* Return the value in the proper mode for this function. */
3695 mode = TYPE_MODE (TREE_TYPE (exp));
3696 if (GET_MODE (result) == mode)
3698 else if (target != 0)
3700 convert_move (target, result, 0);
3704 return convert_to_mode (mode, result, 0);
3711 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3712 if we failed the caller should emit a normal call, otherwise try to get
3713 the result in TARGET, if convenient. */
3716 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3718 tree arglist = TREE_OPERAND (exp, 1);
3720 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3724 tree result = fold_builtin_strcmp (arglist);
3726 return expand_expr (result, target, mode, EXPAND_NORMAL);
3729 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3730 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3731 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3733 rtx arg1_rtx, arg2_rtx;
3734 rtx result, insn = NULL_RTX;
3737 tree arg1 = TREE_VALUE (arglist);
3738 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3740 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3742 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3744 /* If we don't have POINTER_TYPE, call the function. */
3745 if (arg1_align == 0 || arg2_align == 0)
3748 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3749 arg1 = builtin_save_expr (arg1);
3750 arg2 = builtin_save_expr (arg2);
3752 arg1_rtx = get_memory_rtx (arg1, NULL);
3753 arg2_rtx = get_memory_rtx (arg2, NULL);
3755 #ifdef HAVE_cmpstrsi
3756 /* Try to call cmpstrsi. */
3759 enum machine_mode insn_mode
3760 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3762 /* Make a place to write the result of the instruction. */
3765 && REG_P (result) && GET_MODE (result) == insn_mode
3766 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3767 result = gen_reg_rtx (insn_mode);
3769 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3770 GEN_INT (MIN (arg1_align, arg2_align)));
3773 #ifdef HAVE_cmpstrnsi
3774 /* Try to determine at least one length and call cmpstrnsi. */
3775 if (!insn && HAVE_cmpstrnsi)
3780 enum machine_mode insn_mode
3781 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3782 tree len1 = c_strlen (arg1, 1);
3783 tree len2 = c_strlen (arg2, 1);
3786 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3788 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3790 /* If we don't have a constant length for the first, use the length
3791 of the second, if we know it. We don't require a constant for
3792 this case; some cost analysis could be done if both are available
3793 but neither is constant. For now, assume they're equally cheap,
3794 unless one has side effects. If both strings have constant lengths,
3801 else if (TREE_SIDE_EFFECTS (len1))
3803 else if (TREE_SIDE_EFFECTS (len2))
3805 else if (TREE_CODE (len1) != INTEGER_CST)
3807 else if (TREE_CODE (len2) != INTEGER_CST)
3809 else if (tree_int_cst_lt (len1, len2))
3814 /* If both arguments have side effects, we cannot optimize. */
3815 if (!len || TREE_SIDE_EFFECTS (len))
3818 arg3_rtx = expand_normal (len);
3820 /* Make a place to write the result of the instruction. */
3823 && REG_P (result) && GET_MODE (result) == insn_mode
3824 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3825 result = gen_reg_rtx (insn_mode);
3827 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3828 GEN_INT (MIN (arg1_align, arg2_align)));
3836 /* Return the value in the proper mode for this function. */
3837 mode = TYPE_MODE (TREE_TYPE (exp));
3838 if (GET_MODE (result) == mode)
3841 return convert_to_mode (mode, result, 0);
3842 convert_move (target, result, 0);
3846 /* Expand the library call ourselves using a stabilized argument
3847 list to avoid re-evaluating the function's arguments twice. */
3848 #ifdef HAVE_cmpstrnsi
3851 arglist = build_tree_list (NULL_TREE, arg2);
3852 arglist = tree_cons (NULL_TREE, arg1, arglist);
3853 fndecl = get_callee_fndecl (exp);
3854 fn = build_function_call_expr (fndecl, arglist);
3855 if (TREE_CODE (fn) == CALL_EXPR)
3856 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3857 return expand_call (fn, target, target == const0_rtx);
3863 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3864 if we failed the caller should emit a normal call, otherwise try to get
3865 the result in TARGET, if convenient. */
3868 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3870 tree arglist = TREE_OPERAND (exp, 1);
3872 if (!validate_arglist (arglist,
3873 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3877 tree result = fold_builtin_strncmp (arglist);
3879 return expand_expr (result, target, mode, EXPAND_NORMAL);
3882 /* If c_strlen can determine an expression for one of the string
3883 lengths, and it doesn't have side effects, then emit cmpstrnsi
3884 using length MIN(strlen(string)+1, arg3). */
3885 #ifdef HAVE_cmpstrnsi
3888 tree arg1 = TREE_VALUE (arglist);
3889 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3890 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3891 tree len, len1, len2;
3892 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3897 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3899 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3900 enum machine_mode insn_mode
3901 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3903 len1 = c_strlen (arg1, 1);
3904 len2 = c_strlen (arg2, 1);
3907 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3909 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3911 /* If we don't have a constant length for the first, use the length
3912 of the second, if we know it. We don't require a constant for
3913 this case; some cost analysis could be done if both are available
3914 but neither is constant. For now, assume they're equally cheap,
3915 unless one has side effects. If both strings have constant lengths,
3922 else if (TREE_SIDE_EFFECTS (len1))
3924 else if (TREE_SIDE_EFFECTS (len2))
3926 else if (TREE_CODE (len1) != INTEGER_CST)
3928 else if (TREE_CODE (len2) != INTEGER_CST)
3930 else if (tree_int_cst_lt (len1, len2))
3935 /* If both arguments have side effects, we cannot optimize. */
3936 if (!len || TREE_SIDE_EFFECTS (len))
3939 /* The actual new length parameter is MIN(len,arg3). */
3940 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
3941 fold_convert (TREE_TYPE (len), arg3));
3943 /* If we don't have POINTER_TYPE, call the function. */
3944 if (arg1_align == 0 || arg2_align == 0)
3947 /* Make a place to write the result of the instruction. */
3950 && REG_P (result) && GET_MODE (result) == insn_mode
3951 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3952 result = gen_reg_rtx (insn_mode);
3954 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3955 arg1 = builtin_save_expr (arg1);
3956 arg2 = builtin_save_expr (arg2);
3957 len = builtin_save_expr (len);
3959 arg1_rtx = get_memory_rtx (arg1, len);
3960 arg2_rtx = get_memory_rtx (arg2, len);
3961 arg3_rtx = expand_normal (len);
3962 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3963 GEN_INT (MIN (arg1_align, arg2_align)));
3968 /* Return the value in the proper mode for this function. */
3969 mode = TYPE_MODE (TREE_TYPE (exp));
3970 if (GET_MODE (result) == mode)
3973 return convert_to_mode (mode, result, 0);
3974 convert_move (target, result, 0);
3978 /* Expand the library call ourselves using a stabilized argument
3979 list to avoid re-evaluating the function's arguments twice. */
3980 arglist = build_tree_list (NULL_TREE, len);
3981 arglist = tree_cons (NULL_TREE, arg2, arglist);
3982 arglist = tree_cons (NULL_TREE, arg1, arglist);
3983 fndecl = get_callee_fndecl (exp);
3984 fn = build_function_call_expr (fndecl, arglist);
3985 if (TREE_CODE (fn) == CALL_EXPR)
3986 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3987 return expand_call (fn, target, target == const0_rtx);
3993 /* Expand expression EXP, which is a call to the strcat builtin.
3994 Return 0 if we failed the caller should emit a normal call,
3995 otherwise try to get the result in TARGET, if convenient. */
3998 expand_builtin_strcat (tree fndecl, tree arglist, rtx target, enum machine_mode mode)
4000 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4004 tree dst = TREE_VALUE (arglist),
4005 src = TREE_VALUE (TREE_CHAIN (arglist));
4006 const char *p = c_getstr (src);
4008 /* If the string length is zero, return the dst parameter. */
4009 if (p && *p == '\0')
4010 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4014 /* See if we can store by pieces into (dst + strlen(dst)). */
4015 tree newsrc, newdst,
4016 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4019 /* Stabilize the argument list. */
4020 newsrc = builtin_save_expr (src);
4022 arglist = build_tree_list (NULL_TREE, newsrc);
4024 arglist = TREE_CHAIN (arglist); /* Reusing arglist if safe. */
4026 dst = builtin_save_expr (dst);
4030 /* Create strlen (dst). */
4032 build_function_call_expr (strlen_fn,
4033 build_tree_list (NULL_TREE, dst));
4034 /* Create (dst + (cast) strlen (dst)). */
4035 newdst = fold_convert (TREE_TYPE (dst), newdst);
4036 newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4038 newdst = builtin_save_expr (newdst);
4039 arglist = tree_cons (NULL_TREE, newdst, arglist);
4041 if (!expand_builtin_strcpy (fndecl, arglist, target, mode))
4043 end_sequence (); /* Stop sequence. */
4047 /* Output the entire sequence. */
4048 insns = get_insns ();
4052 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4059 /* Expand expression EXP, which is a call to the strncat builtin.
4060 Return 0 if we failed the caller should emit a normal call,
4061 otherwise try to get the result in TARGET, if convenient. */
4064 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
4066 if (validate_arglist (arglist,
4067 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4069 tree result = fold_builtin_strncat (arglist);
4071 return expand_expr (result, target, mode, EXPAND_NORMAL);
4076 /* Expand expression EXP, which is a call to the strspn builtin.
4077 Return 0 if we failed the caller should emit a normal call,
4078 otherwise try to get the result in TARGET, if convenient. */
4081 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
4083 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4085 tree result = fold_builtin_strspn (arglist);
4087 return expand_expr (result, target, mode, EXPAND_NORMAL);
4092 /* Expand expression EXP, which is a call to the strcspn builtin.
4093 Return 0 if we failed the caller should emit a normal call,
4094 otherwise try to get the result in TARGET, if convenient. */
4097 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
4099 if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4101 tree result = fold_builtin_strcspn (arglist);
4103 return expand_expr (result, target, mode, EXPAND_NORMAL);
4108 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4109 if that's convenient. */
4112 expand_builtin_saveregs (void)
4116 /* Don't do __builtin_saveregs more than once in a function.
4117 Save the result of the first call and reuse it. */
4118 if (saveregs_value != 0)
4119 return saveregs_value;
4121 /* When this function is called, it means that registers must be
4122 saved on entry to this function. So we migrate the call to the
4123 first insn of this function. */
4127 /* Do whatever the machine needs done in this case. */
4128 val = targetm.calls.expand_builtin_saveregs ();
4133 saveregs_value = val;
4135 /* Put the insns after the NOTE that starts the function. If this
4136 is inside a start_sequence, make the outer-level insn chain current, so
4137 the code is placed at the start of the function. */
4138 push_topmost_sequence ();
4139 emit_insn_after (seq, entry_of_function ());
4140 pop_topmost_sequence ();
4145 /* __builtin_args_info (N) returns word N of the arg space info
4146 for the current function. The number and meanings of words
4147 is controlled by the definition of CUMULATIVE_ARGS. */
4150 expand_builtin_args_info (tree arglist)
4152 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4153 int *word_ptr = (int *) ¤t_function_args_info;
4155 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4159 if (!host_integerp (TREE_VALUE (arglist), 0))
4160 error ("argument of %<__builtin_args_info%> must be constant");
4163 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
4165 if (wordnum < 0 || wordnum >= nwords)
4166 error ("argument of %<__builtin_args_info%> out of range");
4168 return GEN_INT (word_ptr[wordnum]);