1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
41 #include "typeclass.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init = 0;
84 static REAL_VALUE_TYPE dconstpi;
85 static REAL_VALUE_TYPE dconste;
87 static int get_pointer_alignment (tree, unsigned int);
88 static tree c_strlen (tree, int);
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
98 static rtx expand_builtin_setjmp (tree, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_constant_p (tree, enum machine_mode);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (tree);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
123 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
125 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_bcopy (tree);
127 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
131 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
139 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
140 static rtx expand_builtin_alloca (tree, rtx);
141 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static rtx expand_builtin_fputs (tree, int, int);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_classify_type (tree);
149 static tree fold_builtin_inf (tree, int);
150 static tree fold_builtin_nan (tree, tree, int);
151 static int validate_arglist (tree, ...);
152 static tree fold_trunc_transparent_mathfn (tree);
153 static bool readonly_data_expr (tree);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_cabs (tree, rtx);
156 static void init_builtin_dconsts (void);
157 static tree fold_builtin_cabs (tree, tree, tree);
159 /* Initialize mathematical constants for constant folding builtins.
160 These constants need to be given to at least 160 bits precision. */
163 init_builtin_dconsts (void)
165 real_from_string (&dconstpi,
166 "3.1415926535897932384626433832795028841971693993751058209749445923078");
167 real_from_string (&dconste,
168 "2.7182818284590452353602874713526624977572470936999595749669676277241");
170 builtin_dconsts_init = true;
173 /* Return the alignment in bits of EXP, a pointer valued expression.
174 But don't return more than MAX_ALIGN no matter what.
175 The alignment returned is, by default, the alignment of the thing that
176 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
178 Otherwise, look at the expression to see if we can do better, i.e., if the
179 expression is actually pointing at an object whose alignment is tighter. */
182 get_pointer_alignment (tree exp, unsigned int max_align)
184 unsigned int align, inner;
186 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
189 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
190 align = MIN (align, max_align);
194 switch (TREE_CODE (exp))
198 case NON_LVALUE_EXPR:
199 exp = TREE_OPERAND (exp, 0);
200 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
203 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
204 align = MIN (inner, max_align);
208 /* If sum of pointer + int, restrict our maximum alignment to that
209 imposed by the integer. If not, we can't do any better than
211 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
214 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
215 & (max_align / BITS_PER_UNIT - 1))
219 exp = TREE_OPERAND (exp, 0);
223 /* See what we are pointing at and look at its alignment. */
224 exp = TREE_OPERAND (exp, 0);
225 if (TREE_CODE (exp) == FUNCTION_DECL)
226 align = FUNCTION_BOUNDARY;
227 else if (DECL_P (exp))
228 align = DECL_ALIGN (exp);
229 #ifdef CONSTANT_ALIGNMENT
230 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
231 align = CONSTANT_ALIGNMENT (exp, align);
233 return MIN (align, max_align);
241 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
242 way, because it could contain a zero byte in the middle.
243 TREE_STRING_LENGTH is the size of the character array, not the string.
245 ONLY_VALUE should be nonzero if the result is not going to be emitted
246 into the instruction stream and zero if it is going to be expanded.
247 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
248 is returned, otherwise NULL, since
249 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
250 evaluate the side-effects.
252 The value returned is of type `ssizetype'.
254 Unfortunately, string_constant can't access the values of const char
255 arrays with initializers, so neither can we do so here. */
258 c_strlen (tree src, int only_value)
261 HOST_WIDE_INT offset;
266 if (TREE_CODE (src) == COND_EXPR
267 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
271 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
272 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
273 if (tree_int_cst_equal (len1, len2))
277 if (TREE_CODE (src) == COMPOUND_EXPR
278 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
279 return c_strlen (TREE_OPERAND (src, 1), only_value);
281 src = string_constant (src, &offset_node);
285 max = TREE_STRING_LENGTH (src) - 1;
286 ptr = TREE_STRING_POINTER (src);
288 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
290 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
291 compute the offset to the following null if we don't know where to
292 start searching for it. */
295 for (i = 0; i < max; i++)
299 /* We don't know the starting offset, but we do know that the string
300 has no internal zero bytes. We can assume that the offset falls
301 within the bounds of the string; otherwise, the programmer deserves
302 what he gets. Subtract the offset from the length of the string,
303 and return that. This would perhaps not be valid if we were dealing
304 with named arrays in addition to literal string constants. */
306 return size_diffop (size_int (max), offset_node);
309 /* We have a known offset into the string. Start searching there for
310 a null character if we can represent it as a single HOST_WIDE_INT. */
311 if (offset_node == 0)
313 else if (! host_integerp (offset_node, 0))
316 offset = tree_low_cst (offset_node, 0);
318 /* If the offset is known to be out of bounds, warn, and call strlen at
320 if (offset < 0 || offset > max)
322 warning ("offset outside bounds of constant string");
326 /* Use strlen to search for the first zero byte. Since any strings
327 constructed with build_string will have nulls appended, we win even
328 if we get handed something like (char[4])"abcd".
330 Since OFFSET is our starting index into the string, no further
331 calculation is needed. */
332 return ssize_int (strlen (ptr + offset));
335 /* Return a char pointer for a C string if it is a string constant
336 or sum of string constant and integer constant. */
343 src = string_constant (src, &offset_node);
347 if (offset_node == 0)
348 return TREE_STRING_POINTER (src);
349 else if (!host_integerp (offset_node, 1)
350 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
353 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
356 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
357 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
360 c_readstr (const char *str, enum machine_mode mode)
366 if (GET_MODE_CLASS (mode) != MODE_INT)
371 for (i = 0; i < GET_MODE_SIZE (mode); i++)
374 if (WORDS_BIG_ENDIAN)
375 j = GET_MODE_SIZE (mode) - i - 1;
376 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
377 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
378 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
380 if (j > 2 * HOST_BITS_PER_WIDE_INT)
383 ch = (unsigned char) str[i];
384 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
386 return immed_double_const (c[0], c[1], mode);
389 /* Cast a target constant CST to target CHAR and if that value fits into
390 host char type, return zero and put that value into variable pointed by
394 target_char_cast (tree cst, char *p)
396 unsigned HOST_WIDE_INT val, hostval;
398 if (!host_integerp (cst, 1)
399 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
402 val = tree_low_cst (cst, 1);
403 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
404 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
407 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
408 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
417 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
418 times to get the address of either a higher stack frame, or a return
419 address located within it (depending on FNDECL_CODE). */
422 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
427 /* Some machines need special handling before we can access
428 arbitrary frames. For example, on the sparc, we must first flush
429 all register windows to the stack. */
430 #ifdef SETUP_FRAME_ADDRESSES
432 SETUP_FRAME_ADDRESSES ();
435 /* On the sparc, the return address is not in the frame, it is in a
436 register. There is no way to access it off of the current frame
437 pointer, but it can be accessed off the previous frame pointer by
438 reading the value from the register window save area. */
439 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
440 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
444 /* Scan back COUNT frames to the specified frame. */
445 for (i = 0; i < count; i++)
447 /* Assume the dynamic chain pointer is in the word that the
448 frame address points to, unless otherwise specified. */
449 #ifdef DYNAMIC_CHAIN_ADDRESS
450 tem = DYNAMIC_CHAIN_ADDRESS (tem);
452 tem = memory_address (Pmode, tem);
453 tem = gen_rtx_MEM (Pmode, tem);
454 set_mem_alias_set (tem, get_frame_alias_set ());
455 tem = copy_to_reg (tem);
458 /* For __builtin_frame_address, return what we've got. */
459 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
462 /* For __builtin_return_address, Get the return address from that
464 #ifdef RETURN_ADDR_RTX
465 tem = RETURN_ADDR_RTX (count, tem);
467 tem = memory_address (Pmode,
468 plus_constant (tem, GET_MODE_SIZE (Pmode)));
469 tem = gen_rtx_MEM (Pmode, tem);
470 set_mem_alias_set (tem, get_frame_alias_set ());
475 /* Alias set used for setjmp buffer. */
476 static HOST_WIDE_INT setjmp_alias_set = -1;
478 /* Construct the leading half of a __builtin_setjmp call. Control will
479 return to RECEIVER_LABEL. This is used directly by sjlj exception
483 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
485 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
489 if (setjmp_alias_set == -1)
490 setjmp_alias_set = new_alias_set ();
492 #ifdef POINTERS_EXTEND_UNSIGNED
493 if (GET_MODE (buf_addr) != Pmode)
494 buf_addr = convert_memory_address (Pmode, buf_addr);
497 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
501 /* We store the frame pointer and the address of receiver_label in
502 the buffer and use the rest of it for the stack save area, which
503 is machine-dependent. */
505 #ifndef BUILTIN_SETJMP_FRAME_VALUE
506 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
509 mem = gen_rtx_MEM (Pmode, buf_addr);
510 set_mem_alias_set (mem, setjmp_alias_set);
511 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
513 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
514 set_mem_alias_set (mem, setjmp_alias_set);
516 emit_move_insn (validize_mem (mem),
517 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
519 stack_save = gen_rtx_MEM (sa_mode,
520 plus_constant (buf_addr,
521 2 * GET_MODE_SIZE (Pmode)));
522 set_mem_alias_set (stack_save, setjmp_alias_set);
523 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
525 /* If there is further processing to do, do it. */
526 #ifdef HAVE_builtin_setjmp_setup
527 if (HAVE_builtin_setjmp_setup)
528 emit_insn (gen_builtin_setjmp_setup (buf_addr));
531 /* Tell optimize_save_area_alloca that extra work is going to
532 need to go on during alloca. */
533 current_function_calls_setjmp = 1;
535 /* Set this so all the registers get saved in our frame; we need to be
536 able to copy the saved values for any registers from frames we unwind. */
537 current_function_has_nonlocal_label = 1;
540 /* Construct the trailing part of a __builtin_setjmp call.
541 This is used directly by sjlj exception handling code. */
544 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
546 /* Clobber the FP when we get here, so we have to make sure it's
547 marked as used by this function. */
548 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
550 /* Mark the static chain as clobbered here so life information
551 doesn't get messed up for it. */
552 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
554 /* Now put in the code to restore the frame pointer, and argument
555 pointer, if needed. The code below is from expand_end_bindings
556 in stmt.c; see detailed documentation there. */
557 #ifdef HAVE_nonlocal_goto
558 if (! HAVE_nonlocal_goto)
560 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
562 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
563 if (fixed_regs[ARG_POINTER_REGNUM])
565 #ifdef ELIMINABLE_REGS
567 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
569 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
570 if (elim_regs[i].from == ARG_POINTER_REGNUM
571 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
574 if (i == ARRAY_SIZE (elim_regs))
577 /* Now restore our arg pointer from the address at which it
578 was saved in our stack frame. */
579 emit_move_insn (virtual_incoming_args_rtx,
580 copy_to_reg (get_arg_pointer_save_area (cfun)));
585 #ifdef HAVE_builtin_setjmp_receiver
586 if (HAVE_builtin_setjmp_receiver)
587 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
590 #ifdef HAVE_nonlocal_goto_receiver
591 if (HAVE_nonlocal_goto_receiver)
592 emit_insn (gen_nonlocal_goto_receiver ());
597 /* @@@ This is a kludge. Not all machine descriptions define a blockage
598 insn, but we must not allow the code we just generated to be reordered
599 by scheduling. Specifically, the update of the frame pointer must
600 happen immediately, not later. So emit an ASM_INPUT to act as blockage
602 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
605 /* __builtin_setjmp is passed a pointer to an array of five words (not
606 all will be used on all machines). It operates similarly to the C
607 library function of the same name, but is more efficient. Much of
608 the code below (and for longjmp) is copied from the handling of
611 NOTE: This is intended for use by GNAT and the exception handling
612 scheme in the compiler and will only work in the method used by
616 expand_builtin_setjmp (tree arglist, rtx target)
618 rtx buf_addr, next_lab, cont_lab;
620 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
623 if (target == 0 || GET_CODE (target) != REG
624 || REGNO (target) < FIRST_PSEUDO_REGISTER)
625 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
627 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
629 next_lab = gen_label_rtx ();
630 cont_lab = gen_label_rtx ();
632 expand_builtin_setjmp_setup (buf_addr, next_lab);
634 /* Set TARGET to zero and branch to the continue label. */
635 emit_move_insn (target, const0_rtx);
636 emit_jump_insn (gen_jump (cont_lab));
638 emit_label (next_lab);
640 expand_builtin_setjmp_receiver (next_lab);
642 /* Set TARGET to one. */
643 emit_move_insn (target, const1_rtx);
644 emit_label (cont_lab);
646 /* Tell flow about the strange goings on. Putting `next_lab' on
647 `nonlocal_goto_handler_labels' to indicates that function
648 calls may traverse the arc back to this label. */
650 current_function_has_nonlocal_label = 1;
651 nonlocal_goto_handler_labels
652 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
657 /* __builtin_longjmp is passed a pointer to an array of five words (not
658 all will be used on all machines). It operates similarly to the C
659 library function of the same name, but is more efficient. Much of
660 the code below is copied from the handling of non-local gotos.
662 NOTE: This is intended for use by GNAT and the exception handling
663 scheme in the compiler and will only work in the method used by
667 expand_builtin_longjmp (rtx buf_addr, rtx value)
669 rtx fp, lab, stack, insn, last;
670 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
672 if (setjmp_alias_set == -1)
673 setjmp_alias_set = new_alias_set ();
675 #ifdef POINTERS_EXTEND_UNSIGNED
676 if (GET_MODE (buf_addr) != Pmode)
677 buf_addr = convert_memory_address (Pmode, buf_addr);
680 buf_addr = force_reg (Pmode, buf_addr);
682 /* We used to store value in static_chain_rtx, but that fails if pointers
683 are smaller than integers. We instead require that the user must pass
684 a second argument of 1, because that is what builtin_setjmp will
685 return. This also makes EH slightly more efficient, since we are no
686 longer copying around a value that we don't care about. */
687 if (value != const1_rtx)
690 current_function_calls_longjmp = 1;
692 last = get_last_insn ();
693 #ifdef HAVE_builtin_longjmp
694 if (HAVE_builtin_longjmp)
695 emit_insn (gen_builtin_longjmp (buf_addr));
699 fp = gen_rtx_MEM (Pmode, buf_addr);
700 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
701 GET_MODE_SIZE (Pmode)));
703 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
704 2 * GET_MODE_SIZE (Pmode)));
705 set_mem_alias_set (fp, setjmp_alias_set);
706 set_mem_alias_set (lab, setjmp_alias_set);
707 set_mem_alias_set (stack, setjmp_alias_set);
709 /* Pick up FP, label, and SP from the block and jump. This code is
710 from expand_goto in stmt.c; see there for detailed comments. */
711 #if HAVE_nonlocal_goto
712 if (HAVE_nonlocal_goto)
713 /* We have to pass a value to the nonlocal_goto pattern that will
714 get copied into the static_chain pointer, but it does not matter
715 what that value is, because builtin_setjmp does not use it. */
716 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
720 lab = copy_to_reg (lab);
722 emit_move_insn (hard_frame_pointer_rtx, fp);
723 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
725 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
726 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
727 emit_indirect_jump (lab);
731 /* Search backwards and mark the jump insn as a non-local goto.
732 Note that this precludes the use of __builtin_longjmp to a
733 __builtin_setjmp target in the same function. However, we've
734 already cautioned the user that these functions are for
735 internal exception handling use only. */
736 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
740 if (GET_CODE (insn) == JUMP_INSN)
742 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
746 else if (GET_CODE (insn) == CALL_INSN)
751 /* Expand a call to __builtin_prefetch. For a target that does not support
752 data prefetch, evaluate the memory address argument in case it has side
756 expand_builtin_prefetch (tree arglist)
758 tree arg0, arg1, arg2;
761 if (!validate_arglist (arglist, POINTER_TYPE, 0))
764 arg0 = TREE_VALUE (arglist);
765 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
766 zero (read) and argument 2 (locality) defaults to 3 (high degree of
768 if (TREE_CHAIN (arglist))
770 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
771 if (TREE_CHAIN (TREE_CHAIN (arglist)))
772 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
774 arg2 = build_int_2 (3, 0);
778 arg1 = integer_zero_node;
779 arg2 = build_int_2 (3, 0);
782 /* Argument 0 is an address. */
783 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
785 /* Argument 1 (read/write flag) must be a compile-time constant int. */
786 if (TREE_CODE (arg1) != INTEGER_CST)
788 error ("second arg to `__builtin_prefetch' must be a constant");
789 arg1 = integer_zero_node;
791 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
792 /* Argument 1 must be either zero or one. */
793 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
795 warning ("invalid second arg to __builtin_prefetch; using zero");
799 /* Argument 2 (locality) must be a compile-time constant int. */
800 if (TREE_CODE (arg2) != INTEGER_CST)
802 error ("third arg to `__builtin_prefetch' must be a constant");
803 arg2 = integer_zero_node;
805 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
806 /* Argument 2 must be 0, 1, 2, or 3. */
807 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
809 warning ("invalid third arg to __builtin_prefetch; using zero");
816 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
818 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
819 || (GET_MODE(op0) != Pmode))
821 #ifdef POINTERS_EXTEND_UNSIGNED
822 if (GET_MODE(op0) != Pmode)
823 op0 = convert_memory_address (Pmode, op0);
825 op0 = force_reg (Pmode, op0);
827 emit_insn (gen_prefetch (op0, op1, op2));
831 op0 = protect_from_queue (op0, 0);
832 /* Don't do anything with direct references to volatile memory, but
833 generate code to handle other side effects. */
834 if (GET_CODE (op0) != MEM && side_effects_p (op0))
838 /* Get a MEM rtx for expression EXP which is the address of an operand
839 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
842 get_memory_rtx (tree exp)
844 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
847 #ifdef POINTERS_EXTEND_UNSIGNED
848 if (GET_MODE (addr) != Pmode)
849 addr = convert_memory_address (Pmode, addr);
852 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
854 /* Get an expression we can use to find the attributes to assign to MEM.
855 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
856 we can. First remove any nops. */
857 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
858 || TREE_CODE (exp) == NON_LVALUE_EXPR)
859 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
860 exp = TREE_OPERAND (exp, 0);
862 if (TREE_CODE (exp) == ADDR_EXPR)
864 exp = TREE_OPERAND (exp, 0);
865 set_mem_attributes (mem, exp, 0);
867 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
869 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
870 /* memcpy, memset and other builtin stringops can alias with anything. */
871 set_mem_alias_set (mem, 0);
877 /* Built-in functions to perform an untyped call and return. */
879 /* For each register that may be used for calling a function, this
880 gives a mode used to copy the register's value. VOIDmode indicates
881 the register is not used for calling a function. If the machine
882 has register windows, this gives only the outbound registers.
883 INCOMING_REGNO gives the corresponding inbound register. */
884 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
886 /* For each register that may be used for returning values, this gives
887 a mode used to copy the register's value. VOIDmode indicates the
888 register is not used for returning values. If the machine has
889 register windows, this gives only the outbound registers.
890 INCOMING_REGNO gives the corresponding inbound register. */
891 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
893 /* For each register that may be used for calling a function, this
894 gives the offset of that register into the block returned by
895 __builtin_apply_args. 0 indicates that the register is not
896 used for calling a function. */
897 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
899 /* Return the offset of register REGNO into the block returned by
900 __builtin_apply_args. This is not declared static, since it is
901 needed in objc-act.c. */
904 apply_args_register_offset (int regno)
908 /* Arguments are always put in outgoing registers (in the argument
909 block) if such make sense. */
910 #ifdef OUTGOING_REGNO
911 regno = OUTGOING_REGNO (regno);
913 return apply_args_reg_offset[regno];
916 /* Return the size required for the block returned by __builtin_apply_args,
917 and initialize apply_args_mode. */
920 apply_args_size (void)
922 static int size = -1;
925 enum machine_mode mode;
927 /* The values computed by this function never change. */
930 /* The first value is the incoming arg-pointer. */
931 size = GET_MODE_SIZE (Pmode);
933 /* The second value is the structure value address unless this is
934 passed as an "invisible" first argument. */
935 if (struct_value_rtx)
936 size += GET_MODE_SIZE (Pmode);
938 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
939 if (FUNCTION_ARG_REGNO_P (regno))
941 /* Search for the proper mode for copying this register's
942 value. I'm not sure this is right, but it works so far. */
943 enum machine_mode best_mode = VOIDmode;
945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
947 mode = GET_MODE_WIDER_MODE (mode))
948 if (HARD_REGNO_MODE_OK (regno, mode)
949 && HARD_REGNO_NREGS (regno, mode) == 1)
952 if (best_mode == VOIDmode)
953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
955 mode = GET_MODE_WIDER_MODE (mode))
956 if (HARD_REGNO_MODE_OK (regno, mode)
957 && have_insn_for (SET, mode))
960 if (best_mode == VOIDmode)
961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
963 mode = GET_MODE_WIDER_MODE (mode))
964 if (HARD_REGNO_MODE_OK (regno, mode)
965 && have_insn_for (SET, mode))
968 if (best_mode == VOIDmode)
969 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
971 mode = GET_MODE_WIDER_MODE (mode))
972 if (HARD_REGNO_MODE_OK (regno, mode)
973 && have_insn_for (SET, mode))
977 if (mode == VOIDmode)
980 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
981 if (size % align != 0)
982 size = CEIL (size, align) * align;
983 apply_args_reg_offset[regno] = size;
984 size += GET_MODE_SIZE (mode);
985 apply_args_mode[regno] = mode;
989 apply_args_mode[regno] = VOIDmode;
990 apply_args_reg_offset[regno] = 0;
996 /* Return the size required for the block returned by __builtin_apply,
997 and initialize apply_result_mode. */
1000 apply_result_size (void)
1002 static int size = -1;
1004 enum machine_mode mode;
1006 /* The values computed by this function never change. */
1011 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1012 if (FUNCTION_VALUE_REGNO_P (regno))
1014 /* Search for the proper mode for copying this register's
1015 value. I'm not sure this is right, but it works so far. */
1016 enum machine_mode best_mode = VOIDmode;
1018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1020 mode = GET_MODE_WIDER_MODE (mode))
1021 if (HARD_REGNO_MODE_OK (regno, mode))
1024 if (best_mode == VOIDmode)
1025 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1027 mode = GET_MODE_WIDER_MODE (mode))
1028 if (HARD_REGNO_MODE_OK (regno, mode)
1029 && have_insn_for (SET, mode))
1032 if (best_mode == VOIDmode)
1033 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1035 mode = GET_MODE_WIDER_MODE (mode))
1036 if (HARD_REGNO_MODE_OK (regno, mode)
1037 && have_insn_for (SET, mode))
1040 if (best_mode == VOIDmode)
1041 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1043 mode = GET_MODE_WIDER_MODE (mode))
1044 if (HARD_REGNO_MODE_OK (regno, mode)
1045 && have_insn_for (SET, mode))
1049 if (mode == VOIDmode)
1052 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1053 if (size % align != 0)
1054 size = CEIL (size, align) * align;
1055 size += GET_MODE_SIZE (mode);
1056 apply_result_mode[regno] = mode;
1059 apply_result_mode[regno] = VOIDmode;
1061 /* Allow targets that use untyped_call and untyped_return to override
1062 the size so that machine-specific information can be stored here. */
1063 #ifdef APPLY_RESULT_SIZE
1064 size = APPLY_RESULT_SIZE;
1070 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1071 /* Create a vector describing the result block RESULT. If SAVEP is true,
1072 the result block is used to save the values; otherwise it is used to
1073 restore the values. */
1076 result_vector (int savep, rtx result)
1078 int regno, size, align, nelts;
1079 enum machine_mode mode;
1081 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1084 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1085 if ((mode = apply_result_mode[regno]) != VOIDmode)
1087 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1088 if (size % align != 0)
1089 size = CEIL (size, align) * align;
1090 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1091 mem = adjust_address (result, mode, size);
1092 savevec[nelts++] = (savep
1093 ? gen_rtx_SET (VOIDmode, mem, reg)
1094 : gen_rtx_SET (VOIDmode, reg, mem));
1095 size += GET_MODE_SIZE (mode);
1097 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1099 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1101 /* Save the state required to perform an untyped call with the same
1102 arguments as were passed to the current function. */
1105 expand_builtin_apply_args_1 (void)
1108 int size, align, regno;
1109 enum machine_mode mode;
1111 /* Create a block where the arg-pointer, structure value address,
1112 and argument registers can be saved. */
1113 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1115 /* Walk past the arg-pointer and structure value address. */
1116 size = GET_MODE_SIZE (Pmode);
1117 if (struct_value_rtx)
1118 size += GET_MODE_SIZE (Pmode);
1120 /* Save each register used in calling a function to the block. */
1121 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1122 if ((mode = apply_args_mode[regno]) != VOIDmode)
1126 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1127 if (size % align != 0)
1128 size = CEIL (size, align) * align;
1130 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1132 emit_move_insn (adjust_address (registers, mode, size), tem);
1133 size += GET_MODE_SIZE (mode);
1136 /* Save the arg pointer to the block. */
1137 emit_move_insn (adjust_address (registers, Pmode, 0),
1138 copy_to_reg (virtual_incoming_args_rtx));
1139 size = GET_MODE_SIZE (Pmode);
1141 /* Save the structure value address unless this is passed as an
1142 "invisible" first argument. */
1143 if (struct_value_incoming_rtx)
1145 emit_move_insn (adjust_address (registers, Pmode, size),
1146 copy_to_reg (struct_value_incoming_rtx));
1147 size += GET_MODE_SIZE (Pmode);
1150 /* Return the address of the block. */
1151 return copy_addr_to_reg (XEXP (registers, 0));
1154 /* __builtin_apply_args returns block of memory allocated on
1155 the stack into which is stored the arg pointer, structure
1156 value address, static chain, and all the registers that might
1157 possibly be used in performing a function call. The code is
1158 moved to the start of the function so the incoming values are
1162 expand_builtin_apply_args (void)
1164 /* Don't do __builtin_apply_args more than once in a function.
1165 Save the result of the first call and reuse it. */
1166 if (apply_args_value != 0)
1167 return apply_args_value;
1169 /* When this function is called, it means that registers must be
1170 saved on entry to this function. So we migrate the
1171 call to the first insn of this function. */
1176 temp = expand_builtin_apply_args_1 ();
1180 apply_args_value = temp;
1182 /* Put the insns after the NOTE that starts the function.
1183 If this is inside a start_sequence, make the outer-level insn
1184 chain current, so the code is placed at the start of the
1186 push_topmost_sequence ();
1187 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1188 pop_topmost_sequence ();
1193 /* Perform an untyped call and save the state required to perform an
1194 untyped return of whatever value was returned by the given function. */
1197 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1199 int size, align, regno;
1200 enum machine_mode mode;
1201 rtx incoming_args, result, reg, dest, src, call_insn;
1202 rtx old_stack_level = 0;
1203 rtx call_fusage = 0;
1205 #ifdef POINTERS_EXTEND_UNSIGNED
1206 if (GET_MODE (arguments) != Pmode)
1207 arguments = convert_memory_address (Pmode, arguments);
1210 /* Create a block where the return registers can be saved. */
1211 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1213 /* Fetch the arg pointer from the ARGUMENTS block. */
1214 incoming_args = gen_reg_rtx (Pmode);
1215 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1216 #ifndef STACK_GROWS_DOWNWARD
1217 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1218 incoming_args, 0, OPTAB_LIB_WIDEN);
1221 /* Perform postincrements before actually calling the function. */
1224 /* Push a new argument block and copy the arguments. Do not allow
1225 the (potential) memcpy call below to interfere with our stack
1227 do_pending_stack_adjust ();
1230 /* Save the stack with nonlocal if available. */
1231 #ifdef HAVE_save_stack_nonlocal
1232 if (HAVE_save_stack_nonlocal)
1233 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1236 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1238 /* Push a block of memory onto the stack to store the memory arguments.
1239 Save the address in a register, and copy the memory arguments. ??? I
1240 haven't figured out how the calling convention macros effect this,
1241 but it's likely that the source and/or destination addresses in
1242 the block copy will need updating in machine specific ways. */
1243 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1244 dest = gen_rtx_MEM (BLKmode, dest);
1245 set_mem_align (dest, PARM_BOUNDARY);
1246 src = gen_rtx_MEM (BLKmode, incoming_args);
1247 set_mem_align (src, PARM_BOUNDARY);
1248 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1250 /* Refer to the argument block. */
1252 arguments = gen_rtx_MEM (BLKmode, arguments);
1253 set_mem_align (arguments, PARM_BOUNDARY);
1255 /* Walk past the arg-pointer and structure value address. */
1256 size = GET_MODE_SIZE (Pmode);
1257 if (struct_value_rtx)
1258 size += GET_MODE_SIZE (Pmode);
1260 /* Restore each of the registers previously saved. Make USE insns
1261 for each of these registers for use in making the call. */
1262 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1263 if ((mode = apply_args_mode[regno]) != VOIDmode)
1265 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1266 if (size % align != 0)
1267 size = CEIL (size, align) * align;
1268 reg = gen_rtx_REG (mode, regno);
1269 emit_move_insn (reg, adjust_address (arguments, mode, size));
1270 use_reg (&call_fusage, reg);
1271 size += GET_MODE_SIZE (mode);
1274 /* Restore the structure value address unless this is passed as an
1275 "invisible" first argument. */
1276 size = GET_MODE_SIZE (Pmode);
1277 if (struct_value_rtx)
1279 rtx value = gen_reg_rtx (Pmode);
1280 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1281 emit_move_insn (struct_value_rtx, value);
1282 if (GET_CODE (struct_value_rtx) == REG)
1283 use_reg (&call_fusage, struct_value_rtx);
1284 size += GET_MODE_SIZE (Pmode);
1287 /* All arguments and registers used for the call are set up by now! */
1288 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1290 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1291 and we don't want to load it into a register as an optimization,
1292 because prepare_call_address already did it if it should be done. */
1293 if (GET_CODE (function) != SYMBOL_REF)
1294 function = memory_address (FUNCTION_MODE, function);
1296 /* Generate the actual call instruction and save the return value. */
1297 #ifdef HAVE_untyped_call
1298 if (HAVE_untyped_call)
1299 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1300 result, result_vector (1, result)));
1303 #ifdef HAVE_call_value
1304 if (HAVE_call_value)
1308 /* Locate the unique return register. It is not possible to
1309 express a call that sets more than one return register using
1310 call_value; use untyped_call for that. In fact, untyped_call
1311 only needs to save the return registers in the given block. */
1312 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1313 if ((mode = apply_result_mode[regno]) != VOIDmode)
1316 abort (); /* HAVE_untyped_call required. */
1317 valreg = gen_rtx_REG (mode, regno);
1320 emit_call_insn (GEN_CALL_VALUE (valreg,
1321 gen_rtx_MEM (FUNCTION_MODE, function),
1322 const0_rtx, NULL_RTX, const0_rtx));
1324 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1330 /* Find the CALL insn we just emitted, and attach the register usage
1332 call_insn = last_call_insn ();
1333 add_function_usage_to (call_insn, call_fusage);
1335 /* Restore the stack. */
1336 #ifdef HAVE_save_stack_nonlocal
1337 if (HAVE_save_stack_nonlocal)
1338 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1341 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1345 /* Return the address of the result block. */
1346 return copy_addr_to_reg (XEXP (result, 0));
1349 /* Perform an untyped return. */
1352 expand_builtin_return (rtx result)
1354 int size, align, regno;
1355 enum machine_mode mode;
1357 rtx call_fusage = 0;
1359 #ifdef POINTERS_EXTEND_UNSIGNED
1360 if (GET_MODE (result) != Pmode)
1361 result = convert_memory_address (Pmode, result);
1364 apply_result_size ();
1365 result = gen_rtx_MEM (BLKmode, result);
1367 #ifdef HAVE_untyped_return
1368 if (HAVE_untyped_return)
1370 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1376 /* Restore the return value and note that each value is used. */
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1385 emit_move_insn (reg, adjust_address (result, mode, size));
1387 push_to_sequence (call_fusage);
1388 emit_insn (gen_rtx_USE (VOIDmode, reg));
1389 call_fusage = get_insns ();
1391 size += GET_MODE_SIZE (mode);
1394 /* Put the USE insns before the return. */
1395 emit_insn (call_fusage);
1397 /* Return whatever values was restored by jumping directly to the end
1399 expand_null_return ();
1402 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1404 static enum type_class
1405 type_to_class (tree type)
1407 switch (TREE_CODE (type))
1409 case VOID_TYPE: return void_type_class;
1410 case INTEGER_TYPE: return integer_type_class;
1411 case CHAR_TYPE: return char_type_class;
1412 case ENUMERAL_TYPE: return enumeral_type_class;
1413 case BOOLEAN_TYPE: return boolean_type_class;
1414 case POINTER_TYPE: return pointer_type_class;
1415 case REFERENCE_TYPE: return reference_type_class;
1416 case OFFSET_TYPE: return offset_type_class;
1417 case REAL_TYPE: return real_type_class;
1418 case COMPLEX_TYPE: return complex_type_class;
1419 case FUNCTION_TYPE: return function_type_class;
1420 case METHOD_TYPE: return method_type_class;
1421 case RECORD_TYPE: return record_type_class;
1423 case QUAL_UNION_TYPE: return union_type_class;
1424 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1425 ? string_type_class : array_type_class);
1426 case SET_TYPE: return set_type_class;
1427 case FILE_TYPE: return file_type_class;
1428 case LANG_TYPE: return lang_type_class;
1429 default: return no_type_class;
1433 /* Expand a call to __builtin_classify_type with arguments found in
1437 expand_builtin_classify_type (tree arglist)
1440 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1441 return GEN_INT (no_type_class);
1444 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1447 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1453 arglist = TREE_VALUE (arglist);
1455 /* We have taken care of the easy cases during constant folding. This
1456 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1457 get a chance to see if it can deduce whether ARGLIST is constant. */
1459 current_function_calls_constant_p = 1;
1461 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1462 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1466 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1469 mathfn_built_in (tree type, enum built_in_function fn)
1471 enum built_in_function fcode = NOT_BUILT_IN;
1472 if (TYPE_MODE (type) == TYPE_MODE (double_type_node))
1476 case BUILT_IN_SQRTF:
1477 case BUILT_IN_SQRTL:
1478 fcode = BUILT_IN_SQRT;
1483 fcode = BUILT_IN_SIN;
1488 fcode = BUILT_IN_COS;
1493 fcode = BUILT_IN_EXP;
1498 fcode = BUILT_IN_LOG;
1503 fcode = BUILT_IN_TAN;
1506 case BUILT_IN_ATANF:
1507 case BUILT_IN_ATANL:
1508 fcode = BUILT_IN_ATAN;
1510 case BUILT_IN_FLOOR:
1511 case BUILT_IN_FLOORF:
1512 case BUILT_IN_FLOORL:
1513 fcode = BUILT_IN_FLOOR;
1516 case BUILT_IN_CEILF:
1517 case BUILT_IN_CEILL:
1518 fcode = BUILT_IN_CEIL;
1520 case BUILT_IN_TRUNC:
1521 case BUILT_IN_TRUNCF:
1522 case BUILT_IN_TRUNCL:
1523 fcode = BUILT_IN_TRUNC;
1525 case BUILT_IN_ROUND:
1526 case BUILT_IN_ROUNDF:
1527 case BUILT_IN_ROUNDL:
1528 fcode = BUILT_IN_ROUND;
1530 case BUILT_IN_NEARBYINT:
1531 case BUILT_IN_NEARBYINTF:
1532 case BUILT_IN_NEARBYINTL:
1533 fcode = BUILT_IN_NEARBYINT;
1538 else if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
1542 case BUILT_IN_SQRTF:
1543 case BUILT_IN_SQRTL:
1544 fcode = BUILT_IN_SQRTF;
1549 fcode = BUILT_IN_SINF;
1554 fcode = BUILT_IN_COSF;
1559 fcode = BUILT_IN_EXPF;
1564 fcode = BUILT_IN_LOGF;
1569 fcode = BUILT_IN_TANF;
1572 case BUILT_IN_ATANF:
1573 case BUILT_IN_ATANL:
1574 fcode = BUILT_IN_ATANF;
1576 case BUILT_IN_FLOOR:
1577 case BUILT_IN_FLOORF:
1578 case BUILT_IN_FLOORL:
1579 fcode = BUILT_IN_FLOORF;
1582 case BUILT_IN_CEILF:
1583 case BUILT_IN_CEILL:
1584 fcode = BUILT_IN_CEILF;
1586 case BUILT_IN_TRUNC:
1587 case BUILT_IN_TRUNCF:
1588 case BUILT_IN_TRUNCL:
1589 fcode = BUILT_IN_TRUNCF;
1591 case BUILT_IN_ROUND:
1592 case BUILT_IN_ROUNDF:
1593 case BUILT_IN_ROUNDL:
1594 fcode = BUILT_IN_ROUNDF;
1596 case BUILT_IN_NEARBYINT:
1597 case BUILT_IN_NEARBYINTF:
1598 case BUILT_IN_NEARBYINTL:
1599 fcode = BUILT_IN_NEARBYINTF;
1604 else if (TYPE_MODE (type) == TYPE_MODE (long_double_type_node))
1608 case BUILT_IN_SQRTF:
1609 case BUILT_IN_SQRTL:
1610 fcode = BUILT_IN_SQRTL;
1615 fcode = BUILT_IN_SINL;
1620 fcode = BUILT_IN_COSL;
1625 fcode = BUILT_IN_EXPL;
1630 fcode = BUILT_IN_LOGL;
1635 fcode = BUILT_IN_TANL;
1638 case BUILT_IN_ATANF:
1639 case BUILT_IN_ATANL:
1640 fcode = BUILT_IN_ATANL;
1642 case BUILT_IN_FLOOR:
1643 case BUILT_IN_FLOORF:
1644 case BUILT_IN_FLOORL:
1645 fcode = BUILT_IN_FLOORL;
1648 case BUILT_IN_CEILF:
1649 case BUILT_IN_CEILL:
1650 fcode = BUILT_IN_CEILL;
1652 case BUILT_IN_TRUNC:
1653 case BUILT_IN_TRUNCF:
1654 case BUILT_IN_TRUNCL:
1655 fcode = BUILT_IN_TRUNCL;
1657 case BUILT_IN_ROUND:
1658 case BUILT_IN_ROUNDF:
1659 case BUILT_IN_ROUNDL:
1660 fcode = BUILT_IN_ROUNDL;
1662 case BUILT_IN_NEARBYINT:
1663 case BUILT_IN_NEARBYINTF:
1664 case BUILT_IN_NEARBYINTL:
1665 fcode = BUILT_IN_NEARBYINTL;
1670 return implicit_built_in_decls[fcode];
1673 /* If errno must be maintained, expand the RTL to check if the result,
1674 TARGET, of a built-in function call, EXP, is NaN, and if so set
1678 expand_errno_check (tree exp, rtx target)
1680 rtx lab = gen_label_rtx ();
1682 /* Test the result; if it is NaN, set errno=EDOM because
1683 the argument was not in the domain. */
1684 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1688 /* If this built-in doesn't throw an exception, set errno directly. */
1689 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1691 #ifdef GEN_ERRNO_RTX
1692 rtx errno_rtx = GEN_ERRNO_RTX;
1695 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1697 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1703 /* We can't set errno=EDOM directly; let the library call do it.
1704 Pop the arguments right away in case the call gets deleted. */
1706 expand_call (exp, target, 0);
1712 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1713 Return 0 if a normal call should be emitted rather than expanding the
1714 function in-line. EXP is the expression that is a call to the builtin
1715 function; if convenient, the result should be placed in TARGET.
1716 SUBTARGET may be used as the target for computing one of EXP's operands. */
1719 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1721 optab builtin_optab;
1723 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1724 tree arglist = TREE_OPERAND (exp, 1);
1725 enum machine_mode mode;
1726 bool errno_set = false;
1729 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1732 arg = TREE_VALUE (arglist);
1734 switch (DECL_FUNCTION_CODE (fndecl))
1739 builtin_optab = sin_optab; break;
1743 builtin_optab = cos_optab; break;
1745 case BUILT_IN_SQRTF:
1746 case BUILT_IN_SQRTL:
1747 errno_set = ! tree_expr_nonnegative_p (arg);
1748 builtin_optab = sqrt_optab;
1753 errno_set = true; builtin_optab = exp_optab; break;
1757 errno_set = true; builtin_optab = log_optab; break;
1761 builtin_optab = tan_optab; break;
1763 case BUILT_IN_ATANF:
1764 case BUILT_IN_ATANL:
1765 builtin_optab = atan_optab; break;
1766 case BUILT_IN_FLOOR:
1767 case BUILT_IN_FLOORF:
1768 case BUILT_IN_FLOORL:
1769 builtin_optab = floor_optab; break;
1771 case BUILT_IN_CEILF:
1772 case BUILT_IN_CEILL:
1773 builtin_optab = ceil_optab; break;
1774 case BUILT_IN_TRUNC:
1775 case BUILT_IN_TRUNCF:
1776 case BUILT_IN_TRUNCL:
1777 builtin_optab = trunc_optab; break;
1778 case BUILT_IN_ROUND:
1779 case BUILT_IN_ROUNDF:
1780 case BUILT_IN_ROUNDL:
1781 builtin_optab = round_optab; break;
1782 case BUILT_IN_NEARBYINT:
1783 case BUILT_IN_NEARBYINTF:
1784 case BUILT_IN_NEARBYINTL:
1785 builtin_optab = nearbyint_optab; break;
1790 /* Make a suitable register to place result in. */
1791 mode = TYPE_MODE (TREE_TYPE (exp));
1793 /* Before working hard, check whether the instruction is available. */
1794 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1796 target = gen_reg_rtx (mode);
1798 if (! flag_errno_math || ! HONOR_NANS (mode))
1801 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1802 need to expand the argument again. This way, we will not perform
1803 side-effects more the once. */
1804 narg = save_expr (arg);
1807 arglist = build_tree_list (NULL_TREE, arg);
1808 exp = build_function_call_expr (fndecl, arglist);
1811 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1816 /* Compute into TARGET.
1817 Set TARGET to wherever the result comes back. */
1818 target = expand_unop (mode, builtin_optab, op0, target, 0);
1820 /* If we were unable to expand via the builtin, stop the sequence
1821 (without outputting the insns) and call to the library function
1822 with the stabilized argument list. */
1826 return expand_call (exp, target, target == const0_rtx);
1830 expand_errno_check (exp, target);
1832 /* Output the entire sequence. */
1833 insns = get_insns ();
1840 /* Expand a call to the builtin binary math functions (pow and atan2).
1841 Return 0 if a normal call should be emitted rather than expanding the
1842 function in-line. EXP is the expression that is a call to the builtin
1843 function; if convenient, the result should be placed in TARGET.
1844 SUBTARGET may be used as the target for computing one of EXP's
1848 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1850 optab builtin_optab;
1851 rtx op0, op1, insns;
1852 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1853 tree arglist = TREE_OPERAND (exp, 1);
1854 tree arg0, arg1, temp, narg;
1855 enum machine_mode mode;
1856 bool errno_set = true;
1859 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1862 arg0 = TREE_VALUE (arglist);
1863 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1865 switch (DECL_FUNCTION_CODE (fndecl))
1870 builtin_optab = pow_optab; break;
1871 case BUILT_IN_ATAN2:
1872 case BUILT_IN_ATAN2F:
1873 case BUILT_IN_ATAN2L:
1874 builtin_optab = atan2_optab; break;
1879 /* Make a suitable register to place result in. */
1880 mode = TYPE_MODE (TREE_TYPE (exp));
1882 /* Before working hard, check whether the instruction is available. */
1883 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1886 target = gen_reg_rtx (mode);
1888 if (! flag_errno_math || ! HONOR_NANS (mode))
1891 /* Alway stabilize the argument list. */
1892 narg = save_expr (arg1);
1895 temp = build_tree_list (NULL_TREE, narg);
1899 temp = TREE_CHAIN (arglist);
1901 narg = save_expr (arg0);
1904 arglist = tree_cons (NULL_TREE, narg, temp);
1908 arglist = tree_cons (NULL_TREE, arg0, temp);
1911 exp = build_function_call_expr (fndecl, arglist);
1913 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1914 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1919 /* Compute into TARGET.
1920 Set TARGET to wherever the result comes back. */
1921 target = expand_binop (mode, builtin_optab, op0, op1,
1922 target, 0, OPTAB_DIRECT);
1924 /* If we were unable to expand via the builtin, stop the sequence
1925 (without outputting the insns) and call to the library function
1926 with the stabilized argument list. */
1930 return expand_call (exp, target, target == const0_rtx);
1934 expand_errno_check (exp, target);
1936 /* Output the entire sequence. */
1937 insns = get_insns ();
1944 /* To evaluate powi(x,n), the floating point value x raised to the
1945 constant integer exponent n, we use a hybrid algorithm that
1946 combines the "window method" with look-up tables. For an
1947 introduction to exponentiation algorithms and "addition chains",
1948 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1949 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1950 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1951 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1953 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1954 multiplications to inline before calling the system library's pow
1955 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1956 so this default never requires calling pow, powf or powl. */
1958 #ifndef POWI_MAX_MULTS
1959 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1962 /* The size of the "optimal power tree" lookup table. All
1963 exponents less than this value are simply looked up in the
1964 powi_table below. This threshold is also used to size the
1965 cache of pseudo registers that hold intermediate results. */
1966 #define POWI_TABLE_SIZE 256
1968 /* The size, in bits of the window, used in the "window method"
1969 exponentiation algorithm. This is equivalent to a radix of
1970 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1971 #define POWI_WINDOW_SIZE 3
1973 /* The following table is an efficient representation of an
1974 "optimal power tree". For each value, i, the corresponding
1975 value, j, in the table states than an optimal evaluation
1976 sequence for calculating pow(x,i) can be found by evaluating
1977 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1978 100 integers is given in Knuth's "Seminumerical algorithms". */
1980 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1982 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1983 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1984 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1985 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1986 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1987 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1988 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1989 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1990 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1991 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1992 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1993 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1994 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1995 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
1996 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
1997 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
1998 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
1999 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2000 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2001 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2002 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2003 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2004 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2005 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2006 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2007 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2008 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2009 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2010 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2011 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2012 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2013 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2017 /* Return the number of multiplications required to calculate
2018 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2019 subroutine of powi_cost. CACHE is an array indicating
2020 which exponents have already been calculated. */
2023 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2025 /* If we've already calculated this exponent, then this evaluation
2026 doesn't require any additional multiplications. */
2031 return powi_lookup_cost (n - powi_table[n], cache)
2032 + powi_lookup_cost (powi_table[n], cache) + 1;
2035 /* Return the number of multiplications required to calculate
2036 powi(x,n) for an arbitrary x, given the exponent N. This
2037 function needs to be kept in sync with expand_powi below. */
2040 powi_cost (HOST_WIDE_INT n)
2042 bool cache[POWI_TABLE_SIZE];
2043 unsigned HOST_WIDE_INT digit;
2044 unsigned HOST_WIDE_INT val;
2050 /* Ignore the reciprocal when calculating the cost. */
2051 val = (n < 0) ? -n : n;
2053 /* Initialize the exponent cache. */
2054 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2059 while (val >= POWI_TABLE_SIZE)
2063 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2064 result += powi_lookup_cost (digit, cache)
2065 + POWI_WINDOW_SIZE + 1;
2066 val >>= POWI_WINDOW_SIZE;
2075 return result + powi_lookup_cost (val, cache);
2078 /* Recursive subroutine of expand_powi. This function takes the array,
2079 CACHE, of already calculated exponents and an exponent N and returns
2080 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2083 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2085 unsigned HOST_WIDE_INT digit;
2089 if (n < POWI_TABLE_SIZE)
2094 target = gen_reg_rtx (mode);
2097 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2098 op1 = expand_powi_1 (mode, powi_table[n], cache);
2102 target = gen_reg_rtx (mode);
2103 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2104 op0 = expand_powi_1 (mode, n - digit, cache);
2105 op1 = expand_powi_1 (mode, digit, cache);
2109 target = gen_reg_rtx (mode);
2110 op0 = expand_powi_1 (mode, n >> 1, cache);
2114 result = expand_mult (mode, op0, op1, target, 0);
2115 if (result != target)
2116 emit_move_insn (target, result);
2120 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2121 floating point operand in mode MODE, and N is the exponent. This
2122 function needs to be kept in sync with powi_cost above. */
2125 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2127 unsigned HOST_WIDE_INT val;
2128 rtx cache[POWI_TABLE_SIZE];
2132 return CONST1_RTX (mode);
2134 val = (n < 0) ? -n : n;
2136 memset (cache, 0, sizeof(cache));
2139 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2141 /* If the original exponent was negative, reciprocate the result. */
2143 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2144 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2149 /* Expand a call to the pow built-in mathematical function. Return 0 if
2150 a normal call should be emitted rather than expanding the function
2151 in-line. EXP is the expression that is a call to the builtin
2152 function; if convenient, the result should be placed in TARGET. */
2155 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2157 tree arglist = TREE_OPERAND (exp, 1);
2160 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2163 arg0 = TREE_VALUE (arglist);
2164 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2166 if (flag_unsafe_math_optimizations
2167 && ! flag_errno_math
2169 && TREE_CODE (arg1) == REAL_CST
2170 && ! TREE_CONSTANT_OVERFLOW (arg1))
2172 REAL_VALUE_TYPE cint;
2176 c = TREE_REAL_CST (arg1);
2177 n = real_to_integer (&c);
2178 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2179 if (real_identical (&c, &cint)
2180 && powi_cost (n) <= POWI_MAX_MULTS)
2182 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2183 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2184 op = force_reg (mode, op);
2185 return expand_powi (op, mode, n);
2188 return expand_builtin_mathfn_2 (exp, target, NULL_RTX);
2191 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2192 if we failed the caller should emit a normal call, otherwise
2193 try to get the result in TARGET, if convenient. */
2196 expand_builtin_strlen (tree arglist, rtx target,
2197 enum machine_mode target_mode)
2199 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2204 tree len, src = TREE_VALUE (arglist);
2205 rtx result, src_reg, char_rtx, before_strlen;
2206 enum machine_mode insn_mode = target_mode, char_mode;
2207 enum insn_code icode = CODE_FOR_nothing;
2210 /* If the length can be computed at compile-time, return it. */
2211 len = c_strlen (src, 0);
2213 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2215 /* If the length can be computed at compile-time and is constant
2216 integer, but there are side-effects in src, evaluate
2217 src for side-effects, then return len.
2218 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2219 can be optimized into: i++; x = 3; */
2220 len = c_strlen (src, 1);
2221 if (len && TREE_CODE (len) == INTEGER_CST)
2223 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2224 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2227 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2229 /* If SRC is not a pointer type, don't do this operation inline. */
2233 /* Bail out if we can't compute strlen in the right mode. */
2234 while (insn_mode != VOIDmode)
2236 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2237 if (icode != CODE_FOR_nothing)
2240 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2242 if (insn_mode == VOIDmode)
2245 /* Make a place to write the result of the instruction. */
2248 && GET_CODE (result) == REG
2249 && GET_MODE (result) == insn_mode
2250 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2251 result = gen_reg_rtx (insn_mode);
2253 /* Make a place to hold the source address. We will not expand
2254 the actual source until we are sure that the expansion will
2255 not fail -- there are trees that cannot be expanded twice. */
2256 src_reg = gen_reg_rtx (Pmode);
2258 /* Mark the beginning of the strlen sequence so we can emit the
2259 source operand later. */
2260 before_strlen = get_last_insn ();
2262 char_rtx = const0_rtx;
2263 char_mode = insn_data[(int) icode].operand[2].mode;
2264 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2266 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2268 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2269 char_rtx, GEN_INT (align));
2274 /* Now that we are assured of success, expand the source. */
2276 pat = memory_address (BLKmode,
2277 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2279 emit_move_insn (src_reg, pat);
2284 emit_insn_after (pat, before_strlen);
2286 emit_insn_before (pat, get_insns ());
2288 /* Return the value in the proper mode for this function. */
2289 if (GET_MODE (result) == target_mode)
2291 else if (target != 0)
2292 convert_move (target, result, 0);
2294 target = convert_to_mode (target_mode, result, 0);
2300 /* Expand a call to the strstr builtin. Return 0 if we failed the
2301 caller should emit a normal call, otherwise try to get the result
2302 in TARGET, if convenient (and in mode MODE if that's convenient). */
2305 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2307 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2311 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2313 const char *p1, *p2;
2322 const char *r = strstr (p1, p2);
2327 /* Return an offset into the constant string argument. */
2328 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2329 s1, ssize_int (r - p1))),
2330 target, mode, EXPAND_NORMAL);
2334 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2339 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2343 /* New argument list transforming strstr(s1, s2) to
2344 strchr(s1, s2[0]). */
2346 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2347 arglist = tree_cons (NULL_TREE, s1, arglist);
2348 return expand_expr (build_function_call_expr (fn, arglist),
2349 target, mode, EXPAND_NORMAL);
2353 /* Expand a call to the strchr builtin. Return 0 if we failed the
2354 caller should emit a normal call, otherwise try to get the result
2355 in TARGET, if convenient (and in mode MODE if that's convenient). */
2358 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2360 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2364 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2367 if (TREE_CODE (s2) != INTEGER_CST)
2376 if (target_char_cast (s2, &c))
2384 /* Return an offset into the constant string argument. */
2385 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2386 s1, ssize_int (r - p1))),
2387 target, mode, EXPAND_NORMAL);
2390 /* FIXME: Should use here strchrM optab so that ports can optimize
2396 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2397 caller should emit a normal call, otherwise try to get the result
2398 in TARGET, if convenient (and in mode MODE if that's convenient). */
2401 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2403 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2407 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2411 if (TREE_CODE (s2) != INTEGER_CST)
2420 if (target_char_cast (s2, &c))
2423 r = strrchr (p1, c);
2428 /* Return an offset into the constant string argument. */
2429 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2430 s1, ssize_int (r - p1))),
2431 target, mode, EXPAND_NORMAL);
2434 if (! integer_zerop (s2))
2437 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2441 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2442 return expand_expr (build_function_call_expr (fn, arglist),
2443 target, mode, EXPAND_NORMAL);
2447 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2448 caller should emit a normal call, otherwise try to get the result
2449 in TARGET, if convenient (and in mode MODE if that's convenient). */
2452 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2454 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2458 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2460 const char *p1, *p2;
2469 const char *r = strpbrk (p1, p2);
2474 /* Return an offset into the constant string argument. */
2475 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2476 s1, ssize_int (r - p1))),
2477 target, mode, EXPAND_NORMAL);
2482 /* strpbrk(x, "") == NULL.
2483 Evaluate and ignore the arguments in case they had
2485 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2490 return 0; /* Really call strpbrk. */
2492 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2496 /* New argument list transforming strpbrk(s1, s2) to
2497 strchr(s1, s2[0]). */
2499 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2500 arglist = tree_cons (NULL_TREE, s1, arglist);
2501 return expand_expr (build_function_call_expr (fn, arglist),
2502 target, mode, EXPAND_NORMAL);
2506 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2507 bytes from constant string DATA + OFFSET and return it as target
2511 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2512 enum machine_mode mode)
2514 const char *str = (const char *) data;
2517 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2518 > strlen (str) + 1))
2519 abort (); /* Attempt to read past the end of constant string. */
2521 return c_readstr (str + offset, mode);
2524 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2525 Return 0 if we failed, the caller should emit a normal call,
2526 otherwise try to get the result in TARGET, if convenient (and in
2527 mode MODE if that's convenient). */
2529 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2531 if (!validate_arglist (arglist,
2532 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2536 tree dest = TREE_VALUE (arglist);
2537 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2538 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2539 const char *src_str;
2540 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2541 unsigned int dest_align
2542 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2543 rtx dest_mem, src_mem, dest_addr, len_rtx;
2545 /* If DEST is not a pointer type, call the normal function. */
2546 if (dest_align == 0)
2549 /* If the LEN parameter is zero, return DEST. */
2550 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2552 /* Evaluate and ignore SRC in case it has side-effects. */
2553 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2554 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2557 /* If either SRC is not a pointer type, don't do this
2558 operation in-line. */
2562 dest_mem = get_memory_rtx (dest);
2563 set_mem_align (dest_mem, dest_align);
2564 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2565 src_str = c_getstr (src);
2567 /* If SRC is a string constant and block move would be done
2568 by pieces, we can avoid loading the string from memory
2569 and only stored the computed constants. */
2571 && GET_CODE (len_rtx) == CONST_INT
2572 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2573 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2574 (void *) src_str, dest_align))
2576 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2577 builtin_memcpy_read_str,
2578 (void *) src_str, dest_align, 0);
2579 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2580 #ifdef POINTERS_EXTEND_UNSIGNED
2581 if (GET_MODE (dest_mem) != ptr_mode)
2582 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2587 src_mem = get_memory_rtx (src);
2588 set_mem_align (src_mem, src_align);
2590 /* Copy word part most expediently. */
2591 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2596 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2597 #ifdef POINTERS_EXTEND_UNSIGNED
2598 if (GET_MODE (dest_addr) != ptr_mode)
2599 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2606 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2607 Return 0 if we failed the caller should emit a normal call,
2608 otherwise try to get the result in TARGET, if convenient (and in
2609 mode MODE if that's convenient). If ENDP is 0 return the
2610 destination pointer, if ENDP is 1 return the end pointer ala
2611 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2615 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2618 if (!validate_arglist (arglist,
2619 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2621 /* If return value is ignored, transform mempcpy into memcpy. */
2622 else if (target == const0_rtx)
2624 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2629 return expand_expr (build_function_call_expr (fn, arglist),
2630 target, mode, EXPAND_NORMAL);
2634 tree dest = TREE_VALUE (arglist);
2635 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2636 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2637 const char *src_str;
2638 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2639 unsigned int dest_align
2640 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2641 rtx dest_mem, src_mem, len_rtx;
2643 /* If DEST is not a pointer type or LEN is not constant,
2644 call the normal function. */
2645 if (dest_align == 0 || !host_integerp (len, 1))
2648 /* If the LEN parameter is zero, return DEST. */
2649 if (tree_low_cst (len, 1) == 0)
2651 /* Evaluate and ignore SRC in case it has side-effects. */
2652 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2653 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2656 /* If either SRC is not a pointer type, don't do this
2657 operation in-line. */
2661 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2662 src_str = c_getstr (src);
2664 /* If SRC is a string constant and block move would be done
2665 by pieces, we can avoid loading the string from memory
2666 and only stored the computed constants. */
2668 && GET_CODE (len_rtx) == CONST_INT
2669 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2670 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2671 (void *) src_str, dest_align))
2673 dest_mem = get_memory_rtx (dest);
2674 set_mem_align (dest_mem, dest_align);
2675 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2676 builtin_memcpy_read_str,
2677 (void *) src_str, dest_align, endp);
2678 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2679 #ifdef POINTERS_EXTEND_UNSIGNED
2680 if (GET_MODE (dest_mem) != ptr_mode)
2681 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2686 if (GET_CODE (len_rtx) == CONST_INT
2687 && can_move_by_pieces (INTVAL (len_rtx),
2688 MIN (dest_align, src_align)))
2690 dest_mem = get_memory_rtx (dest);
2691 set_mem_align (dest_mem, dest_align);
2692 src_mem = get_memory_rtx (src);
2693 set_mem_align (src_mem, src_align);
2694 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2695 MIN (dest_align, src_align), endp);
2696 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2697 #ifdef POINTERS_EXTEND_UNSIGNED
2698 if (GET_MODE (dest_mem) != ptr_mode)
2699 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2708 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2709 if we failed the caller should emit a normal call. */
2712 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2714 if (!validate_arglist (arglist,
2715 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2719 tree dest = TREE_VALUE (arglist);
2720 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2721 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2723 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2724 unsigned int dest_align
2725 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2727 /* If DEST is not a pointer type, call the normal function. */
2728 if (dest_align == 0)
2731 /* If the LEN parameter is zero, return DEST. */
2732 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2734 /* Evaluate and ignore SRC in case it has side-effects. */
2735 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2736 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2739 /* If either SRC is not a pointer type, don't do this
2740 operation in-line. */
2744 /* If src is categorized for a readonly section we can use
2746 if (readonly_data_expr (src))
2748 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2751 return expand_expr (build_function_call_expr (fn, arglist),
2752 target, mode, EXPAND_NORMAL);
2755 /* Otherwise, call the normal function. */
2760 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2761 if we failed the caller should emit a normal call. */
2764 expand_builtin_bcopy (tree arglist)
2766 tree src, dest, size, newarglist;
2768 if (!validate_arglist (arglist,
2769 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2772 src = TREE_VALUE (arglist);
2773 dest = TREE_VALUE (TREE_CHAIN (arglist));
2774 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2776 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2777 memmove(ptr y, ptr x, size_t z). This is done this way
2778 so that if it isn't expanded inline, we fallback to
2779 calling bcopy instead of memmove. */
2781 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2782 newarglist = tree_cons (NULL_TREE, src, newarglist);
2783 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2785 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2788 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2789 if we failed the caller should emit a normal call, otherwise try to get
2790 the result in TARGET, if convenient (and in mode MODE if that's
2794 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2796 tree fn, len, src, dst;
2798 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2801 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2805 src = TREE_VALUE (TREE_CHAIN (arglist));
2806 len = c_strlen (src, 1);
2807 if (len == 0 || TREE_SIDE_EFFECTS (len))
2810 dst = TREE_VALUE (arglist);
2811 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2812 arglist = build_tree_list (NULL_TREE, len);
2813 arglist = tree_cons (NULL_TREE, src, arglist);
2814 arglist = tree_cons (NULL_TREE, dst, arglist);
2815 return expand_expr (build_function_call_expr (fn, arglist),
2816 target, mode, EXPAND_NORMAL);
2819 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2820 Return 0 if we failed the caller should emit a normal call,
2821 otherwise try to get the result in TARGET, if convenient (and in
2822 mode MODE if that's convenient). */
2825 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2827 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2833 /* If return value is ignored, transform stpcpy into strcpy. */
2834 if (target == const0_rtx)
2836 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2840 return expand_expr (build_function_call_expr (fn, arglist),
2841 target, mode, EXPAND_NORMAL);
2844 /* Ensure we get an actual string whose length can be evaluated at
2845 compile-time, not an expression containing a string. This is
2846 because the latter will potentially produce pessimized code
2847 when used to produce the return value. */
2848 src = TREE_VALUE (TREE_CHAIN (arglist));
2849 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2852 dst = TREE_VALUE (arglist);
2853 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2854 arglist = build_tree_list (NULL_TREE, len);
2855 arglist = tree_cons (NULL_TREE, src, arglist);
2856 arglist = tree_cons (NULL_TREE, dst, arglist);
2857 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2861 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2862 bytes from constant string DATA + OFFSET and return it as target
2866 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2867 enum machine_mode mode)
2869 const char *str = (const char *) data;
2871 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2874 return c_readstr (str + offset, mode);
2877 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2878 if we failed the caller should emit a normal call. */
2881 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2883 if (!validate_arglist (arglist,
2884 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2888 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2889 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2892 /* We must be passed a constant len parameter. */
2893 if (TREE_CODE (len) != INTEGER_CST)
2896 /* If the len parameter is zero, return the dst parameter. */
2897 if (integer_zerop (len))
2899 /* Evaluate and ignore the src argument in case it has
2901 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2902 VOIDmode, EXPAND_NORMAL);
2903 /* Return the dst parameter. */
2904 return expand_expr (TREE_VALUE (arglist), target, mode,
2908 /* Now, we must be passed a constant src ptr parameter. */
2909 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2912 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2914 /* We're required to pad with trailing zeros if the requested
2915 len is greater than strlen(s2)+1. In that case try to
2916 use store_by_pieces, if it fails, punt. */
2917 if (tree_int_cst_lt (slen, len))
2919 tree dest = TREE_VALUE (arglist);
2920 unsigned int dest_align
2921 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2922 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2925 if (!p || dest_align == 0 || !host_integerp (len, 1)
2926 || !can_store_by_pieces (tree_low_cst (len, 1),
2927 builtin_strncpy_read_str,
2928 (void *) p, dest_align))
2931 dest_mem = get_memory_rtx (dest);
2932 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2933 builtin_strncpy_read_str,
2934 (void *) p, dest_align, 0);
2935 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2936 #ifdef POINTERS_EXTEND_UNSIGNED
2937 if (GET_MODE (dest_mem) != ptr_mode)
2938 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2943 /* OK transform into builtin memcpy. */
2944 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2947 return expand_expr (build_function_call_expr (fn, arglist),
2948 target, mode, EXPAND_NORMAL);
2952 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2953 bytes from constant string DATA + OFFSET and return it as target
2957 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2958 enum machine_mode mode)
2960 const char *c = (const char *) data;
2961 char *p = alloca (GET_MODE_SIZE (mode));
2963 memset (p, *c, GET_MODE_SIZE (mode));
2965 return c_readstr (p, mode);
2968 /* Callback routine for store_by_pieces. Return the RTL of a register
2969 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2970 char value given in the RTL register data. For example, if mode is
2971 4 bytes wide, return the RTL for 0x01010101*data. */
2974 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2975 enum machine_mode mode)
2981 size = GET_MODE_SIZE (mode);
2986 memset (p, 1, size);
2987 coeff = c_readstr (p, mode);
2989 target = convert_to_mode (mode, (rtx) data, 1);
2990 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2991 return force_reg (mode, target);
2994 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2995 if we failed the caller should emit a normal call, otherwise try to get
2996 the result in TARGET, if convenient (and in mode MODE if that's
3000 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
3002 if (!validate_arglist (arglist,
3003 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3007 tree dest = TREE_VALUE (arglist);
3008 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3009 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3012 unsigned int dest_align
3013 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3014 rtx dest_mem, dest_addr, len_rtx;
3016 /* If DEST is not a pointer type, don't do this
3017 operation in-line. */
3018 if (dest_align == 0)
3021 /* If the LEN parameter is zero, return DEST. */
3022 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3024 /* Evaluate and ignore VAL in case it has side-effects. */
3025 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3026 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3029 if (TREE_CODE (val) != INTEGER_CST)
3033 if (!host_integerp (len, 1))
3036 if (optimize_size && tree_low_cst (len, 1) > 1)
3039 /* Assume that we can memset by pieces if we can store the
3040 * the coefficients by pieces (in the required modes).
3041 * We can't pass builtin_memset_gen_str as that emits RTL. */
3043 if (!can_store_by_pieces (tree_low_cst (len, 1),
3044 builtin_memset_read_str,
3048 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3049 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3050 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3052 dest_mem = get_memory_rtx (dest);
3053 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3054 builtin_memset_gen_str,
3055 val_rtx, dest_align, 0);
3056 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3057 #ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (dest_mem) != ptr_mode)
3059 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3064 if (target_char_cast (val, &c))
3069 if (!host_integerp (len, 1))
3071 if (!can_store_by_pieces (tree_low_cst (len, 1),
3072 builtin_memset_read_str, &c,
3076 dest_mem = get_memory_rtx (dest);
3077 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3078 builtin_memset_read_str,
3080 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3081 #ifdef POINTERS_EXTEND_UNSIGNED
3082 if (GET_MODE (dest_mem) != ptr_mode)
3083 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3088 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3090 dest_mem = get_memory_rtx (dest);
3091 set_mem_align (dest_mem, dest_align);
3092 dest_addr = clear_storage (dest_mem, len_rtx);
3096 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3097 #ifdef POINTERS_EXTEND_UNSIGNED
3098 if (GET_MODE (dest_addr) != ptr_mode)
3099 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3107 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3108 if we failed the caller should emit a normal call. */
3111 expand_builtin_bzero (tree arglist)
3113 tree dest, size, newarglist;
3115 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3118 dest = TREE_VALUE (arglist);
3119 size = TREE_VALUE (TREE_CHAIN (arglist));
3121 /* New argument list transforming bzero(ptr x, int y) to
3122 memset(ptr x, int 0, size_t y). This is done this way
3123 so that if it isn't expanded inline, we fallback to
3124 calling bzero instead of memset. */
3126 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3127 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3128 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3130 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3133 /* Expand expression EXP, which is a call to the memcmp built-in function.
3134 ARGLIST is the argument list for this call. Return 0 if we failed and the
3135 caller should emit a normal call, otherwise try to get the result in
3136 TARGET, if convenient (and in mode MODE, if that's convenient). */
3139 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3140 enum machine_mode mode)
3142 tree arg1, arg2, len;
3143 const char *p1, *p2;
3145 if (!validate_arglist (arglist,
3146 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3149 arg1 = TREE_VALUE (arglist);
3150 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3151 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3153 /* If the len parameter is zero, return zero. */
3154 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
3156 /* Evaluate and ignore arg1 and arg2 in case they have
3158 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3159 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3163 p1 = c_getstr (arg1);
3164 p2 = c_getstr (arg2);
3166 /* If all arguments are constant, and the value of len is not greater
3167 than the lengths of arg1 and arg2, evaluate at compile-time. */
3168 if (host_integerp (len, 1) && p1 && p2
3169 && compare_tree_int (len, strlen (p1) + 1) <= 0
3170 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3172 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3174 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3177 /* If len parameter is one, return an expression corresponding to
3178 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3179 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
3181 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3182 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3184 fold (build1 (CONVERT_EXPR, integer_type_node,
3185 build1 (INDIRECT_REF, cst_uchar_node,
3186 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3188 fold (build1 (CONVERT_EXPR, integer_type_node,
3189 build1 (INDIRECT_REF, cst_uchar_node,
3190 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3191 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3192 return expand_expr (result, target, mode, EXPAND_NORMAL);
3195 #ifdef HAVE_cmpstrsi
3197 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3202 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3204 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3205 enum machine_mode insn_mode
3206 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3208 /* If we don't have POINTER_TYPE, call the function. */
3209 if (arg1_align == 0 || arg2_align == 0)
3212 /* Make a place to write the result of the instruction. */
3215 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3216 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3217 result = gen_reg_rtx (insn_mode);
3219 arg1_rtx = get_memory_rtx (arg1);
3220 arg2_rtx = get_memory_rtx (arg2);
3221 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3225 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3226 GEN_INT (MIN (arg1_align, arg2_align)));
3231 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3232 TYPE_MODE (integer_type_node), 3,
3233 XEXP (arg1_rtx, 0), Pmode,
3234 XEXP (arg2_rtx, 0), Pmode,
3235 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3236 TREE_UNSIGNED (sizetype)),
3237 TYPE_MODE (sizetype));
3239 /* Return the value in the proper mode for this function. */
3240 mode = TYPE_MODE (TREE_TYPE (exp));
3241 if (GET_MODE (result) == mode)
3243 else if (target != 0)
3245 convert_move (target, result, 0);
3249 return convert_to_mode (mode, result, 0);
3256 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3257 if we failed the caller should emit a normal call, otherwise try to get
3258 the result in TARGET, if convenient. */
3261 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3263 tree arglist = TREE_OPERAND (exp, 1);
3265 const char *p1, *p2;
3267 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3270 arg1 = TREE_VALUE (arglist);
3271 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3273 p1 = c_getstr (arg1);
3274 p2 = c_getstr (arg2);
3278 const int i = strcmp (p1, p2);
3279 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3282 /* If either arg is "", return an expression corresponding to
3283 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3284 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3286 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3287 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3289 fold (build1 (CONVERT_EXPR, integer_type_node,
3290 build1 (INDIRECT_REF, cst_uchar_node,
3291 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3293 fold (build1 (CONVERT_EXPR, integer_type_node,
3294 build1 (INDIRECT_REF, cst_uchar_node,
3295 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3296 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3297 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 #ifdef HAVE_cmpstrsi
3303 tree len, len1, len2;
3304 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3308 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3310 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3311 enum machine_mode insn_mode
3312 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3314 len1 = c_strlen (arg1, 1);
3315 len2 = c_strlen (arg2, 1);
3318 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3320 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3322 /* If we don't have a constant length for the first, use the length
3323 of the second, if we know it. We don't require a constant for
3324 this case; some cost analysis could be done if both are available
3325 but neither is constant. For now, assume they're equally cheap,
3326 unless one has side effects. If both strings have constant lengths,
3333 else if (TREE_SIDE_EFFECTS (len1))
3335 else if (TREE_SIDE_EFFECTS (len2))
3337 else if (TREE_CODE (len1) != INTEGER_CST)
3339 else if (TREE_CODE (len2) != INTEGER_CST)
3341 else if (tree_int_cst_lt (len1, len2))
3346 /* If both arguments have side effects, we cannot optimize. */
3347 if (!len || TREE_SIDE_EFFECTS (len))
3350 /* If we don't have POINTER_TYPE, call the function. */
3351 if (arg1_align == 0 || arg2_align == 0)
3354 /* Make a place to write the result of the instruction. */
3357 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3358 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3359 result = gen_reg_rtx (insn_mode);
3361 arg1_rtx = get_memory_rtx (arg1);
3362 arg2_rtx = get_memory_rtx (arg2);
3363 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3364 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3365 GEN_INT (MIN (arg1_align, arg2_align)));
3371 /* Return the value in the proper mode for this function. */
3372 mode = TYPE_MODE (TREE_TYPE (exp));
3373 if (GET_MODE (result) == mode)
3376 return convert_to_mode (mode, result, 0);
3377 convert_move (target, result, 0);
3384 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3385 if we failed the caller should emit a normal call, otherwise try to get
3386 the result in TARGET, if convenient. */
3389 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3391 tree arglist = TREE_OPERAND (exp, 1);
3392 tree arg1, arg2, arg3;
3393 const char *p1, *p2;
3395 if (!validate_arglist (arglist,
3396 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3399 arg1 = TREE_VALUE (arglist);
3400 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3401 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3403 /* If the len parameter is zero, return zero. */
3404 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
3406 /* Evaluate and ignore arg1 and arg2 in case they have
3408 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3409 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3413 p1 = c_getstr (arg1);
3414 p2 = c_getstr (arg2);
3416 /* If all arguments are constant, evaluate at compile-time. */
3417 if (host_integerp (arg3, 1) && p1 && p2)
3419 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3420 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3423 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3424 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3425 if (host_integerp (arg3, 1)
3426 && (tree_low_cst (arg3, 1) == 1
3427 || (tree_low_cst (arg3, 1) > 1
3428 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3430 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3431 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3433 fold (build1 (CONVERT_EXPR, integer_type_node,
3434 build1 (INDIRECT_REF, cst_uchar_node,
3435 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3437 fold (build1 (CONVERT_EXPR, integer_type_node,
3438 build1 (INDIRECT_REF, cst_uchar_node,
3439 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3440 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3441 return expand_expr (result, target, mode, EXPAND_NORMAL);
3444 /* If c_strlen can determine an expression for one of the string
3445 lengths, and it doesn't have side effects, then emit cmpstrsi
3446 using length MIN(strlen(string)+1, arg3). */
3447 #ifdef HAVE_cmpstrsi
3450 tree len, len1, len2;
3451 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3455 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3457 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3458 enum machine_mode insn_mode
3459 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3461 len1 = c_strlen (arg1, 1);
3462 len2 = c_strlen (arg2, 1);
3465 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3467 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3469 /* If we don't have a constant length for the first, use the length
3470 of the second, if we know it. We don't require a constant for
3471 this case; some cost analysis could be done if both are available
3472 but neither is constant. For now, assume they're equally cheap,
3473 unless one has side effects. If both strings have constant lengths,
3480 else if (TREE_SIDE_EFFECTS (len1))
3482 else if (TREE_SIDE_EFFECTS (len2))
3484 else if (TREE_CODE (len1) != INTEGER_CST)
3486 else if (TREE_CODE (len2) != INTEGER_CST)
3488 else if (tree_int_cst_lt (len1, len2))
3493 /* If both arguments have side effects, we cannot optimize. */
3494 if (!len || TREE_SIDE_EFFECTS (len))
3497 /* The actual new length parameter is MIN(len,arg3). */
3498 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3500 /* If we don't have POINTER_TYPE, call the function. */
3501 if (arg1_align == 0 || arg2_align == 0)
3504 /* Make a place to write the result of the instruction. */
3507 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3508 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3509 result = gen_reg_rtx (insn_mode);
3511 arg1_rtx = get_memory_rtx (arg1);
3512 arg2_rtx = get_memory_rtx (arg2);
3513 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3514 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3515 GEN_INT (MIN (arg1_align, arg2_align)));
3521 /* Return the value in the proper mode for this function. */
3522 mode = TYPE_MODE (TREE_TYPE (exp));
3523 if (GET_MODE (result) == mode)
3526 return convert_to_mode (mode, result, 0);
3527 convert_move (target, result, 0);
3534 /* Expand expression EXP, which is a call to the strcat builtin.
3535 Return 0 if we failed the caller should emit a normal call,
3536 otherwise try to get the result in TARGET, if convenient. */
3539 expand_builtin_strcat (tree arglist, rtx target, enum machine_mode mode)
3541 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3545 tree dst = TREE_VALUE (arglist),
3546 src = TREE_VALUE (TREE_CHAIN (arglist));
3547 const char *p = c_getstr (src);
3549 /* If the string length is zero, return the dst parameter. */
3550 if (p && *p == '\0')
3551 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3557 /* Expand expression EXP, which is a call to the strncat builtin.
3558 Return 0 if we failed the caller should emit a normal call,
3559 otherwise try to get the result in TARGET, if convenient. */
3562 expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
3564 if (!validate_arglist (arglist,
3565 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3569 tree dst = TREE_VALUE (arglist),
3570 src = TREE_VALUE (TREE_CHAIN (arglist)),
3571 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3572 const char *p = c_getstr (src);
3574 /* If the requested length is zero, or the src parameter string
3575 length is zero, return the dst parameter. */
3576 if (integer_zerop (len) || (p && *p == '\0'))
3578 /* Evaluate and ignore the src and len parameters in case
3579 they have side-effects. */
3580 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3582 return expand_expr (dst, target, mode, EXPAND_NORMAL);
3585 /* If the requested len is greater than or equal to the string
3586 length, call strcat. */
3587 if (TREE_CODE (len) == INTEGER_CST && p
3588 && compare_tree_int (len, strlen (p)) >= 0)
3591 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
3592 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
3594 /* If the replacement _DECL isn't initialized, don't do the
3599 return expand_expr (build_function_call_expr (fn, newarglist),
3600 target, mode, EXPAND_NORMAL);
3606 /* Expand expression EXP, which is a call to the strspn builtin.
3607 Return 0 if we failed the caller should emit a normal call,
3608 otherwise try to get the result in TARGET, if convenient. */
3611 expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
3613 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3617 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3618 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3620 /* If both arguments are constants, evaluate at compile-time. */
3623 const size_t r = strspn (p1, p2);
3624 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3627 /* If either argument is "", return 0. */
3628 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3630 /* Evaluate and ignore both arguments in case either one has
3632 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3633 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3640 /* Expand expression EXP, which is a call to the strcspn builtin.
3641 Return 0 if we failed the caller should emit a normal call,
3642 otherwise try to get the result in TARGET, if convenient. */
3645 expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
3647 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3651 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
3652 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
3654 /* If both arguments are constants, evaluate at compile-time. */
3657 const size_t r = strcspn (p1, p2);
3658 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
3661 /* If the first argument is "", return 0. */
3662 if (p1 && *p1 == '\0')
3664 /* Evaluate and ignore argument s2 in case it has
3666 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3670 /* If the second argument is "", return __builtin_strlen(s1). */
3671 if (p2 && *p2 == '\0')
3673 tree newarglist = build_tree_list (NULL_TREE, s1),
3674 fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3676 /* If the replacement _DECL isn't initialized, don't do the
3681 return expand_expr (build_function_call_expr (fn, newarglist),
3682 target, mode, EXPAND_NORMAL);
3688 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3689 if that's convenient. */
3692 expand_builtin_saveregs (void)
3696 /* Don't do __builtin_saveregs more than once in a function.
3697 Save the result of the first call and reuse it. */
3698 if (saveregs_value != 0)
3699 return saveregs_value;
3701 /* When this function is called, it means that registers must be
3702 saved on entry to this function. So we migrate the call to the
3703 first insn of this function. */
3707 #ifdef EXPAND_BUILTIN_SAVEREGS
3708 /* Do whatever the machine needs done in this case. */
3709 val = EXPAND_BUILTIN_SAVEREGS ();
3711 /* ??? We used to try and build up a call to the out of line function,
3712 guessing about what registers needed saving etc. This became much
3713 harder with __builtin_va_start, since we don't have a tree for a
3714 call to __builtin_saveregs to fall back on. There was exactly one
3715 port (i860) that used this code, and I'm unconvinced it could actually
3716 handle the general case. So we no longer try to handle anything
3717 weird and make the backend absorb the evil. */
3719 error ("__builtin_saveregs not supported by this target");
3726 saveregs_value = val;
3728 /* Put the insns after the NOTE that starts the function. If this
3729 is inside a start_sequence, make the outer-level insn chain current, so
3730 the code is placed at the start of the function. */
3731 push_topmost_sequence ();
3732 emit_insn_after (seq, get_insns ());
3733 pop_topmost_sequence ();
3738 /* __builtin_args_info (N) returns word N of the arg space info
3739 for the current function. The number and meanings of words
3740 is controlled by the definition of CUMULATIVE_ARGS. */
3743 expand_builtin_args_info (tree arglist)
3745 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3746 int *word_ptr = (int *) ¤t_function_args_info;
3748 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3753 if (!host_integerp (TREE_VALUE (arglist), 0))
3754 error ("argument of `__builtin_args_info' must be constant");
3757 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3759 if (wordnum < 0 || wordnum >= nwords)
3760 error ("argument of `__builtin_args_info' out of range");
3762 return GEN_INT (word_ptr[wordnum]);
3766 error ("missing argument in `__builtin_args_info'");
3771 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3774 expand_builtin_next_arg (tree arglist)
3776 tree fntype = TREE_TYPE (current_function_decl);
3778 if (TYPE_ARG_TYPES (fntype) == 0
3779 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3782 error ("`va_start' used in function with fixed args");
3788 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3789 tree arg = TREE_VALUE (arglist);
3791 /* Strip off all nops for the sake of the comparison. This
3792 is not quite the same as STRIP_NOPS. It does more.
3793 We must also strip off INDIRECT_EXPR for C++ reference
3795 while (TREE_CODE (arg) == NOP_EXPR
3796 || TREE_CODE (arg) == CONVERT_EXPR
3797 || TREE_CODE (arg) == NON_LVALUE_EXPR
3798 || TREE_CODE (arg) == INDIRECT_REF)
3799 arg = TREE_OPERAND (arg, 0);
3800 if (arg != last_parm)
3801 warning ("second parameter of `va_start' not last named argument");
3804 /* Evidently an out of date version of <stdarg.h>; can't validate
3805 va_start's second argument, but can still work as intended. */
3806 warning ("`__builtin_next_arg' called without an argument");
3808 return expand_binop (Pmode, add_optab,
3809 current_function_internal_arg_pointer,
3810 current_function_arg_offset_rtx,
3811 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3814 /* Make it easier for the backends by protecting the valist argument
3815 from multiple evaluations. */
3818 stabilize_va_list (tree valist, int needs_lvalue)
3820 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3822 if (TREE_SIDE_EFFECTS (valist))
3823 valist = save_expr (valist);
3825 /* For this case, the backends will be expecting a pointer to
3826 TREE_TYPE (va_list_type_node), but it's possible we've
3827 actually been given an array (an actual va_list_type_node).
3829 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3831 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3832 tree p2 = build_pointer_type (va_list_type_node);
3834 valist = build1 (ADDR_EXPR, p2, valist);
3835 valist = fold (build1 (NOP_EXPR, p1, valist));
3844 if (! TREE_SIDE_EFFECTS (valist))
3847 pt = build_pointer_type (va_list_type_node);
3848 valist = fold (build1 (ADDR_EXPR, pt, valist));
3849 TREE_SIDE_EFFECTS (valist) = 1;
3852 if (TREE_SIDE_EFFECTS (valist))
3853 valist = save_expr (valist);
3854 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3861 /* The "standard" implementation of va_start: just assign `nextarg' to
3865 std_expand_builtin_va_start (tree valist, rtx nextarg)
3869 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3870 make_tree (ptr_type_node, nextarg));
3871 TREE_SIDE_EFFECTS (t) = 1;
3873 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3876 /* Expand ARGLIST, from a call to __builtin_va_start. */
3879 expand_builtin_va_start (tree arglist)
3884 chain = TREE_CHAIN (arglist);
3886 if (TREE_CHAIN (chain))
3887 error ("too many arguments to function `va_start'");
3889 nextarg = expand_builtin_next_arg (chain);
3890 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3892 #ifdef EXPAND_BUILTIN_VA_START
3893 EXPAND_BUILTIN_VA_START (valist, nextarg);
3895 std_expand_builtin_va_start (valist, nextarg);
3901 /* The "standard" implementation of va_arg: read the value from the
3902 current (padded) address and increment by the (padded) size. */
3905 std_expand_builtin_va_arg (tree valist, tree type)
3907 tree addr_tree, t, type_size = NULL;
3908 tree align, alignm1;
3912 /* Compute the rounded size of the type. */
3913 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3914 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3915 if (type == error_mark_node
3916 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3917 || TREE_OVERFLOW (type_size))
3918 rounded_size = size_zero_node;
3920 rounded_size = fold (build (MULT_EXPR, sizetype,
3921 fold (build (TRUNC_DIV_EXPR, sizetype,
3922 fold (build (PLUS_EXPR, sizetype,
3923 type_size, alignm1)),
3929 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3931 /* Small args are padded downward. */
3932 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3933 fold (build (COND_EXPR, sizetype,
3934 fold (build (GT_EXPR, sizetype,
3938 fold (build (MINUS_EXPR, sizetype,
3943 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3944 addr = copy_to_reg (addr);
3946 /* Compute new value for AP. */
3947 if (! integer_zerop (rounded_size))
3949 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3950 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3952 TREE_SIDE_EFFECTS (t) = 1;
3953 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3959 /* Expand __builtin_va_arg, which is not really a builtin function, but
3960 a very special sort of operator. */
3963 expand_builtin_va_arg (tree valist, tree type)
3966 tree promoted_type, want_va_type, have_va_type;
3968 /* Verify that valist is of the proper type. */
3970 want_va_type = va_list_type_node;
3971 have_va_type = TREE_TYPE (valist);
3972 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3974 /* If va_list is an array type, the argument may have decayed
3975 to a pointer type, e.g. by being passed to another function.
3976 In that case, unwrap both types so that we can compare the
3977 underlying records. */
3978 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3979 || TREE_CODE (have_va_type) == POINTER_TYPE)
3981 want_va_type = TREE_TYPE (want_va_type);
3982 have_va_type = TREE_TYPE (have_va_type);
3985 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3987 error ("first argument to `va_arg' not of type `va_list'");
3991 /* Generate a diagnostic for requesting data of a type that cannot
3992 be passed through `...' due to type promotion at the call site. */
3993 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3996 const char *name = "<anonymous type>", *pname = 0;
3997 static bool gave_help;
3999 if (TYPE_NAME (type))
4001 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
4002 name = IDENTIFIER_POINTER (TYPE_NAME (type));
4003 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
4004 && DECL_NAME (TYPE_NAME (type)))
4005 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
4007 if (TYPE_NAME (promoted_type))
4009 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
4010 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
4011 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
4012 && DECL_NAME (TYPE_NAME (promoted_type)))
4013 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
4016 /* Unfortunately, this is merely undefined, rather than a constraint
4017 violation, so we cannot make this an error. If this call is never
4018 executed, the program is still strictly conforming. */
4019 warning ("`%s' is promoted to `%s' when passed through `...'",
4024 warning ("(so you should pass `%s' not `%s' to `va_arg')",
4028 /* We can, however, treat "undefined" any way we please.
4029 Call abort to encourage the user to fix the program. */
4030 expand_builtin_trap ();
4032 /* This is dead code, but go ahead and finish so that the
4033 mode of the result comes out right. */
4038 /* Make it easier for the backends by protecting the valist argument
4039 from multiple evaluations. */
4040 valist = stabilize_va_list (valist, 0);
4042 #ifdef EXPAND_BUILTIN_VA_ARG
4043 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
4045 addr = std_expand_builtin_va_arg (valist, type);
4049 #ifdef POINTERS_EXTEND_UNSIGNED
4050 if (GET_MODE (addr) != Pmode)
4051 addr = convert_memory_address (Pmode, addr);
4054 result = gen_rtx_MEM (TYPE_MODE (type), addr);
4055 set_mem_alias_set (result, get_varargs_alias_set ());
4060 /* Expand ARGLIST, from a call to __builtin_va_end. */
4063 expand_builtin_va_end (tree arglist)
4065 tree valist = TREE_VALUE (arglist);
4067 #ifdef EXPAND_BUILTIN_VA_END
4068 valist = stabilize_va_list (valist, 0);
4069 EXPAND_BUILTIN_VA_END (arglist);
4071 /* Evaluate for side effects, if needed. I hate macros that don't
4073 if (TREE_SIDE_EFFECTS (valist))
4074 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4080 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4081 builtin rather than just as an assignment in stdarg.h because of the
4082 nastiness of array-type va_list types. */
4085 expand_builtin_va_copy (tree arglist)
4089 dst = TREE_VALUE (arglist);
4090 src = TREE_VALUE (TREE_CHAIN (arglist));
4092 dst = stabilize_va_list (dst, 1);
4093 src = stabilize_va_list (src, 0);
4095 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4097 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
4098 TREE_SIDE_EFFECTS (t) = 1;
4099 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4103 rtx dstb, srcb, size;
4105 /* Evaluate to pointers. */
4106 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4107 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4108 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4109 VOIDmode, EXPAND_NORMAL);
4111 #ifdef POINTERS_EXTEND_UNSIGNED
4112 if (GET_MODE (dstb) != Pmode)
4113 dstb = convert_memory_address (Pmode, dstb);
4115 if (GET_MODE (srcb) != Pmode)
4116 srcb = convert_memory_address (Pmode, srcb);
4119 /* "Dereference" to BLKmode memories. */
4120 dstb = gen_rtx_MEM (BLKmode, dstb);
4121 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4122 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4123 srcb = gen_rtx_MEM (BLKmode, srcb);
4124 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4125 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4128 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4134 /* Expand a call to one of the builtin functions __builtin_frame_address or
4135 __builtin_return_address. */
4138 expand_builtin_frame_address (tree fndecl, tree arglist)
4140 /* The argument must be a nonnegative integer constant.
4141 It counts the number of frames to scan up the stack.
4142 The value is the return address saved in that frame. */
4144 /* Warning about missing arg was already issued. */
4146 else if (! host_integerp (TREE_VALUE (arglist), 1))
4148 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4149 error ("invalid arg to `__builtin_frame_address'");
4151 error ("invalid arg to `__builtin_return_address'");
4157 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4158 tree_low_cst (TREE_VALUE (arglist), 1),
4159 hard_frame_pointer_rtx);
4161 /* Some ports cannot access arbitrary stack frames. */
4164 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4165 warning ("unsupported arg to `__builtin_frame_address'");
4167 warning ("unsupported arg to `__builtin_return_address'");
4171 /* For __builtin_frame_address, return what we've got. */
4172 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4175 if (GET_CODE (tem) != REG
4176 && ! CONSTANT_P (tem))
4177 tem = copy_to_mode_reg (Pmode, tem);
4182 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4183 we failed and the caller should emit a normal call, otherwise try to get
4184 the result in TARGET, if convenient. */
4187 expand_builtin_alloca (tree arglist, rtx target)
4192 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4195 /* Compute the argument. */
4196 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
4198 /* Allocate the desired space. */
4199 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4201 #ifdef POINTERS_EXTEND_UNSIGNED
4202 if (GET_MODE (result) != ptr_mode)
4203 result = convert_memory_address (ptr_mode, result);
4209 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4210 Return 0 if a normal call should be emitted rather than expanding the
4211 function in-line. If convenient, the result should be placed in TARGET.
4212 SUBTARGET may be used as the target for computing one of EXP's operands. */
4215 expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4216 rtx subtarget, optab op_optab)
4219 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4222 /* Compute the argument. */
4223 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4224 /* Compute op, into TARGET if possible.
4225 Set TARGET to wherever the result comes back. */
4226 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4227 op_optab, op0, target, 1);
4231 return convert_to_mode (target_mode, target, 0);
4234 /* If the string passed to fputs is a constant and is one character
4235 long, we attempt to transform this call into __builtin_fputc(). */
4238 expand_builtin_fputs (tree arglist, int ignore, int unlocked)
4241 tree fn_fputc = unlocked ? implicit_built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
4242 : implicit_built_in_decls[BUILT_IN_FPUTC];
4243 tree fn_fwrite = unlocked ? implicit_built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
4244 : implicit_built_in_decls[BUILT_IN_FWRITE];
4246 /* If the return value is used, or the replacement _DECL isn't
4247 initialized, don't do the transformation. */
4248 if (!ignore || !fn_fputc || !fn_fwrite)
4251 /* Verify the arguments in the original call. */
4252 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4255 /* Get the length of the string passed to fputs. If the length
4256 can't be determined, punt. */
4257 if (!(len = c_strlen (TREE_VALUE (arglist), 1))
4258 || TREE_CODE (len) != INTEGER_CST)
4261 switch (compare_tree_int (len, 1))
4263 case -1: /* length is 0, delete the call entirely . */
4265 /* Evaluate and ignore the argument in case it has
4267 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
4268 VOIDmode, EXPAND_NORMAL);
4271 case 0: /* length is 1, call fputc. */
4273 const char *p = c_getstr (TREE_VALUE (arglist));
4277 /* New argument list transforming fputs(string, stream) to
4278 fputc(string[0], stream). */
4280 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4282 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
4288 case 1: /* length is greater than 1, call fwrite. */
4292 /* If optimizing for size keep fputs. */
4295 string_arg = TREE_VALUE (arglist);
4296 /* New argument list transforming fputs(string, stream) to
4297 fwrite(string, 1, len, stream). */
4298 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
4299 arglist = tree_cons (NULL_TREE, len, arglist);
4300 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
4301 arglist = tree_cons (NULL_TREE, string_arg, arglist);
4309 return expand_expr (build_function_call_expr (fn, arglist),
4310 (ignore ? const0_rtx : NULL_RTX),
4311 VOIDmode, EXPAND_NORMAL);
4314 /* Expand a call to __builtin_expect. We return our argument and emit a
4315 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4316 a non-jump context. */
4319 expand_builtin_expect (tree arglist, rtx target)
4324 if (arglist == NULL_TREE
4325 || TREE_CHAIN (arglist) == NULL_TREE)
4327 exp = TREE_VALUE (arglist);
4328 c = TREE_VALUE (TREE_CHAIN (arglist));
4330 if (TREE_CODE (c) != INTEGER_CST)
4332 error ("second arg to `__builtin_expect' must be a constant");
4333 c = integer_zero_node;
4336 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4338 /* Don't bother with expected value notes for integral constants. */
4339 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4341 /* We do need to force this into a register so that we can be
4342 moderately sure to be able to correctly interpret the branch
4344 target = force_reg (GET_MODE (target), target);
4346 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4348 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4349 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4355 /* Like expand_builtin_expect, except do this in a jump context. This is
4356 called from do_jump if the conditional is a __builtin_expect. Return either
4357 a list of insns to emit the jump or NULL if we cannot optimize
4358 __builtin_expect. We need to optimize this at jump time so that machines
4359 like the PowerPC don't turn the test into a SCC operation, and then jump
4360 based on the test being 0/1. */
4363 expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4365 tree arglist = TREE_OPERAND (exp, 1);
4366 tree arg0 = TREE_VALUE (arglist);
4367 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4370 /* Only handle __builtin_expect (test, 0) and
4371 __builtin_expect (test, 1). */
4372 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4373 && (integer_zerop (arg1) || integer_onep (arg1)))
4378 /* If we fail to locate an appropriate conditional jump, we'll
4379 fall back to normal evaluation. Ensure that the expression
4380 can be re-evaluated. */
4381 switch (unsafe_for_reeval (arg0))
4386 case 1: /* Mildly unsafe. */
4387 arg0 = unsave_expr (arg0);
4390 case 2: /* Wildly unsafe. */
4394 /* Expand the jump insns. */
4396 do_jump (arg0, if_false_label, if_true_label);
4400 /* Now that the __builtin_expect has been validated, go through and add
4401 the expect's to each of the conditional jumps. If we run into an
4402 error, just give up and generate the 'safe' code of doing a SCC
4403 operation and then doing a branch on that. */
4405 while (insn != NULL_RTX)
4407 rtx next = NEXT_INSN (insn);
4409 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
4411 rtx ifelse = SET_SRC (pc_set (insn));
4415 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
4418 label = XEXP (XEXP (ifelse, 1), 0);
4420 /* An inverted jump reverses the probabilities. */
4421 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
4424 label = XEXP (XEXP (ifelse, 2), 0);
4426 /* We shouldn't have to worry about conditional returns during
4427 the expansion stage, but handle it gracefully anyway. */
4428 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
4433 /* An inverted return reverses the probabilities. */
4434 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
4442 /* If the test is expected to fail, reverse the
4444 if (integer_zerop (arg1))
4447 /* If we are jumping to the false label, reverse the
4449 if (label == NULL_RTX)
4450 ; /* conditional return */
4451 else if (label == if_false_label)
4453 else if (label != if_true_label)
4457 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4464 /* If no jumps were modified, fail and do __builtin_expect the normal
4474 expand_builtin_trap (void)
4478 emit_insn (gen_trap ());
4481 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4485 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4486 Return 0 if a normal call should be emitted rather than expanding
4487 the function inline. If convenient, the result should be placed
4488 in TARGET. SUBTARGET may be used as the target for computing
4492 expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4494 enum machine_mode mode;
4498 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4501 arg = TREE_VALUE (arglist);
4502 mode = TYPE_MODE (TREE_TYPE (arg));
4503 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4504 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4507 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4508 Return 0 if a normal call should be emitted rather than expanding
4509 the function inline. If convenient, the result should be placed
4513 expand_builtin_cabs (tree arglist, rtx target)
4515 enum machine_mode mode;
4519 if (arglist == 0 || TREE_CHAIN (arglist))
4521 arg = TREE_VALUE (arglist);
4522 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
4523 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
4526 mode = TYPE_MODE (TREE_TYPE (arg));
4527 op0 = expand_expr (arg, NULL_RTX, VOIDmode, 0);
4528 return expand_complex_abs (mode, op0, target, 0);
4531 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4532 a normal call should be emitted rather than expanding the function
4533 inline. If convenient, the result should be placed in TARGET with
4537 expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
4539 tree orig_arglist, dest, fmt;
4540 const char *fmt_str;
4542 orig_arglist = arglist;
4544 /* Verify the required arguments in the original call. */
4547 dest = TREE_VALUE (arglist);
4548 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4550 arglist = TREE_CHAIN (arglist);
4553 fmt = TREE_VALUE (arglist);
4554 if (TREE_CODE (TREE_TYPE (dest)) != POINTER_TYPE)
4556 arglist = TREE_CHAIN (arglist);
4558 /* Check whether the format is a literal string constant. */
4559 fmt_str = c_getstr (fmt);
4560 if (fmt_str == NULL)
4563 /* If the format doesn't contain % args or %%, use strcpy. */
4564 if (strchr (fmt_str, '%') == 0)
4566 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4569 if (arglist || ! fn)
4571 expand_expr (build_function_call_expr (fn, orig_arglist),
4572 const0_rtx, VOIDmode, EXPAND_NORMAL);
4573 if (target == const0_rtx)
4575 exp = build_int_2 (strlen (fmt_str), 0);
4576 exp = fold (build1 (NOP_EXPR, integer_type_node, exp));
4577 return expand_expr (exp, target, mode, EXPAND_NORMAL);
4579 /* If the format is "%s", use strcpy if the result isn't used. */
4580 else if (strcmp (fmt_str, "%s") == 0)
4583 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
4588 if (! arglist || TREE_CHAIN (arglist))
4590 arg = TREE_VALUE (arglist);
4591 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
4594 if (target != const0_rtx)
4596 len = c_strlen (arg, 1);
4597 if (! len || TREE_CODE (len) != INTEGER_CST)
4603 arglist = build_tree_list (NULL_TREE, arg);
4604 arglist = tree_cons (NULL_TREE, dest, arglist);
4605 expand_expr (build_function_call_expr (fn, arglist),
4606 const0_rtx, VOIDmode, EXPAND_NORMAL);
4608 if (target == const0_rtx)
4610 return expand_expr (len, target, mode, EXPAND_NORMAL);
4616 /* Expand an expression EXP that calls a built-in function,
4617 with result going to TARGET if that's convenient
4618 (and in mode MODE if that's convenient).
4619 SUBTARGET may be used as the target for computing one of EXP's operands.
4620 IGNORE is nonzero if the value is to be ignored. */
4623 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
4626 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4627 tree arglist = TREE_OPERAND (exp, 1);
4628 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4629 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
4631 /* Perform postincrements before expanding builtin functions. Â */
4634 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4635 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
4637 /* When not optimizing, generate calls to library functions for a certain
4639 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
4643 case BUILT_IN_SQRTF:
4644 case BUILT_IN_SQRTL:
4661 case BUILT_IN_ATANF:
4662 case BUILT_IN_ATANL:
4666 case BUILT_IN_ATAN2:
4667 case BUILT_IN_ATAN2F:
4668 case BUILT_IN_ATAN2L:
4669 case BUILT_IN_MEMSET:
4670 case BUILT_IN_MEMCPY:
4671 case BUILT_IN_MEMCMP:
4672 case BUILT_IN_MEMPCPY:
4673 case BUILT_IN_MEMMOVE:
4675 case BUILT_IN_BZERO:
4676 case BUILT_IN_BCOPY:
4677 case BUILT_IN_INDEX:
4678 case BUILT_IN_RINDEX:
4679 case BUILT_IN_SPRINTF:
4680 case BUILT_IN_STPCPY:
4681 case BUILT_IN_STRCHR:
4682 case BUILT_IN_STRRCHR:
4683 case BUILT_IN_STRLEN:
4684 case BUILT_IN_STRCPY:
4685 case BUILT_IN_STRNCPY:
4686 case BUILT_IN_STRNCMP:
4687 case BUILT_IN_STRSTR:
4688 case BUILT_IN_STRPBRK:
4689 case BUILT_IN_STRCAT:
4690 case BUILT_IN_STRNCAT:
4691 case BUILT_IN_STRSPN:
4692 case BUILT_IN_STRCSPN:
4693 case BUILT_IN_STRCMP:
4695 case BUILT_IN_PUTCHAR:
4697 case BUILT_IN_PRINTF:
4698 case BUILT_IN_FPUTC:
4699 case BUILT_IN_FPUTS:
4700 case BUILT_IN_FWRITE:
4701 case BUILT_IN_PUTCHAR_UNLOCKED:
4702 case BUILT_IN_PUTS_UNLOCKED:
4703 case BUILT_IN_PRINTF_UNLOCKED:
4704 case BUILT_IN_FPUTC_UNLOCKED:
4705 case BUILT_IN_FPUTS_UNLOCKED:
4706 case BUILT_IN_FWRITE_UNLOCKED:
4707 case BUILT_IN_FLOOR:
4708 case BUILT_IN_FLOORF:
4709 case BUILT_IN_FLOORL:
4711 case BUILT_IN_CEILF:
4712 case BUILT_IN_CEILL:
4713 case BUILT_IN_TRUNC:
4714 case BUILT_IN_TRUNCF:
4715 case BUILT_IN_TRUNCL:
4716 case BUILT_IN_ROUND:
4717 case BUILT_IN_ROUNDF:
4718 case BUILT_IN_ROUNDL:
4719 case BUILT_IN_NEARBYINT:
4720 case BUILT_IN_NEARBYINTF:
4721 case BUILT_IN_NEARBYINTL:
4722 return expand_call (exp, target, ignore);
4728 /* The built-in function expanders test for target == const0_rtx
4729 to determine whether the function's result will be ignored. */
4731 target = const0_rtx;
4733 /* If the result of a pure or const built-in function is ignored, and
4734 none of its arguments are volatile, we can avoid expanding the
4735 built-in call and just evaluate the arguments for side-effects. */
4736 if (target == const0_rtx
4737 && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
4739 bool volatilep = false;
4742 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4743 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
4751 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
4752 expand_expr (TREE_VALUE (arg), const0_rtx,
4753 VOIDmode, EXPAND_NORMAL);
4762 case BUILT_IN_LLABS:
4763 case BUILT_IN_IMAXABS:
4764 /* build_function_call changes these into ABS_EXPR. */
4768 case BUILT_IN_FABSF:
4769 case BUILT_IN_FABSL:
4770 target = expand_builtin_fabs (arglist, target, subtarget);
4776 case BUILT_IN_CABSF:
4777 case BUILT_IN_CABSL:
4778 if (flag_unsafe_math_optimizations)
4780 target = expand_builtin_cabs (arglist, target);
4787 case BUILT_IN_CONJF:
4788 case BUILT_IN_CONJL:
4789 case BUILT_IN_CREAL:
4790 case BUILT_IN_CREALF:
4791 case BUILT_IN_CREALL:
4792 case BUILT_IN_CIMAG:
4793 case BUILT_IN_CIMAGF:
4794 case BUILT_IN_CIMAGL:
4795 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4796 and IMAGPART_EXPR. */
4815 case BUILT_IN_ATANF:
4816 case BUILT_IN_ATANL:
4817 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4818 because of possible accuracy problems. */
4819 if (! flag_unsafe_math_optimizations)
4822 case BUILT_IN_SQRTF:
4823 case BUILT_IN_SQRTL:
4824 case BUILT_IN_FLOOR:
4825 case BUILT_IN_FLOORF:
4826 case BUILT_IN_FLOORL:
4828 case BUILT_IN_CEILF:
4829 case BUILT_IN_CEILL:
4830 case BUILT_IN_TRUNC:
4831 case BUILT_IN_TRUNCF:
4832 case BUILT_IN_TRUNCL:
4833 case BUILT_IN_ROUND:
4834 case BUILT_IN_ROUNDF:
4835 case BUILT_IN_ROUNDL:
4836 case BUILT_IN_NEARBYINT:
4837 case BUILT_IN_NEARBYINTF:
4838 case BUILT_IN_NEARBYINTL:
4839 target = expand_builtin_mathfn (exp, target, subtarget);
4847 if (! flag_unsafe_math_optimizations)
4849 target = expand_builtin_pow (exp, target, subtarget);
4854 case BUILT_IN_ATAN2:
4855 case BUILT_IN_ATAN2F:
4856 case BUILT_IN_ATAN2L:
4857 if (! flag_unsafe_math_optimizations)
4859 target = expand_builtin_mathfn_2 (exp, target, subtarget);
4864 case BUILT_IN_APPLY_ARGS:
4865 return expand_builtin_apply_args ();
4867 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4868 FUNCTION with a copy of the parameters described by
4869 ARGUMENTS, and ARGSIZE. It returns a block of memory
4870 allocated on the stack into which is stored all the registers
4871 that might possibly be used for returning the result of a
4872 function. ARGUMENTS is the value returned by
4873 __builtin_apply_args. ARGSIZE is the number of bytes of
4874 arguments that must be copied. ??? How should this value be
4875 computed? We'll also need a safe worst case value for varargs
4877 case BUILT_IN_APPLY:
4878 if (!validate_arglist (arglist, POINTER_TYPE,
4879 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4880 && !validate_arglist (arglist, REFERENCE_TYPE,
4881 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4889 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
4890 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
4892 return expand_builtin_apply (ops[0], ops[1], ops[2]);
4895 /* __builtin_return (RESULT) causes the function to return the
4896 value described by RESULT. RESULT is address of the block of
4897 memory returned by __builtin_apply. */
4898 case BUILT_IN_RETURN:
4899 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4900 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
4901 NULL_RTX, VOIDmode, 0));
4904 case BUILT_IN_SAVEREGS:
4905 return expand_builtin_saveregs ();
4907 case BUILT_IN_ARGS_INFO:
4908 return expand_builtin_args_info (arglist);
4910 /* Return the address of the first anonymous stack arg. */
4911 case BUILT_IN_NEXT_ARG:
4912 return expand_builtin_next_arg (arglist);
4914 case BUILT_IN_CLASSIFY_TYPE:
4915 return expand_builtin_classify_type (arglist);
4917 case BUILT_IN_CONSTANT_P:
4918 return expand_builtin_constant_p (arglist, target_mode);
4920 case BUILT_IN_FRAME_ADDRESS:
4921 case BUILT_IN_RETURN_ADDRESS:
4922 return expand_builtin_frame_address (fndecl, arglist);
4924 /* Returns the address of the area where the structure is returned.
4926 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
4928 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
4929 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
4932 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
4934 case BUILT_IN_ALLOCA:
4935 target = expand_builtin_alloca (arglist, target);
4942 case BUILT_IN_FFSLL:
4943 target = expand_builtin_unop (target_mode, arglist, target,
4944 subtarget, ffs_optab);
4951 case BUILT_IN_CLZLL:
4952 target = expand_builtin_unop (target_mode, arglist, target,
4953 subtarget, clz_optab);
4960 case BUILT_IN_CTZLL:
4961 target = expand_builtin_unop (target_mode, arglist, target,
4962 subtarget, ctz_optab);
4967 case BUILT_IN_POPCOUNT:
4968 case BUILT_IN_POPCOUNTL:
4969 case BUILT_IN_POPCOUNTLL:
4970 target = expand_builtin_unop (target_mode, arglist, target,
4971 subtarget, popcount_optab);
4976 case BUILT_IN_PARITY:
4977 case BUILT_IN_PARITYL:
4978 case BUILT_IN_PARITYLL:
4979 target = expand_builtin_unop (target_mode, arglist, target,
4980 subtarget, parity_optab);
4985 case BUILT_IN_STRLEN:
4986 target = expand_builtin_strlen (arglist, target, target_mode);
4991 case BUILT_IN_STRCPY:
4992 target = expand_builtin_strcpy (arglist, target, mode);
4997 case BUILT_IN_STRNCPY:
4998 target = expand_builtin_strncpy (arglist, target, mode);
5003 case BUILT_IN_STPCPY:
5004 target = expand_builtin_stpcpy (arglist, target, mode);
5009 case BUILT_IN_STRCAT:
5010 target = expand_builtin_strcat (arglist, target, mode);
5015 case BUILT_IN_STRNCAT:
5016 target = expand_builtin_strncat (arglist, target, mode);
5021 case BUILT_IN_STRSPN:
5022 target = expand_builtin_strspn (arglist, target, mode);
5027 case BUILT_IN_STRCSPN:
5028 target = expand_builtin_strcspn (arglist, target, mode);
5033 case BUILT_IN_STRSTR:
5034 target = expand_builtin_strstr (arglist, target, mode);
5039 case BUILT_IN_STRPBRK:
5040 target = expand_builtin_strpbrk (arglist, target, mode);
5045 case BUILT_IN_INDEX:
5046 case BUILT_IN_STRCHR:
5047 target = expand_builtin_strchr (arglist, target, mode);
5052 case BUILT_IN_RINDEX:
5053 case BUILT_IN_STRRCHR:
5054 target = expand_builtin_strrchr (arglist, target, mode);
5059 case BUILT_IN_MEMCPY:
5060 target = expand_builtin_memcpy (arglist, target, mode);
5065 case BUILT_IN_MEMPCPY:
5066 target = expand_builtin_mempcpy (arglist, target, mode, /*endp=*/ 1);
5071 case BUILT_IN_MEMMOVE:
5072 target = expand_builtin_memmove (arglist, target, mode);
5077 case BUILT_IN_BCOPY:
5078 target = expand_builtin_bcopy (arglist);
5083 case BUILT_IN_MEMSET:
5084 target = expand_builtin_memset (arglist, target, mode);
5089 case BUILT_IN_BZERO:
5090 target = expand_builtin_bzero (arglist);
5095 case BUILT_IN_STRCMP:
5096 target = expand_builtin_strcmp (exp, target, mode);
5101 case BUILT_IN_STRNCMP:
5102 target = expand_builtin_strncmp (exp, target, mode);
5108 case BUILT_IN_MEMCMP:
5109 target = expand_builtin_memcmp (exp, arglist, target, mode);
5114 case BUILT_IN_SETJMP:
5115 target = expand_builtin_setjmp (arglist, target);
5120 /* __builtin_longjmp is passed a pointer to an array of five words.
5121 It's similar to the C library longjmp function but works with
5122 __builtin_setjmp above. */
5123 case BUILT_IN_LONGJMP:
5124 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5128 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
5130 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
5131 NULL_RTX, VOIDmode, 0);
5133 if (value != const1_rtx)
5135 error ("__builtin_longjmp second argument must be 1");
5139 expand_builtin_longjmp (buf_addr, value);
5144 expand_builtin_trap ();
5147 case BUILT_IN_FPUTS:
5148 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
5152 case BUILT_IN_FPUTS_UNLOCKED:
5153 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
5158 case BUILT_IN_SPRINTF:
5159 target = expand_builtin_sprintf (arglist, target, mode);
5164 /* Various hooks for the DWARF 2 __throw routine. */
5165 case BUILT_IN_UNWIND_INIT:
5166 expand_builtin_unwind_init ();
5168 case BUILT_IN_DWARF_CFA:
5169 return virtual_cfa_rtx;
5170 #ifdef DWARF2_UNWIND_INFO
5171 case BUILT_IN_DWARF_SP_COLUMN:
5172 return expand_builtin_dwarf_sp_column ();
5173 case BUILT_IN_INIT_DWARF_REG_SIZES:
5174 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
5177 case BUILT_IN_FROB_RETURN_ADDR:
5178 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
5179 case BUILT_IN_EXTRACT_RETURN_ADDR:
5180 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
5181 case BUILT_IN_EH_RETURN:
5182 expand_builtin_eh_return (TREE_VALUE (arglist),
5183 TREE_VALUE (TREE_CHAIN (arglist)));
5185 #ifdef EH_RETURN_DATA_REGNO
5186 case BUILT_IN_EH_RETURN_DATA_REGNO:
5187 return expand_builtin_eh_return_data_regno (arglist);
5189 case BUILT_IN_VA_START:
5190 case BUILT_IN_STDARG_START:
5191 return expand_builtin_va_start (arglist);
5192 case BUILT_IN_VA_END:
5193 return expand_builtin_va_end (arglist);
5194 case BUILT_IN_VA_COPY:
5195 return expand_builtin_va_copy (arglist);
5196 case BUILT_IN_EXPECT:
5197 return expand_builtin_expect (arglist, target);
5198 case BUILT_IN_PREFETCH:
5199 expand_builtin_prefetch (arglist);
5203 default: /* just do library call, if unknown builtin */
5204 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
5205 error ("built-in function `%s' not currently supported",
5206 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
5209 /* The switch statement above can drop through to cause the function
5210 to be called normally. */
5211 return expand_call (exp, target, ignore);
5214 /* Determine whether a tree node represents a call to a built-in
5215 math function. If the tree T is a call to a built-in function
5216 taking a single real argument, then the return value is the
5217 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
5218 the return value is END_BUILTINS. */
5220 enum built_in_function
5221 builtin_mathfn_code (tree t)
5223 tree fndecl, arglist;
5225 if (TREE_CODE (t) != CALL_EXPR
5226 || TREE_CODE (TREE_OPERAND (t, 0)) != ADDR_EXPR)
5227 return END_BUILTINS;
5229 fndecl = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5230 if (TREE_CODE (fndecl) != FUNCTION_DECL
5231 || ! DECL_BUILT_IN (fndecl)
5232 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5233 return END_BUILTINS;
5235 arglist = TREE_OPERAND (t, 1);
5237 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5238 return END_BUILTINS;
5240 arglist = TREE_CHAIN (arglist);
5241 switch (DECL_FUNCTION_CODE (fndecl))
5246 case BUILT_IN_ATAN2:
5247 case BUILT_IN_ATAN2F:
5248 case BUILT_IN_ATAN2L:
5250 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE
5251 || TREE_CHAIN (arglist))
5252 return END_BUILTINS;
5257 return END_BUILTINS;
5261 return DECL_FUNCTION_CODE (fndecl);
5264 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
5265 constant. ARGLIST is the argument list of the call. */
5268 fold_builtin_constant_p (tree arglist)
5273 arglist = TREE_VALUE (arglist);
5275 /* We return 1 for a numeric type that's known to be a constant
5276 value at compile-time or for an aggregate type that's a
5277 literal constant. */
5278 STRIP_NOPS (arglist);
5280 /* If we know this is a constant, emit the constant of one. */
5281 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
5282 || (TREE_CODE (arglist) == CONSTRUCTOR
5283 && TREE_CONSTANT (arglist))
5284 || (TREE_CODE (arglist) == ADDR_EXPR
5285 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
5286 return integer_one_node;
5288 /* If we aren't going to be running CSE or this expression
5289 has side effects, show we don't know it to be a constant.
5290 Likewise if it's a pointer or aggregate type since in those
5291 case we only want literals, since those are only optimized
5292 when generating RTL, not later.
5293 And finally, if we are compiling an initializer, not code, we
5294 need to return a definite result now; there's not going to be any
5295 more optimization done. */
5296 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
5297 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
5298 || POINTER_TYPE_P (TREE_TYPE (arglist))
5300 return integer_zero_node;
5305 /* Fold a call to __builtin_classify_type. */
5308 fold_builtin_classify_type (tree arglist)
5311 return build_int_2 (no_type_class, 0);
5313 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
5316 /* Fold a call to __builtin_inf or __builtin_huge_val. */
5319 fold_builtin_inf (tree type, int warn)
5321 REAL_VALUE_TYPE real;
5323 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
5324 warning ("target format does not support infinity");
5327 return build_real (type, real);
5330 /* Fold a call to __builtin_nan or __builtin_nans. */
5333 fold_builtin_nan (tree arglist, tree type, int quiet)
5335 REAL_VALUE_TYPE real;
5338 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5340 str = c_getstr (TREE_VALUE (arglist));
5344 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5347 return build_real (type, real);
5350 /* EXP is assumed to me builtin call where truncation can be propagated
5351 across (for instance floor((double)f) == (double)floorf (f).
5352 Do the transformation. */
5354 fold_trunc_transparent_mathfn (tree exp)
5356 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5357 tree arglist = TREE_OPERAND (exp, 1);
5358 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5360 if (optimize && validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5362 tree arg0 = strip_float_extensions (TREE_VALUE (arglist));
5363 tree ftype = TREE_TYPE (exp);
5364 tree newtype = TREE_TYPE (arg0);
5367 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
5368 && (decl = mathfn_built_in (newtype, fcode)))
5371 build_tree_list (NULL_TREE, fold (convert (newtype, arg0)));
5372 return convert (ftype,
5373 build_function_call_expr (decl, arglist));
5379 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
5380 function's DECL, ARGLIST is the argument list and TYPE is the return
5381 type. Return NULL_TREE if no simplification can be made. */
5384 fold_builtin_cabs (tree fndecl, tree arglist, tree type)
5388 if (!arglist || TREE_CHAIN (arglist))
5391 arg = TREE_VALUE (arglist);
5392 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
5393 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
5396 /* Evaluate cabs of a constant at compile-time. */
5397 if (flag_unsafe_math_optimizations
5398 && TREE_CODE (arg) == COMPLEX_CST
5399 && TREE_CODE (TREE_REALPART (arg)) == REAL_CST
5400 && TREE_CODE (TREE_IMAGPART (arg)) == REAL_CST
5401 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg))
5402 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg)))
5404 REAL_VALUE_TYPE r, i;
5406 r = TREE_REAL_CST (TREE_REALPART (arg));
5407 i = TREE_REAL_CST (TREE_IMAGPART (arg));
5409 real_arithmetic (&r, MULT_EXPR, &r, &r);
5410 real_arithmetic (&i, MULT_EXPR, &i, &i);
5411 real_arithmetic (&r, PLUS_EXPR, &r, &i);
5412 if (real_sqrt (&r, TYPE_MODE (type), &r)
5413 || ! flag_trapping_math)
5414 return build_real (type, r);
5417 /* If either part is zero, cabs is fabs of the other. */
5418 if (TREE_CODE (arg) == COMPLEX_EXPR
5419 && real_zerop (TREE_OPERAND (arg, 0)))
5420 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 1)));
5421 if (TREE_CODE (arg) == COMPLEX_EXPR
5422 && real_zerop (TREE_OPERAND (arg, 1)))
5423 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg, 0)));
5425 if (flag_unsafe_math_optimizations)
5427 enum built_in_function fcode;
5430 fcode = DECL_FUNCTION_CODE (fndecl);
5431 if (fcode == BUILT_IN_CABS)
5432 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5433 else if (fcode == BUILT_IN_CABSF)
5434 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5435 else if (fcode == BUILT_IN_CABSL)
5436 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5440 if (sqrtfn != NULL_TREE)
5442 tree rpart, ipart, result, arglist;
5444 rpart = fold (build1 (REALPART_EXPR, type, arg));
5445 ipart = fold (build1 (IMAGPART_EXPR, type, arg));
5447 rpart = save_expr (rpart);
5448 ipart = save_expr (ipart);
5450 result = fold (build (PLUS_EXPR, type,
5451 fold (build (MULT_EXPR, type,
5453 fold (build (MULT_EXPR, type,
5456 arglist = build_tree_list (NULL_TREE, result);
5457 return build_function_call_expr (sqrtfn, arglist);
5464 /* Used by constant folding to eliminate some builtin calls early. EXP is
5465 the CALL_EXPR of a call to a builtin function. */
5468 fold_builtin (tree exp)
5470 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5471 tree arglist = TREE_OPERAND (exp, 1);
5472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5474 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5477 switch (DECL_FUNCTION_CODE (fndecl))
5479 case BUILT_IN_CONSTANT_P:
5480 return fold_builtin_constant_p (arglist);
5482 case BUILT_IN_CLASSIFY_TYPE:
5483 return fold_builtin_classify_type (arglist);
5485 case BUILT_IN_STRLEN:
5486 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5488 tree len = c_strlen (TREE_VALUE (arglist), 0);
5491 /* Convert from the internal "sizetype" type to "size_t". */
5493 len = convert (size_type_node, len);
5500 case BUILT_IN_FABSF:
5501 case BUILT_IN_FABSL:
5502 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5503 return fold (build1 (ABS_EXPR, type, TREE_VALUE (arglist)));
5507 case BUILT_IN_CABSF:
5508 case BUILT_IN_CABSL:
5509 return fold_builtin_cabs (fndecl, arglist, type);
5512 case BUILT_IN_SQRTF:
5513 case BUILT_IN_SQRTL:
5514 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5516 enum built_in_function fcode;
5517 tree arg = TREE_VALUE (arglist);
5519 /* Optimize sqrt of constant value. */
5520 if (TREE_CODE (arg) == REAL_CST
5521 && ! TREE_CONSTANT_OVERFLOW (arg))
5523 REAL_VALUE_TYPE r, x;
5525 x = TREE_REAL_CST (arg);
5526 if (real_sqrt (&r, TYPE_MODE (type), &x)
5527 || (!flag_trapping_math && !flag_errno_math))
5528 return build_real (type, r);
5531 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5532 fcode = builtin_mathfn_code (arg);
5533 if (flag_unsafe_math_optimizations
5534 && (fcode == BUILT_IN_EXP
5535 || fcode == BUILT_IN_EXPF
5536 || fcode == BUILT_IN_EXPL))
5538 tree expfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5539 arg = fold (build (MULT_EXPR, type,
5540 TREE_VALUE (TREE_OPERAND (arg, 1)),
5541 build_real (type, dconsthalf)));
5542 arglist = build_tree_list (NULL_TREE, arg);
5543 return build_function_call_expr (expfn, arglist);
5546 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5547 if (flag_unsafe_math_optimizations
5548 && (fcode == BUILT_IN_POW
5549 || fcode == BUILT_IN_POWF
5550 || fcode == BUILT_IN_POWL))
5552 tree powfn = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
5553 tree arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5554 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5555 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5556 build_real (type, dconsthalf)));
5557 arglist = tree_cons (NULL_TREE, arg0,
5558 build_tree_list (NULL_TREE, narg1));
5559 return build_function_call_expr (powfn, arglist);
5567 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5569 tree arg = TREE_VALUE (arglist);
5571 /* Optimize sin(0.0) = 0.0. */
5572 if (real_zerop (arg))
5580 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5582 tree arg = TREE_VALUE (arglist);
5584 /* Optimize cos(0.0) = 1.0. */
5585 if (real_zerop (arg))
5586 return build_real (type, dconst1);
5588 /* Optimize cos(-x) into cos(x). */
5589 if (TREE_CODE (arg) == NEGATE_EXPR)
5591 tree arglist = build_tree_list (NULL_TREE,
5592 TREE_OPERAND (arg, 0));
5593 return build_function_call_expr (fndecl, arglist);
5601 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5603 enum built_in_function fcode;
5604 tree arg = TREE_VALUE (arglist);
5606 /* Optimize exp(0.0) = 1.0. */
5607 if (real_zerop (arg))
5608 return build_real (type, dconst1);
5610 /* Optimize exp(1.0) = e. */
5611 if (real_onep (arg))
5613 REAL_VALUE_TYPE cst;
5615 if (! builtin_dconsts_init)
5616 init_builtin_dconsts ();
5617 real_convert (&cst, TYPE_MODE (type), &dconste);
5618 return build_real (type, cst);
5621 /* Attempt to evaluate exp at compile-time. */
5622 if (flag_unsafe_math_optimizations
5623 && TREE_CODE (arg) == REAL_CST
5624 && ! TREE_CONSTANT_OVERFLOW (arg))
5626 REAL_VALUE_TYPE cint;
5630 c = TREE_REAL_CST (arg);
5631 n = real_to_integer (&c);
5632 real_from_integer (&cint, VOIDmode, n,
5634 if (real_identical (&c, &cint))
5638 if (! builtin_dconsts_init)
5639 init_builtin_dconsts ();
5640 real_powi (&x, TYPE_MODE (type), &dconste, n);
5641 return build_real (type, x);
5645 /* Optimize exp(log(x)) = x. */
5646 fcode = builtin_mathfn_code (arg);
5647 if (flag_unsafe_math_optimizations
5648 && (fcode == BUILT_IN_LOG
5649 || fcode == BUILT_IN_LOGF
5650 || fcode == BUILT_IN_LOGL))
5651 return TREE_VALUE (TREE_OPERAND (arg, 1));
5658 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5660 enum built_in_function fcode;
5661 tree arg = TREE_VALUE (arglist);
5663 /* Optimize log(1.0) = 0.0. */
5664 if (real_onep (arg))
5665 return build_real (type, dconst0);
5667 /* Optimize log(exp(x)) = x. */
5668 fcode = builtin_mathfn_code (arg);
5669 if (flag_unsafe_math_optimizations
5670 && (fcode == BUILT_IN_EXP
5671 || fcode == BUILT_IN_EXPF
5672 || fcode == BUILT_IN_EXPL))
5673 return TREE_VALUE (TREE_OPERAND (arg, 1));
5675 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5676 if (flag_unsafe_math_optimizations
5677 && (fcode == BUILT_IN_SQRT
5678 || fcode == BUILT_IN_SQRTF
5679 || fcode == BUILT_IN_SQRTL))
5681 tree logfn = build_function_call_expr (fndecl,
5682 TREE_OPERAND (arg, 1));
5683 return fold (build (MULT_EXPR, type, logfn,
5684 build_real (type, dconsthalf)));
5687 /* Optimize log(pow(x,y)) = y*log(x). */
5688 if (flag_unsafe_math_optimizations
5689 && (fcode == BUILT_IN_POW
5690 || fcode == BUILT_IN_POWF
5691 || fcode == BUILT_IN_POWL))
5693 tree arg0, arg1, logfn;
5695 arg0 = TREE_VALUE (TREE_OPERAND (arg, 1));
5696 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg, 1)));
5697 arglist = build_tree_list (NULL_TREE, arg0);
5698 logfn = build_function_call_expr (fndecl, arglist);
5699 return fold (build (MULT_EXPR, type, arg1, logfn));
5707 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5709 enum built_in_function fcode;
5710 tree arg = TREE_VALUE (arglist);
5712 /* Optimize tan(0.0) = 0.0. */
5713 if (real_zerop (arg))
5716 /* Optimize tan(atan(x)) = x. */
5717 fcode = builtin_mathfn_code (arg);
5718 if (flag_unsafe_math_optimizations
5719 && (fcode == BUILT_IN_ATAN
5720 || fcode == BUILT_IN_ATANF
5721 || fcode == BUILT_IN_ATANL))
5722 return TREE_VALUE (TREE_OPERAND (arg, 1));
5727 case BUILT_IN_ATANF:
5728 case BUILT_IN_ATANL:
5729 if (validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5731 tree arg = TREE_VALUE (arglist);
5733 /* Optimize atan(0.0) = 0.0. */
5734 if (real_zerop (arg))
5737 /* Optimize atan(1.0) = pi/4. */
5738 if (real_onep (arg))
5740 REAL_VALUE_TYPE cst;
5742 if (! builtin_dconsts_init)
5743 init_builtin_dconsts ();
5744 real_convert (&cst, TYPE_MODE (type), &dconstpi);
5746 return build_real (type, cst);
5754 if (validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5756 enum built_in_function fcode;
5757 tree arg0 = TREE_VALUE (arglist);
5758 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5760 /* Optimize pow(1.0,y) = 1.0. */
5761 if (real_onep (arg0))
5762 return omit_one_operand (type, build_real (type, dconst1), arg1);
5764 if (TREE_CODE (arg1) == REAL_CST
5765 && ! TREE_CONSTANT_OVERFLOW (arg1))
5768 c = TREE_REAL_CST (arg1);
5770 /* Optimize pow(x,0.0) = 1.0. */
5771 if (REAL_VALUES_EQUAL (c, dconst0))
5772 return omit_one_operand (type, build_real (type, dconst1),
5775 /* Optimize pow(x,1.0) = x. */
5776 if (REAL_VALUES_EQUAL (c, dconst1))
5779 /* Optimize pow(x,-1.0) = 1.0/x. */
5780 if (REAL_VALUES_EQUAL (c, dconstm1))
5781 return fold (build (RDIV_EXPR, type,
5782 build_real (type, dconst1),
5785 /* Optimize pow(x,2.0) = x*x. */
5786 if (REAL_VALUES_EQUAL (c, dconst2)
5787 && (*lang_hooks.decls.global_bindings_p) () == 0
5788 && ! CONTAINS_PLACEHOLDER_P (arg0))
5790 arg0 = save_expr (arg0);
5791 return fold (build (MULT_EXPR, type, arg0, arg0));
5794 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5795 if (flag_unsafe_math_optimizations
5796 && REAL_VALUES_EQUAL (c, dconstm2)
5797 && (*lang_hooks.decls.global_bindings_p) () == 0
5798 && ! CONTAINS_PLACEHOLDER_P (arg0))
5800 arg0 = save_expr (arg0);
5801 return fold (build (RDIV_EXPR, type,
5802 build_real (type, dconst1),
5803 fold (build (MULT_EXPR, type,
5807 /* Optimize pow(x,0.5) = sqrt(x). */
5808 if (flag_unsafe_math_optimizations
5809 && REAL_VALUES_EQUAL (c, dconsthalf))
5813 fcode = DECL_FUNCTION_CODE (fndecl);
5814 if (fcode == BUILT_IN_POW)
5815 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRT];
5816 else if (fcode == BUILT_IN_POWF)
5817 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTF];
5818 else if (fcode == BUILT_IN_POWL)
5819 sqrtfn = implicit_built_in_decls[BUILT_IN_SQRTL];
5823 if (sqrtfn != NULL_TREE)
5825 tree arglist = build_tree_list (NULL_TREE, arg0);
5826 return build_function_call_expr (sqrtfn, arglist);
5830 /* Attempt to evaluate pow at compile-time. */
5831 if (TREE_CODE (arg0) == REAL_CST
5832 && ! TREE_CONSTANT_OVERFLOW (arg0))
5834 REAL_VALUE_TYPE cint;
5837 n = real_to_integer (&c);
5838 real_from_integer (&cint, VOIDmode, n,
5840 if (real_identical (&c, &cint))
5845 x = TREE_REAL_CST (arg0);
5846 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
5847 if (flag_unsafe_math_optimizations || !inexact)
5848 return build_real (type, x);
5853 /* Optimize pow(exp(x),y) = exp(x*y). */
5854 fcode = builtin_mathfn_code (arg0);
5855 if (flag_unsafe_math_optimizations
5856 && (fcode == BUILT_IN_EXP
5857 || fcode == BUILT_IN_EXPF
5858 || fcode == BUILT_IN_EXPL))
5860 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
5861 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5862 arg = fold (build (MULT_EXPR, type, arg, arg1));
5863 arglist = build_tree_list (NULL_TREE, arg);
5864 return build_function_call_expr (expfn, arglist);
5867 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5868 if (flag_unsafe_math_optimizations
5869 && (fcode == BUILT_IN_SQRT
5870 || fcode == BUILT_IN_SQRTF
5871 || fcode == BUILT_IN_SQRTL))
5873 tree narg0 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5874 tree narg1 = fold (build (MULT_EXPR, type, arg1,
5875 build_real (type, dconsthalf)));
5877 arglist = tree_cons (NULL_TREE, narg0,
5878 build_tree_list (NULL_TREE, narg1));
5879 return build_function_call_expr (fndecl, arglist);
5882 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5883 if (flag_unsafe_math_optimizations
5884 && (fcode == BUILT_IN_POW
5885 || fcode == BUILT_IN_POWF
5886 || fcode == BUILT_IN_POWL))
5888 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
5889 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
5890 tree narg1 = fold (build (MULT_EXPR, type, arg01, arg1));
5891 arglist = tree_cons (NULL_TREE, arg00,
5892 build_tree_list (NULL_TREE, narg1));
5893 return build_function_call_expr (fndecl, arglist);
5901 return fold_builtin_inf (type, true);
5903 case BUILT_IN_HUGE_VAL:
5904 case BUILT_IN_HUGE_VALF:
5905 case BUILT_IN_HUGE_VALL:
5906 return fold_builtin_inf (type, false);
5911 return fold_builtin_nan (arglist, type, true);
5914 case BUILT_IN_NANSF:
5915 case BUILT_IN_NANSL:
5916 return fold_builtin_nan (arglist, type, false);
5918 case BUILT_IN_FLOOR:
5919 case BUILT_IN_FLOORF:
5920 case BUILT_IN_FLOORL:
5922 case BUILT_IN_CEILF:
5923 case BUILT_IN_CEILL:
5924 case BUILT_IN_TRUNC:
5925 case BUILT_IN_TRUNCF:
5926 case BUILT_IN_TRUNCL:
5927 case BUILT_IN_ROUND:
5928 case BUILT_IN_ROUNDF:
5929 case BUILT_IN_ROUNDL:
5930 case BUILT_IN_NEARBYINT:
5931 case BUILT_IN_NEARBYINTF:
5932 case BUILT_IN_NEARBYINTL:
5933 return fold_trunc_transparent_mathfn (exp);
5942 /* Conveniently construct a function call expression. */
5945 build_function_call_expr (tree fn, tree arglist)
5949 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5950 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
5951 call_expr, arglist);
5952 TREE_SIDE_EFFECTS (call_expr) = 1;
5953 return fold (call_expr);
5956 /* This function validates the types of a function call argument list
5957 represented as a tree chain of parameters against a specified list
5958 of tree_codes. If the last specifier is a 0, that represents an
5959 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5962 validate_arglist (tree arglist, ...)
5964 enum tree_code code;
5968 va_start (ap, arglist);
5972 code = va_arg (ap, enum tree_code);
5976 /* This signifies an ellipses, any further arguments are all ok. */
5980 /* This signifies an endlink, if no arguments remain, return
5981 true, otherwise return false. */
5985 /* If no parameters remain or the parameter's code does not
5986 match the specified code, return false. Otherwise continue
5987 checking any remaining arguments. */
5989 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
5993 arglist = TREE_CHAIN (arglist);
5997 /* We need gotos here since we can only have one VA_CLOSE in a
6005 /* Default version of target-specific builtin setup that does nothing. */
6008 default_init_builtins (void)
6012 /* Default target-specific builtin expander that does nothing. */
6015 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
6016 rtx target ATTRIBUTE_UNUSED,
6017 rtx subtarget ATTRIBUTE_UNUSED,
6018 enum machine_mode mode ATTRIBUTE_UNUSED,
6019 int ignore ATTRIBUTE_UNUSED)
6024 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
6027 purge_builtin_constant_p (void)
6029 rtx insn, set, arg, new, note;
6031 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6033 && (set = single_set (insn)) != NULL_RTX
6034 && (GET_CODE (arg = SET_SRC (set)) == CONSTANT_P_RTX
6035 || (GET_CODE (arg) == SUBREG
6036 && (GET_CODE (arg = SUBREG_REG (arg))
6037 == CONSTANT_P_RTX))))
6039 arg = XEXP (arg, 0);
6040 new = CONSTANT_P (arg) ? const1_rtx : const0_rtx;
6041 validate_change (insn, &SET_SRC (set), new, 0);
6043 /* Remove the REG_EQUAL note from the insn. */
6044 if ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0)
6045 remove_note (insn, note);
6049 /* Returns true is EXP represents data that would potentially reside
6050 in a readonly section. */
6053 readonly_data_expr (tree exp)
6057 if (TREE_CODE (exp) == ADDR_EXPR)
6058 return decl_readonly_section (TREE_OPERAND (exp, 0), 0);