1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
41 #include "typeclass.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static int get_pointer_alignment (tree, unsigned int);
83 static tree c_strlen (tree, int);
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree);
88 static tree build_string_literal (int, const char *);
89 static int apply_args_size (void);
90 static int apply_result_size (void);
91 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
92 static rtx result_vector (int, rtx);
94 static rtx expand_builtin_setjmp (tree, rtx);
95 static void expand_builtin_prefetch (tree);
96 static rtx expand_builtin_apply_args (void);
97 static rtx expand_builtin_apply_args_1 (void);
98 static rtx expand_builtin_apply (rtx, rtx, rtx);
99 static void expand_builtin_return (rtx);
100 static enum type_class type_to_class (tree);
101 static rtx expand_builtin_classify_type (tree);
102 static void expand_errno_check (tree, rtx);
103 static rtx expand_builtin_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
105 static rtx expand_builtin_constant_p (tree, enum machine_mode);
106 static rtx expand_builtin_args_info (tree);
107 static rtx expand_builtin_next_arg (tree);
108 static rtx expand_builtin_va_start (tree);
109 static rtx expand_builtin_va_end (tree);
110 static rtx expand_builtin_va_copy (tree);
111 static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
112 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
113 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
114 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
115 static rtx expand_builtin_strcat (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode);
122 static rtx expand_builtin_bcopy (tree);
123 static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
125 static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
127 static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
133 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_alloca (tree, rtx);
137 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static rtx expand_builtin_fputs (tree, rtx, bool);
140 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
141 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
142 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
143 static tree stabilize_va_list (tree, int);
144 static rtx expand_builtin_expect (tree, rtx);
145 static tree fold_builtin_constant_p (tree);
146 static tree fold_builtin_classify_type (tree);
147 static tree fold_builtin_inf (tree, int);
148 static tree fold_builtin_nan (tree, tree, int);
149 static int validate_arglist (tree, ...);
150 static bool integer_valued_real_p (tree);
151 static tree fold_trunc_transparent_mathfn (tree);
152 static bool readonly_data_expr (tree);
153 static rtx expand_builtin_fabs (tree, rtx, rtx);
154 static rtx expand_builtin_cabs (tree, rtx);
155 static rtx expand_builtin_signbit (tree, rtx);
156 static tree fold_builtin_cabs (tree, tree, tree);
157 static tree fold_builtin_trunc (tree);
158 static tree fold_builtin_floor (tree);
159 static tree fold_builtin_ceil (tree);
160 static tree fold_builtin_round (tree);
161 static tree fold_builtin_bitop (tree);
162 static tree fold_builtin_memcpy (tree);
163 static tree fold_builtin_mempcpy (tree);
164 static tree fold_builtin_memmove (tree);
165 static tree fold_builtin_strcpy (tree);
166 static tree fold_builtin_strncpy (tree);
167 static tree fold_builtin_memcmp (tree);
168 static tree fold_builtin_strcmp (tree);
169 static tree fold_builtin_strncmp (tree);
170 static tree fold_builtin_signbit (tree);
172 /* Return the alignment in bits of EXP, a pointer valued expression.
173 But don't return more than MAX_ALIGN no matter what.
174 The alignment returned is, by default, the alignment of the thing that
175 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
177 Otherwise, look at the expression to see if we can do better, i.e., if the
178 expression is actually pointing at an object whose alignment is tighter. */
181 get_pointer_alignment (tree exp, unsigned int max_align)
183 unsigned int align, inner;
185 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
188 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 align = MIN (align, max_align);
193 switch (TREE_CODE (exp))
197 case NON_LVALUE_EXPR:
198 exp = TREE_OPERAND (exp, 0);
199 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
202 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
203 align = MIN (inner, max_align);
207 /* If sum of pointer + int, restrict our maximum alignment to that
208 imposed by the integer. If not, we can't do any better than
210 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
213 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
214 & (max_align / BITS_PER_UNIT - 1))
218 exp = TREE_OPERAND (exp, 0);
222 /* See what we are pointing at and look at its alignment. */
223 exp = TREE_OPERAND (exp, 0);
224 if (TREE_CODE (exp) == FUNCTION_DECL)
225 align = FUNCTION_BOUNDARY;
226 else if (DECL_P (exp))
227 align = DECL_ALIGN (exp);
228 #ifdef CONSTANT_ALIGNMENT
229 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
230 align = CONSTANT_ALIGNMENT (exp, align);
232 return MIN (align, max_align);
240 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
241 way, because it could contain a zero byte in the middle.
242 TREE_STRING_LENGTH is the size of the character array, not the string.
244 ONLY_VALUE should be nonzero if the result is not going to be emitted
245 into the instruction stream and zero if it is going to be expanded.
246 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
247 is returned, otherwise NULL, since
248 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
249 evaluate the side-effects.
251 The value returned is of type `ssizetype'.
253 Unfortunately, string_constant can't access the values of const char
254 arrays with initializers, so neither can we do so here. */
257 c_strlen (tree src, int only_value)
260 HOST_WIDE_INT offset;
265 if (TREE_CODE (src) == COND_EXPR
266 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
270 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
271 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
272 if (tree_int_cst_equal (len1, len2))
276 if (TREE_CODE (src) == COMPOUND_EXPR
277 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
278 return c_strlen (TREE_OPERAND (src, 1), only_value);
280 src = string_constant (src, &offset_node);
284 max = TREE_STRING_LENGTH (src) - 1;
285 ptr = TREE_STRING_POINTER (src);
287 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
289 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
290 compute the offset to the following null if we don't know where to
291 start searching for it. */
294 for (i = 0; i < max; i++)
298 /* We don't know the starting offset, but we do know that the string
299 has no internal zero bytes. We can assume that the offset falls
300 within the bounds of the string; otherwise, the programmer deserves
301 what he gets. Subtract the offset from the length of the string,
302 and return that. This would perhaps not be valid if we were dealing
303 with named arrays in addition to literal string constants. */
305 return size_diffop (size_int (max), offset_node);
308 /* We have a known offset into the string. Start searching there for
309 a null character if we can represent it as a single HOST_WIDE_INT. */
310 if (offset_node == 0)
312 else if (! host_integerp (offset_node, 0))
315 offset = tree_low_cst (offset_node, 0);
317 /* If the offset is known to be out of bounds, warn, and call strlen at
319 if (offset < 0 || offset > max)
321 warning ("offset outside bounds of constant string");
325 /* Use strlen to search for the first zero byte. Since any strings
326 constructed with build_string will have nulls appended, we win even
327 if we get handed something like (char[4])"abcd".
329 Since OFFSET is our starting index into the string, no further
330 calculation is needed. */
331 return ssize_int (strlen (ptr + offset));
334 /* Return a char pointer for a C string if it is a string constant
335 or sum of string constant and integer constant. */
342 src = string_constant (src, &offset_node);
346 if (offset_node == 0)
347 return TREE_STRING_POINTER (src);
348 else if (!host_integerp (offset_node, 1)
349 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
352 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
355 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
356 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
359 c_readstr (const char *str, enum machine_mode mode)
365 if (GET_MODE_CLASS (mode) != MODE_INT)
370 for (i = 0; i < GET_MODE_SIZE (mode); i++)
373 if (WORDS_BIG_ENDIAN)
374 j = GET_MODE_SIZE (mode) - i - 1;
375 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
376 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
377 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
379 if (j > 2 * HOST_BITS_PER_WIDE_INT)
382 ch = (unsigned char) str[i];
383 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
385 return immed_double_const (c[0], c[1], mode);
388 /* Cast a target constant CST to target CHAR and if that value fits into
389 host char type, return zero and put that value into variable pointed by
393 target_char_cast (tree cst, char *p)
395 unsigned HOST_WIDE_INT val, hostval;
397 if (!host_integerp (cst, 1)
398 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
401 val = tree_low_cst (cst, 1);
402 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
403 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
406 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
407 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
416 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
417 times to get the address of either a higher stack frame, or a return
418 address located within it (depending on FNDECL_CODE). */
421 expand_builtin_return_addr (enum built_in_function fndecl_code, int count,
426 /* Some machines need special handling before we can access
427 arbitrary frames. For example, on the sparc, we must first flush
428 all register windows to the stack. */
429 #ifdef SETUP_FRAME_ADDRESSES
431 SETUP_FRAME_ADDRESSES ();
434 /* On the sparc, the return address is not in the frame, it is in a
435 register. There is no way to access it off of the current frame
436 pointer, but it can be accessed off the previous frame pointer by
437 reading the value from the register window save area. */
438 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
439 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
443 /* Scan back COUNT frames to the specified frame. */
444 for (i = 0; i < count; i++)
446 /* Assume the dynamic chain pointer is in the word that the
447 frame address points to, unless otherwise specified. */
448 #ifdef DYNAMIC_CHAIN_ADDRESS
449 tem = DYNAMIC_CHAIN_ADDRESS (tem);
451 tem = memory_address (Pmode, tem);
452 tem = gen_rtx_MEM (Pmode, tem);
453 set_mem_alias_set (tem, get_frame_alias_set ());
454 tem = copy_to_reg (tem);
457 /* For __builtin_frame_address, return what we've got. */
458 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
461 /* For __builtin_return_address, Get the return address from that
463 #ifdef RETURN_ADDR_RTX
464 tem = RETURN_ADDR_RTX (count, tem);
466 tem = memory_address (Pmode,
467 plus_constant (tem, GET_MODE_SIZE (Pmode)));
468 tem = gen_rtx_MEM (Pmode, tem);
469 set_mem_alias_set (tem, get_frame_alias_set ());
474 /* Alias set used for setjmp buffer. */
475 static HOST_WIDE_INT setjmp_alias_set = -1;
477 /* Construct the leading half of a __builtin_setjmp call. Control will
478 return to RECEIVER_LABEL. This is used directly by sjlj exception
482 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
484 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
488 if (setjmp_alias_set == -1)
489 setjmp_alias_set = new_alias_set ();
491 buf_addr = convert_memory_address (Pmode, buf_addr);
493 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
497 /* We store the frame pointer and the address of receiver_label in
498 the buffer and use the rest of it for the stack save area, which
499 is machine-dependent. */
501 #ifndef BUILTIN_SETJMP_FRAME_VALUE
502 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
505 mem = gen_rtx_MEM (Pmode, buf_addr);
506 set_mem_alias_set (mem, setjmp_alias_set);
507 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
509 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
510 set_mem_alias_set (mem, setjmp_alias_set);
512 emit_move_insn (validize_mem (mem),
513 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
515 stack_save = gen_rtx_MEM (sa_mode,
516 plus_constant (buf_addr,
517 2 * GET_MODE_SIZE (Pmode)));
518 set_mem_alias_set (stack_save, setjmp_alias_set);
519 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
521 /* If there is further processing to do, do it. */
522 #ifdef HAVE_builtin_setjmp_setup
523 if (HAVE_builtin_setjmp_setup)
524 emit_insn (gen_builtin_setjmp_setup (buf_addr));
527 /* Tell optimize_save_area_alloca that extra work is going to
528 need to go on during alloca. */
529 current_function_calls_setjmp = 1;
531 /* Set this so all the registers get saved in our frame; we need to be
532 able to copy the saved values for any registers from frames we unwind. */
533 current_function_has_nonlocal_label = 1;
536 /* Construct the trailing part of a __builtin_setjmp call.
537 This is used directly by sjlj exception handling code. */
540 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
542 /* Clobber the FP when we get here, so we have to make sure it's
543 marked as used by this function. */
544 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
546 /* Mark the static chain as clobbered here so life information
547 doesn't get messed up for it. */
548 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
550 /* Now put in the code to restore the frame pointer, and argument
551 pointer, if needed. The code below is from expand_end_bindings
552 in stmt.c; see detailed documentation there. */
553 #ifdef HAVE_nonlocal_goto
554 if (! HAVE_nonlocal_goto)
556 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
558 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
559 if (fixed_regs[ARG_POINTER_REGNUM])
561 #ifdef ELIMINABLE_REGS
563 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
565 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
566 if (elim_regs[i].from == ARG_POINTER_REGNUM
567 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
570 if (i == ARRAY_SIZE (elim_regs))
573 /* Now restore our arg pointer from the address at which it
574 was saved in our stack frame. */
575 emit_move_insn (virtual_incoming_args_rtx,
576 copy_to_reg (get_arg_pointer_save_area (cfun)));
581 #ifdef HAVE_builtin_setjmp_receiver
582 if (HAVE_builtin_setjmp_receiver)
583 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
586 #ifdef HAVE_nonlocal_goto_receiver
587 if (HAVE_nonlocal_goto_receiver)
588 emit_insn (gen_nonlocal_goto_receiver ());
593 /* @@@ This is a kludge. Not all machine descriptions define a blockage
594 insn, but we must not allow the code we just generated to be reordered
595 by scheduling. Specifically, the update of the frame pointer must
596 happen immediately, not later. So emit an ASM_INPUT to act as blockage
598 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
601 /* __builtin_setjmp is passed a pointer to an array of five words (not
602 all will be used on all machines). It operates similarly to the C
603 library function of the same name, but is more efficient. Much of
604 the code below (and for longjmp) is copied from the handling of
607 NOTE: This is intended for use by GNAT and the exception handling
608 scheme in the compiler and will only work in the method used by
612 expand_builtin_setjmp (tree arglist, rtx target)
614 rtx buf_addr, next_lab, cont_lab;
616 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
619 if (target == 0 || GET_CODE (target) != REG
620 || REGNO (target) < FIRST_PSEUDO_REGISTER)
621 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
623 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
625 next_lab = gen_label_rtx ();
626 cont_lab = gen_label_rtx ();
628 expand_builtin_setjmp_setup (buf_addr, next_lab);
630 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
631 ensure that pending stack adjustments are flushed. */
632 emit_move_insn (target, const0_rtx);
633 emit_jump (cont_lab);
635 emit_label (next_lab);
637 expand_builtin_setjmp_receiver (next_lab);
639 /* Set TARGET to one. */
640 emit_move_insn (target, const1_rtx);
641 emit_label (cont_lab);
643 /* Tell flow about the strange goings on. Putting `next_lab' on
644 `nonlocal_goto_handler_labels' to indicates that function
645 calls may traverse the arc back to this label. */
647 current_function_has_nonlocal_label = 1;
648 nonlocal_goto_handler_labels
649 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
654 /* __builtin_longjmp is passed a pointer to an array of five words (not
655 all will be used on all machines). It operates similarly to the C
656 library function of the same name, but is more efficient. Much of
657 the code below is copied from the handling of non-local gotos.
659 NOTE: This is intended for use by GNAT and the exception handling
660 scheme in the compiler and will only work in the method used by
664 expand_builtin_longjmp (rtx buf_addr, rtx value)
666 rtx fp, lab, stack, insn, last;
667 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
669 if (setjmp_alias_set == -1)
670 setjmp_alias_set = new_alias_set ();
672 buf_addr = convert_memory_address (Pmode, buf_addr);
674 buf_addr = force_reg (Pmode, buf_addr);
676 /* We used to store value in static_chain_rtx, but that fails if pointers
677 are smaller than integers. We instead require that the user must pass
678 a second argument of 1, because that is what builtin_setjmp will
679 return. This also makes EH slightly more efficient, since we are no
680 longer copying around a value that we don't care about. */
681 if (value != const1_rtx)
684 current_function_calls_longjmp = 1;
686 last = get_last_insn ();
687 #ifdef HAVE_builtin_longjmp
688 if (HAVE_builtin_longjmp)
689 emit_insn (gen_builtin_longjmp (buf_addr));
693 fp = gen_rtx_MEM (Pmode, buf_addr);
694 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
695 GET_MODE_SIZE (Pmode)));
697 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
698 2 * GET_MODE_SIZE (Pmode)));
699 set_mem_alias_set (fp, setjmp_alias_set);
700 set_mem_alias_set (lab, setjmp_alias_set);
701 set_mem_alias_set (stack, setjmp_alias_set);
703 /* Pick up FP, label, and SP from the block and jump. This code is
704 from expand_goto in stmt.c; see there for detailed comments. */
705 #if HAVE_nonlocal_goto
706 if (HAVE_nonlocal_goto)
707 /* We have to pass a value to the nonlocal_goto pattern that will
708 get copied into the static_chain pointer, but it does not matter
709 what that value is, because builtin_setjmp does not use it. */
710 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
714 lab = copy_to_reg (lab);
716 emit_insn (gen_rtx_CLOBBER (VOIDmode,
717 gen_rtx_MEM (BLKmode,
718 gen_rtx_SCRATCH (VOIDmode))));
719 emit_insn (gen_rtx_CLOBBER (VOIDmode,
720 gen_rtx_MEM (BLKmode,
721 hard_frame_pointer_rtx)));
723 emit_move_insn (hard_frame_pointer_rtx, fp);
724 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
726 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
727 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
728 emit_indirect_jump (lab);
732 /* Search backwards and mark the jump insn as a non-local goto.
733 Note that this precludes the use of __builtin_longjmp to a
734 __builtin_setjmp target in the same function. However, we've
735 already cautioned the user that these functions are for
736 internal exception handling use only. */
737 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
741 if (GET_CODE (insn) == JUMP_INSN)
743 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
747 else if (GET_CODE (insn) == CALL_INSN)
752 /* Expand a call to __builtin_prefetch. For a target that does not support
753 data prefetch, evaluate the memory address argument in case it has side
757 expand_builtin_prefetch (tree arglist)
759 tree arg0, arg1, arg2;
762 if (!validate_arglist (arglist, POINTER_TYPE, 0))
765 arg0 = TREE_VALUE (arglist);
766 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
767 zero (read) and argument 2 (locality) defaults to 3 (high degree of
769 if (TREE_CHAIN (arglist))
771 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
772 if (TREE_CHAIN (TREE_CHAIN (arglist)))
773 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
775 arg2 = build_int_2 (3, 0);
779 arg1 = integer_zero_node;
780 arg2 = build_int_2 (3, 0);
783 /* Argument 0 is an address. */
784 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
786 /* Argument 1 (read/write flag) must be a compile-time constant int. */
787 if (TREE_CODE (arg1) != INTEGER_CST)
789 error ("second arg to `__builtin_prefetch' must be a constant");
790 arg1 = integer_zero_node;
792 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
793 /* Argument 1 must be either zero or one. */
794 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
796 warning ("invalid second arg to __builtin_prefetch; using zero");
800 /* Argument 2 (locality) must be a compile-time constant int. */
801 if (TREE_CODE (arg2) != INTEGER_CST)
803 error ("third arg to `__builtin_prefetch' must be a constant");
804 arg2 = integer_zero_node;
806 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
807 /* Argument 2 must be 0, 1, 2, or 3. */
808 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
810 warning ("invalid third arg to __builtin_prefetch; using zero");
817 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
819 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
820 || (GET_MODE (op0) != Pmode))
822 op0 = convert_memory_address (Pmode, op0);
823 op0 = force_reg (Pmode, op0);
825 emit_insn (gen_prefetch (op0, op1, op2));
829 op0 = protect_from_queue (op0, 0);
830 /* Don't do anything with direct references to volatile memory, but
831 generate code to handle other side effects. */
832 if (GET_CODE (op0) != MEM && side_effects_p (op0))
836 /* Get a MEM rtx for expression EXP which is the address of an operand
837 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
840 get_memory_rtx (tree exp)
842 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
845 addr = convert_memory_address (Pmode, addr);
847 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
849 /* Get an expression we can use to find the attributes to assign to MEM.
850 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
851 we can. First remove any nops. */
852 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
853 || TREE_CODE (exp) == NON_LVALUE_EXPR)
854 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
855 exp = TREE_OPERAND (exp, 0);
857 if (TREE_CODE (exp) == ADDR_EXPR)
859 exp = TREE_OPERAND (exp, 0);
860 set_mem_attributes (mem, exp, 0);
862 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
864 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
865 /* memcpy, memset and other builtin stringops can alias with anything. */
866 set_mem_alias_set (mem, 0);
872 /* Built-in functions to perform an untyped call and return. */
874 /* For each register that may be used for calling a function, this
875 gives a mode used to copy the register's value. VOIDmode indicates
876 the register is not used for calling a function. If the machine
877 has register windows, this gives only the outbound registers.
878 INCOMING_REGNO gives the corresponding inbound register. */
879 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
881 /* For each register that may be used for returning values, this gives
882 a mode used to copy the register's value. VOIDmode indicates the
883 register is not used for returning values. If the machine has
884 register windows, this gives only the outbound registers.
885 INCOMING_REGNO gives the corresponding inbound register. */
886 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
888 /* For each register that may be used for calling a function, this
889 gives the offset of that register into the block returned by
890 __builtin_apply_args. 0 indicates that the register is not
891 used for calling a function. */
892 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
894 /* Return the offset of register REGNO into the block returned by
895 __builtin_apply_args. This is not declared static, since it is
896 needed in objc-act.c. */
899 apply_args_register_offset (int regno)
903 /* Arguments are always put in outgoing registers (in the argument
904 block) if such make sense. */
905 #ifdef OUTGOING_REGNO
906 regno = OUTGOING_REGNO (regno);
908 return apply_args_reg_offset[regno];
911 /* Return the size required for the block returned by __builtin_apply_args,
912 and initialize apply_args_mode. */
915 apply_args_size (void)
917 static int size = -1;
920 enum machine_mode mode;
922 /* The values computed by this function never change. */
925 /* The first value is the incoming arg-pointer. */
926 size = GET_MODE_SIZE (Pmode);
928 /* The second value is the structure value address unless this is
929 passed as an "invisible" first argument. */
930 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
931 size += GET_MODE_SIZE (Pmode);
933 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
934 if (FUNCTION_ARG_REGNO_P (regno))
936 /* Search for the proper mode for copying this register's
937 value. I'm not sure this is right, but it works so far. */
938 enum machine_mode best_mode = VOIDmode;
940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
942 mode = GET_MODE_WIDER_MODE (mode))
943 if (HARD_REGNO_MODE_OK (regno, mode)
944 && hard_regno_nregs[regno][mode] == 1)
947 if (best_mode == VOIDmode)
948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
950 mode = GET_MODE_WIDER_MODE (mode))
951 if (HARD_REGNO_MODE_OK (regno, mode)
952 && have_insn_for (SET, mode))
955 if (best_mode == VOIDmode)
956 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
958 mode = GET_MODE_WIDER_MODE (mode))
959 if (HARD_REGNO_MODE_OK (regno, mode)
960 && have_insn_for (SET, mode))
963 if (best_mode == VOIDmode)
964 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
966 mode = GET_MODE_WIDER_MODE (mode))
967 if (HARD_REGNO_MODE_OK (regno, mode)
968 && have_insn_for (SET, mode))
972 if (mode == VOIDmode)
975 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
976 if (size % align != 0)
977 size = CEIL (size, align) * align;
978 apply_args_reg_offset[regno] = size;
979 size += GET_MODE_SIZE (mode);
980 apply_args_mode[regno] = mode;
984 apply_args_mode[regno] = VOIDmode;
985 apply_args_reg_offset[regno] = 0;
991 /* Return the size required for the block returned by __builtin_apply,
992 and initialize apply_result_mode. */
995 apply_result_size (void)
997 static int size = -1;
999 enum machine_mode mode;
1001 /* The values computed by this function never change. */
1006 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1007 if (FUNCTION_VALUE_REGNO_P (regno))
1009 /* Search for the proper mode for copying this register's
1010 value. I'm not sure this is right, but it works so far. */
1011 enum machine_mode best_mode = VOIDmode;
1013 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1015 mode = GET_MODE_WIDER_MODE (mode))
1016 if (HARD_REGNO_MODE_OK (regno, mode))
1019 if (best_mode == VOIDmode)
1020 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1022 mode = GET_MODE_WIDER_MODE (mode))
1023 if (HARD_REGNO_MODE_OK (regno, mode)
1024 && have_insn_for (SET, mode))
1027 if (best_mode == VOIDmode)
1028 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1030 mode = GET_MODE_WIDER_MODE (mode))
1031 if (HARD_REGNO_MODE_OK (regno, mode)
1032 && have_insn_for (SET, mode))
1035 if (best_mode == VOIDmode)
1036 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1038 mode = GET_MODE_WIDER_MODE (mode))
1039 if (HARD_REGNO_MODE_OK (regno, mode)
1040 && have_insn_for (SET, mode))
1044 if (mode == VOIDmode)
1047 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1048 if (size % align != 0)
1049 size = CEIL (size, align) * align;
1050 size += GET_MODE_SIZE (mode);
1051 apply_result_mode[regno] = mode;
1054 apply_result_mode[regno] = VOIDmode;
1056 /* Allow targets that use untyped_call and untyped_return to override
1057 the size so that machine-specific information can be stored here. */
1058 #ifdef APPLY_RESULT_SIZE
1059 size = APPLY_RESULT_SIZE;
1065 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1066 /* Create a vector describing the result block RESULT. If SAVEP is true,
1067 the result block is used to save the values; otherwise it is used to
1068 restore the values. */
1071 result_vector (int savep, rtx result)
1073 int regno, size, align, nelts;
1074 enum machine_mode mode;
1076 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1079 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1080 if ((mode = apply_result_mode[regno]) != VOIDmode)
1082 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1083 if (size % align != 0)
1084 size = CEIL (size, align) * align;
1085 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1086 mem = adjust_address (result, mode, size);
1087 savevec[nelts++] = (savep
1088 ? gen_rtx_SET (VOIDmode, mem, reg)
1089 : gen_rtx_SET (VOIDmode, reg, mem));
1090 size += GET_MODE_SIZE (mode);
1092 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1094 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1096 /* Save the state required to perform an untyped call with the same
1097 arguments as were passed to the current function. */
1100 expand_builtin_apply_args_1 (void)
1103 int size, align, regno;
1104 enum machine_mode mode;
1105 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1107 /* Create a block where the arg-pointer, structure value address,
1108 and argument registers can be saved. */
1109 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1111 /* Walk past the arg-pointer and structure value address. */
1112 size = GET_MODE_SIZE (Pmode);
1113 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1114 size += GET_MODE_SIZE (Pmode);
1116 /* Save each register used in calling a function to the block. */
1117 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1118 if ((mode = apply_args_mode[regno]) != VOIDmode)
1120 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1121 if (size % align != 0)
1122 size = CEIL (size, align) * align;
1124 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1126 emit_move_insn (adjust_address (registers, mode, size), tem);
1127 size += GET_MODE_SIZE (mode);
1130 /* Save the arg pointer to the block. */
1131 tem = copy_to_reg (virtual_incoming_args_rtx);
1132 #ifdef STACK_GROWS_DOWNWARD
1133 /* We need the pointer as the caller actually passed them to us, not
1134 as we might have pretended they were passed. Make sure it's a valid
1135 operand, as emit_move_insn isn't expected to handle a PLUS. */
1137 = force_operand (plus_constant (tem, current_function_pretend_args_size),
1140 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1142 size = GET_MODE_SIZE (Pmode);
1144 /* Save the structure value address unless this is passed as an
1145 "invisible" first argument. */
1146 if (struct_incoming_value)
1148 emit_move_insn (adjust_address (registers, Pmode, size),
1149 copy_to_reg (struct_incoming_value));
1150 size += GET_MODE_SIZE (Pmode);
1153 /* Return the address of the block. */
1154 return copy_addr_to_reg (XEXP (registers, 0));
1157 /* __builtin_apply_args returns block of memory allocated on
1158 the stack into which is stored the arg pointer, structure
1159 value address, static chain, and all the registers that might
1160 possibly be used in performing a function call. The code is
1161 moved to the start of the function so the incoming values are
1165 expand_builtin_apply_args (void)
1167 /* Don't do __builtin_apply_args more than once in a function.
1168 Save the result of the first call and reuse it. */
1169 if (apply_args_value != 0)
1170 return apply_args_value;
1172 /* When this function is called, it means that registers must be
1173 saved on entry to this function. So we migrate the
1174 call to the first insn of this function. */
1179 temp = expand_builtin_apply_args_1 ();
1183 apply_args_value = temp;
1185 /* Put the insns after the NOTE that starts the function.
1186 If this is inside a start_sequence, make the outer-level insn
1187 chain current, so the code is placed at the start of the
1189 push_topmost_sequence ();
1190 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1191 pop_topmost_sequence ();
1196 /* Perform an untyped call and save the state required to perform an
1197 untyped return of whatever value was returned by the given function. */
1200 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1202 int size, align, regno;
1203 enum machine_mode mode;
1204 rtx incoming_args, result, reg, dest, src, call_insn;
1205 rtx old_stack_level = 0;
1206 rtx call_fusage = 0;
1207 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1209 arguments = convert_memory_address (Pmode, arguments);
1211 /* Create a block where the return registers can be saved. */
1212 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1214 /* Fetch the arg pointer from the ARGUMENTS block. */
1215 incoming_args = gen_reg_rtx (Pmode);
1216 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1217 #ifndef STACK_GROWS_DOWNWARD
1218 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1219 incoming_args, 0, OPTAB_LIB_WIDEN);
1222 /* Perform postincrements before actually calling the function. */
1225 /* Push a new argument block and copy the arguments. Do not allow
1226 the (potential) memcpy call below to interfere with our stack
1228 do_pending_stack_adjust ();
1231 /* Save the stack with nonlocal if available. */
1232 #ifdef HAVE_save_stack_nonlocal
1233 if (HAVE_save_stack_nonlocal)
1234 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1237 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1239 /* Allocate a block of memory onto the stack and copy the memory
1240 arguments to the outgoing arguments address. */
1241 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1242 dest = virtual_outgoing_args_rtx;
1243 #ifndef STACK_GROWS_DOWNWARD
1244 if (GET_CODE (argsize) == CONST_INT)
1245 dest = plus_constant (dest, -INTVAL (argsize));
1247 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1249 dest = gen_rtx_MEM (BLKmode, dest);
1250 set_mem_align (dest, PARM_BOUNDARY);
1251 src = gen_rtx_MEM (BLKmode, incoming_args);
1252 set_mem_align (src, PARM_BOUNDARY);
1253 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1255 /* Refer to the argument block. */
1257 arguments = gen_rtx_MEM (BLKmode, arguments);
1258 set_mem_align (arguments, PARM_BOUNDARY);
1260 /* Walk past the arg-pointer and structure value address. */
1261 size = GET_MODE_SIZE (Pmode);
1263 size += GET_MODE_SIZE (Pmode);
1265 /* Restore each of the registers previously saved. Make USE insns
1266 for each of these registers for use in making the call. */
1267 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1268 if ((mode = apply_args_mode[regno]) != VOIDmode)
1270 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1271 if (size % align != 0)
1272 size = CEIL (size, align) * align;
1273 reg = gen_rtx_REG (mode, regno);
1274 emit_move_insn (reg, adjust_address (arguments, mode, size));
1275 use_reg (&call_fusage, reg);
1276 size += GET_MODE_SIZE (mode);
1279 /* Restore the structure value address unless this is passed as an
1280 "invisible" first argument. */
1281 size = GET_MODE_SIZE (Pmode);
1284 rtx value = gen_reg_rtx (Pmode);
1285 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1286 emit_move_insn (struct_value, value);
1287 if (GET_CODE (struct_value) == REG)
1288 use_reg (&call_fusage, struct_value);
1289 size += GET_MODE_SIZE (Pmode);
1292 /* All arguments and registers used for the call are set up by now! */
1293 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1295 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1296 and we don't want to load it into a register as an optimization,
1297 because prepare_call_address already did it if it should be done. */
1298 if (GET_CODE (function) != SYMBOL_REF)
1299 function = memory_address (FUNCTION_MODE, function);
1301 /* Generate the actual call instruction and save the return value. */
1302 #ifdef HAVE_untyped_call
1303 if (HAVE_untyped_call)
1304 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1305 result, result_vector (1, result)));
1308 #ifdef HAVE_call_value
1309 if (HAVE_call_value)
1313 /* Locate the unique return register. It is not possible to
1314 express a call that sets more than one return register using
1315 call_value; use untyped_call for that. In fact, untyped_call
1316 only needs to save the return registers in the given block. */
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if ((mode = apply_result_mode[regno]) != VOIDmode)
1321 abort (); /* HAVE_untyped_call required. */
1322 valreg = gen_rtx_REG (mode, regno);
1325 emit_call_insn (GEN_CALL_VALUE (valreg,
1326 gen_rtx_MEM (FUNCTION_MODE, function),
1327 const0_rtx, NULL_RTX, const0_rtx));
1329 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1335 /* Find the CALL insn we just emitted, and attach the register usage
1337 call_insn = last_call_insn ();
1338 add_function_usage_to (call_insn, call_fusage);
1340 /* Restore the stack. */
1341 #ifdef HAVE_save_stack_nonlocal
1342 if (HAVE_save_stack_nonlocal)
1343 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1346 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1350 /* Return the address of the result block. */
1351 result = copy_addr_to_reg (XEXP (result, 0));
1352 return convert_memory_address (ptr_mode, result);
1355 /* Perform an untyped return. */
1358 expand_builtin_return (rtx result)
1360 int size, align, regno;
1361 enum machine_mode mode;
1363 rtx call_fusage = 0;
1365 result = convert_memory_address (Pmode, result);
1367 apply_result_size ();
1368 result = gen_rtx_MEM (BLKmode, result);
1370 #ifdef HAVE_untyped_return
1371 if (HAVE_untyped_return)
1373 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1379 /* Restore the return value and note that each value is used. */
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if ((mode = apply_result_mode[regno]) != VOIDmode)
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1388 emit_move_insn (reg, adjust_address (result, mode, size));
1390 push_to_sequence (call_fusage);
1391 emit_insn (gen_rtx_USE (VOIDmode, reg));
1392 call_fusage = get_insns ();
1394 size += GET_MODE_SIZE (mode);
1397 /* Put the USE insns before the return. */
1398 emit_insn (call_fusage);
1400 /* Return whatever values was restored by jumping directly to the end
1402 expand_naked_return ();
1405 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1407 static enum type_class
1408 type_to_class (tree type)
1410 switch (TREE_CODE (type))
1412 case VOID_TYPE: return void_type_class;
1413 case INTEGER_TYPE: return integer_type_class;
1414 case CHAR_TYPE: return char_type_class;
1415 case ENUMERAL_TYPE: return enumeral_type_class;
1416 case BOOLEAN_TYPE: return boolean_type_class;
1417 case POINTER_TYPE: return pointer_type_class;
1418 case REFERENCE_TYPE: return reference_type_class;
1419 case OFFSET_TYPE: return offset_type_class;
1420 case REAL_TYPE: return real_type_class;
1421 case COMPLEX_TYPE: return complex_type_class;
1422 case FUNCTION_TYPE: return function_type_class;
1423 case METHOD_TYPE: return method_type_class;
1424 case RECORD_TYPE: return record_type_class;
1426 case QUAL_UNION_TYPE: return union_type_class;
1427 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1428 ? string_type_class : array_type_class);
1429 case SET_TYPE: return set_type_class;
1430 case FILE_TYPE: return file_type_class;
1431 case LANG_TYPE: return lang_type_class;
1432 default: return no_type_class;
1436 /* Expand a call to __builtin_classify_type with arguments found in
1440 expand_builtin_classify_type (tree arglist)
1443 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1444 return GEN_INT (no_type_class);
1447 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1450 expand_builtin_constant_p (tree arglist, enum machine_mode target_mode)
1456 arglist = TREE_VALUE (arglist);
1458 /* We have taken care of the easy cases during constant folding. This
1459 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1460 get a chance to see if it can deduce whether ARGLIST is constant.
1461 If CSE isn't going to run, of course, don't bother waiting. */
1463 if (cse_not_expected)
1466 current_function_calls_constant_p = 1;
1468 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1469 tmp = gen_rtx_CONSTANT_P_RTX (target_mode, tmp);
1473 /* This helper macro, meant to be used in mathfn_built_in below,
1474 determines which among a set of three builtin math functions is
1475 appropriate for a given type mode. The `F' and `L' cases are
1476 automatically generated from the `double' case. */
1477 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1478 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1479 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1480 fcodel = BUILT_IN_MATHFN##L ; break;
1482 /* Return mathematic function equivalent to FN but operating directly
1483 on TYPE, if available. If we can't do the conversion, return zero. */
1485 mathfn_built_in (tree type, enum built_in_function fn)
1487 const enum machine_mode type_mode = TYPE_MODE (type);
1488 enum built_in_function fcode, fcodef, fcodel;
1492 CASE_MATHFN (BUILT_IN_ACOS)
1493 CASE_MATHFN (BUILT_IN_ACOSH)
1494 CASE_MATHFN (BUILT_IN_ASIN)
1495 CASE_MATHFN (BUILT_IN_ASINH)
1496 CASE_MATHFN (BUILT_IN_ATAN)
1497 CASE_MATHFN (BUILT_IN_ATAN2)
1498 CASE_MATHFN (BUILT_IN_ATANH)
1499 CASE_MATHFN (BUILT_IN_CBRT)
1500 CASE_MATHFN (BUILT_IN_CEIL)
1501 CASE_MATHFN (BUILT_IN_COPYSIGN)
1502 CASE_MATHFN (BUILT_IN_COS)
1503 CASE_MATHFN (BUILT_IN_COSH)
1504 CASE_MATHFN (BUILT_IN_DREM)
1505 CASE_MATHFN (BUILT_IN_ERF)
1506 CASE_MATHFN (BUILT_IN_ERFC)
1507 CASE_MATHFN (BUILT_IN_EXP)
1508 CASE_MATHFN (BUILT_IN_EXP10)
1509 CASE_MATHFN (BUILT_IN_EXP2)
1510 CASE_MATHFN (BUILT_IN_EXPM1)
1511 CASE_MATHFN (BUILT_IN_FABS)
1512 CASE_MATHFN (BUILT_IN_FDIM)
1513 CASE_MATHFN (BUILT_IN_FLOOR)
1514 CASE_MATHFN (BUILT_IN_FMA)
1515 CASE_MATHFN (BUILT_IN_FMAX)
1516 CASE_MATHFN (BUILT_IN_FMIN)
1517 CASE_MATHFN (BUILT_IN_FMOD)
1518 CASE_MATHFN (BUILT_IN_FREXP)
1519 CASE_MATHFN (BUILT_IN_GAMMA)
1520 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1521 CASE_MATHFN (BUILT_IN_HYPOT)
1522 CASE_MATHFN (BUILT_IN_ILOGB)
1523 CASE_MATHFN (BUILT_IN_INF)
1524 CASE_MATHFN (BUILT_IN_J0)
1525 CASE_MATHFN (BUILT_IN_J1)
1526 CASE_MATHFN (BUILT_IN_JN)
1527 CASE_MATHFN (BUILT_IN_LDEXP)
1528 CASE_MATHFN (BUILT_IN_LGAMMA)
1529 CASE_MATHFN (BUILT_IN_LLRINT)
1530 CASE_MATHFN (BUILT_IN_LLROUND)
1531 CASE_MATHFN (BUILT_IN_LOG)
1532 CASE_MATHFN (BUILT_IN_LOG10)
1533 CASE_MATHFN (BUILT_IN_LOG1P)
1534 CASE_MATHFN (BUILT_IN_LOG2)
1535 CASE_MATHFN (BUILT_IN_LOGB)
1536 CASE_MATHFN (BUILT_IN_LRINT)
1537 CASE_MATHFN (BUILT_IN_LROUND)
1538 CASE_MATHFN (BUILT_IN_MODF)
1539 CASE_MATHFN (BUILT_IN_NAN)
1540 CASE_MATHFN (BUILT_IN_NANS)
1541 CASE_MATHFN (BUILT_IN_NEARBYINT)
1542 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1543 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1544 CASE_MATHFN (BUILT_IN_POW)
1545 CASE_MATHFN (BUILT_IN_POW10)
1546 CASE_MATHFN (BUILT_IN_REMAINDER)
1547 CASE_MATHFN (BUILT_IN_REMQUO)
1548 CASE_MATHFN (BUILT_IN_RINT)
1549 CASE_MATHFN (BUILT_IN_ROUND)
1550 CASE_MATHFN (BUILT_IN_SCALB)
1551 CASE_MATHFN (BUILT_IN_SCALBLN)
1552 CASE_MATHFN (BUILT_IN_SCALBN)
1553 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1554 CASE_MATHFN (BUILT_IN_SIN)
1555 CASE_MATHFN (BUILT_IN_SINCOS)
1556 CASE_MATHFN (BUILT_IN_SINH)
1557 CASE_MATHFN (BUILT_IN_SQRT)
1558 CASE_MATHFN (BUILT_IN_TAN)
1559 CASE_MATHFN (BUILT_IN_TANH)
1560 CASE_MATHFN (BUILT_IN_TGAMMA)
1561 CASE_MATHFN (BUILT_IN_TRUNC)
1562 CASE_MATHFN (BUILT_IN_Y0)
1563 CASE_MATHFN (BUILT_IN_Y1)
1564 CASE_MATHFN (BUILT_IN_YN)
1570 if (type_mode == TYPE_MODE (double_type_node))
1571 return implicit_built_in_decls[fcode];
1572 else if (type_mode == TYPE_MODE (float_type_node))
1573 return implicit_built_in_decls[fcodef];
1574 else if (type_mode == TYPE_MODE (long_double_type_node))
1575 return implicit_built_in_decls[fcodel];
1580 /* If errno must be maintained, expand the RTL to check if the result,
1581 TARGET, of a built-in function call, EXP, is NaN, and if so set
1585 expand_errno_check (tree exp, rtx target)
1587 rtx lab = gen_label_rtx ();
1589 /* Test the result; if it is NaN, set errno=EDOM because
1590 the argument was not in the domain. */
1591 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1595 /* If this built-in doesn't throw an exception, set errno directly. */
1596 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1598 #ifdef GEN_ERRNO_RTX
1599 rtx errno_rtx = GEN_ERRNO_RTX;
1602 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1604 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1610 /* We can't set errno=EDOM directly; let the library call do it.
1611 Pop the arguments right away in case the call gets deleted. */
1613 expand_call (exp, target, 0);
1619 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1620 Return 0 if a normal call should be emitted rather than expanding the
1621 function in-line. EXP is the expression that is a call to the builtin
1622 function; if convenient, the result should be placed in TARGET.
1623 SUBTARGET may be used as the target for computing one of EXP's operands. */
1626 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1628 optab builtin_optab;
1629 rtx op0, insns, before_call;
1630 tree fndecl = get_callee_fndecl (exp);
1631 tree arglist = TREE_OPERAND (exp, 1);
1632 enum machine_mode mode;
1633 bool errno_set = false;
1636 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1639 arg = TREE_VALUE (arglist);
1641 switch (DECL_FUNCTION_CODE (fndecl))
1646 builtin_optab = sin_optab; break;
1650 builtin_optab = cos_optab; break;
1652 case BUILT_IN_SQRTF:
1653 case BUILT_IN_SQRTL:
1654 errno_set = ! tree_expr_nonnegative_p (arg);
1655 builtin_optab = sqrt_optab;
1660 errno_set = true; builtin_optab = exp_optab; break;
1664 errno_set = true; builtin_optab = log_optab; break;
1668 builtin_optab = tan_optab; break;
1670 case BUILT_IN_ATANF:
1671 case BUILT_IN_ATANL:
1672 builtin_optab = atan_optab; break;
1673 case BUILT_IN_FLOOR:
1674 case BUILT_IN_FLOORF:
1675 case BUILT_IN_FLOORL:
1676 builtin_optab = floor_optab; break;
1678 case BUILT_IN_CEILF:
1679 case BUILT_IN_CEILL:
1680 builtin_optab = ceil_optab; break;
1681 case BUILT_IN_TRUNC:
1682 case BUILT_IN_TRUNCF:
1683 case BUILT_IN_TRUNCL:
1684 builtin_optab = btrunc_optab; break;
1685 case BUILT_IN_ROUND:
1686 case BUILT_IN_ROUNDF:
1687 case BUILT_IN_ROUNDL:
1688 builtin_optab = round_optab; break;
1689 case BUILT_IN_NEARBYINT:
1690 case BUILT_IN_NEARBYINTF:
1691 case BUILT_IN_NEARBYINTL:
1692 builtin_optab = nearbyint_optab; break;
1697 /* Make a suitable register to place result in. */
1698 mode = TYPE_MODE (TREE_TYPE (exp));
1700 if (! flag_errno_math || ! HONOR_NANS (mode))
1703 /* Before working hard, check whether the instruction is available. */
1704 if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1706 target = gen_reg_rtx (mode);
1708 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1709 need to expand the argument again. This way, we will not perform
1710 side-effects more the once. */
1711 narg = save_expr (arg);
1714 arglist = build_tree_list (NULL_TREE, arg);
1715 exp = build_function_call_expr (fndecl, arglist);
1718 op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1723 /* Compute into TARGET.
1724 Set TARGET to wherever the result comes back. */
1725 target = expand_unop (mode, builtin_optab, op0, target, 0);
1730 expand_errno_check (exp, target);
1732 /* Output the entire sequence. */
1733 insns = get_insns ();
1739 /* If we were unable to expand via the builtin, stop the sequence
1740 (without outputting the insns) and call to the library function
1741 with the stabilized argument list. */
1745 before_call = get_last_insn ();
1747 target = expand_call (exp, target, target == const0_rtx);
1749 /* If this is a sqrt operation and we don't care about errno, try to
1750 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1751 This allows the semantics of the libcall to be visible to the RTL
1753 if (builtin_optab == sqrt_optab && !errno_set)
1755 /* Search backwards through the insns emitted by expand_call looking
1756 for the instruction with the REG_RETVAL note. */
1757 rtx last = get_last_insn ();
1758 while (last != before_call)
1760 if (find_reg_note (last, REG_RETVAL, NULL))
1762 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1763 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1764 two elements, i.e. symbol_ref(sqrt) and the operand. */
1766 && GET_CODE (note) == EXPR_LIST
1767 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1768 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1769 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1771 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1772 /* Check operand is a register with expected mode. */
1774 && GET_CODE (operand) == REG
1775 && GET_MODE (operand) == mode)
1777 /* Replace the REG_EQUAL note with a SQRT rtx. */
1778 rtx equiv = gen_rtx_SQRT (mode, operand);
1779 set_unique_reg_note (last, REG_EQUAL, equiv);
1784 last = PREV_INSN (last);
1791 /* Expand a call to the builtin binary math functions (pow and atan2).
1792 Return 0 if a normal call should be emitted rather than expanding the
1793 function in-line. EXP is the expression that is a call to the builtin
1794 function; if convenient, the result should be placed in TARGET.
1795 SUBTARGET may be used as the target for computing one of EXP's
1799 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1801 optab builtin_optab;
1802 rtx op0, op1, insns;
1803 tree fndecl = get_callee_fndecl (exp);
1804 tree arglist = TREE_OPERAND (exp, 1);
1805 tree arg0, arg1, temp, narg;
1806 enum machine_mode mode;
1807 bool errno_set = true;
1810 if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1813 arg0 = TREE_VALUE (arglist);
1814 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1816 switch (DECL_FUNCTION_CODE (fndecl))
1821 builtin_optab = pow_optab; break;
1822 case BUILT_IN_ATAN2:
1823 case BUILT_IN_ATAN2F:
1824 case BUILT_IN_ATAN2L:
1825 builtin_optab = atan2_optab; break;
1830 /* Make a suitable register to place result in. */
1831 mode = TYPE_MODE (TREE_TYPE (exp));
1833 /* Before working hard, check whether the instruction is available. */
1834 if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1837 target = gen_reg_rtx (mode);
1839 if (! flag_errno_math || ! HONOR_NANS (mode))
1842 /* Alway stabilize the argument list. */
1843 narg = save_expr (arg1);
1846 temp = build_tree_list (NULL_TREE, narg);
1850 temp = TREE_CHAIN (arglist);
1852 narg = save_expr (arg0);
1855 arglist = tree_cons (NULL_TREE, narg, temp);
1859 arglist = tree_cons (NULL_TREE, arg0, temp);
1862 exp = build_function_call_expr (fndecl, arglist);
1864 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
1865 op1 = expand_expr (arg1, 0, VOIDmode, 0);
1870 /* Compute into TARGET.
1871 Set TARGET to wherever the result comes back. */
1872 target = expand_binop (mode, builtin_optab, op0, op1,
1873 target, 0, OPTAB_DIRECT);
1875 /* If we were unable to expand via the builtin, stop the sequence
1876 (without outputting the insns) and call to the library function
1877 with the stabilized argument list. */
1881 return expand_call (exp, target, target == const0_rtx);
1885 expand_errno_check (exp, target);
1887 /* Output the entire sequence. */
1888 insns = get_insns ();
1895 /* To evaluate powi(x,n), the floating point value x raised to the
1896 constant integer exponent n, we use a hybrid algorithm that
1897 combines the "window method" with look-up tables. For an
1898 introduction to exponentiation algorithms and "addition chains",
1899 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
1900 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
1901 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
1902 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
1904 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
1905 multiplications to inline before calling the system library's pow
1906 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
1907 so this default never requires calling pow, powf or powl. */
1909 #ifndef POWI_MAX_MULTS
1910 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
1913 /* The size of the "optimal power tree" lookup table. All
1914 exponents less than this value are simply looked up in the
1915 powi_table below. This threshold is also used to size the
1916 cache of pseudo registers that hold intermediate results. */
1917 #define POWI_TABLE_SIZE 256
1919 /* The size, in bits of the window, used in the "window method"
1920 exponentiation algorithm. This is equivalent to a radix of
1921 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
1922 #define POWI_WINDOW_SIZE 3
1924 /* The following table is an efficient representation of an
1925 "optimal power tree". For each value, i, the corresponding
1926 value, j, in the table states than an optimal evaluation
1927 sequence for calculating pow(x,i) can be found by evaluating
1928 pow(x,j)*pow(x,i-j). An optimal power tree for the first
1929 100 integers is given in Knuth's "Seminumerical algorithms". */
1931 static const unsigned char powi_table[POWI_TABLE_SIZE] =
1933 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
1934 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
1935 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
1936 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
1937 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
1938 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
1939 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
1940 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
1941 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
1942 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
1943 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
1944 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
1945 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
1946 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
1947 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
1948 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
1949 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
1950 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
1951 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
1952 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
1953 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
1954 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
1955 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
1956 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
1957 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
1958 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
1959 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
1960 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
1961 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
1962 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
1963 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
1964 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
1968 /* Return the number of multiplications required to calculate
1969 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
1970 subroutine of powi_cost. CACHE is an array indicating
1971 which exponents have already been calculated. */
1974 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
1976 /* If we've already calculated this exponent, then this evaluation
1977 doesn't require any additional multiplications. */
1982 return powi_lookup_cost (n - powi_table[n], cache)
1983 + powi_lookup_cost (powi_table[n], cache) + 1;
1986 /* Return the number of multiplications required to calculate
1987 powi(x,n) for an arbitrary x, given the exponent N. This
1988 function needs to be kept in sync with expand_powi below. */
1991 powi_cost (HOST_WIDE_INT n)
1993 bool cache[POWI_TABLE_SIZE];
1994 unsigned HOST_WIDE_INT digit;
1995 unsigned HOST_WIDE_INT val;
2001 /* Ignore the reciprocal when calculating the cost. */
2002 val = (n < 0) ? -n : n;
2004 /* Initialize the exponent cache. */
2005 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2010 while (val >= POWI_TABLE_SIZE)
2014 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2015 result += powi_lookup_cost (digit, cache)
2016 + POWI_WINDOW_SIZE + 1;
2017 val >>= POWI_WINDOW_SIZE;
2026 return result + powi_lookup_cost (val, cache);
2029 /* Recursive subroutine of expand_powi. This function takes the array,
2030 CACHE, of already calculated exponents and an exponent N and returns
2031 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2034 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2036 unsigned HOST_WIDE_INT digit;
2040 if (n < POWI_TABLE_SIZE)
2045 target = gen_reg_rtx (mode);
2048 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2049 op1 = expand_powi_1 (mode, powi_table[n], cache);
2053 target = gen_reg_rtx (mode);
2054 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2055 op0 = expand_powi_1 (mode, n - digit, cache);
2056 op1 = expand_powi_1 (mode, digit, cache);
2060 target = gen_reg_rtx (mode);
2061 op0 = expand_powi_1 (mode, n >> 1, cache);
2065 result = expand_mult (mode, op0, op1, target, 0);
2066 if (result != target)
2067 emit_move_insn (target, result);
2071 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2072 floating point operand in mode MODE, and N is the exponent. This
2073 function needs to be kept in sync with powi_cost above. */
2076 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2078 unsigned HOST_WIDE_INT val;
2079 rtx cache[POWI_TABLE_SIZE];
2083 return CONST1_RTX (mode);
2085 val = (n < 0) ? -n : n;
2087 memset (cache, 0, sizeof (cache));
2090 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2092 /* If the original exponent was negative, reciprocate the result. */
2094 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2095 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2100 /* Expand a call to the pow built-in mathematical function. Return 0 if
2101 a normal call should be emitted rather than expanding the function
2102 in-line. EXP is the expression that is a call to the builtin
2103 function; if convenient, the result should be placed in TARGET. */
2106 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2108 tree arglist = TREE_OPERAND (exp, 1);
2111 if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2114 arg0 = TREE_VALUE (arglist);
2115 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2117 if (TREE_CODE (arg1) == REAL_CST
2118 && ! TREE_CONSTANT_OVERFLOW (arg1))
2120 REAL_VALUE_TYPE cint;
2124 c = TREE_REAL_CST (arg1);
2125 n = real_to_integer (&c);
2126 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2127 if (real_identical (&c, &cint))
2129 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2130 Otherwise, check the number of multiplications required.
2131 Note that pow never sets errno for an integer exponent. */
2132 if ((n >= -1 && n <= 2)
2133 || (flag_unsafe_math_optimizations
2135 && powi_cost (n) <= POWI_MAX_MULTS))
2137 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2138 rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2139 op = force_reg (mode, op);
2140 return expand_powi (op, mode, n);
2145 if (! flag_unsafe_math_optimizations)
2147 return expand_builtin_mathfn_2 (exp, target, subtarget);
2150 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2151 if we failed the caller should emit a normal call, otherwise
2152 try to get the result in TARGET, if convenient. */
2155 expand_builtin_strlen (tree arglist, rtx target,
2156 enum machine_mode target_mode)
2158 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2163 tree len, src = TREE_VALUE (arglist);
2164 rtx result, src_reg, char_rtx, before_strlen;
2165 enum machine_mode insn_mode = target_mode, char_mode;
2166 enum insn_code icode = CODE_FOR_nothing;
2169 /* If the length can be computed at compile-time, return it. */
2170 len = c_strlen (src, 0);
2172 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2174 /* If the length can be computed at compile-time and is constant
2175 integer, but there are side-effects in src, evaluate
2176 src for side-effects, then return len.
2177 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2178 can be optimized into: i++; x = 3; */
2179 len = c_strlen (src, 1);
2180 if (len && TREE_CODE (len) == INTEGER_CST)
2182 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2183 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2186 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2188 /* If SRC is not a pointer type, don't do this operation inline. */
2192 /* Bail out if we can't compute strlen in the right mode. */
2193 while (insn_mode != VOIDmode)
2195 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2196 if (icode != CODE_FOR_nothing)
2199 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2201 if (insn_mode == VOIDmode)
2204 /* Make a place to write the result of the instruction. */
2207 && GET_CODE (result) == REG
2208 && GET_MODE (result) == insn_mode
2209 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2210 result = gen_reg_rtx (insn_mode);
2212 /* Make a place to hold the source address. We will not expand
2213 the actual source until we are sure that the expansion will
2214 not fail -- there are trees that cannot be expanded twice. */
2215 src_reg = gen_reg_rtx (Pmode);
2217 /* Mark the beginning of the strlen sequence so we can emit the
2218 source operand later. */
2219 before_strlen = get_last_insn ();
2221 char_rtx = const0_rtx;
2222 char_mode = insn_data[(int) icode].operand[2].mode;
2223 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2225 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2227 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2228 char_rtx, GEN_INT (align));
2233 /* Now that we are assured of success, expand the source. */
2235 pat = memory_address (BLKmode,
2236 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
2238 emit_move_insn (src_reg, pat);
2243 emit_insn_after (pat, before_strlen);
2245 emit_insn_before (pat, get_insns ());
2247 /* Return the value in the proper mode for this function. */
2248 if (GET_MODE (result) == target_mode)
2250 else if (target != 0)
2251 convert_move (target, result, 0);
2253 target = convert_to_mode (target_mode, result, 0);
2259 /* Expand a call to the strstr builtin. Return 0 if we failed the
2260 caller should emit a normal call, otherwise try to get the result
2261 in TARGET, if convenient (and in mode MODE if that's convenient). */
2264 expand_builtin_strstr (tree arglist, rtx target, enum machine_mode mode)
2266 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2270 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2272 const char *p1, *p2;
2281 const char *r = strstr (p1, p2);
2286 /* Return an offset into the constant string argument. */
2287 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2288 s1, convert (TREE_TYPE (s1),
2289 ssize_int (r - p1)))),
2290 target, mode, EXPAND_NORMAL);
2294 return expand_expr (s1, target, mode, EXPAND_NORMAL);
2299 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2303 /* New argument list transforming strstr(s1, s2) to
2304 strchr(s1, s2[0]). */
2306 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2307 arglist = tree_cons (NULL_TREE, s1, arglist);
2308 return expand_expr (build_function_call_expr (fn, arglist),
2309 target, mode, EXPAND_NORMAL);
2313 /* Expand a call to the strchr builtin. Return 0 if we failed the
2314 caller should emit a normal call, otherwise try to get the result
2315 in TARGET, if convenient (and in mode MODE if that's convenient). */
2318 expand_builtin_strchr (tree arglist, rtx target, enum machine_mode mode)
2320 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2324 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2327 if (TREE_CODE (s2) != INTEGER_CST)
2336 if (target_char_cast (s2, &c))
2344 /* Return an offset into the constant string argument. */
2345 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2346 s1, convert (TREE_TYPE (s1),
2347 ssize_int (r - p1)))),
2348 target, mode, EXPAND_NORMAL);
2351 /* FIXME: Should use here strchrM optab so that ports can optimize
2357 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2358 caller should emit a normal call, otherwise try to get the result
2359 in TARGET, if convenient (and in mode MODE if that's convenient). */
2362 expand_builtin_strrchr (tree arglist, rtx target, enum machine_mode mode)
2364 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2368 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2372 if (TREE_CODE (s2) != INTEGER_CST)
2381 if (target_char_cast (s2, &c))
2384 r = strrchr (p1, c);
2389 /* Return an offset into the constant string argument. */
2390 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2391 s1, convert (TREE_TYPE (s1),
2392 ssize_int (r - p1)))),
2393 target, mode, EXPAND_NORMAL);
2396 if (! integer_zerop (s2))
2399 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2403 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2404 return expand_expr (build_function_call_expr (fn, arglist),
2405 target, mode, EXPAND_NORMAL);
2409 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2410 caller should emit a normal call, otherwise try to get the result
2411 in TARGET, if convenient (and in mode MODE if that's convenient). */
2414 expand_builtin_strpbrk (tree arglist, rtx target, enum machine_mode mode)
2416 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2420 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2422 const char *p1, *p2;
2431 const char *r = strpbrk (p1, p2);
2436 /* Return an offset into the constant string argument. */
2437 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
2438 s1, convert (TREE_TYPE (s1),
2439 ssize_int (r - p1)))),
2440 target, mode, EXPAND_NORMAL);
2445 /* strpbrk(x, "") == NULL.
2446 Evaluate and ignore the arguments in case they had
2448 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2453 return 0; /* Really call strpbrk. */
2455 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
2459 /* New argument list transforming strpbrk(s1, s2) to
2460 strchr(s1, s2[0]). */
2462 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
2463 arglist = tree_cons (NULL_TREE, s1, arglist);
2464 return expand_expr (build_function_call_expr (fn, arglist),
2465 target, mode, EXPAND_NORMAL);
2469 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2470 bytes from constant string DATA + OFFSET and return it as target
2474 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2475 enum machine_mode mode)
2477 const char *str = (const char *) data;
2480 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2481 > strlen (str) + 1))
2482 abort (); /* Attempt to read past the end of constant string. */
2484 return c_readstr (str + offset, mode);
2487 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2488 Return 0 if we failed, the caller should emit a normal call,
2489 otherwise try to get the result in TARGET, if convenient (and in
2490 mode MODE if that's convenient). */
2492 expand_builtin_memcpy (tree arglist, rtx target, enum machine_mode mode)
2494 if (!validate_arglist (arglist,
2495 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2499 tree dest = TREE_VALUE (arglist);
2500 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2501 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2502 const char *src_str;
2503 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2504 unsigned int dest_align
2505 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2506 rtx dest_mem, src_mem, dest_addr, len_rtx;
2508 /* If DEST is not a pointer type, call the normal function. */
2509 if (dest_align == 0)
2512 /* If the LEN parameter is zero, return DEST. */
2513 if (integer_zerop (len))
2515 /* Evaluate and ignore SRC in case it has side-effects. */
2516 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2517 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2520 /* If SRC and DEST are the same (and not volatile), return DEST. */
2521 if (operand_equal_p (src, dest, 0))
2523 /* Evaluate and ignore LEN in case it has side-effects. */
2524 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2525 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2528 /* If either SRC is not a pointer type, don't do this
2529 operation in-line. */
2533 dest_mem = get_memory_rtx (dest);
2534 set_mem_align (dest_mem, dest_align);
2535 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2536 src_str = c_getstr (src);
2538 /* If SRC is a string constant and block move would be done
2539 by pieces, we can avoid loading the string from memory
2540 and only stored the computed constants. */
2542 && GET_CODE (len_rtx) == CONST_INT
2543 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2544 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2545 (void *) src_str, dest_align))
2547 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2548 builtin_memcpy_read_str,
2549 (void *) src_str, dest_align, 0);
2550 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2551 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2555 src_mem = get_memory_rtx (src);
2556 set_mem_align (src_mem, src_align);
2558 /* Copy word part most expediently. */
2559 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2564 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2565 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2571 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2572 Return 0 if we failed the caller should emit a normal call,
2573 otherwise try to get the result in TARGET, if convenient (and in
2574 mode MODE if that's convenient). If ENDP is 0 return the
2575 destination pointer, if ENDP is 1 return the end pointer ala
2576 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2580 expand_builtin_mempcpy (tree arglist, rtx target, enum machine_mode mode,
2583 if (!validate_arglist (arglist,
2584 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2586 /* If return value is ignored, transform mempcpy into memcpy. */
2587 else if (target == const0_rtx)
2589 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2594 return expand_expr (build_function_call_expr (fn, arglist),
2595 target, mode, EXPAND_NORMAL);
2599 tree dest = TREE_VALUE (arglist);
2600 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2601 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2602 const char *src_str;
2603 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2604 unsigned int dest_align
2605 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2606 rtx dest_mem, src_mem, len_rtx;
2608 /* If DEST is not a pointer type, call the normal function. */
2609 if (dest_align == 0)
2612 /* If SRC and DEST are the same (and not volatile), do nothing. */
2613 if (operand_equal_p (src, dest, 0))
2619 /* Evaluate and ignore LEN in case it has side-effects. */
2620 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2621 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2625 len = fold (build (MINUS_EXPR, TREE_TYPE (len), dest,
2627 len = convert (TREE_TYPE (dest), len);
2628 expr = fold (build (PLUS_EXPR, TREE_TYPE (dest), dest, len));
2629 return expand_expr (expr, target, mode, EXPAND_NORMAL);
2632 /* If LEN is not constant, call the normal function. */
2633 if (! host_integerp (len, 1))
2636 /* If the LEN parameter is zero, return DEST. */
2637 if (tree_low_cst (len, 1) == 0)
2639 /* Evaluate and ignore SRC in case it has side-effects. */
2640 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2641 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2644 /* If either SRC is not a pointer type, don't do this
2645 operation in-line. */
2649 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2650 src_str = c_getstr (src);
2652 /* If SRC is a string constant and block move would be done
2653 by pieces, we can avoid loading the string from memory
2654 and only stored the computed constants. */
2656 && GET_CODE (len_rtx) == CONST_INT
2657 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2658 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2659 (void *) src_str, dest_align))
2661 dest_mem = get_memory_rtx (dest);
2662 set_mem_align (dest_mem, dest_align);
2663 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2664 builtin_memcpy_read_str,
2665 (void *) src_str, dest_align, endp);
2666 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2667 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2671 if (GET_CODE (len_rtx) == CONST_INT
2672 && can_move_by_pieces (INTVAL (len_rtx),
2673 MIN (dest_align, src_align)))
2675 dest_mem = get_memory_rtx (dest);
2676 set_mem_align (dest_mem, dest_align);
2677 src_mem = get_memory_rtx (src);
2678 set_mem_align (src_mem, src_align);
2679 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2680 MIN (dest_align, src_align), endp);
2681 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2682 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2690 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2691 if we failed the caller should emit a normal call. */
2694 expand_builtin_memmove (tree arglist, rtx target, enum machine_mode mode)
2696 if (!validate_arglist (arglist,
2697 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2701 tree dest = TREE_VALUE (arglist);
2702 tree src = TREE_VALUE (TREE_CHAIN (arglist));
2703 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2705 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2706 unsigned int dest_align
2707 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2709 /* If DEST is not a pointer type, call the normal function. */
2710 if (dest_align == 0)
2713 /* If the LEN parameter is zero, return DEST. */
2714 if (integer_zerop (len))
2716 /* Evaluate and ignore SRC in case it has side-effects. */
2717 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2718 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2721 /* If SRC and DEST are the same (and not volatile), return DEST. */
2722 if (operand_equal_p (src, dest, 0))
2724 /* Evaluate and ignore LEN in case it has side-effects. */
2725 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2726 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2729 /* If either SRC is not a pointer type, don't do this
2730 operation in-line. */
2734 /* If src is categorized for a readonly section we can use
2736 if (readonly_data_expr (src))
2738 tree const fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2741 return expand_expr (build_function_call_expr (fn, arglist),
2742 target, mode, EXPAND_NORMAL);
2745 /* Otherwise, call the normal function. */
2750 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2751 if we failed the caller should emit a normal call. */
2754 expand_builtin_bcopy (tree arglist)
2756 tree src, dest, size, newarglist;
2758 if (!validate_arglist (arglist,
2759 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2762 src = TREE_VALUE (arglist);
2763 dest = TREE_VALUE (TREE_CHAIN (arglist));
2764 size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2766 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2767 memmove(ptr y, ptr x, size_t z). This is done this way
2768 so that if it isn't expanded inline, we fallback to
2769 calling bcopy instead of memmove. */
2771 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2772 newarglist = tree_cons (NULL_TREE, src, newarglist);
2773 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2775 return expand_builtin_memmove (newarglist, const0_rtx, VOIDmode);
2778 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2779 if we failed the caller should emit a normal call, otherwise try to get
2780 the result in TARGET, if convenient (and in mode MODE if that's
2784 expand_builtin_strcpy (tree arglist, rtx target, enum machine_mode mode)
2786 tree fn, len, src, dst;
2788 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2791 src = TREE_VALUE (TREE_CHAIN (arglist));
2792 dst = TREE_VALUE (arglist);
2794 /* If SRC and DST are equal (and not volatile), return DST. */
2795 if (operand_equal_p (src, dst, 0))
2796 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2798 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2802 len = c_strlen (src, 1);
2803 if (len == 0 || TREE_SIDE_EFFECTS (len))
2806 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2807 arglist = build_tree_list (NULL_TREE, len);
2808 arglist = tree_cons (NULL_TREE, src, arglist);
2809 arglist = tree_cons (NULL_TREE, dst, arglist);
2810 return expand_expr (build_function_call_expr (fn, arglist),
2811 target, mode, EXPAND_NORMAL);
2814 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2815 Return 0 if we failed the caller should emit a normal call,
2816 otherwise try to get the result in TARGET, if convenient (and in
2817 mode MODE if that's convenient). */
2820 expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
2822 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2828 /* If return value is ignored, transform stpcpy into strcpy. */
2829 if (target == const0_rtx)
2831 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
2835 return expand_expr (build_function_call_expr (fn, arglist),
2836 target, mode, EXPAND_NORMAL);
2839 /* Ensure we get an actual string whose length can be evaluated at
2840 compile-time, not an expression containing a string. This is
2841 because the latter will potentially produce pessimized code
2842 when used to produce the return value. */
2843 src = TREE_VALUE (TREE_CHAIN (arglist));
2844 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
2847 dst = TREE_VALUE (arglist);
2848 len = fold (size_binop (PLUS_EXPR, len, ssize_int (1)));
2849 arglist = build_tree_list (NULL_TREE, len);
2850 arglist = tree_cons (NULL_TREE, src, arglist);
2851 arglist = tree_cons (NULL_TREE, dst, arglist);
2852 return expand_builtin_mempcpy (arglist, target, mode, /*endp=*/2);
2856 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2857 bytes from constant string DATA + OFFSET and return it as target
2861 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
2862 enum machine_mode mode)
2864 const char *str = (const char *) data;
2866 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2869 return c_readstr (str + offset, mode);
2872 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2873 if we failed the caller should emit a normal call. */
2876 expand_builtin_strncpy (tree arglist, rtx target, enum machine_mode mode)
2878 if (!validate_arglist (arglist,
2879 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2883 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
2884 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2887 /* We must be passed a constant len parameter. */
2888 if (TREE_CODE (len) != INTEGER_CST)
2891 /* If the len parameter is zero, return the dst parameter. */
2892 if (integer_zerop (len))
2894 /* Evaluate and ignore the src argument in case it has
2896 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2897 VOIDmode, EXPAND_NORMAL);
2898 /* Return the dst parameter. */
2899 return expand_expr (TREE_VALUE (arglist), target, mode,
2903 /* Now, we must be passed a constant src ptr parameter. */
2904 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2907 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2909 /* We're required to pad with trailing zeros if the requested
2910 len is greater than strlen(s2)+1. In that case try to
2911 use store_by_pieces, if it fails, punt. */
2912 if (tree_int_cst_lt (slen, len))
2914 tree dest = TREE_VALUE (arglist);
2915 unsigned int dest_align
2916 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2917 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2920 if (!p || dest_align == 0 || !host_integerp (len, 1)
2921 || !can_store_by_pieces (tree_low_cst (len, 1),
2922 builtin_strncpy_read_str,
2923 (void *) p, dest_align))
2926 dest_mem = get_memory_rtx (dest);
2927 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2928 builtin_strncpy_read_str,
2929 (void *) p, dest_align, 0);
2930 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2931 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2935 /* OK transform into builtin memcpy. */
2936 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2939 return expand_expr (build_function_call_expr (fn, arglist),
2940 target, mode, EXPAND_NORMAL);
2944 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2945 bytes from constant string DATA + OFFSET and return it as target
2949 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2950 enum machine_mode mode)
2952 const char *c = (const char *) data;
2953 char *p = alloca (GET_MODE_SIZE (mode));
2955 memset (p, *c, GET_MODE_SIZE (mode));
2957 return c_readstr (p, mode);
2960 /* Callback routine for store_by_pieces. Return the RTL of a register
2961 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2962 char value given in the RTL register data. For example, if mode is
2963 4 bytes wide, return the RTL for 0x01010101*data. */
2966 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2967 enum machine_mode mode)
2973 size = GET_MODE_SIZE (mode);
2978 memset (p, 1, size);
2979 coeff = c_readstr (p, mode);
2981 target = convert_to_mode (mode, (rtx) data, 1);
2982 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2983 return force_reg (mode, target);
2986 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2987 if we failed the caller should emit a normal call, otherwise try to get
2988 the result in TARGET, if convenient (and in mode MODE if that's
2992 expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode)
2994 if (!validate_arglist (arglist,
2995 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2999 tree dest = TREE_VALUE (arglist);
3000 tree val = TREE_VALUE (TREE_CHAIN (arglist));
3001 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3004 unsigned int dest_align
3005 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3006 rtx dest_mem, dest_addr, len_rtx;
3008 /* If DEST is not a pointer type, don't do this
3009 operation in-line. */
3010 if (dest_align == 0)
3013 /* If the LEN parameter is zero, return DEST. */
3014 if (integer_zerop (len))
3016 /* Evaluate and ignore VAL in case it has side-effects. */
3017 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3018 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3021 if (TREE_CODE (val) != INTEGER_CST)
3025 if (!host_integerp (len, 1))
3028 if (optimize_size && tree_low_cst (len, 1) > 1)
3031 /* Assume that we can memset by pieces if we can store the
3032 * the coefficients by pieces (in the required modes).
3033 * We can't pass builtin_memset_gen_str as that emits RTL. */
3035 if (!can_store_by_pieces (tree_low_cst (len, 1),
3036 builtin_memset_read_str,
3040 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
3041 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
3042 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3044 dest_mem = get_memory_rtx (dest);
3045 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3046 builtin_memset_gen_str,
3047 val_rtx, dest_align, 0);
3048 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3049 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3053 if (target_char_cast (val, &c))
3058 if (!host_integerp (len, 1))
3060 if (!can_store_by_pieces (tree_low_cst (len, 1),
3061 builtin_memset_read_str, &c,
3065 dest_mem = get_memory_rtx (dest);
3066 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3067 builtin_memset_read_str,
3069 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3070 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3074 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3076 dest_mem = get_memory_rtx (dest);
3077 set_mem_align (dest_mem, dest_align);
3078 dest_addr = clear_storage (dest_mem, len_rtx);
3082 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3083 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3090 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3091 if we failed the caller should emit a normal call. */
3094 expand_builtin_bzero (tree arglist)
3096 tree dest, size, newarglist;
3098 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3101 dest = TREE_VALUE (arglist);
3102 size = TREE_VALUE (TREE_CHAIN (arglist));
3104 /* New argument list transforming bzero(ptr x, int y) to
3105 memset(ptr x, int 0, size_t y). This is done this way
3106 so that if it isn't expanded inline, we fallback to
3107 calling bzero instead of memset. */
3109 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
3110 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3111 newarglist = tree_cons (NULL_TREE, dest, newarglist);
3113 return expand_builtin_memset (newarglist, const0_rtx, VOIDmode);
3116 /* Expand expression EXP, which is a call to the memcmp built-in function.
3117 ARGLIST is the argument list for this call. Return 0 if we failed and the
3118 caller should emit a normal call, otherwise try to get the result in
3119 TARGET, if convenient (and in mode MODE, if that's convenient). */
3122 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3123 enum machine_mode mode)
3125 tree arg1, arg2, len;
3126 const char *p1, *p2;
3128 if (!validate_arglist (arglist,
3129 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3132 arg1 = TREE_VALUE (arglist);
3133 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3134 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3136 /* If the len parameter is zero, return zero. */
3137 if (integer_zerop (len))
3139 /* Evaluate and ignore arg1 and arg2 in case they have
3141 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3142 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3146 /* If both arguments are equal (and not volatile), return zero. */
3147 if (operand_equal_p (arg1, arg2, 0))
3149 /* Evaluate and ignore len in case it has side-effects. */
3150 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
3154 p1 = c_getstr (arg1);
3155 p2 = c_getstr (arg2);
3157 /* If all arguments are constant, and the value of len is not greater
3158 than the lengths of arg1 and arg2, evaluate at compile-time. */
3159 if (host_integerp (len, 1) && p1 && p2
3160 && compare_tree_int (len, strlen (p1) + 1) <= 0
3161 && compare_tree_int (len, strlen (p2) + 1) <= 0)
3163 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
3165 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3168 /* If len parameter is one, return an expression corresponding to
3169 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3170 if (integer_onep (len))
3172 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3173 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3175 fold (build1 (CONVERT_EXPR, integer_type_node,
3176 build1 (INDIRECT_REF, cst_uchar_node,
3177 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3179 fold (build1 (CONVERT_EXPR, integer_type_node,
3180 build1 (INDIRECT_REF, cst_uchar_node,
3181 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3182 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3183 return expand_expr (result, target, mode, EXPAND_NORMAL);
3186 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3188 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3193 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3195 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3196 enum machine_mode insn_mode;
3198 #ifdef HAVE_cmpmemsi
3200 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3203 #ifdef HAVE_cmpstrsi
3205 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3210 /* If we don't have POINTER_TYPE, call the function. */
3211 if (arg1_align == 0 || arg2_align == 0)
3214 /* Make a place to write the result of the instruction. */
3217 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3218 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3219 result = gen_reg_rtx (insn_mode);
3221 arg1_rtx = get_memory_rtx (arg1);
3222 arg2_rtx = get_memory_rtx (arg2);
3223 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3224 #ifdef HAVE_cmpmemsi
3226 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3227 GEN_INT (MIN (arg1_align, arg2_align)));
3230 #ifdef HAVE_cmpstrsi
3232 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3233 GEN_INT (MIN (arg1_align, arg2_align)));
3241 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3242 TYPE_MODE (integer_type_node), 3,
3243 XEXP (arg1_rtx, 0), Pmode,
3244 XEXP (arg2_rtx, 0), Pmode,
3245 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3246 TREE_UNSIGNED (sizetype)),
3247 TYPE_MODE (sizetype));
3249 /* Return the value in the proper mode for this function. */
3250 mode = TYPE_MODE (TREE_TYPE (exp));
3251 if (GET_MODE (result) == mode)
3253 else if (target != 0)
3255 convert_move (target, result, 0);
3259 return convert_to_mode (mode, result, 0);
3266 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3267 if we failed the caller should emit a normal call, otherwise try to get
3268 the result in TARGET, if convenient. */
3271 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3273 tree arglist = TREE_OPERAND (exp, 1);
3275 const char *p1, *p2;
3277 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3280 arg1 = TREE_VALUE (arglist);
3281 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3283 /* If both arguments are equal (and not volatile), return zero. */
3284 if (operand_equal_p (arg1, arg2, 0))
3287 p1 = c_getstr (arg1);
3288 p2 = c_getstr (arg2);
3292 const int i = strcmp (p1, p2);
3293 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
3296 /* If either arg is "", return an expression corresponding to
3297 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
3298 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
3300 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3301 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3303 fold (build1 (CONVERT_EXPR, integer_type_node,
3304 build1 (INDIRECT_REF, cst_uchar_node,
3305 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3307 fold (build1 (CONVERT_EXPR, integer_type_node,
3308 build1 (INDIRECT_REF, cst_uchar_node,
3309 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3310 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3311 return expand_expr (result, target, mode, EXPAND_NORMAL);
3314 #ifdef HAVE_cmpstrsi
3317 tree len, len1, len2;
3318 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3323 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3325 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3326 enum machine_mode insn_mode
3327 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3329 len1 = c_strlen (arg1, 1);
3330 len2 = c_strlen (arg2, 1);
3333 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3335 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3337 /* If we don't have a constant length for the first, use the length
3338 of the second, if we know it. We don't require a constant for
3339 this case; some cost analysis could be done if both are available
3340 but neither is constant. For now, assume they're equally cheap,
3341 unless one has side effects. If both strings have constant lengths,
3348 else if (TREE_SIDE_EFFECTS (len1))
3350 else if (TREE_SIDE_EFFECTS (len2))
3352 else if (TREE_CODE (len1) != INTEGER_CST)
3354 else if (TREE_CODE (len2) != INTEGER_CST)
3356 else if (tree_int_cst_lt (len1, len2))
3361 /* If both arguments have side effects, we cannot optimize. */
3362 if (!len || TREE_SIDE_EFFECTS (len))
3365 /* If we don't have POINTER_TYPE, call the function. */
3366 if (arg1_align == 0 || arg2_align == 0)
3369 /* Make a place to write the result of the instruction. */
3372 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
3373 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3374 result = gen_reg_rtx (insn_mode);
3376 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3377 arg1 = save_expr (arg1);
3378 arg2 = save_expr (arg2);
3380 arg1_rtx = get_memory_rtx (arg1);
3381 arg2_rtx = get_memory_rtx (arg2);
3382 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
3383 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3384 GEN_INT (MIN (arg1_align, arg2_align)));
3389 /* Return the value in the proper mode for this function. */
3390 mode = TYPE_MODE (TREE_TYPE (exp));
3391 if (GET_MODE (result) == mode)
3394 return convert_to_mode (mode, result, 0);
3395 convert_move (target, result, 0);
3399 /* Expand the library call ourselves using a stabilized argument
3400 list to avoid re-evaluating the function's arguments twice. */
3401 arglist = build_tree_list (NULL_TREE, arg2);
3402 arglist = tree_cons (NULL_TREE, arg1, arglist);
3403 fndecl = get_callee_fndecl (exp);
3404 exp = build_function_call_expr (fndecl, arglist);
3405 return expand_call (exp, target, target == const0_rtx);
3411 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3412 if we failed the caller should emit a normal call, otherwise try to get
3413 the result in TARGET, if convenient. */
3416 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3418 tree arglist = TREE_OPERAND (exp, 1);
3419 tree arg1, arg2, arg3;
3420 const char *p1, *p2;
3422 if (!validate_arglist (arglist,
3423 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3426 arg1 = TREE_VALUE (arglist);
3427 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3428 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3430 /* If the len parameter is zero, return zero. */
3431 if (integer_zerop (arg3))
3433 /* Evaluate and ignore arg1 and arg2 in case they have
3435 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
3436 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
3440 /* If arg1 and arg2 are equal (and not volatile), return zero. */
3441 if (operand_equal_p (arg1, arg2, 0))
3443 /* Evaluate and ignore arg3 in case it has side-effects. */
3444 expand_expr (arg3, const0_rtx, VOIDmode, EXPAND_NORMAL);
3448 p1 = c_getstr (arg1);
3449 p2 = c_getstr (arg2);
3451 /* If all arguments are constant, evaluate at compile-time. */
3452 if (host_integerp (arg3, 1) && p1 && p2)
3454 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
3455 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
3458 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3459 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3460 if (host_integerp (arg3, 1)
3461 && (tree_low_cst (arg3, 1) == 1
3462 || (tree_low_cst (arg3, 1) > 1
3463 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
3465 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
3466 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
3468 fold (build1 (CONVERT_EXPR, integer_type_node,
3469 build1 (INDIRECT_REF, cst_uchar_node,
3470 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
3472 fold (build1 (CONVERT_EXPR, integer_type_node,
3473 build1 (INDIRECT_REF, cst_uchar_node,
3474 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
3475 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
3476 return expand_expr (result, target, mode, EXPAND_NORMAL);
3479 /* If c_strlen can determine an expression for one of the string
3480 lengths, and it doesn't have side effects, then emit cmpstrsi
3481 using length MIN(strlen(string)+1, arg3). */
3482 #ifdef HAVE_cmpstrsi
3485 tree len, len1, len2;
3486 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3491 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3493 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3494 enum machine_mode insn_mode
3495 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3497 len1 = c_strlen (arg1, 1);
3498 len2 = c_strlen (arg2, 1);
3501 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3503 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3505 /* If we don't have a constant length for the first, use the length
3506 of the second, if we know it. We don't require a constant for
3507 this case; some cost analysis could be done if both are available
3508 but neither is constant. For now, assume they're equally cheap,
3509 unless one has side effects. If both strings have constant lengths,
3516 else if (TREE_SIDE_EFFECTS (len1))
3518 else if (TREE_SIDE_EFFECTS (len2))
3520 else if (TREE_CODE (len1) != INTEGER_CST)
3522 else if (TREE_CODE (len2) != INTEGER_CST)
3524 else if (tree_int_cst_lt (len1, len2))
3529 /* If both arguments have side effects, we cannot optimize. */
3530 if (!len || TREE_SIDE_EFFECTS (len))
3533 /* The actual new length parameter is MIN(len,arg3). */
3534 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
3536 /* If we don't have POINTER_TYPE, call the function. */
3537 if (arg1_align == 0 || arg2_align == 0)
3540 /* Make a place to write the result of the instruction. */
3543 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode