1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 struct target_builtins default_target_builtins;
65 struct target_builtins *this_target_builtins = &default_target_builtins;
68 /* Define the names of the builtin function types and codes. */
69 const char *const built_in_class_names[4]
70 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
72 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
73 const char * built_in_names[(int) END_BUILTINS] =
75 #include "builtins.def"
79 /* Setup an array of _DECL trees, make sure each element is
80 initialized to NULL_TREE. */
81 tree built_in_decls[(int) END_BUILTINS];
82 /* Declarations used when constructing the builtin implicitly in the compiler.
83 It may be NULL_TREE when this is invalid (for instance runtime is not
84 required to implement the function call in all cases). */
85 tree implicit_built_in_decls[(int) END_BUILTINS];
87 static const char *c_getstr (tree);
88 static rtx c_readstr (const char *, enum machine_mode);
89 static int target_char_cast (tree, char *);
90 static rtx get_memory_rtx (tree, tree);
91 static int apply_args_size (void);
92 static int apply_result_size (void);
93 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
94 static rtx result_vector (int, rtx);
96 static void expand_builtin_update_setjmp_buf (rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static void expand_errno_check (tree, rtx);
105 static rtx expand_builtin_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 enum machine_mode, int);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_alloca (tree, bool);
136 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_expect (location_t, tree, tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static bool readonly_data_expr (tree);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_sqrt (location_t, tree, tree);
154 static tree fold_builtin_cbrt (location_t, tree, tree);
155 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_cos (location_t, tree, tree, tree);
158 static tree fold_builtin_cosh (location_t, tree, tree, tree);
159 static tree fold_builtin_tan (tree, tree);
160 static tree fold_builtin_trunc (location_t, tree, tree);
161 static tree fold_builtin_floor (location_t, tree, tree);
162 static tree fold_builtin_ceil (location_t, tree, tree);
163 static tree fold_builtin_round (location_t, tree, tree);
164 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
165 static tree fold_builtin_bitop (tree, tree);
166 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
167 static tree fold_builtin_strchr (location_t, tree, tree, tree);
168 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
170 static tree fold_builtin_strcmp (location_t, tree, tree);
171 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
172 static tree fold_builtin_signbit (location_t, tree, tree);
173 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_isascii (location_t, tree);
175 static tree fold_builtin_toascii (location_t, tree);
176 static tree fold_builtin_isdigit (location_t, tree);
177 static tree fold_builtin_fabs (location_t, tree, tree);
178 static tree fold_builtin_abs (location_t, tree, tree);
179 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
182 static tree fold_builtin_0 (location_t, tree, bool);
183 static tree fold_builtin_1 (location_t, tree, tree, bool);
184 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
186 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
187 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strcat (location_t, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
206 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
207 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
208 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
209 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
210 enum built_in_function);
211 static bool init_target_chars (void);
213 static unsigned HOST_WIDE_INT target_newline;
214 static unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 static char target_percent_c[3];
218 static char target_percent_s[3];
219 static char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
236 is_builtin_name (const char *name)
238 if (strncmp (name, "__builtin_", 10) == 0)
240 if (strncmp (name, "__sync_", 7) == 0)
246 /* Return true if DECL is a function symbol representing a built-in. */
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
260 called_as_built_in (tree node)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
273 get_object_alignment (tree exp, unsigned int max_align)
275 HOST_WIDE_INT bitsize, bitpos;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
279 unsigned int align, inner;
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp) == CONST_DECL)
289 exp = DECL_INITIAL (exp);
291 && TREE_CODE (exp) != LABEL_DECL)
292 align = DECL_ALIGN (exp);
293 else if (CONSTANT_CLASS_P (exp))
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 #ifdef CONSTANT_ALIGNMENT
297 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
300 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == INDIRECT_REF)
303 align = TYPE_ALIGN (TREE_TYPE (exp));
304 else if (TREE_CODE (exp) == MEM_REF)
306 tree addr = TREE_OPERAND (exp, 0);
307 struct ptr_info_def *pi;
308 if (TREE_CODE (addr) == BIT_AND_EXPR
309 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
311 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
312 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
313 align *= BITS_PER_UNIT;
314 addr = TREE_OPERAND (addr, 0);
317 align = BITS_PER_UNIT;
318 if (TREE_CODE (addr) == SSA_NAME
319 && (pi = SSA_NAME_PTR_INFO (addr)))
321 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
322 align = MAX (pi->align * BITS_PER_UNIT, align);
324 else if (TREE_CODE (addr) == ADDR_EXPR)
325 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
327 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
329 else if (TREE_CODE (exp) == TARGET_MEM_REF)
331 struct ptr_info_def *pi;
332 tree addr = TMR_BASE (exp);
333 if (TREE_CODE (addr) == BIT_AND_EXPR
334 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
336 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
337 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
338 align *= BITS_PER_UNIT;
339 addr = TREE_OPERAND (addr, 0);
342 align = BITS_PER_UNIT;
343 if (TREE_CODE (addr) == SSA_NAME
344 && (pi = SSA_NAME_PTR_INFO (addr)))
346 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
347 align = MAX (pi->align * BITS_PER_UNIT, align);
349 else if (TREE_CODE (addr) == ADDR_EXPR)
350 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
352 if (TMR_OFFSET (exp))
353 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
354 if (TMR_INDEX (exp) && TMR_STEP (exp))
356 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
357 align = MIN (align, (step & -step) * BITS_PER_UNIT);
359 else if (TMR_INDEX (exp))
360 align = BITS_PER_UNIT;
361 if (TMR_INDEX2 (exp))
362 align = BITS_PER_UNIT;
365 align = BITS_PER_UNIT;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
374 if (TREE_CODE (offset) == PLUS_EXPR)
376 next_offset = TREE_OPERAND (offset, 0);
377 offset = TREE_OPERAND (offset, 1);
381 if (host_integerp (offset, 1))
383 /* Any overflow in calculating offset_bits won't change
386 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
389 inner = MIN (inner, (offset_bits & -offset_bits));
391 else if (TREE_CODE (offset) == MULT_EXPR
392 && host_integerp (TREE_OPERAND (offset, 1), 1))
394 /* Any overflow in calculating offset_factor won't change
396 unsigned offset_factor
397 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
401 inner = MIN (inner, (offset_factor & -offset_factor));
405 inner = MIN (inner, BITS_PER_UNIT);
408 offset = next_offset;
411 /* Alignment is innermost object alignment adjusted by the constant
412 and non-constant offset parts. */
413 align = MIN (align, inner);
414 bitpos = bitpos & (align - 1);
416 /* align and bitpos now specify known low bits of the pointer.
417 ptr & (align - 1) == bitpos. */
420 align = (bitpos & -bitpos);
422 return MIN (align, max_align);
425 /* Returns true iff we can trust that alignment information has been
426 calculated properly. */
429 can_trust_pointer_alignment (void)
431 /* We rely on TER to compute accurate alignment information. */
432 return (optimize && flag_tree_ter);
435 /* Return the alignment in bits of EXP, a pointer valued expression.
436 But don't return more than MAX_ALIGN no matter what.
437 The alignment returned is, by default, the alignment of the thing that
438 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
440 Otherwise, look at the expression to see if we can do better, i.e., if the
441 expression is actually pointing at an object whose alignment is tighter. */
444 get_pointer_alignment (tree exp, unsigned int max_align)
448 if (TREE_CODE (exp) == ADDR_EXPR)
449 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
450 else if (TREE_CODE (exp) == SSA_NAME
451 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456 return BITS_PER_UNIT;
457 if (pi->misalign != 0)
458 align = (pi->misalign & -pi->misalign);
461 return MIN (max_align, align * BITS_PER_UNIT);
464 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
467 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
468 way, because it could contain a zero byte in the middle.
469 TREE_STRING_LENGTH is the size of the character array, not the string.
471 ONLY_VALUE should be nonzero if the result is not going to be emitted
472 into the instruction stream and zero if it is going to be expanded.
473 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
474 is returned, otherwise NULL, since
475 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
476 evaluate the side-effects.
478 The value returned is of type `ssizetype'.
480 Unfortunately, string_constant can't access the values of const char
481 arrays with initializers, so neither can we do so here. */
484 c_strlen (tree src, int only_value)
487 HOST_WIDE_INT offset;
493 if (TREE_CODE (src) == COND_EXPR
494 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
498 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
499 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
500 if (tree_int_cst_equal (len1, len2))
504 if (TREE_CODE (src) == COMPOUND_EXPR
505 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
506 return c_strlen (TREE_OPERAND (src, 1), only_value);
508 loc = EXPR_LOC_OR_HERE (src);
510 src = string_constant (src, &offset_node);
514 max = TREE_STRING_LENGTH (src) - 1;
515 ptr = TREE_STRING_POINTER (src);
517 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
519 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
520 compute the offset to the following null if we don't know where to
521 start searching for it. */
524 for (i = 0; i < max; i++)
528 /* We don't know the starting offset, but we do know that the string
529 has no internal zero bytes. We can assume that the offset falls
530 within the bounds of the string; otherwise, the programmer deserves
531 what he gets. Subtract the offset from the length of the string,
532 and return that. This would perhaps not be valid if we were dealing
533 with named arrays in addition to literal string constants. */
535 return size_diffop_loc (loc, size_int (max), offset_node);
538 /* We have a known offset into the string. Start searching there for
539 a null character if we can represent it as a single HOST_WIDE_INT. */
540 if (offset_node == 0)
542 else if (! host_integerp (offset_node, 0))
545 offset = tree_low_cst (offset_node, 0);
547 /* If the offset is known to be out of bounds, warn, and call strlen at
549 if (offset < 0 || offset > max)
551 /* Suppress multiple warnings for propagated constant strings. */
552 if (! TREE_NO_WARNING (src))
554 warning_at (loc, 0, "offset outside bounds of constant string");
555 TREE_NO_WARNING (src) = 1;
560 /* Use strlen to search for the first zero byte. Since any strings
561 constructed with build_string will have nulls appended, we win even
562 if we get handed something like (char[4])"abcd".
564 Since OFFSET is our starting index into the string, no further
565 calculation is needed. */
566 return ssize_int (strlen (ptr + offset));
569 /* Return a char pointer for a C string if it is a string constant
570 or sum of string constant and integer constant. */
577 src = string_constant (src, &offset_node);
581 if (offset_node == 0)
582 return TREE_STRING_POINTER (src);
583 else if (!host_integerp (offset_node, 1)
584 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
587 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
590 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
591 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
594 c_readstr (const char *str, enum machine_mode mode)
600 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
605 for (i = 0; i < GET_MODE_SIZE (mode); i++)
608 if (WORDS_BIG_ENDIAN)
609 j = GET_MODE_SIZE (mode) - i - 1;
610 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
611 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
612 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
614 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
617 ch = (unsigned char) str[i];
618 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
620 return immed_double_const (c[0], c[1], mode);
623 /* Cast a target constant CST to target CHAR and if that value fits into
624 host char type, return zero and put that value into variable pointed to by
628 target_char_cast (tree cst, char *p)
630 unsigned HOST_WIDE_INT val, hostval;
632 if (TREE_CODE (cst) != INTEGER_CST
633 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
636 val = TREE_INT_CST_LOW (cst);
637 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
638 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
641 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
642 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
651 /* Similar to save_expr, but assumes that arbitrary code is not executed
652 in between the multiple evaluations. In particular, we assume that a
653 non-addressable local variable will not be modified. */
656 builtin_save_expr (tree exp)
658 if (TREE_ADDRESSABLE (exp) == 0
659 && (TREE_CODE (exp) == PARM_DECL
660 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
663 return save_expr (exp);
666 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
667 times to get the address of either a higher stack frame, or a return
668 address located within it (depending on FNDECL_CODE). */
671 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
675 #ifdef INITIAL_FRAME_ADDRESS_RTX
676 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
680 /* For a zero count with __builtin_return_address, we don't care what
681 frame address we return, because target-specific definitions will
682 override us. Therefore frame pointer elimination is OK, and using
683 the soft frame pointer is OK.
685 For a nonzero count, or a zero count with __builtin_frame_address,
686 we require a stable offset from the current frame pointer to the
687 previous one, so we must use the hard frame pointer, and
688 we must disable frame pointer elimination. */
689 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
690 tem = frame_pointer_rtx;
693 tem = hard_frame_pointer_rtx;
695 /* Tell reload not to eliminate the frame pointer. */
696 crtl->accesses_prior_frames = 1;
700 /* Some machines need special handling before we can access
701 arbitrary frames. For example, on the SPARC, we must first flush
702 all register windows to the stack. */
703 #ifdef SETUP_FRAME_ADDRESSES
705 SETUP_FRAME_ADDRESSES ();
708 /* On the SPARC, the return address is not in the frame, it is in a
709 register. There is no way to access it off of the current frame
710 pointer, but it can be accessed off the previous frame pointer by
711 reading the value from the register window save area. */
712 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
713 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
717 /* Scan back COUNT frames to the specified frame. */
718 for (i = 0; i < count; i++)
720 /* Assume the dynamic chain pointer is in the word that the
721 frame address points to, unless otherwise specified. */
722 #ifdef DYNAMIC_CHAIN_ADDRESS
723 tem = DYNAMIC_CHAIN_ADDRESS (tem);
725 tem = memory_address (Pmode, tem);
726 tem = gen_frame_mem (Pmode, tem);
727 tem = copy_to_reg (tem);
730 /* For __builtin_frame_address, return what we've got. But, on
731 the SPARC for example, we may have to add a bias. */
732 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
733 #ifdef FRAME_ADDR_RTX
734 return FRAME_ADDR_RTX (tem);
739 /* For __builtin_return_address, get the return address from that frame. */
740 #ifdef RETURN_ADDR_RTX
741 tem = RETURN_ADDR_RTX (count, tem);
743 tem = memory_address (Pmode,
744 plus_constant (tem, GET_MODE_SIZE (Pmode)));
745 tem = gen_frame_mem (Pmode, tem);
750 /* Alias set used for setjmp buffer. */
751 static alias_set_type setjmp_alias_set = -1;
753 /* Construct the leading half of a __builtin_setjmp call. Control will
754 return to RECEIVER_LABEL. This is also called directly by the SJLJ
755 exception handling code. */
758 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
760 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
764 if (setjmp_alias_set == -1)
765 setjmp_alias_set = new_alias_set ();
767 buf_addr = convert_memory_address (Pmode, buf_addr);
769 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
771 /* We store the frame pointer and the address of receiver_label in
772 the buffer and use the rest of it for the stack save area, which
773 is machine-dependent. */
775 mem = gen_rtx_MEM (Pmode, buf_addr);
776 set_mem_alias_set (mem, setjmp_alias_set);
777 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
779 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
780 set_mem_alias_set (mem, setjmp_alias_set);
782 emit_move_insn (validize_mem (mem),
783 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
785 stack_save = gen_rtx_MEM (sa_mode,
786 plus_constant (buf_addr,
787 2 * GET_MODE_SIZE (Pmode)));
788 set_mem_alias_set (stack_save, setjmp_alias_set);
789 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
791 /* If there is further processing to do, do it. */
792 #ifdef HAVE_builtin_setjmp_setup
793 if (HAVE_builtin_setjmp_setup)
794 emit_insn (gen_builtin_setjmp_setup (buf_addr));
797 /* Tell optimize_save_area_alloca that extra work is going to
798 need to go on during alloca. */
799 cfun->calls_setjmp = 1;
801 /* We have a nonlocal label. */
802 cfun->has_nonlocal_label = 1;
805 /* Construct the trailing part of a __builtin_setjmp call. This is
806 also called directly by the SJLJ exception handling code. */
809 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
813 /* Clobber the FP when we get here, so we have to make sure it's
814 marked as used by this function. */
815 emit_use (hard_frame_pointer_rtx);
817 /* Mark the static chain as clobbered here so life information
818 doesn't get messed up for it. */
819 chain = targetm.calls.static_chain (current_function_decl, true);
820 if (chain && REG_P (chain))
821 emit_clobber (chain);
823 /* Now put in the code to restore the frame pointer, and argument
824 pointer, if needed. */
825 #ifdef HAVE_nonlocal_goto
826 if (! HAVE_nonlocal_goto)
829 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
830 /* This might change the hard frame pointer in ways that aren't
831 apparent to early optimization passes, so force a clobber. */
832 emit_clobber (hard_frame_pointer_rtx);
835 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
836 if (fixed_regs[ARG_POINTER_REGNUM])
838 #ifdef ELIMINABLE_REGS
840 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
842 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
843 if (elim_regs[i].from == ARG_POINTER_REGNUM
844 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
847 if (i == ARRAY_SIZE (elim_regs))
850 /* Now restore our arg pointer from the address at which it
851 was saved in our stack frame. */
852 emit_move_insn (crtl->args.internal_arg_pointer,
853 copy_to_reg (get_arg_pointer_save_area ()));
858 #ifdef HAVE_builtin_setjmp_receiver
859 if (HAVE_builtin_setjmp_receiver)
860 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
863 #ifdef HAVE_nonlocal_goto_receiver
864 if (HAVE_nonlocal_goto_receiver)
865 emit_insn (gen_nonlocal_goto_receiver ());
870 /* We must not allow the code we just generated to be reordered by
871 scheduling. Specifically, the update of the frame pointer must
872 happen immediately, not later. */
873 emit_insn (gen_blockage ());
876 /* __builtin_longjmp is passed a pointer to an array of five words (not
877 all will be used on all machines). It operates similarly to the C
878 library function of the same name, but is more efficient. Much of
879 the code below is copied from the handling of non-local gotos. */
882 expand_builtin_longjmp (rtx buf_addr, rtx value)
884 rtx fp, lab, stack, insn, last;
885 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
887 /* DRAP is needed for stack realign if longjmp is expanded to current
889 if (SUPPORTS_STACK_ALIGNMENT)
890 crtl->need_drap = true;
892 if (setjmp_alias_set == -1)
893 setjmp_alias_set = new_alias_set ();
895 buf_addr = convert_memory_address (Pmode, buf_addr);
897 buf_addr = force_reg (Pmode, buf_addr);
899 /* We require that the user must pass a second argument of 1, because
900 that is what builtin_setjmp will return. */
901 gcc_assert (value == const1_rtx);
903 last = get_last_insn ();
904 #ifdef HAVE_builtin_longjmp
905 if (HAVE_builtin_longjmp)
906 emit_insn (gen_builtin_longjmp (buf_addr));
910 fp = gen_rtx_MEM (Pmode, buf_addr);
911 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
912 GET_MODE_SIZE (Pmode)));
914 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
915 2 * GET_MODE_SIZE (Pmode)));
916 set_mem_alias_set (fp, setjmp_alias_set);
917 set_mem_alias_set (lab, setjmp_alias_set);
918 set_mem_alias_set (stack, setjmp_alias_set);
920 /* Pick up FP, label, and SP from the block and jump. This code is
921 from expand_goto in stmt.c; see there for detailed comments. */
922 #ifdef HAVE_nonlocal_goto
923 if (HAVE_nonlocal_goto)
924 /* We have to pass a value to the nonlocal_goto pattern that will
925 get copied into the static_chain pointer, but it does not matter
926 what that value is, because builtin_setjmp does not use it. */
927 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
931 lab = copy_to_reg (lab);
933 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
934 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
936 emit_move_insn (hard_frame_pointer_rtx, fp);
937 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
939 emit_use (hard_frame_pointer_rtx);
940 emit_use (stack_pointer_rtx);
941 emit_indirect_jump (lab);
945 /* Search backwards and mark the jump insn as a non-local goto.
946 Note that this precludes the use of __builtin_longjmp to a
947 __builtin_setjmp target in the same function. However, we've
948 already cautioned the user that these functions are for
949 internal exception handling use only. */
950 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
952 gcc_assert (insn != last);
956 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
959 else if (CALL_P (insn))
964 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
965 and the address of the save area. */
968 expand_builtin_nonlocal_goto (tree exp)
970 tree t_label, t_save_area;
971 rtx r_label, r_save_area, r_fp, r_sp, insn;
973 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
976 t_label = CALL_EXPR_ARG (exp, 0);
977 t_save_area = CALL_EXPR_ARG (exp, 1);
979 r_label = expand_normal (t_label);
980 r_label = convert_memory_address (Pmode, r_label);
981 r_save_area = expand_normal (t_save_area);
982 r_save_area = convert_memory_address (Pmode, r_save_area);
983 /* Copy the address of the save location to a register just in case it was based
984 on the frame pointer. */
985 r_save_area = copy_to_reg (r_save_area);
986 r_fp = gen_rtx_MEM (Pmode, r_save_area);
987 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
988 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
990 crtl->has_nonlocal_goto = 1;
992 #ifdef HAVE_nonlocal_goto
993 /* ??? We no longer need to pass the static chain value, afaik. */
994 if (HAVE_nonlocal_goto)
995 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
999 r_label = copy_to_reg (r_label);
1001 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1002 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1004 /* Restore frame pointer for containing function.
1005 This sets the actual hard register used for the frame pointer
1006 to the location of the function's incoming static chain info.
1007 The non-local goto handler will then adjust it to contain the
1008 proper value and reload the argument pointer, if needed. */
1009 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1010 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1012 /* USE of hard_frame_pointer_rtx added for consistency;
1013 not clear if really needed. */
1014 emit_use (hard_frame_pointer_rtx);
1015 emit_use (stack_pointer_rtx);
1017 /* If the architecture is using a GP register, we must
1018 conservatively assume that the target function makes use of it.
1019 The prologue of functions with nonlocal gotos must therefore
1020 initialize the GP register to the appropriate value, and we
1021 must then make sure that this value is live at the point
1022 of the jump. (Note that this doesn't necessarily apply
1023 to targets with a nonlocal_goto pattern; they are free
1024 to implement it in their own way. Note also that this is
1025 a no-op if the GP register is a global invariant.) */
1026 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1027 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1028 emit_use (pic_offset_table_rtx);
1030 emit_indirect_jump (r_label);
1033 /* Search backwards to the jump insn and mark it as a
1035 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1039 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1042 else if (CALL_P (insn))
1049 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1050 (not all will be used on all machines) that was passed to __builtin_setjmp.
1051 It updates the stack pointer in that block to correspond to the current
1055 expand_builtin_update_setjmp_buf (rtx buf_addr)
1057 enum machine_mode sa_mode = Pmode;
1061 #ifdef HAVE_save_stack_nonlocal
1062 if (HAVE_save_stack_nonlocal)
1063 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1065 #ifdef STACK_SAVEAREA_MODE
1066 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1070 = gen_rtx_MEM (sa_mode,
1073 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1077 emit_insn (gen_setjmp ());
1080 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1083 /* Expand a call to __builtin_prefetch. For a target that does not support
1084 data prefetch, evaluate the memory address argument in case it has side
1088 expand_builtin_prefetch (tree exp)
1090 tree arg0, arg1, arg2;
1094 if (!validate_arglist (exp, POINTER_TYPE, 0))
1097 arg0 = CALL_EXPR_ARG (exp, 0);
1099 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1100 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1102 nargs = call_expr_nargs (exp);
1104 arg1 = CALL_EXPR_ARG (exp, 1);
1106 arg1 = integer_zero_node;
1108 arg2 = CALL_EXPR_ARG (exp, 2);
1110 arg2 = integer_three_node;
1112 /* Argument 0 is an address. */
1113 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1115 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1116 if (TREE_CODE (arg1) != INTEGER_CST)
1118 error ("second argument to %<__builtin_prefetch%> must be a constant");
1119 arg1 = integer_zero_node;
1121 op1 = expand_normal (arg1);
1122 /* Argument 1 must be either zero or one. */
1123 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1125 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1130 /* Argument 2 (locality) must be a compile-time constant int. */
1131 if (TREE_CODE (arg2) != INTEGER_CST)
1133 error ("third argument to %<__builtin_prefetch%> must be a constant");
1134 arg2 = integer_zero_node;
1136 op2 = expand_normal (arg2);
1137 /* Argument 2 must be 0, 1, 2, or 3. */
1138 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1140 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1144 #ifdef HAVE_prefetch
1147 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1149 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1150 || (GET_MODE (op0) != Pmode))
1152 op0 = convert_memory_address (Pmode, op0);
1153 op0 = force_reg (Pmode, op0);
1155 emit_insn (gen_prefetch (op0, op1, op2));
1159 /* Don't do anything with direct references to volatile memory, but
1160 generate code to handle other side effects. */
1161 if (!MEM_P (op0) && side_effects_p (op0))
1165 /* Get a MEM rtx for expression EXP which is the address of an operand
1166 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1167 the maximum length of the block of memory that might be accessed or
1171 get_memory_rtx (tree exp, tree len)
1173 tree orig_exp = exp;
1177 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1178 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1179 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1180 exp = TREE_OPERAND (exp, 0);
1182 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1183 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1185 /* Get an expression we can use to find the attributes to assign to MEM.
1186 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1187 we can. First remove any nops. */
1188 while (CONVERT_EXPR_P (exp)
1189 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1190 exp = TREE_OPERAND (exp, 0);
1193 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1194 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1195 && host_integerp (TREE_OPERAND (exp, 1), 0)
1196 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1197 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1198 else if (TREE_CODE (exp) == ADDR_EXPR)
1199 exp = TREE_OPERAND (exp, 0);
1200 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1201 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1205 /* Honor attributes derived from exp, except for the alias set
1206 (as builtin stringops may alias with anything) and the size
1207 (as stringops may access multiple array elements). */
1210 set_mem_attributes (mem, exp, 0);
1213 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1215 /* Allow the string and memory builtins to overflow from one
1216 field into another, see http://gcc.gnu.org/PR23561.
1217 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1218 memory accessed by the string or memory builtin will fit
1219 within the field. */
1220 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1222 tree mem_expr = MEM_EXPR (mem);
1223 HOST_WIDE_INT offset = -1, length = -1;
1226 while (TREE_CODE (inner) == ARRAY_REF
1227 || CONVERT_EXPR_P (inner)
1228 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1229 || TREE_CODE (inner) == SAVE_EXPR)
1230 inner = TREE_OPERAND (inner, 0);
1232 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1234 if (MEM_OFFSET (mem)
1235 && CONST_INT_P (MEM_OFFSET (mem)))
1236 offset = INTVAL (MEM_OFFSET (mem));
1238 if (offset >= 0 && len && host_integerp (len, 0))
1239 length = tree_low_cst (len, 0);
1241 while (TREE_CODE (inner) == COMPONENT_REF)
1243 tree field = TREE_OPERAND (inner, 1);
1244 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1245 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1247 /* Bitfields are generally not byte-addressable. */
1248 gcc_assert (!DECL_BIT_FIELD (field)
1249 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1250 % BITS_PER_UNIT) == 0
1251 && host_integerp (DECL_SIZE (field), 0)
1252 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1253 % BITS_PER_UNIT) == 0));
1255 /* If we can prove that the memory starting at XEXP (mem, 0) and
1256 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1257 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1258 fields without DECL_SIZE_UNIT like flexible array members. */
1260 && DECL_SIZE_UNIT (field)
1261 && host_integerp (DECL_SIZE_UNIT (field), 0))
1264 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1267 && offset + length <= size)
1272 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1273 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1274 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1282 mem_expr = TREE_OPERAND (mem_expr, 0);
1283 inner = TREE_OPERAND (inner, 0);
1286 if (mem_expr == NULL)
1288 if (mem_expr != MEM_EXPR (mem))
1290 set_mem_expr (mem, mem_expr);
1291 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1294 set_mem_alias_set (mem, 0);
1295 set_mem_size (mem, NULL_RTX);
1301 /* Built-in functions to perform an untyped call and return. */
1303 #define apply_args_mode \
1304 (this_target_builtins->x_apply_args_mode)
1305 #define apply_result_mode \
1306 (this_target_builtins->x_apply_result_mode)
1308 /* Return the size required for the block returned by __builtin_apply_args,
1309 and initialize apply_args_mode. */
1312 apply_args_size (void)
1314 static int size = -1;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1322 /* The first value is the incoming arg-pointer. */
1323 size = GET_MODE_SIZE (Pmode);
1325 /* The second value is the structure value address unless this is
1326 passed as an "invisible" first argument. */
1327 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1328 size += GET_MODE_SIZE (Pmode);
1330 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1331 if (FUNCTION_ARG_REGNO_P (regno))
1333 mode = targetm.calls.get_raw_arg_mode (regno);
1335 gcc_assert (mode != VOIDmode);
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1340 size += GET_MODE_SIZE (mode);
1341 apply_args_mode[regno] = mode;
1345 apply_args_mode[regno] = VOIDmode;
1351 /* Return the size required for the block returned by __builtin_apply,
1352 and initialize apply_result_mode. */
1355 apply_result_size (void)
1357 static int size = -1;
1359 enum machine_mode mode;
1361 /* The values computed by this function never change. */
1366 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1367 if (targetm.calls.function_value_regno_p (regno))
1369 mode = targetm.calls.get_raw_result_mode (regno);
1371 gcc_assert (mode != VOIDmode);
1373 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1374 if (size % align != 0)
1375 size = CEIL (size, align) * align;
1376 size += GET_MODE_SIZE (mode);
1377 apply_result_mode[regno] = mode;
1380 apply_result_mode[regno] = VOIDmode;
1382 /* Allow targets that use untyped_call and untyped_return to override
1383 the size so that machine-specific information can be stored here. */
1384 #ifdef APPLY_RESULT_SIZE
1385 size = APPLY_RESULT_SIZE;
1391 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1392 /* Create a vector describing the result block RESULT. If SAVEP is true,
1393 the result block is used to save the values; otherwise it is used to
1394 restore the values. */
1397 result_vector (int savep, rtx result)
1399 int regno, size, align, nelts;
1400 enum machine_mode mode;
1402 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1406 if ((mode = apply_result_mode[regno]) != VOIDmode)
1408 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1409 if (size % align != 0)
1410 size = CEIL (size, align) * align;
1411 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1412 mem = adjust_address (result, mode, size);
1413 savevec[nelts++] = (savep
1414 ? gen_rtx_SET (VOIDmode, mem, reg)
1415 : gen_rtx_SET (VOIDmode, reg, mem));
1416 size += GET_MODE_SIZE (mode);
1418 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1420 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1422 /* Save the state required to perform an untyped call with the same
1423 arguments as were passed to the current function. */
1426 expand_builtin_apply_args_1 (void)
1429 int size, align, regno;
1430 enum machine_mode mode;
1431 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1433 /* Create a block where the arg-pointer, structure value address,
1434 and argument registers can be saved. */
1435 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1437 /* Walk past the arg-pointer and structure value address. */
1438 size = GET_MODE_SIZE (Pmode);
1439 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1440 size += GET_MODE_SIZE (Pmode);
1442 /* Save each register used in calling a function to the block. */
1443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1444 if ((mode = apply_args_mode[regno]) != VOIDmode)
1446 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1447 if (size % align != 0)
1448 size = CEIL (size, align) * align;
1450 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1452 emit_move_insn (adjust_address (registers, mode, size), tem);
1453 size += GET_MODE_SIZE (mode);
1456 /* Save the arg pointer to the block. */
1457 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1458 #ifdef STACK_GROWS_DOWNWARD
1459 /* We need the pointer as the caller actually passed them to us, not
1460 as we might have pretended they were passed. Make sure it's a valid
1461 operand, as emit_move_insn isn't expected to handle a PLUS. */
1463 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1466 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1468 size = GET_MODE_SIZE (Pmode);
1470 /* Save the structure value address unless this is passed as an
1471 "invisible" first argument. */
1472 if (struct_incoming_value)
1474 emit_move_insn (adjust_address (registers, Pmode, size),
1475 copy_to_reg (struct_incoming_value));
1476 size += GET_MODE_SIZE (Pmode);
1479 /* Return the address of the block. */
1480 return copy_addr_to_reg (XEXP (registers, 0));
1483 /* __builtin_apply_args returns block of memory allocated on
1484 the stack into which is stored the arg pointer, structure
1485 value address, static chain, and all the registers that might
1486 possibly be used in performing a function call. The code is
1487 moved to the start of the function so the incoming values are
1491 expand_builtin_apply_args (void)
1493 /* Don't do __builtin_apply_args more than once in a function.
1494 Save the result of the first call and reuse it. */
1495 if (apply_args_value != 0)
1496 return apply_args_value;
1498 /* When this function is called, it means that registers must be
1499 saved on entry to this function. So we migrate the
1500 call to the first insn of this function. */
1505 temp = expand_builtin_apply_args_1 ();
1509 apply_args_value = temp;
1511 /* Put the insns after the NOTE that starts the function.
1512 If this is inside a start_sequence, make the outer-level insn
1513 chain current, so the code is placed at the start of the
1514 function. If internal_arg_pointer is a non-virtual pseudo,
1515 it needs to be placed after the function that initializes
1517 push_topmost_sequence ();
1518 if (REG_P (crtl->args.internal_arg_pointer)
1519 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1520 emit_insn_before (seq, parm_birth_insn);
1522 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1523 pop_topmost_sequence ();
1528 /* Perform an untyped call and save the state required to perform an
1529 untyped return of whatever value was returned by the given function. */
1532 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1534 int size, align, regno;
1535 enum machine_mode mode;
1536 rtx incoming_args, result, reg, dest, src, call_insn;
1537 rtx old_stack_level = 0;
1538 rtx call_fusage = 0;
1539 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1541 arguments = convert_memory_address (Pmode, arguments);
1543 /* Create a block where the return registers can be saved. */
1544 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1546 /* Fetch the arg pointer from the ARGUMENTS block. */
1547 incoming_args = gen_reg_rtx (Pmode);
1548 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1549 #ifndef STACK_GROWS_DOWNWARD
1550 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1551 incoming_args, 0, OPTAB_LIB_WIDEN);
1554 /* Push a new argument block and copy the arguments. Do not allow
1555 the (potential) memcpy call below to interfere with our stack
1557 do_pending_stack_adjust ();
1560 /* Save the stack with nonlocal if available. */
1561 #ifdef HAVE_save_stack_nonlocal
1562 if (HAVE_save_stack_nonlocal)
1563 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1566 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1568 /* Allocate a block of memory onto the stack and copy the memory
1569 arguments to the outgoing arguments address. We can pass TRUE
1570 as the 4th argument because we just saved the stack pointer
1571 and will restore it right after the call. */
1572 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1574 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1575 may have already set current_function_calls_alloca to true.
1576 current_function_calls_alloca won't be set if argsize is zero,
1577 so we have to guarantee need_drap is true here. */
1578 if (SUPPORTS_STACK_ALIGNMENT)
1579 crtl->need_drap = true;
1581 dest = virtual_outgoing_args_rtx;
1582 #ifndef STACK_GROWS_DOWNWARD
1583 if (CONST_INT_P (argsize))
1584 dest = plus_constant (dest, -INTVAL (argsize));
1586 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1588 dest = gen_rtx_MEM (BLKmode, dest);
1589 set_mem_align (dest, PARM_BOUNDARY);
1590 src = gen_rtx_MEM (BLKmode, incoming_args);
1591 set_mem_align (src, PARM_BOUNDARY);
1592 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1594 /* Refer to the argument block. */
1596 arguments = gen_rtx_MEM (BLKmode, arguments);
1597 set_mem_align (arguments, PARM_BOUNDARY);
1599 /* Walk past the arg-pointer and structure value address. */
1600 size = GET_MODE_SIZE (Pmode);
1602 size += GET_MODE_SIZE (Pmode);
1604 /* Restore each of the registers previously saved. Make USE insns
1605 for each of these registers for use in making the call. */
1606 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1607 if ((mode = apply_args_mode[regno]) != VOIDmode)
1609 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1610 if (size % align != 0)
1611 size = CEIL (size, align) * align;
1612 reg = gen_rtx_REG (mode, regno);
1613 emit_move_insn (reg, adjust_address (arguments, mode, size));
1614 use_reg (&call_fusage, reg);
1615 size += GET_MODE_SIZE (mode);
1618 /* Restore the structure value address unless this is passed as an
1619 "invisible" first argument. */
1620 size = GET_MODE_SIZE (Pmode);
1623 rtx value = gen_reg_rtx (Pmode);
1624 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1625 emit_move_insn (struct_value, value);
1626 if (REG_P (struct_value))
1627 use_reg (&call_fusage, struct_value);
1628 size += GET_MODE_SIZE (Pmode);
1631 /* All arguments and registers used for the call are set up by now! */
1632 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1634 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1635 and we don't want to load it into a register as an optimization,
1636 because prepare_call_address already did it if it should be done. */
1637 if (GET_CODE (function) != SYMBOL_REF)
1638 function = memory_address (FUNCTION_MODE, function);
1640 /* Generate the actual call instruction and save the return value. */
1641 #ifdef HAVE_untyped_call
1642 if (HAVE_untyped_call)
1643 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1644 result, result_vector (1, result)));
1647 #ifdef HAVE_call_value
1648 if (HAVE_call_value)
1652 /* Locate the unique return register. It is not possible to
1653 express a call that sets more than one return register using
1654 call_value; use untyped_call for that. In fact, untyped_call
1655 only needs to save the return registers in the given block. */
1656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1657 if ((mode = apply_result_mode[regno]) != VOIDmode)
1659 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1661 valreg = gen_rtx_REG (mode, regno);
1664 emit_call_insn (GEN_CALL_VALUE (valreg,
1665 gen_rtx_MEM (FUNCTION_MODE, function),
1666 const0_rtx, NULL_RTX, const0_rtx));
1668 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1674 /* Find the CALL insn we just emitted, and attach the register usage
1676 call_insn = last_call_insn ();
1677 add_function_usage_to (call_insn, call_fusage);
1679 /* Restore the stack. */
1680 #ifdef HAVE_save_stack_nonlocal
1681 if (HAVE_save_stack_nonlocal)
1682 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1685 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1689 /* Return the address of the result block. */
1690 result = copy_addr_to_reg (XEXP (result, 0));
1691 return convert_memory_address (ptr_mode, result);
1694 /* Perform an untyped return. */
1697 expand_builtin_return (rtx result)
1699 int size, align, regno;
1700 enum machine_mode mode;
1702 rtx call_fusage = 0;
1704 result = convert_memory_address (Pmode, result);
1706 apply_result_size ();
1707 result = gen_rtx_MEM (BLKmode, result);
1709 #ifdef HAVE_untyped_return
1710 if (HAVE_untyped_return)
1712 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1718 /* Restore the return value and note that each value is used. */
1720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1721 if ((mode = apply_result_mode[regno]) != VOIDmode)
1723 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1724 if (size % align != 0)
1725 size = CEIL (size, align) * align;
1726 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1727 emit_move_insn (reg, adjust_address (result, mode, size));
1729 push_to_sequence (call_fusage);
1731 call_fusage = get_insns ();
1733 size += GET_MODE_SIZE (mode);
1736 /* Put the USE insns before the return. */
1737 emit_insn (call_fusage);
1739 /* Return whatever values was restored by jumping directly to the end
1741 expand_naked_return ();
1744 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1746 static enum type_class
1747 type_to_class (tree type)
1749 switch (TREE_CODE (type))
1751 case VOID_TYPE: return void_type_class;
1752 case INTEGER_TYPE: return integer_type_class;
1753 case ENUMERAL_TYPE: return enumeral_type_class;
1754 case BOOLEAN_TYPE: return boolean_type_class;
1755 case POINTER_TYPE: return pointer_type_class;
1756 case REFERENCE_TYPE: return reference_type_class;
1757 case OFFSET_TYPE: return offset_type_class;
1758 case REAL_TYPE: return real_type_class;
1759 case COMPLEX_TYPE: return complex_type_class;
1760 case FUNCTION_TYPE: return function_type_class;
1761 case METHOD_TYPE: return method_type_class;
1762 case RECORD_TYPE: return record_type_class;
1764 case QUAL_UNION_TYPE: return union_type_class;
1765 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1766 ? string_type_class : array_type_class);
1767 case LANG_TYPE: return lang_type_class;
1768 default: return no_type_class;
1772 /* Expand a call EXP to __builtin_classify_type. */
1775 expand_builtin_classify_type (tree exp)
1777 if (call_expr_nargs (exp))
1778 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1779 return GEN_INT (no_type_class);
1782 /* This helper macro, meant to be used in mathfn_built_in below,
1783 determines which among a set of three builtin math functions is
1784 appropriate for a given type mode. The `F' and `L' cases are
1785 automatically generated from the `double' case. */
1786 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1787 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1788 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1789 fcodel = BUILT_IN_MATHFN##L ; break;
1790 /* Similar to above, but appends _R after any F/L suffix. */
1791 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1792 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1793 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1794 fcodel = BUILT_IN_MATHFN##L_R ; break;
1796 /* Return mathematic function equivalent to FN but operating directly
1797 on TYPE, if available. If IMPLICIT is true find the function in
1798 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1799 can't do the conversion, return zero. */
1802 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1804 tree const *const fn_arr
1805 = implicit ? implicit_built_in_decls : built_in_decls;
1806 enum built_in_function fcode, fcodef, fcodel;
1810 CASE_MATHFN (BUILT_IN_ACOS)
1811 CASE_MATHFN (BUILT_IN_ACOSH)
1812 CASE_MATHFN (BUILT_IN_ASIN)
1813 CASE_MATHFN (BUILT_IN_ASINH)
1814 CASE_MATHFN (BUILT_IN_ATAN)
1815 CASE_MATHFN (BUILT_IN_ATAN2)
1816 CASE_MATHFN (BUILT_IN_ATANH)
1817 CASE_MATHFN (BUILT_IN_CBRT)
1818 CASE_MATHFN (BUILT_IN_CEIL)
1819 CASE_MATHFN (BUILT_IN_CEXPI)
1820 CASE_MATHFN (BUILT_IN_COPYSIGN)
1821 CASE_MATHFN (BUILT_IN_COS)
1822 CASE_MATHFN (BUILT_IN_COSH)
1823 CASE_MATHFN (BUILT_IN_DREM)
1824 CASE_MATHFN (BUILT_IN_ERF)
1825 CASE_MATHFN (BUILT_IN_ERFC)
1826 CASE_MATHFN (BUILT_IN_EXP)
1827 CASE_MATHFN (BUILT_IN_EXP10)
1828 CASE_MATHFN (BUILT_IN_EXP2)
1829 CASE_MATHFN (BUILT_IN_EXPM1)
1830 CASE_MATHFN (BUILT_IN_FABS)
1831 CASE_MATHFN (BUILT_IN_FDIM)
1832 CASE_MATHFN (BUILT_IN_FLOOR)
1833 CASE_MATHFN (BUILT_IN_FMA)
1834 CASE_MATHFN (BUILT_IN_FMAX)
1835 CASE_MATHFN (BUILT_IN_FMIN)
1836 CASE_MATHFN (BUILT_IN_FMOD)
1837 CASE_MATHFN (BUILT_IN_FREXP)
1838 CASE_MATHFN (BUILT_IN_GAMMA)
1839 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1840 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1841 CASE_MATHFN (BUILT_IN_HYPOT)
1842 CASE_MATHFN (BUILT_IN_ILOGB)
1843 CASE_MATHFN (BUILT_IN_INF)
1844 CASE_MATHFN (BUILT_IN_ISINF)
1845 CASE_MATHFN (BUILT_IN_J0)
1846 CASE_MATHFN (BUILT_IN_J1)
1847 CASE_MATHFN (BUILT_IN_JN)
1848 CASE_MATHFN (BUILT_IN_LCEIL)
1849 CASE_MATHFN (BUILT_IN_LDEXP)
1850 CASE_MATHFN (BUILT_IN_LFLOOR)
1851 CASE_MATHFN (BUILT_IN_LGAMMA)
1852 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1853 CASE_MATHFN (BUILT_IN_LLCEIL)
1854 CASE_MATHFN (BUILT_IN_LLFLOOR)
1855 CASE_MATHFN (BUILT_IN_LLRINT)
1856 CASE_MATHFN (BUILT_IN_LLROUND)
1857 CASE_MATHFN (BUILT_IN_LOG)
1858 CASE_MATHFN (BUILT_IN_LOG10)
1859 CASE_MATHFN (BUILT_IN_LOG1P)
1860 CASE_MATHFN (BUILT_IN_LOG2)
1861 CASE_MATHFN (BUILT_IN_LOGB)
1862 CASE_MATHFN (BUILT_IN_LRINT)
1863 CASE_MATHFN (BUILT_IN_LROUND)
1864 CASE_MATHFN (BUILT_IN_MODF)
1865 CASE_MATHFN (BUILT_IN_NAN)
1866 CASE_MATHFN (BUILT_IN_NANS)
1867 CASE_MATHFN (BUILT_IN_NEARBYINT)
1868 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1869 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1870 CASE_MATHFN (BUILT_IN_POW)
1871 CASE_MATHFN (BUILT_IN_POWI)
1872 CASE_MATHFN (BUILT_IN_POW10)
1873 CASE_MATHFN (BUILT_IN_REMAINDER)
1874 CASE_MATHFN (BUILT_IN_REMQUO)
1875 CASE_MATHFN (BUILT_IN_RINT)
1876 CASE_MATHFN (BUILT_IN_ROUND)
1877 CASE_MATHFN (BUILT_IN_SCALB)
1878 CASE_MATHFN (BUILT_IN_SCALBLN)
1879 CASE_MATHFN (BUILT_IN_SCALBN)
1880 CASE_MATHFN (BUILT_IN_SIGNBIT)
1881 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1882 CASE_MATHFN (BUILT_IN_SIN)
1883 CASE_MATHFN (BUILT_IN_SINCOS)
1884 CASE_MATHFN (BUILT_IN_SINH)
1885 CASE_MATHFN (BUILT_IN_SQRT)
1886 CASE_MATHFN (BUILT_IN_TAN)
1887 CASE_MATHFN (BUILT_IN_TANH)
1888 CASE_MATHFN (BUILT_IN_TGAMMA)
1889 CASE_MATHFN (BUILT_IN_TRUNC)
1890 CASE_MATHFN (BUILT_IN_Y0)
1891 CASE_MATHFN (BUILT_IN_Y1)
1892 CASE_MATHFN (BUILT_IN_YN)
1898 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1899 return fn_arr[fcode];
1900 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1901 return fn_arr[fcodef];
1902 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1903 return fn_arr[fcodel];
1908 /* Like mathfn_built_in_1(), but always use the implicit array. */
1911 mathfn_built_in (tree type, enum built_in_function fn)
1913 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1916 /* If errno must be maintained, expand the RTL to check if the result,
1917 TARGET, of a built-in function call, EXP, is NaN, and if so set
1921 expand_errno_check (tree exp, rtx target)
1923 rtx lab = gen_label_rtx ();
1925 /* Test the result; if it is NaN, set errno=EDOM because
1926 the argument was not in the domain. */
1927 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1928 NULL_RTX, NULL_RTX, lab,
1929 /* The jump is very likely. */
1930 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1933 /* If this built-in doesn't throw an exception, set errno directly. */
1934 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1936 #ifdef GEN_ERRNO_RTX
1937 rtx errno_rtx = GEN_ERRNO_RTX;
1940 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1942 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1948 /* Make sure the library call isn't expanded as a tail call. */
1949 CALL_EXPR_TAILCALL (exp) = 0;
1951 /* We can't set errno=EDOM directly; let the library call do it.
1952 Pop the arguments right away in case the call gets deleted. */
1954 expand_call (exp, target, 0);
1959 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1960 Return NULL_RTX if a normal call should be emitted rather than expanding
1961 the function in-line. EXP is the expression that is a call to the builtin
1962 function; if convenient, the result should be placed in TARGET.
1963 SUBTARGET may be used as the target for computing one of EXP's operands. */
1966 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1968 optab builtin_optab;
1970 tree fndecl = get_callee_fndecl (exp);
1971 enum machine_mode mode;
1972 bool errno_set = false;
1975 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1978 arg = CALL_EXPR_ARG (exp, 0);
1980 switch (DECL_FUNCTION_CODE (fndecl))
1982 CASE_FLT_FN (BUILT_IN_SQRT):
1983 errno_set = ! tree_expr_nonnegative_p (arg);
1984 builtin_optab = sqrt_optab;
1986 CASE_FLT_FN (BUILT_IN_EXP):
1987 errno_set = true; builtin_optab = exp_optab; break;
1988 CASE_FLT_FN (BUILT_IN_EXP10):
1989 CASE_FLT_FN (BUILT_IN_POW10):
1990 errno_set = true; builtin_optab = exp10_optab; break;
1991 CASE_FLT_FN (BUILT_IN_EXP2):
1992 errno_set = true; builtin_optab = exp2_optab; break;
1993 CASE_FLT_FN (BUILT_IN_EXPM1):
1994 errno_set = true; builtin_optab = expm1_optab; break;
1995 CASE_FLT_FN (BUILT_IN_LOGB):
1996 errno_set = true; builtin_optab = logb_optab; break;
1997 CASE_FLT_FN (BUILT_IN_LOG):
1998 errno_set = true; builtin_optab = log_optab; break;
1999 CASE_FLT_FN (BUILT_IN_LOG10):
2000 errno_set = true; builtin_optab = log10_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOG2):
2002 errno_set = true; builtin_optab = log2_optab; break;
2003 CASE_FLT_FN (BUILT_IN_LOG1P):
2004 errno_set = true; builtin_optab = log1p_optab; break;
2005 CASE_FLT_FN (BUILT_IN_ASIN):
2006 builtin_optab = asin_optab; break;
2007 CASE_FLT_FN (BUILT_IN_ACOS):
2008 builtin_optab = acos_optab; break;
2009 CASE_FLT_FN (BUILT_IN_TAN):
2010 builtin_optab = tan_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ATAN):
2012 builtin_optab = atan_optab; break;
2013 CASE_FLT_FN (BUILT_IN_FLOOR):
2014 builtin_optab = floor_optab; break;
2015 CASE_FLT_FN (BUILT_IN_CEIL):
2016 builtin_optab = ceil_optab; break;
2017 CASE_FLT_FN (BUILT_IN_TRUNC):
2018 builtin_optab = btrunc_optab; break;
2019 CASE_FLT_FN (BUILT_IN_ROUND):
2020 builtin_optab = round_optab; break;
2021 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2022 builtin_optab = nearbyint_optab;
2023 if (flag_trapping_math)
2025 /* Else fallthrough and expand as rint. */
2026 CASE_FLT_FN (BUILT_IN_RINT):
2027 builtin_optab = rint_optab; break;
2028 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2029 builtin_optab = significand_optab; break;
2034 /* Make a suitable register to place result in. */
2035 mode = TYPE_MODE (TREE_TYPE (exp));
2037 if (! flag_errno_math || ! HONOR_NANS (mode))
2040 /* Before working hard, check whether the instruction is available. */
2041 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2042 && (!errno_set || !optimize_insn_for_size_p ()))
2044 target = gen_reg_rtx (mode);
2046 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2047 need to expand the argument again. This way, we will not perform
2048 side-effects more the once. */
2049 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2051 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2055 /* Compute into TARGET.
2056 Set TARGET to wherever the result comes back. */
2057 target = expand_unop (mode, builtin_optab, op0, target, 0);
2062 expand_errno_check (exp, target);
2064 /* Output the entire sequence. */
2065 insns = get_insns ();
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2077 return expand_call (exp, target, target == const0_rtx);
2080 /* Expand a call to the builtin binary math functions (pow and atan2).
2081 Return NULL_RTX if a normal call should be emitted rather than expanding the
2082 function in-line. EXP is the expression that is a call to the builtin
2083 function; if convenient, the result should be placed in TARGET.
2084 SUBTARGET may be used as the target for computing one of EXP's
2088 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2090 optab builtin_optab;
2091 rtx op0, op1, insns;
2092 int op1_type = REAL_TYPE;
2093 tree fndecl = get_callee_fndecl (exp);
2095 enum machine_mode mode;
2096 bool errno_set = true;
2098 switch (DECL_FUNCTION_CODE (fndecl))
2100 CASE_FLT_FN (BUILT_IN_SCALBN):
2101 CASE_FLT_FN (BUILT_IN_SCALBLN):
2102 CASE_FLT_FN (BUILT_IN_LDEXP):
2103 op1_type = INTEGER_TYPE;
2108 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2111 arg0 = CALL_EXPR_ARG (exp, 0);
2112 arg1 = CALL_EXPR_ARG (exp, 1);
2114 switch (DECL_FUNCTION_CODE (fndecl))
2116 CASE_FLT_FN (BUILT_IN_POW):
2117 builtin_optab = pow_optab; break;
2118 CASE_FLT_FN (BUILT_IN_ATAN2):
2119 builtin_optab = atan2_optab; break;
2120 CASE_FLT_FN (BUILT_IN_SCALB):
2121 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 builtin_optab = scalb_optab; break;
2124 CASE_FLT_FN (BUILT_IN_SCALBN):
2125 CASE_FLT_FN (BUILT_IN_SCALBLN):
2126 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 /* Fall through... */
2129 CASE_FLT_FN (BUILT_IN_LDEXP):
2130 builtin_optab = ldexp_optab; break;
2131 CASE_FLT_FN (BUILT_IN_FMOD):
2132 builtin_optab = fmod_optab; break;
2133 CASE_FLT_FN (BUILT_IN_REMAINDER):
2134 CASE_FLT_FN (BUILT_IN_DREM):
2135 builtin_optab = remainder_optab; break;
2140 /* Make a suitable register to place result in. */
2141 mode = TYPE_MODE (TREE_TYPE (exp));
2143 /* Before working hard, check whether the instruction is available. */
2144 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2147 target = gen_reg_rtx (mode);
2149 if (! flag_errno_math || ! HONOR_NANS (mode))
2152 if (errno_set && optimize_insn_for_size_p ())
2155 /* Always stabilize the argument list. */
2156 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2157 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2159 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2160 op1 = expand_normal (arg1);
2164 /* Compute into TARGET.
2165 Set TARGET to wherever the result comes back. */
2166 target = expand_binop (mode, builtin_optab, op0, op1,
2167 target, 0, OPTAB_DIRECT);
2169 /* If we were unable to expand via the builtin, stop the sequence
2170 (without outputting the insns) and call to the library function
2171 with the stabilized argument list. */
2175 return expand_call (exp, target, target == const0_rtx);
2179 expand_errno_check (exp, target);
2181 /* Output the entire sequence. */
2182 insns = get_insns ();
2189 /* Expand a call to the builtin trinary math functions (fma).
2190 Return NULL_RTX if a normal call should be emitted rather than expanding the
2191 function in-line. EXP is the expression that is a call to the builtin
2192 function; if convenient, the result should be placed in TARGET.
2193 SUBTARGET may be used as the target for computing one of EXP's
2197 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2199 optab builtin_optab;
2200 rtx op0, op1, op2, insns;
2201 tree fndecl = get_callee_fndecl (exp);
2202 tree arg0, arg1, arg2;
2203 enum machine_mode mode;
2205 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2208 arg0 = CALL_EXPR_ARG (exp, 0);
2209 arg1 = CALL_EXPR_ARG (exp, 1);
2210 arg2 = CALL_EXPR_ARG (exp, 2);
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_FMA):
2215 builtin_optab = fma_optab; break;
2220 /* Make a suitable register to place result in. */
2221 mode = TYPE_MODE (TREE_TYPE (exp));
2223 /* Before working hard, check whether the instruction is available. */
2224 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2227 target = gen_reg_rtx (mode);
2229 /* Always stabilize the argument list. */
2230 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2231 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2232 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2234 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2235 op1 = expand_normal (arg1);
2236 op2 = expand_normal (arg2);
2240 /* Compute into TARGET.
2241 Set TARGET to wherever the result comes back. */
2242 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2245 /* If we were unable to expand via the builtin, stop the sequence
2246 (without outputting the insns) and call to the library function
2247 with the stabilized argument list. */
2251 return expand_call (exp, target, target == const0_rtx);
2254 /* Output the entire sequence. */
2255 insns = get_insns ();
2262 /* Expand a call to the builtin sin and cos math functions.
2263 Return NULL_RTX if a normal call should be emitted rather than expanding the
2264 function in-line. EXP is the expression that is a call to the builtin
2265 function; if convenient, the result should be placed in TARGET.
2266 SUBTARGET may be used as the target for computing one of EXP's
2270 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2272 optab builtin_optab;
2274 tree fndecl = get_callee_fndecl (exp);
2275 enum machine_mode mode;
2278 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2281 arg = CALL_EXPR_ARG (exp, 0);
2283 switch (DECL_FUNCTION_CODE (fndecl))
2285 CASE_FLT_FN (BUILT_IN_SIN):
2286 CASE_FLT_FN (BUILT_IN_COS):
2287 builtin_optab = sincos_optab; break;
2292 /* Make a suitable register to place result in. */
2293 mode = TYPE_MODE (TREE_TYPE (exp));
2295 /* Check if sincos insn is available, otherwise fallback
2296 to sin or cos insn. */
2297 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2298 switch (DECL_FUNCTION_CODE (fndecl))
2300 CASE_FLT_FN (BUILT_IN_SIN):
2301 builtin_optab = sin_optab; break;
2302 CASE_FLT_FN (BUILT_IN_COS):
2303 builtin_optab = cos_optab; break;
2308 /* Before working hard, check whether the instruction is available. */
2309 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2311 target = gen_reg_rtx (mode);
2313 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2314 need to expand the argument again. This way, we will not perform
2315 side-effects more the once. */
2316 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2318 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2322 /* Compute into TARGET.
2323 Set TARGET to wherever the result comes back. */
2324 if (builtin_optab == sincos_optab)
2328 switch (DECL_FUNCTION_CODE (fndecl))
2330 CASE_FLT_FN (BUILT_IN_SIN):
2331 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2333 CASE_FLT_FN (BUILT_IN_COS):
2334 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2339 gcc_assert (result);
2343 target = expand_unop (mode, builtin_optab, op0, target, 0);
2348 /* Output the entire sequence. */
2349 insns = get_insns ();
2355 /* If we were unable to expand via the builtin, stop the sequence
2356 (without outputting the insns) and call to the library function
2357 with the stabilized argument list. */
2361 target = expand_call (exp, target, target == const0_rtx);
2366 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2367 return an RTL instruction code that implements the functionality.
2368 If that isn't possible or available return CODE_FOR_nothing. */
2370 static enum insn_code
2371 interclass_mathfn_icode (tree arg, tree fndecl)
2373 bool errno_set = false;
2374 optab builtin_optab = 0;
2375 enum machine_mode mode;
2377 switch (DECL_FUNCTION_CODE (fndecl))
2379 CASE_FLT_FN (BUILT_IN_ILOGB):
2380 errno_set = true; builtin_optab = ilogb_optab; break;
2381 CASE_FLT_FN (BUILT_IN_ISINF):
2382 builtin_optab = isinf_optab; break;
2383 case BUILT_IN_ISNORMAL:
2384 case BUILT_IN_ISFINITE:
2385 CASE_FLT_FN (BUILT_IN_FINITE):
2386 case BUILT_IN_FINITED32:
2387 case BUILT_IN_FINITED64:
2388 case BUILT_IN_FINITED128:
2389 case BUILT_IN_ISINFD32:
2390 case BUILT_IN_ISINFD64:
2391 case BUILT_IN_ISINFD128:
2392 /* These builtins have no optabs (yet). */
2398 /* There's no easy way to detect the case we need to set EDOM. */
2399 if (flag_errno_math && errno_set)
2400 return CODE_FOR_nothing;
2402 /* Optab mode depends on the mode of the input argument. */
2403 mode = TYPE_MODE (TREE_TYPE (arg));
2406 return optab_handler (builtin_optab, mode);
2407 return CODE_FOR_nothing;
2410 /* Expand a call to one of the builtin math functions that operate on
2411 floating point argument and output an integer result (ilogb, isinf,
2413 Return 0 if a normal call should be emitted rather than expanding the
2414 function in-line. EXP is the expression that is a call to the builtin
2415 function; if convenient, the result should be placed in TARGET. */
2418 expand_builtin_interclass_mathfn (tree exp, rtx target)
2420 enum insn_code icode = CODE_FOR_nothing;
2422 tree fndecl = get_callee_fndecl (exp);
2423 enum machine_mode mode;
2426 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2429 arg = CALL_EXPR_ARG (exp, 0);
2430 icode = interclass_mathfn_icode (arg, fndecl);
2431 mode = TYPE_MODE (TREE_TYPE (arg));
2433 if (icode != CODE_FOR_nothing)
2435 rtx last = get_last_insn ();
2436 tree orig_arg = arg;
2437 /* Make a suitable register to place result in. */
2439 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2440 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2441 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2443 gcc_assert (insn_data[icode].operand[0].predicate
2444 (target, GET_MODE (target)));
2446 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2447 need to expand the argument again. This way, we will not perform
2448 side-effects more the once. */
2449 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2451 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2453 if (mode != GET_MODE (op0))
2454 op0 = convert_to_mode (mode, op0, 0);
2456 /* Compute into TARGET.
2457 Set TARGET to wherever the result comes back. */
2458 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2460 delete_insns_since (last);
2461 CALL_EXPR_ARG (exp, 0) = orig_arg;
2467 /* Expand a call to the builtin sincos math function.
2468 Return NULL_RTX if a normal call should be emitted rather than expanding the
2469 function in-line. EXP is the expression that is a call to the builtin
2473 expand_builtin_sincos (tree exp)
2475 rtx op0, op1, op2, target1, target2;
2476 enum machine_mode mode;
2477 tree arg, sinp, cosp;
2479 location_t loc = EXPR_LOCATION (exp);
2480 tree alias_type, alias_off;
2482 if (!validate_arglist (exp, REAL_TYPE,
2483 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2486 arg = CALL_EXPR_ARG (exp, 0);
2487 sinp = CALL_EXPR_ARG (exp, 1);
2488 cosp = CALL_EXPR_ARG (exp, 2);
2490 /* Make a suitable register to place result in. */
2491 mode = TYPE_MODE (TREE_TYPE (arg));
2493 /* Check if sincos insn is available, otherwise emit the call. */
2494 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2497 target1 = gen_reg_rtx (mode);
2498 target2 = gen_reg_rtx (mode);
2500 op0 = expand_normal (arg);
2501 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2502 alias_off = build_int_cst (alias_type, 0);
2503 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2505 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2508 /* Compute into target1 and target2.
2509 Set TARGET to wherever the result comes back. */
2510 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2511 gcc_assert (result);
2513 /* Move target1 and target2 to the memory locations indicated
2515 emit_move_insn (op1, target1);
2516 emit_move_insn (op2, target2);
2521 /* Expand a call to the internal cexpi builtin to the sincos math function.
2522 EXP is the expression that is a call to the builtin function; if convenient,
2523 the result should be placed in TARGET. */
2526 expand_builtin_cexpi (tree exp, rtx target)
2528 tree fndecl = get_callee_fndecl (exp);
2530 enum machine_mode mode;
2532 location_t loc = EXPR_LOCATION (exp);
2534 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2537 arg = CALL_EXPR_ARG (exp, 0);
2538 type = TREE_TYPE (arg);
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2541 /* Try expanding via a sincos optab, fall back to emitting a libcall
2542 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2543 is only generated from sincos, cexp or if we have either of them. */
2544 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2546 op1 = gen_reg_rtx (mode);
2547 op2 = gen_reg_rtx (mode);
2549 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2551 /* Compute into op1 and op2. */
2552 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2554 else if (TARGET_HAS_SINCOS)
2556 tree call, fn = NULL_TREE;
2560 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2561 fn = built_in_decls[BUILT_IN_SINCOSF];
2562 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2563 fn = built_in_decls[BUILT_IN_SINCOS];
2564 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2565 fn = built_in_decls[BUILT_IN_SINCOSL];
2569 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2570 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2571 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2572 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2573 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2574 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2576 /* Make sure not to fold the sincos call again. */
2577 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2578 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2579 call, 3, arg, top1, top2));
2583 tree call, fn = NULL_TREE, narg;
2584 tree ctype = build_complex_type (type);
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = built_in_decls[BUILT_IN_CEXPF];
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = built_in_decls[BUILT_IN_CEXP];
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = built_in_decls[BUILT_IN_CEXPL];
2595 /* If we don't have a decl for cexp create one. This is the
2596 friendliest fallback if the user calls __builtin_cexpi
2597 without full target C99 function support. */
2598 if (fn == NULL_TREE)
2601 const char *name = NULL;
2603 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2610 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2611 fn = build_fn_decl (name, fntype);
2614 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2615 build_real (type, dconst0), arg);
2617 /* Make sure not to fold the cexp call again. */
2618 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2619 return expand_expr (build_call_nary (ctype, call, 1, narg),
2620 target, VOIDmode, EXPAND_NORMAL);
2623 /* Now build the proper return type. */
2624 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2625 make_tree (TREE_TYPE (arg), op2),
2626 make_tree (TREE_TYPE (arg), op1)),
2627 target, VOIDmode, EXPAND_NORMAL);
2630 /* Conveniently construct a function call expression. FNDECL names the
2631 function to be called, N is the number of arguments, and the "..."
2632 parameters are the argument expressions. Unlike build_call_exr
2633 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2636 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2639 tree fntype = TREE_TYPE (fndecl);
2640 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2643 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2645 SET_EXPR_LOCATION (fn, loc);
2649 /* Expand a call to one of the builtin rounding functions gcc defines
2650 as an extension (lfloor and lceil). As these are gcc extensions we
2651 do not need to worry about setting errno to EDOM.
2652 If expanding via optab fails, lower expression to (int)(floor(x)).
2653 EXP is the expression that is a call to the builtin function;
2654 if convenient, the result should be placed in TARGET. */
2657 expand_builtin_int_roundingfn (tree exp, rtx target)
2659 convert_optab builtin_optab;
2660 rtx op0, insns, tmp;
2661 tree fndecl = get_callee_fndecl (exp);
2662 enum built_in_function fallback_fn;
2663 tree fallback_fndecl;
2664 enum machine_mode mode;
2667 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2670 arg = CALL_EXPR_ARG (exp, 0);
2672 switch (DECL_FUNCTION_CODE (fndecl))
2674 CASE_FLT_FN (BUILT_IN_LCEIL):
2675 CASE_FLT_FN (BUILT_IN_LLCEIL):
2676 builtin_optab = lceil_optab;
2677 fallback_fn = BUILT_IN_CEIL;
2680 CASE_FLT_FN (BUILT_IN_LFLOOR):
2681 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2682 builtin_optab = lfloor_optab;
2683 fallback_fn = BUILT_IN_FLOOR;
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2704 /* Compute into TARGET. */
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns). */
2718 /* Fall back to floating point rounding optab. */
2719 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2721 /* For non-C99 targets we may end up without a fallback fndecl here
2722 if the user called __builtin_lfloor directly. In this case emit
2723 a call to the floor/ceil variants nevertheless. This should result
2724 in the best user experience for not full C99 targets. */
2725 if (fallback_fndecl == NULL_TREE)
2728 const char *name = NULL;
2730 switch (DECL_FUNCTION_CODE (fndecl))
2732 case BUILT_IN_LCEIL:
2733 case BUILT_IN_LLCEIL:
2736 case BUILT_IN_LCEILF:
2737 case BUILT_IN_LLCEILF:
2740 case BUILT_IN_LCEILL:
2741 case BUILT_IN_LLCEILL:
2744 case BUILT_IN_LFLOOR:
2745 case BUILT_IN_LLFLOOR:
2748 case BUILT_IN_LFLOORF:
2749 case BUILT_IN_LLFLOORF:
2752 case BUILT_IN_LFLOORL:
2753 case BUILT_IN_LLFLOORL:
2760 fntype = build_function_type_list (TREE_TYPE (arg),
2761 TREE_TYPE (arg), NULL_TREE);
2762 fallback_fndecl = build_fn_decl (name, fntype);
2765 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2767 tmp = expand_normal (exp);
2769 /* Truncate the result of floating point optab to integer
2770 via expand_fix (). */
2771 target = gen_reg_rtx (mode);
2772 expand_fix (target, tmp, 0);
2777 /* Expand a call to one of the builtin math functions doing integer
2779 Return 0 if a normal call should be emitted rather than expanding the
2780 function in-line. EXP is the expression that is a call to the builtin
2781 function; if convenient, the result should be placed in TARGET. */
2784 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2786 convert_optab builtin_optab;
2788 tree fndecl = get_callee_fndecl (exp);
2790 enum machine_mode mode;
2792 /* There's no easy way to detect the case we need to set EDOM. */
2793 if (flag_errno_math)
2796 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2799 arg = CALL_EXPR_ARG (exp, 0);
2801 switch (DECL_FUNCTION_CODE (fndecl))
2803 CASE_FLT_FN (BUILT_IN_LRINT):
2804 CASE_FLT_FN (BUILT_IN_LLRINT):
2805 builtin_optab = lrint_optab; break;
2806 CASE_FLT_FN (BUILT_IN_LROUND):
2807 CASE_FLT_FN (BUILT_IN_LLROUND):
2808 builtin_optab = lround_optab; break;
2813 /* Make a suitable register to place result in. */
2814 mode = TYPE_MODE (TREE_TYPE (exp));
2816 target = gen_reg_rtx (mode);
2818 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2819 need to expand the argument again. This way, we will not perform
2820 side-effects more the once. */
2821 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2823 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2827 if (expand_sfix_optab (target, op0, builtin_optab))
2829 /* Output the entire sequence. */
2830 insns = get_insns ();
2836 /* If we were unable to expand via the builtin, stop the sequence
2837 (without outputting the insns) and call to the library function
2838 with the stabilized argument list. */
2841 target = expand_call (exp, target, target == const0_rtx);
2846 /* To evaluate powi(x,n), the floating point value x raised to the
2847 constant integer exponent n, we use a hybrid algorithm that
2848 combines the "window method" with look-up tables. For an
2849 introduction to exponentiation algorithms and "addition chains",
2850 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2851 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2852 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2853 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2855 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2856 multiplications to inline before calling the system library's pow
2857 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2858 so this default never requires calling pow, powf or powl. */
2860 #ifndef POWI_MAX_MULTS
2861 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2864 /* The size of the "optimal power tree" lookup table. All
2865 exponents less than this value are simply looked up in the
2866 powi_table below. This threshold is also used to size the
2867 cache of pseudo registers that hold intermediate results. */
2868 #define POWI_TABLE_SIZE 256
2870 /* The size, in bits of the window, used in the "window method"
2871 exponentiation algorithm. This is equivalent to a radix of
2872 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2873 #define POWI_WINDOW_SIZE 3
2875 /* The following table is an efficient representation of an
2876 "optimal power tree". For each value, i, the corresponding
2877 value, j, in the table states than an optimal evaluation
2878 sequence for calculating pow(x,i) can be found by evaluating
2879 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2880 100 integers is given in Knuth's "Seminumerical algorithms". */
2882 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2884 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2885 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2886 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2887 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2888 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2889 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2890 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2891 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2892 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2893 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2894 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2895 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2896 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2897 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2898 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2899 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2900 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2901 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2902 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2903 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2904 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2905 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2906 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2907 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2908 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2909 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2910 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2911 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2912 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2913 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2914 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2915 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2919 /* Return the number of multiplications required to calculate
2920 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2921 subroutine of powi_cost. CACHE is an array indicating
2922 which exponents have already been calculated. */
2925 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2927 /* If we've already calculated this exponent, then this evaluation
2928 doesn't require any additional multiplications. */
2933 return powi_lookup_cost (n - powi_table[n], cache)
2934 + powi_lookup_cost (powi_table[n], cache) + 1;
2937 /* Return the number of multiplications required to calculate
2938 powi(x,n) for an arbitrary x, given the exponent N. This
2939 function needs to be kept in sync with expand_powi below. */
2942 powi_cost (HOST_WIDE_INT n)
2944 bool cache[POWI_TABLE_SIZE];
2945 unsigned HOST_WIDE_INT digit;
2946 unsigned HOST_WIDE_INT val;
2952 /* Ignore the reciprocal when calculating the cost. */
2953 val = (n < 0) ? -n : n;
2955 /* Initialize the exponent cache. */
2956 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2961 while (val >= POWI_TABLE_SIZE)
2965 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2966 result += powi_lookup_cost (digit, cache)
2967 + POWI_WINDOW_SIZE + 1;
2968 val >>= POWI_WINDOW_SIZE;
2977 return result + powi_lookup_cost (val, cache);
2980 /* Recursive subroutine of expand_powi. This function takes the array,
2981 CACHE, of already calculated exponents and an exponent N and returns
2982 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2985 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2987 unsigned HOST_WIDE_INT digit;
2991 if (n < POWI_TABLE_SIZE)
2996 target = gen_reg_rtx (mode);
2999 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
3000 op1 = expand_powi_1 (mode, powi_table[n], cache);
3004 target = gen_reg_rtx (mode);
3005 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
3006 op0 = expand_powi_1 (mode, n - digit, cache);
3007 op1 = expand_powi_1 (mode, digit, cache);
3011 target = gen_reg_rtx (mode);
3012 op0 = expand_powi_1 (mode, n >> 1, cache);
3016 result = expand_mult (mode, op0, op1, target, 0);
3017 if (result != target)
3018 emit_move_insn (target, result);
3022 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3023 floating point operand in mode MODE, and N is the exponent. This
3024 function needs to be kept in sync with powi_cost above. */
3027 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3029 rtx cache[POWI_TABLE_SIZE];
3033 return CONST1_RTX (mode);
3035 memset (cache, 0, sizeof (cache));
3038 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3040 /* If the original exponent was negative, reciprocate the result. */
3042 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3043 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3048 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3049 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3050 if we can simplify it. */
3052 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3055 if (TREE_CODE (arg1) == REAL_CST
3056 && !TREE_OVERFLOW (arg1)
3057 && flag_unsafe_math_optimizations)
3059 enum machine_mode mode = TYPE_MODE (type);
3060 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3061 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3062 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3063 tree op = NULL_TREE;
3067 /* Optimize pow (x, 0.5) into sqrt. */
3068 if (REAL_VALUES_EQUAL (c, dconsthalf))
3069 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3071 /* Don't do this optimization if we don't have a sqrt insn. */
3072 else if (optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3074 REAL_VALUE_TYPE dconst1_4 = dconst1;
3075 REAL_VALUE_TYPE dconst3_4;
3076 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3078 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3079 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3081 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3082 machines that a builtin sqrt instruction is smaller than a
3083 call to pow with 0.25, so do this optimization even if
3085 if (REAL_VALUES_EQUAL (c, dconst1_4))
3087 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3088 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3091 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3092 are optimizing for space. */
3093 else if (optimize_insn_for_speed_p ()
3094 && !TREE_SIDE_EFFECTS (arg0)
3095 && REAL_VALUES_EQUAL (c, dconst3_4))
3097 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3098 tree sqrt2 = builtin_save_expr (sqrt1);
3099 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3100 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3105 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3106 cbrt/sqrts instead of pow (x, 1./6.). */
3108 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3110 /* First try 1/3. */
3111 REAL_VALUE_TYPE dconst1_3
3112 = real_value_truncate (mode, dconst_third ());
3114 if (REAL_VALUES_EQUAL (c, dconst1_3))
3115 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3118 else if (optimize_insn_for_speed_p ()
3119 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3121 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3122 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3124 if (REAL_VALUES_EQUAL (c, dconst1_6))
3126 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3127 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3133 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3139 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3140 a normal call should be emitted rather than expanding the function
3141 in-line. EXP is the expression that is a call to the builtin
3142 function; if convenient, the result should be placed in TARGET. */
3145 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3149 tree type = TREE_TYPE (exp);
3150 REAL_VALUE_TYPE cint, c, c2;
3153 enum machine_mode mode = TYPE_MODE (type);
3155 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3158 arg0 = CALL_EXPR_ARG (exp, 0);
3159 arg1 = CALL_EXPR_ARG (exp, 1);
3161 if (TREE_CODE (arg1) != REAL_CST
3162 || TREE_OVERFLOW (arg1))
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3165 /* Handle constant exponents. */
3167 /* For integer valued exponents we can expand to an optimal multiplication
3168 sequence using expand_powi. */
3169 c = TREE_REAL_CST (arg1);
3170 n = real_to_integer (&c);
3171 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3172 if (real_identical (&c, &cint)
3173 && ((n >= -1 && n <= 2)
3174 || (flag_unsafe_math_optimizations
3175 && optimize_insn_for_speed_p ()
3176 && powi_cost (n) <= POWI_MAX_MULTS)))
3178 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3181 op = force_reg (mode, op);
3182 op = expand_powi (op, mode, n);
3187 narg0 = builtin_save_expr (arg0);
3189 /* If the exponent is not integer valued, check if it is half of an integer.
3190 In this case we can expand to sqrt (x) * x**(n/2). */
3191 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3192 if (fn != NULL_TREE)
3194 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3195 n = real_to_integer (&c2);
3196 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3197 if (real_identical (&c2, &cint)
3198 && ((flag_unsafe_math_optimizations
3199 && optimize_insn_for_speed_p ()
3200 && powi_cost (n/2) <= POWI_MAX_MULTS)
3201 /* Even the c == 0.5 case cannot be done unconditionally
3202 when we need to preserve signed zeros, as
3203 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3204 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3205 /* For c == 1.5 we can assume that x * sqrt (x) is always
3206 smaller than pow (x, 1.5) if sqrt will not be expanded
3209 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3211 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3213 /* Use expand_expr in case the newly built call expression
3214 was folded to a non-call. */
3215 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3218 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3219 op2 = force_reg (mode, op2);
3220 op2 = expand_powi (op2, mode, abs (n / 2));
3221 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3222 0, OPTAB_LIB_WIDEN);
3223 /* If the original exponent was negative, reciprocate the
3226 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3227 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3233 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3235 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3240 /* Try if the exponent is a third of an integer. In this case
3241 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3242 different from pow (x, 1./3.) due to rounding and behavior
3243 with negative x we need to constrain this transformation to
3244 unsafe math and positive x or finite math. */
3245 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3247 && flag_unsafe_math_optimizations
3248 && (tree_expr_nonnegative_p (arg0)
3249 || !HONOR_NANS (mode)))
3251 REAL_VALUE_TYPE dconst3;
3252 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3253 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3254 real_round (&c2, mode, &c2);
3255 n = real_to_integer (&c2);
3256 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3257 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3258 real_convert (&c2, mode, &c2);
3259 if (real_identical (&c2, &c)
3260 && ((optimize_insn_for_speed_p ()
3261 && powi_cost (n/3) <= POWI_MAX_MULTS)
3264 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3266 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3267 if (abs (n) % 3 == 2)
3268 op = expand_simple_binop (mode, MULT, op, op, op,
3269 0, OPTAB_LIB_WIDEN);
3272 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3273 op2 = force_reg (mode, op2);
3274 op2 = expand_powi (op2, mode, abs (n / 3));
3275 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3276 0, OPTAB_LIB_WIDEN);
3277 /* If the original exponent was negative, reciprocate the
3280 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3281 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3287 /* Fall back to optab expansion. */
3288 return expand_builtin_mathfn_2 (exp, target, subtarget);
3291 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3292 a normal call should be emitted rather than expanding the function
3293 in-line. EXP is the expression that is a call to the builtin
3294 function; if convenient, the result should be placed in TARGET. */
3297 expand_builtin_powi (tree exp, rtx target)
3301 enum machine_mode mode;
3302 enum machine_mode mode2;
3304 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3307 arg0 = CALL_EXPR_ARG (exp, 0);
3308 arg1 = CALL_EXPR_ARG (exp, 1);
3309 mode = TYPE_MODE (TREE_TYPE (exp));
3311 /* Handle constant power. */
3313 if (TREE_CODE (arg1) == INTEGER_CST
3314 && !TREE_OVERFLOW (arg1))
3316 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3318 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3319 Otherwise, check the number of multiplications required. */
3320 if ((TREE_INT_CST_HIGH (arg1) == 0
3321 || TREE_INT_CST_HIGH (arg1) == -1)
3322 && ((n >= -1 && n <= 2)
3323 || (optimize_insn_for_speed_p ()
3324 && powi_cost (n) <= POWI_MAX_MULTS)))
3326 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3327 op0 = force_reg (mode, op0);
3328 return expand_powi (op0, mode, n);
3332 /* Emit a libcall to libgcc. */
3334 /* Mode of the 2nd argument must match that of an int. */
3335 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3337 if (target == NULL_RTX)
3338 target = gen_reg_rtx (mode);
3340 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3341 if (GET_MODE (op0) != mode)
3342 op0 = convert_to_mode (mode, op0, 0);
3343 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3344 if (GET_MODE (op1) != mode2)
3345 op1 = convert_to_mode (mode2, op1, 0);
3347 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3348 target, LCT_CONST, mode, 2,
3349 op0, mode, op1, mode2);
3354 /* Expand expression EXP which is a call to the strlen builtin. Return
3355 NULL_RTX if we failed the caller should emit a normal call, otherwise
3356 try to get the result in TARGET, if convenient. */
3359 expand_builtin_strlen (tree exp, rtx target,
3360 enum machine_mode target_mode)
3362 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3368 tree src = CALL_EXPR_ARG (exp, 0);
3369 rtx result, src_reg, char_rtx, before_strlen;
3370 enum machine_mode insn_mode = target_mode, char_mode;
3371 enum insn_code icode = CODE_FOR_nothing;
3374 /* If the length can be computed at compile-time, return it. */
3375 len = c_strlen (src, 0);
3377 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3379 /* If the length can be computed at compile-time and is constant
3380 integer, but there are side-effects in src, evaluate
3381 src for side-effects, then return len.
3382 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3383 can be optimized into: i++; x = 3; */
3384 len = c_strlen (src, 1);
3385 if (len && TREE_CODE (len) == INTEGER_CST)
3387 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3388 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3391 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3393 /* If SRC is not a pointer type, don't do this operation inline. */
3397 /* Bail out if we can't compute strlen in the right mode. */
3398 while (insn_mode != VOIDmode)
3400 icode = optab_handler (strlen_optab, insn_mode);
3401 if (icode != CODE_FOR_nothing)
3404 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3406 if (insn_mode == VOIDmode)
3409 /* Make a place to write the result of the instruction. */
3413 && GET_MODE (result) == insn_mode
3414 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3415 result = gen_reg_rtx (insn_mode);
3417 /* Make a place to hold the source address. We will not expand
3418 the actual source until we are sure that the expansion will
3419 not fail -- there are trees that cannot be expanded twice. */
3420 src_reg = gen_reg_rtx (Pmode);
3422 /* Mark the beginning of the strlen sequence so we can emit the
3423 source operand later. */
3424 before_strlen = get_last_insn ();
3426 char_rtx = const0_rtx;
3427 char_mode = insn_data[(int) icode].operand[2].mode;
3428 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3430 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3432 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3433 char_rtx, GEN_INT (align));
3438 /* Now that we are assured of success, expand the source. */
3440 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3442 emit_move_insn (src_reg, pat);
3447 emit_insn_after (pat, before_strlen);
3449 emit_insn_before (pat, get_insns ());
3451 /* Return the value in the proper mode for this function. */
3452 if (GET_MODE (result) == target_mode)
3454 else if (target != 0)
3455 convert_move (target, result, 0);
3457 target = convert_to_mode (target_mode, result, 0);
3463 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3464 bytes from constant string DATA + OFFSET and return it as target
3468 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3469 enum machine_mode mode)
3471 const char *str = (const char *) data;
3473 gcc_assert (offset >= 0
3474 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3475 <= strlen (str) + 1));
3477 return c_readstr (str + offset, mode);
3480 /* Expand a call EXP to the memcpy builtin.
3481 Return NULL_RTX if we failed, the caller should emit a normal call,
3482 otherwise try to get the result in TARGET, if convenient (and in
3483 mode MODE if that's convenient). */
3486 expand_builtin_memcpy (tree exp, rtx target)
3488 if (!validate_arglist (exp,
3489 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3493 tree dest = CALL_EXPR_ARG (exp, 0);
3494 tree src = CALL_EXPR_ARG (exp, 1);
3495 tree len = CALL_EXPR_ARG (exp, 2);
3496 const char *src_str;
3497 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3498 unsigned int dest_align
3499 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3500 rtx dest_mem, src_mem, dest_addr, len_rtx;
3501 HOST_WIDE_INT expected_size = -1;
3502 unsigned int expected_align = 0;
3504 /* If DEST is not a pointer type, call the normal function. */
3505 if (dest_align == 0)
3508 /* If either SRC is not a pointer type, don't do this
3509 operation in-line. */
3513 if (currently_expanding_gimple_stmt)
3514 stringop_block_profile (currently_expanding_gimple_stmt,
3515 &expected_align, &expected_size);
3517 if (expected_align < dest_align)
3518 expected_align = dest_align;
3519 dest_mem = get_memory_rtx (dest, len);
3520 set_mem_align (dest_mem, dest_align);
3521 len_rtx = expand_normal (len);
3522 src_str = c_getstr (src);
3524 /* If SRC is a string constant and block move would be done
3525 by pieces, we can avoid loading the string from memory
3526 and only stored the computed constants. */
3528 && CONST_INT_P (len_rtx)
3529 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3530 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3531 CONST_CAST (char *, src_str),
3534 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3535 builtin_memcpy_read_str,
3536 CONST_CAST (char *, src_str),
3537 dest_align, false, 0);
3538 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3539 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3543 src_mem = get_memory_rtx (src, len);
3544 set_mem_align (src_mem, src_align);
3546 /* Copy word part most expediently. */
3547 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3548 CALL_EXPR_TAILCALL (exp)
3549 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3550 expected_align, expected_size);
3554 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3555 dest_addr = convert_memory_address (ptr_mode, dest_addr);