Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
#include "config.h"
#include "system.h"
#include "hard-reg-set.h"
#include "except.h"
#include "function.h"
-#include "insn-flags.h"
-#include "insn-codes.h"
#include "insn-config.h"
#include "expr.h"
+#include "optabs.h"
+#include "libfuncs.h"
#include "recog.h"
#include "output.h"
#include "typeclass.h"
#include "toplev.h"
+#include "predict.h"
#include "tm_p.h"
+#include "target.h"
#define CALLED_AS_BUILT_IN(NODE) \
(!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
const char *const built_in_class_names[4]
= {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
-#define DEF_BUILTIN(x) STRINGIFY(x),
+#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA) STRINGX(X),
const char *const built_in_names[(int) END_BUILTINS] =
{
#include "builtins.def"
/* Setup an array of _DECL trees, make sure each element is
initialized to NULL_TREE. */
-#define DEF_BUILTIN(x) NULL_TREE,
-tree built_in_decls[(int) END_BUILTINS] =
-{
-#include "builtins.def"
-};
-#undef DEF_BUILTIN
+tree built_in_decls[(int) END_BUILTINS];
tree (*lang_type_promotes_to) PARAMS ((tree));
static rtx expand_builtin_apply_args_1 PARAMS ((void));
static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
static void expand_builtin_return PARAMS ((rtx));
+static enum type_class type_to_class PARAMS ((tree));
static rtx expand_builtin_classify_type PARAMS ((tree));
static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
static rtx expand_builtin_constant_p PARAMS ((tree));
static tree stabilize_va_list PARAMS ((tree, int));
static rtx expand_builtin_expect PARAMS ((tree, rtx));
static tree fold_builtin_constant_p PARAMS ((tree));
+static tree fold_builtin_classify_type PARAMS ((tree));
static tree build_function_call_expr PARAMS ((tree, tree));
static int validate_arglist PARAMS ((tree, ...));
if (! host_integerp (TREE_OPERAND (exp, 1), 1))
return align;
- while (((tree_low_cst (TREE_OPERAND (exp, 1), 1) * BITS_PER_UNIT)
- & (max_align - 1))
+ while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
+ & (max_align / BITS_PER_UNIT - 1))
!= 0)
max_align >>= 1;
#endif
tem = memory_address (Pmode, tem);
tem = gen_rtx_MEM (Pmode, tem);
- MEM_ALIAS_SET (tem) = get_frame_alias_set ();
+ set_mem_alias_set (tem, get_frame_alias_set ());
tem = copy_to_reg (tem);
}
tem = memory_address (Pmode,
plus_constant (tem, GET_MODE_SIZE (Pmode)));
tem = gen_rtx_MEM (Pmode, tem);
- MEM_ALIAS_SET (tem) = get_frame_alias_set ();
+ set_mem_alias_set (tem, get_frame_alias_set ());
#endif
return tem;
}
#endif
mem = gen_rtx_MEM (Pmode, buf_addr);
- MEM_ALIAS_SET (mem) = setjmp_alias_set;
+ set_mem_alias_set (mem, setjmp_alias_set);
emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
- MEM_ALIAS_SET (mem) = setjmp_alias_set;
+ set_mem_alias_set (mem, setjmp_alias_set);
emit_move_insn (validize_mem (mem),
force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
stack_save = gen_rtx_MEM (sa_mode,
plus_constant (buf_addr,
2 * GET_MODE_SIZE (Pmode)));
- MEM_ALIAS_SET (stack_save) = setjmp_alias_set;
+ set_mem_alias_set (stack_save, setjmp_alias_set);
emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
/* If there is further processing to do, do it. */
current_function_calls_setjmp = 1;
/* Set this so all the registers get saved in our frame; we need to be
- able to copy the saved values for any registers from frames we unwind. */
+ able to copy the saved values for any registers from frames we unwind. */
current_function_has_nonlocal_label = 1;
}
#endif
{
/* Now restore our arg pointer from the address at which it
- was saved in our stack frame.
- If there hasn't be space allocated for it yet, make
- some now. */
- if (arg_pointer_save_area == 0)
- arg_pointer_save_area
- = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ was saved in our stack frame. */
emit_move_insn (virtual_incoming_args_rtx,
- copy_to_reg (arg_pointer_save_area));
+ copy_to_reg (get_arg_pointer_save_area (cfun)));
}
}
#endif
stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
2 * GET_MODE_SIZE (Pmode)));
- MEM_ALIAS_SET (fp) = MEM_ALIAS_SET (lab) = MEM_ALIAS_SET (stack)
- = setjmp_alias_set;
+ set_mem_alias_set (fp, setjmp_alias_set);
+ set_mem_alias_set (lab, setjmp_alias_set);
+ set_mem_alias_set (stack, setjmp_alias_set);
/* Pick up FP, label, and SP from the block and jump. This code is
from expand_goto in stmt.c; see there for detailed comments. */
/* We have to pass a value to the nonlocal_goto pattern that will
get copied into the static_chain pointer, but it does not matter
what that value is, because builtin_setjmp does not use it. */
- emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
+ emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
else
#endif
{
__builtin_setjmp target in the same function. However, we've
already cautioned the user that these functions are for
internal exception handling use only. */
- for (insn = get_last_insn ();
- GET_CODE (insn) != JUMP_INSN;
- insn = PREV_INSN (insn))
- continue;
- REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
- REG_NOTES (insn));
+ for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
+ {
+ if (GET_CODE (insn) == JUMP_INSN)
+ {
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
+ REG_NOTES (insn));
+ break;
+ }
+ else if (GET_CODE (insn) == CALL_INSN)
+ break;
+ }
}
/* Get a MEM rtx for expression EXP which is the address of an operand
return mem;
set_mem_attributes (mem, exp, 0);
-
- /* memcpy, memset and other builtin stringops can alias with anything. */
- MEM_ALIAS_SET (mem) = 0;
+ /* memcpy, memset and other builtin stringops can alias with anything. */
+ set_mem_alias_set (mem, 0);
return mem;
}
\f
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
if (HARD_REGNO_MODE_OK (regno, mode)
- && (mov_optab->handlers[(int) mode].insn_code
- != CODE_FOR_nothing))
+ && have_insn_for (SET, mode))
best_mode = mode;
mode = best_mode;
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
if (HARD_REGNO_MODE_OK (regno, mode)
- && (mov_optab->handlers[(int) mode].insn_code
- != CODE_FOR_nothing))
+ && have_insn_for (SET, mode))
best_mode = mode;
mode = best_mode;
if (size % align != 0)
size = CEIL (size, align) * align;
reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
- mem = change_address (result, mode,
- plus_constant (XEXP (result, 0), size));
+ mem = adjust_address (result, mode, size);
savevec[nelts++] = (savep
? gen_rtx_SET (VOIDmode, mem, reg)
: gen_rtx_SET (VOIDmode, reg, mem));
tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
- emit_move_insn (change_address (registers, mode,
- plus_constant (XEXP (registers, 0),
- size)),
- tem);
+ emit_move_insn (adjust_address (registers, mode, size), tem);
size += GET_MODE_SIZE (mode);
}
/* Save the arg pointer to the block. */
- emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
+ emit_move_insn (adjust_address (registers, Pmode, 0),
copy_to_reg (virtual_incoming_args_rtx));
size = GET_MODE_SIZE (Pmode);
"invisible" first argument. */
if (struct_value_incoming_rtx)
{
- emit_move_insn (change_address (registers, Pmode,
- plus_constant (XEXP (registers, 0),
- size)),
+ emit_move_insn (adjust_address (registers, Pmode, size),
copy_to_reg (struct_value_incoming_rtx));
size += GET_MODE_SIZE (Pmode);
}
emit_move_insn (incoming_args,
gen_rtx_MEM (Pmode, arguments));
#ifndef STACK_GROWS_DOWNWARD
- incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
- incoming_args, 0, OPTAB_LIB_WIDEN);
+ incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
+ incoming_args, 0, OPTAB_LIB_WIDEN);
#endif
/* Perform postincrements before actually calling the function. */
if (size % align != 0)
size = CEIL (size, align) * align;
reg = gen_rtx_REG (mode, regno);
- emit_move_insn (reg,
- change_address (arguments, mode,
- plus_constant (XEXP (arguments, 0),
- size)));
-
+ emit_move_insn (reg, adjust_address (arguments, mode, size));
use_reg (&call_fusage, reg);
size += GET_MODE_SIZE (mode);
}
if (struct_value_rtx)
{
rtx value = gen_reg_rtx (Pmode);
- emit_move_insn (value,
- change_address (arguments, Pmode,
- plus_constant (XEXP (arguments, 0),
- size)));
+ emit_move_insn (value, adjust_address (arguments, Pmode, size));
emit_move_insn (struct_value_rtx, value);
if (GET_CODE (struct_value_rtx) == REG)
use_reg (&call_fusage, struct_value_rtx);
}
/* All arguments and registers used for the call are set up by now! */
- function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
+ function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
/* Ensure address is valid. SYMBOL_REF is already valid, so no need,
and we don't want to load it into a register as an optimization,
gen_rtx_MEM (FUNCTION_MODE, function),
const0_rtx, NULL_RTX, const0_rtx));
- emit_move_insn (change_address (result, GET_MODE (valreg),
- XEXP (result, 0)),
- valreg);
+ emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
}
else
#endif
if (size % align != 0)
size = CEIL (size, align) * align;
reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
- emit_move_insn (reg,
- change_address (result, mode,
- plus_constant (XEXP (result, 0),
- size)));
+ emit_move_insn (reg, adjust_address (result, mode, size));
push_to_sequence (call_fusage);
emit_insn (gen_rtx_USE (VOIDmode, reg));
expand_null_return ();
}
+/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
+static enum type_class
+type_to_class (type)
+ tree type;
+{
+ switch (TREE_CODE (type))
+ {
+ case VOID_TYPE: return void_type_class;
+ case INTEGER_TYPE: return integer_type_class;
+ case CHAR_TYPE: return char_type_class;
+ case ENUMERAL_TYPE: return enumeral_type_class;
+ case BOOLEAN_TYPE: return boolean_type_class;
+ case POINTER_TYPE: return pointer_type_class;
+ case REFERENCE_TYPE: return reference_type_class;
+ case OFFSET_TYPE: return offset_type_class;
+ case REAL_TYPE: return real_type_class;
+ case COMPLEX_TYPE: return complex_type_class;
+ case FUNCTION_TYPE: return function_type_class;
+ case METHOD_TYPE: return method_type_class;
+ case RECORD_TYPE: return record_type_class;
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE: return union_type_class;
+ case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
+ ? string_type_class : array_type_class);
+ case SET_TYPE: return set_type_class;
+ case FILE_TYPE: return file_type_class;
+ case LANG_TYPE: return lang_type_class;
+ default: return no_type_class;
+ }
+}
+
/* Expand a call to __builtin_classify_type with arguments found in
ARGLIST. */
static rtx
tree arglist;
{
if (arglist != 0)
- {
- tree type = TREE_TYPE (TREE_VALUE (arglist));
- enum tree_code code = TREE_CODE (type);
- if (code == VOID_TYPE)
- return GEN_INT (void_type_class);
- if (code == INTEGER_TYPE)
- return GEN_INT (integer_type_class);
- if (code == CHAR_TYPE)
- return GEN_INT (char_type_class);
- if (code == ENUMERAL_TYPE)
- return GEN_INT (enumeral_type_class);
- if (code == BOOLEAN_TYPE)
- return GEN_INT (boolean_type_class);
- if (code == POINTER_TYPE)
- return GEN_INT (pointer_type_class);
- if (code == REFERENCE_TYPE)
- return GEN_INT (reference_type_class);
- if (code == OFFSET_TYPE)
- return GEN_INT (offset_type_class);
- if (code == REAL_TYPE)
- return GEN_INT (real_type_class);
- if (code == COMPLEX_TYPE)
- return GEN_INT (complex_type_class);
- if (code == FUNCTION_TYPE)
- return GEN_INT (function_type_class);
- if (code == METHOD_TYPE)
- return GEN_INT (method_type_class);
- if (code == RECORD_TYPE)
- return GEN_INT (record_type_class);
- if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
- return GEN_INT (union_type_class);
- if (code == ARRAY_TYPE)
- {
- if (TYPE_STRING_FLAG (type))
- return GEN_INT (string_type_class);
- else
- return GEN_INT (array_type_class);
- }
- if (code == SET_TYPE)
- return GEN_INT (set_type_class);
- if (code == FILE_TYPE)
- return GEN_INT (file_type_class);
- if (code == LANG_TYPE)
- return GEN_INT (lang_type_class);
- }
+ return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
return GEN_INT (no_type_class);
}
switch (DECL_FUNCTION_CODE (fndecl))
{
- case BUILT_IN_SIN:
+ case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
builtin_optab = sin_optab; break;
- case BUILT_IN_COS:
+ case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
builtin_optab = cos_optab; break;
- case BUILT_IN_FSQRT:
+ case BUILT_IN_FSQRT:
+ case BUILT_IN_SQRTF:
+ case BUILT_IN_SQRTL:
builtin_optab = sqrt_optab; break;
default:
abort ();
fn = built_in_decls[BUILT_IN_STRCAT];
/* If the replacement _DECL isn't initialized, don't do the
- transformation. */
+ transformation. */
if (!fn)
return 0;
fn = built_in_decls[BUILT_IN_STRLEN];
/* If the replacement _DECL isn't initialized, don't do the
- transformation. */
+ transformation. */
if (!fn)
return 0;
}
result = gen_rtx_MEM (TYPE_MODE (type), addr);
- MEM_ALIAS_SET (result) = get_varargs_alias_set ();
+ set_mem_alias_set (result, get_varargs_alias_set ());
return result;
}
/* "Dereference" to BLKmode memories. */
dstb = gen_rtx_MEM (BLKmode, dstb);
- MEM_ALIAS_SET (dstb) = get_alias_set (TREE_TYPE (TREE_TYPE (dst)));
+ set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
srcb = gen_rtx_MEM (BLKmode, srcb);
- MEM_ALIAS_SET (srcb) = get_alias_set (TREE_TYPE (TREE_TYPE (src)));
+ set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
/* Copy. */
emit_block_move (dstb, srcb, size, TYPE_ALIGN (va_list_type_node));
}
/* If the string passed to fputs is a constant and is one character
- long, we attempt to transform this call into __builtin_fputc(). */
+ long, we attempt to transform this call into __builtin_fputc(). */
static rtx
expand_builtin_fputs (arglist, ignore)
fn_fwrite = built_in_decls[BUILT_IN_FWRITE];
/* If the return value is used, or the replacement _DECL isn't
- initialized, don't do the transformation. */
+ initialized, don't do the transformation. */
if (!ignore || !fn_fputc || !fn_fwrite)
return 0;
- /* Verify the arguments in the original call. */
+ /* Verify the arguments in the original call. */
if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
|| current_function_check_memory_usage)
return 0;
VOIDmode, EXPAND_NORMAL);
}
-/* Expand a call to __builtin_expect. We return our argument and
- emit a NOTE_INSN_EXPECTED_VALUE note. */
+/* Expand a call to __builtin_expect. We return our argument and emit a
+ NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
+ a non-jump context. */
static rtx
expand_builtin_expect (arglist, target)
return target;
}
+
+/* Like expand_builtin_expect, except do this in a jump context. This is
+ called from do_jump if the conditional is a __builtin_expect. Return either
+ a SEQUENCE of insns to emit the jump or NULL if we cannot optimize
+ __builtin_expect. We need to optimize this at jump time so that machines
+ like the PowerPC don't turn the test into a SCC operation, and then jump
+ based on the test being 0/1. */
+
+rtx
+expand_builtin_expect_jump (exp, if_false_label, if_true_label)
+ tree exp;
+ rtx if_false_label;
+ rtx if_true_label;
+{
+ tree arglist = TREE_OPERAND (exp, 1);
+ tree arg0 = TREE_VALUE (arglist);
+ tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
+ rtx ret = NULL_RTX;
+
+ /* Only handle __builtin_expect (test, 0) and
+ __builtin_expect (test, 1). */
+ if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
+ && TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_INT_CST_LOW (arg1) == 0 || TREE_INT_CST_LOW (arg1) == 1)
+ && TREE_INT_CST_HIGH (arg1) == 0)
+ {
+ int j;
+ int num_jumps = 0;
+
+ /* If we fail to locate an appropriate conditional jump, we'll
+ fall back to normal evaluation. Ensure that the expression
+ can be re-evaluated. */
+ switch (unsafe_for_reeval (arg0))
+ {
+ case 0: /* Safe. */
+ break;
+
+ case 1: /* Mildly unsafe. */
+ arg0 = unsave_expr (arg0);
+ break;
+
+ case 2: /* Wildly unsafe. */
+ return NULL_RTX;
+ }
+
+ /* Expand the jump insns. */
+ start_sequence ();
+ do_jump (arg0, if_false_label, if_true_label);
+ ret = gen_sequence ();
+ end_sequence ();
+
+ /* Now that the __builtin_expect has been validated, go through and add
+ the expect's to each of the conditional jumps. If we run into an
+ error, just give up and generate the 'safe' code of doing a SCC
+ operation and then doing a branch on that. */
+ for (j = 0; j < XVECLEN (ret, 0); j++)
+ {
+ rtx insn = XVECEXP (ret, 0, j);
+ rtx pattern;
+
+ if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
+ && (pattern = pc_set (insn)) != NULL_RTX)
+ {
+ rtx ifelse = SET_SRC (pattern);
+ rtx label;
+ int taken;
+
+ if (GET_CODE (ifelse) != IF_THEN_ELSE)
+ continue;
+
+ if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
+ {
+ taken = 1;
+ label = XEXP (XEXP (ifelse, 1), 0);
+ }
+ /* An inverted jump reverses the probabilities. */
+ else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
+ {
+ taken = 0;
+ label = XEXP (XEXP (ifelse, 2), 0);
+ }
+ /* We shouldn't have to worry about conditional returns during
+ the expansion stage, but handle it gracefully anyway. */
+ else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
+ {
+ taken = 1;
+ label = NULL_RTX;
+ }
+ /* An inverted return reverses the probabilities. */
+ else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
+ {
+ taken = 0;
+ label = NULL_RTX;
+ }
+ else
+ continue;
+
+ /* If the test is expected to fail, reverse the
+ probabilities. */
+ if (TREE_INT_CST_LOW (arg1) == 0)
+ taken = 1 - taken;
+
+ /* If we are jumping to the false label, reverse the
+ probabilities. */
+ if (label == NULL_RTX)
+ ; /* conditional return */
+ else if (label == if_false_label)
+ taken = 1 - taken;
+ else if (label != if_true_label)
+ continue;
+
+ num_jumps++;
+ predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
+ }
+ }
+
+ /* If no jumps were modified, fail and do __builtin_expect the normal
+ way. */
+ if (num_jumps == 0)
+ ret = NULL_RTX;
+ }
+
+ return ret;
+}
+
\f
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
tree arglist = TREE_OPERAND (exp, 1);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
-#ifdef MD_EXPAND_BUILTIN
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
- return MD_EXPAND_BUILTIN (exp, target, subtarget, mode, ignore);
-#endif
+ return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
/* When not optimizing, generate calls to library functions for a certain
set of builtins. */
if (! optimize && ! CALLED_AS_BUILT_IN (fndecl)
&& (fcode == BUILT_IN_SIN || fcode == BUILT_IN_COS
- || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_MEMSET
+ || fcode == BUILT_IN_FSQRT || fcode == BUILT_IN_SQRTF
+ || fcode == BUILT_IN_SQRTL || fcode == BUILT_IN_MEMSET
|| fcode == BUILT_IN_MEMCPY || fcode == BUILT_IN_MEMCMP
|| fcode == BUILT_IN_BCMP || fcode == BUILT_IN_BZERO
|| fcode == BUILT_IN_INDEX || fcode == BUILT_IN_RINDEX
switch (fcode)
{
case BUILT_IN_ABS:
+ case BUILT_IN_LABS:
+ case BUILT_IN_LLABS:
+ case BUILT_IN_IMAXABS:
case BUILT_IN_FABS:
+ case BUILT_IN_FABSF:
+ case BUILT_IN_FABSL:
/* build_function_call changes these into ABS_EXPR. */
abort ();
case BUILT_IN_CONJ:
+ case BUILT_IN_CONJF:
+ case BUILT_IN_CONJL:
case BUILT_IN_CREAL:
+ case BUILT_IN_CREALF:
+ case BUILT_IN_CREALL:
case BUILT_IN_CIMAG:
+ case BUILT_IN_CIMAGF:
+ case BUILT_IN_CIMAGL:
/* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
and IMAGPART_EXPR. */
abort ();
case BUILT_IN_SIN:
+ case BUILT_IN_SINF:
+ case BUILT_IN_SINL:
case BUILT_IN_COS:
+ case BUILT_IN_COSF:
+ case BUILT_IN_COSL:
/* Treat these like sqrt only if unsafe math optimizations are allowed,
because of possible accuracy problems. */
if (! flag_unsafe_math_optimizations)
break;
case BUILT_IN_FSQRT:
+ case BUILT_IN_SQRTF:
+ case BUILT_IN_SQRTL:
target = expand_builtin_mathfn (exp, target, subtarget);
if (target)
return target;
return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
case BUILT_IN_EH_RETURN:
expand_builtin_eh_return (TREE_VALUE (arglist),
- TREE_VALUE (TREE_CHAIN (arglist)),
- TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
+ TREE_VALUE (TREE_CHAIN (arglist)));
return const0_rtx;
+#ifdef EH_RETURN_DATA_REGNO
+ case BUILT_IN_EH_RETURN_DATA_REGNO:
+ return expand_builtin_eh_return_data_regno (arglist);
+#endif
case BUILT_IN_VARARGS_START:
return expand_builtin_va_start (0, arglist);
case BUILT_IN_STDARG_START:
has side effects, show we don't know it to be a constant.
Likewise if it's a pointer or aggregate type since in those
case we only want literals, since those are only optimized
- when generating RTL, not later. */
+ when generating RTL, not later.
+ And finally, if we are compiling an initializer, not code, we
+ need to return a definite result now; there's not going to be any
+ more optimization done. */
if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
|| AGGREGATE_TYPE_P (TREE_TYPE (arglist))
- || POINTER_TYPE_P (TREE_TYPE (arglist)))
+ || POINTER_TYPE_P (TREE_TYPE (arglist))
+ || cfun == 0)
return integer_zero_node;
return 0;
}
+/* Fold a call to __builtin_classify_type. */
+static tree
+fold_builtin_classify_type (arglist)
+ tree arglist;
+{
+ if (arglist == 0)
+ return build_int_2 (no_type_class, 0);
+
+ return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
+}
+
/* Used by constant folding to eliminate some builtin calls early. EXP is
the CALL_EXPR of a call to a builtin function. */
case BUILT_IN_CONSTANT_P:
return fold_builtin_constant_p (arglist);
+ case BUILT_IN_CLASSIFY_TYPE:
+ return fold_builtin_classify_type (arglist);
+
case BUILT_IN_STRLEN:
if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
{
static int
validate_arglist VPARAMS ((tree arglist, ...))
{
-#ifndef ANSI_PROTOTYPES
- tree arglist;
-#endif
enum tree_code code;
- va_list ap;
-
- VA_START (ap, arglist);
+ int res = 0;
-#ifndef ANSI_PROTOTYPES
- arglist = va_arg (ap, tree);
-#endif
+ VA_OPEN (ap, arglist);
+ VA_FIXEDARG (ap, tree, arglist);
do {
code = va_arg (ap, enum tree_code);
{
case 0:
/* This signifies an ellipses, any further arguments are all ok. */
- va_end (ap);
- return 1;
+ res = 1;
+ goto end;
case VOID_TYPE:
/* This signifies an endlink, if no arguments remain, return
true, otherwise return false. */
- va_end (ap);
- return arglist == 0;
+ res = arglist == 0;
+ goto end;
default:
/* If no parameters remain or the parameter's code does not
match the specified code, return false. Otherwise continue
checking any remaining arguments. */
if (arglist == 0 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
- {
- va_end (ap);
- return 0;
- }
+ goto end;
break;
}
arglist = TREE_CHAIN (arglist);
} while (1);
+
+ /* We need gotos here since we can only have one VA_CLOSE in a
+ function. */
+ end: ;
+ VA_CLOSE (ap);
+
+ return res;
+}
+
+/* Default version of target-specific builtin setup that does nothing. */
+
+void
+default_init_builtins ()
+{
+}
+
+/* Default target-specific builtin expander that does nothing. */
+
+rtx
+default_expand_builtin (exp, target, subtarget, mode, ignore)
+ tree exp ATTRIBUTE_UNUSED;
+ rtx target ATTRIBUTE_UNUSED;
+ rtx subtarget ATTRIBUTE_UNUSED;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+ int ignore ATTRIBUTE_UNUSED;
+{
+ return NULL_RTX;
}