/* Expand builtin functions.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
Free Software Foundation, Inc.
This file is part of GCC.
#include "real.h"
#include "rtl.h"
#include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
#include "flags.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "value-prof.h"
#include "diagnostic.h"
+#ifndef SLOW_UNALIGNED_ACCESS
+#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
+#endif
+
#ifndef PAD_VARARGS_DOWN
#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
#endif
+#ifdef HAVE_mpc
+static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
+#ifdef HAVE_mpc_pow
+static tree do_mpc_arg2 (tree, tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t));
+#endif
+#endif
/* Define the names of the builtin function types and codes. */
const char *const built_in_class_names[4]
static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
static rtx expand_builtin_sincos (tree);
static rtx expand_builtin_cexpi (tree, rtx, rtx);
-static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
-static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
+static rtx expand_builtin_int_roundingfn (tree, rtx);
+static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
static rtx expand_builtin_args_info (tree);
static rtx expand_builtin_next_arg (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
-static tree stabilize_va_list (tree, int);
+static tree stabilize_va_list_loc (location_t, tree, int);
static rtx expand_builtin_expect (tree, rtx);
static tree fold_builtin_constant_p (tree);
-static tree fold_builtin_expect (tree, tree);
+static tree fold_builtin_expect (location_t, tree, tree);
static tree fold_builtin_classify_type (tree);
-static tree fold_builtin_strlen (tree);
-static tree fold_builtin_inf (tree, int);
+static tree fold_builtin_strlen (location_t, tree);
+static tree fold_builtin_inf (location_t, tree, int);
static tree fold_builtin_nan (tree, tree, int);
-static tree rewrite_call_expr (tree, int, tree, int, ...);
+static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
static bool validate_arg (const_tree, enum tree_code code);
static bool integer_valued_real_p (tree);
-static tree fold_trunc_transparent_mathfn (tree, tree);
+static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
static bool readonly_data_expr (tree);
static rtx expand_builtin_fabs (tree, rtx, rtx);
static rtx expand_builtin_signbit (tree, rtx);
-static tree fold_builtin_sqrt (tree, tree);
-static tree fold_builtin_cbrt (tree, tree);
-static tree fold_builtin_pow (tree, tree, tree, tree);
-static tree fold_builtin_powi (tree, tree, tree, tree);
-static tree fold_builtin_cos (tree, tree, tree);
-static tree fold_builtin_cosh (tree, tree, tree);
+static tree fold_builtin_sqrt (location_t, tree, tree);
+static tree fold_builtin_cbrt (location_t, tree, tree);
+static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
+static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
+static tree fold_builtin_cos (location_t, tree, tree, tree);
+static tree fold_builtin_cosh (location_t, tree, tree, tree);
static tree fold_builtin_tan (tree, tree);
-static tree fold_builtin_trunc (tree, tree);
-static tree fold_builtin_floor (tree, tree);
-static tree fold_builtin_ceil (tree, tree);
-static tree fold_builtin_round (tree, tree);
-static tree fold_builtin_int_roundingfn (tree, tree);
+static tree fold_builtin_trunc (location_t, tree, tree);
+static tree fold_builtin_floor (location_t, tree, tree);
+static tree fold_builtin_ceil (location_t, tree, tree);
+static tree fold_builtin_round (location_t, tree, tree);
+static tree fold_builtin_int_roundingfn (location_t, tree, tree);
static tree fold_builtin_bitop (tree, tree);
-static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
-static tree fold_builtin_strchr (tree, tree, tree);
-static tree fold_builtin_memchr (tree, tree, tree, tree);
-static tree fold_builtin_memcmp (tree, tree, tree);
-static tree fold_builtin_strcmp (tree, tree);
-static tree fold_builtin_strncmp (tree, tree, tree);
-static tree fold_builtin_signbit (tree, tree);
-static tree fold_builtin_copysign (tree, tree, tree, tree);
-static tree fold_builtin_isascii (tree);
-static tree fold_builtin_toascii (tree);
-static tree fold_builtin_isdigit (tree);
-static tree fold_builtin_fabs (tree, tree);
-static tree fold_builtin_abs (tree, tree);
-static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
+static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
+static tree fold_builtin_strchr (location_t, tree, tree, tree);
+static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
+static tree fold_builtin_memcmp (location_t, tree, tree, tree);
+static tree fold_builtin_strcmp (location_t, tree, tree);
+static tree fold_builtin_strncmp (location_t, tree, tree, tree);
+static tree fold_builtin_signbit (location_t, tree, tree);
+static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
+static tree fold_builtin_isascii (location_t, tree);
+static tree fold_builtin_toascii (location_t, tree);
+static tree fold_builtin_isdigit (location_t, tree);
+static tree fold_builtin_fabs (location_t, tree, tree);
+static tree fold_builtin_abs (location_t, tree, tree);
+static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
enum tree_code);
-static tree fold_builtin_n (tree, tree *, int, bool);
-static tree fold_builtin_0 (tree, bool);
-static tree fold_builtin_1 (tree, tree, bool);
-static tree fold_builtin_2 (tree, tree, tree, bool);
-static tree fold_builtin_3 (tree, tree, tree, tree, bool);
-static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
-static tree fold_builtin_varargs (tree, tree, bool);
-
-static tree fold_builtin_strpbrk (tree, tree, tree);
-static tree fold_builtin_strstr (tree, tree, tree);
-static tree fold_builtin_strrchr (tree, tree, tree);
-static tree fold_builtin_strcat (tree, tree);
-static tree fold_builtin_strncat (tree, tree, tree);
-static tree fold_builtin_strspn (tree, tree);
-static tree fold_builtin_strcspn (tree, tree);
-static tree fold_builtin_sprintf (tree, tree, tree, int);
+static tree fold_builtin_n (location_t, tree, tree *, int, bool);
+static tree fold_builtin_0 (location_t, tree, bool);
+static tree fold_builtin_1 (location_t, tree, tree, bool);
+static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
+static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
+static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
+static tree fold_builtin_varargs (location_t, tree, tree, bool);
+
+static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
+static tree fold_builtin_strstr (location_t, tree, tree, tree);
+static tree fold_builtin_strrchr (location_t, tree, tree, tree);
+static tree fold_builtin_strcat (location_t, tree, tree);
+static tree fold_builtin_strncat (location_t, tree, tree, tree);
+static tree fold_builtin_strspn (location_t, tree, tree);
+static tree fold_builtin_strcspn (location_t, tree, tree);
+static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
static rtx expand_builtin_object_size (tree);
static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
enum built_in_function);
static void maybe_emit_chk_warning (tree, enum built_in_function);
static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
+static void maybe_emit_free_warning (tree);
static tree fold_builtin_object_size (tree, tree);
-static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
-static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
-static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
-static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
-static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
+static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
+static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
+static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
+static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
+static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
enum built_in_function);
static bool init_target_chars (void);
static tree do_mpfr_arg3 (tree, tree, tree, tree,
int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
static tree do_mpfr_sincos (tree, tree, tree);
-#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
static tree do_mpfr_bessel_n (tree, tree, tree,
int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
const REAL_VALUE_TYPE *, bool);
static tree do_mpfr_remquo (tree, tree, tree);
static tree do_mpfr_lgamma_r (tree, tree, tree);
-#endif
-
-/* Return true if NODE should be considered for inline expansion regardless
- of the optimization level. This means whenever a function is invoked with
- its "internal" name, which normally contains the prefix "__builtin". */
-static bool called_as_built_in (tree node)
+bool
+is_builtin_name (const char *name)
{
- const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
if (strncmp (name, "__builtin_", 10) == 0)
return true;
if (strncmp (name, "__sync_", 7) == 0)
return false;
}
+/* Return true if NODE should be considered for inline expansion regardless
+ of the optimization level. This means whenever a function is invoked with
+ its "internal" name, which normally contains the prefix "__builtin". */
+
+static bool
+called_as_built_in (tree node)
+{
+ /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
+ we want the name used to call the function, not the name it
+ will have. */
+ const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
+ return is_builtin_name (name);
+}
+
+/* Return the alignment in bits of EXP, an object.
+ Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
+ guessed alignment e.g. from type alignment. */
+
+int
+get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
+{
+ unsigned int inner;
+
+ inner = max_align;
+ if (handled_component_p (exp))
+ {
+ HOST_WIDE_INT bitsize, bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+
+ exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep, true);
+ if (bitpos)
+ inner = MIN (inner, (unsigned) (bitpos & -bitpos));
+ while (offset)
+ {
+ tree next_offset;
+
+ if (TREE_CODE (offset) == PLUS_EXPR)
+ {
+ next_offset = TREE_OPERAND (offset, 0);
+ offset = TREE_OPERAND (offset, 1);
+ }
+ else
+ next_offset = NULL;
+ if (host_integerp (offset, 1))
+ {
+ /* Any overflow in calculating offset_bits won't change
+ the alignment. */
+ unsigned offset_bits
+ = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
+
+ if (offset_bits)
+ inner = MIN (inner, (offset_bits & -offset_bits));
+ }
+ else if (TREE_CODE (offset) == MULT_EXPR
+ && host_integerp (TREE_OPERAND (offset, 1), 1))
+ {
+ /* Any overflow in calculating offset_factor won't change
+ the alignment. */
+ unsigned offset_factor
+ = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
+ * BITS_PER_UNIT);
+
+ if (offset_factor)
+ inner = MIN (inner, (offset_factor & -offset_factor));
+ }
+ else
+ {
+ inner = MIN (inner, BITS_PER_UNIT);
+ break;
+ }
+ offset = next_offset;
+ }
+ }
+ if (DECL_P (exp))
+ align = MIN (inner, DECL_ALIGN (exp));
+#ifdef CONSTANT_ALIGNMENT
+ else if (CONSTANT_CLASS_P (exp))
+ align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
+#endif
+ else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
+ || TREE_CODE (exp) == INDIRECT_REF)
+ align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
+ else
+ align = MIN (align, inner);
+ return MIN (align, max_align);
+}
+
+/* Returns true iff we can trust that alignment information has been
+ calculated properly. */
+
+bool
+can_trust_pointer_alignment (void)
+{
+ /* We rely on TER to compute accurate alignment information. */
+ return (optimize && flag_tree_ter);
+}
+
/* Return the alignment in bits of EXP, a pointer valued expression.
But don't return more than MAX_ALIGN no matter what.
The alignment returned is, by default, the alignment of the thing that
{
unsigned int align, inner;
- /* We rely on TER to compute accurate alignment information. */
- if (!(optimize && flag_tree_ter))
+ if (!can_trust_pointer_alignment ())
return 0;
if (!POINTER_TYPE_P (TREE_TYPE (exp)))
{
switch (TREE_CODE (exp))
{
- case NOP_EXPR:
- case CONVERT_EXPR:
- case NON_LVALUE_EXPR:
+ CASE_CONVERT:
exp = TREE_OPERAND (exp, 0);
if (! POINTER_TYPE_P (TREE_TYPE (exp)))
return align;
case ADDR_EXPR:
/* See what we are pointing at and look at its alignment. */
- exp = TREE_OPERAND (exp, 0);
- inner = max_align;
- if (handled_component_p (exp))
- {
- HOST_WIDE_INT bitsize, bitpos;
- tree offset;
- enum machine_mode mode;
- int unsignedp, volatilep;
-
- exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
- if (bitpos)
- inner = MIN (inner, (unsigned) (bitpos & -bitpos));
- if (offset && TREE_CODE (offset) == PLUS_EXPR
- && host_integerp (TREE_OPERAND (offset, 1), 1))
- {
- /* Any overflow in calculating offset_bits won't change
- the alignment. */
- unsigned offset_bits
- = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
- * BITS_PER_UNIT);
-
- if (offset_bits)
- inner = MIN (inner, (offset_bits & -offset_bits));
- offset = TREE_OPERAND (offset, 0);
- }
- if (offset && TREE_CODE (offset) == MULT_EXPR
- && host_integerp (TREE_OPERAND (offset, 1), 1))
- {
- /* Any overflow in calculating offset_factor won't change
- the alignment. */
- unsigned offset_factor
- = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
- * BITS_PER_UNIT);
-
- if (offset_factor)
- inner = MIN (inner, (offset_factor & -offset_factor));
- }
- else if (offset)
- inner = MIN (inner, BITS_PER_UNIT);
- }
- if (DECL_P (exp))
- align = MIN (inner, DECL_ALIGN (exp));
-#ifdef CONSTANT_ALIGNMENT
- else if (CONSTANT_CLASS_P (exp))
- align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
-#endif
- else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
- || TREE_CODE (exp) == INDIRECT_REF)
- align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
- else
- align = MIN (align, inner);
- return MIN (align, max_align);
+ return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
default:
return align;
and return that. This would perhaps not be valid if we were dealing
with named arrays in addition to literal string constants. */
- return size_diffop (size_int (max), offset_node);
+ return size_diffop_loc (input_location, size_int (max), offset_node);
}
/* We have a known offset into the string. Start searching there for
runtime. */
if (offset < 0 || offset > max)
{
- warning (0, "offset outside bounds of constant string");
+ /* Suppress multiple warnings for propagated constant strings. */
+ if (! TREE_NO_WARNING (src))
+ {
+ warning (0, "offset outside bounds of constant string");
+ TREE_NO_WARNING (src) = 1;
+ }
return NULL_TREE;
}
tem = hard_frame_pointer_rtx;
/* Tell reload not to eliminate the frame pointer. */
- current_function_accesses_prior_frames = 1;
+ crtl->accesses_prior_frames = 1;
}
#endif
/* Tell optimize_save_area_alloca that extra work is going to
need to go on during alloca. */
- current_function_calls_setjmp = 1;
+ cfun->calls_setjmp = 1;
/* We have a nonlocal label. */
- current_function_has_nonlocal_label = 1;
+ cfun->has_nonlocal_label = 1;
}
/* Construct the trailing part of a __builtin_setjmp call. This is
{
/* Clobber the FP when we get here, so we have to make sure it's
marked as used by this function. */
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_use (hard_frame_pointer_rtx);
/* Mark the static chain as clobbered here so life information
doesn't get messed up for it. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
+ emit_clobber (static_chain_rtx);
/* Now put in the code to restore the frame pointer, and argument
pointer, if needed. */
emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
/* This might change the hard frame pointer in ways that aren't
apparent to early optimization passes, so force a clobber. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
+ emit_clobber (hard_frame_pointer_rtx);
}
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
{
/* Now restore our arg pointer from the address at which it
was saved in our stack frame. */
- emit_move_insn (virtual_incoming_args_rtx,
+ emit_move_insn (crtl->args.internal_arg_pointer,
copy_to_reg (get_arg_pointer_save_area ()));
}
}
rtx fp, lab, stack, insn, last;
enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
+ /* DRAP is needed for stack realign if longjmp is expanded to current
+ function */
+ if (SUPPORTS_STACK_ALIGNMENT)
+ crtl->need_drap = true;
+
if (setjmp_alias_set == -1)
setjmp_alias_set = new_alias_set ();
{
lab = copy_to_reg (lab);
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))));
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- hard_frame_pointer_rtx)));
+ emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
+ emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
emit_move_insn (hard_frame_pointer_rtx, fp);
emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_use (hard_frame_pointer_rtx);
+ emit_use (stack_pointer_rtx);
emit_indirect_jump (lab);
}
}
if (JUMP_P (insn))
{
- REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
- REG_NOTES (insn));
+ add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
break;
}
else if (CALL_P (insn))
r_label = convert_memory_address (Pmode, r_label);
r_save_area = expand_normal (t_save_area);
r_save_area = convert_memory_address (Pmode, r_save_area);
+ /* Copy the address of the save location to a register just in case it was based
+ on the frame pointer. */
+ r_save_area = copy_to_reg (r_save_area);
r_fp = gen_rtx_MEM (Pmode, r_save_area);
r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
- current_function_has_nonlocal_goto = 1;
+ crtl->has_nonlocal_goto = 1;
#ifdef HAVE_nonlocal_goto
/* ??? We no longer need to pass the static chain value, afaik. */
{
r_label = copy_to_reg (r_label);
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- gen_rtx_SCRATCH (VOIDmode))));
-
- emit_insn (gen_rtx_CLOBBER (VOIDmode,
- gen_rtx_MEM (BLKmode,
- hard_frame_pointer_rtx)));
+ emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
+ emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
/* Restore frame pointer for containing function.
This sets the actual hard register used for the frame pointer
/* USE of hard_frame_pointer_rtx added for consistency;
not clear if really needed. */
- emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_use (hard_frame_pointer_rtx);
+ emit_use (stack_pointer_rtx);
/* If the architecture is using a GP register, we must
conservatively assume that the target function makes use of it.
a no-op if the GP register is a global invariant.) */
if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
&& fixed_regs[PIC_OFFSET_TABLE_REGNUM])
- emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
+ emit_use (pic_offset_table_rtx);
emit_indirect_jump (r_label);
}
{
if (JUMP_P (insn))
{
- REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
- const0_rtx, REG_NOTES (insn));
+ add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
break;
}
else if (CALL_P (insn))
static rtx
get_memory_rtx (tree exp, tree len)
{
- rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
- rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
+ tree orig_exp = exp;
+ rtx addr, mem;
+ HOST_WIDE_INT off;
+
+ /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
+ from its expression, for expr->a.b only <variable>.a.b is recorded. */
+ if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
+ exp = TREE_OPERAND (exp, 0);
+
+ addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
+ mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
/* Get an expression we can use to find the attributes to assign to MEM.
If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
we can. First remove any nops. */
- while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
- || TREE_CODE (exp) == NON_LVALUE_EXPR)
+ while (CONVERT_EXPR_P (exp)
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
exp = TREE_OPERAND (exp, 0);
- if (TREE_CODE (exp) == ADDR_EXPR)
+ off = 0;
+ if (TREE_CODE (exp) == POINTER_PLUS_EXPR
+ && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
+ && host_integerp (TREE_OPERAND (exp, 1), 0)
+ && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
+ exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+ else if (TREE_CODE (exp) == ADDR_EXPR)
exp = TREE_OPERAND (exp, 0);
else if (POINTER_TYPE_P (TREE_TYPE (exp)))
exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
{
set_mem_attributes (mem, exp, 0);
+ if (off)
+ mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
+
/* Allow the string and memory builtins to overflow from one
field into another, see http://gcc.gnu.org/PR23561.
Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
tree inner = exp;
while (TREE_CODE (inner) == ARRAY_REF
- || TREE_CODE (inner) == NOP_EXPR
- || TREE_CODE (inner) == CONVERT_EXPR
- || TREE_CODE (inner) == NON_LVALUE_EXPR
+ || CONVERT_EXPR_P (inner)
|| TREE_CODE (inner) == VIEW_CONVERT_EXPR
|| TREE_CODE (inner) == SAVE_EXPR)
inner = TREE_OPERAND (inner, 0);
gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
if (MEM_OFFSET (mem)
- && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
+ && CONST_INT_P (MEM_OFFSET (mem)))
offset = INTVAL (MEM_OFFSET (mem));
if (offset >= 0 && len && host_integerp (len, 0))
while (TREE_CODE (inner) == COMPONENT_REF)
{
tree field = TREE_OPERAND (inner, 1);
- gcc_assert (! DECL_BIT_FIELD (field));
gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
gcc_assert (field == TREE_OPERAND (mem_expr, 1));
+ /* Bitfields are generally not byte-addressable. */
+ gcc_assert (!DECL_BIT_FIELD (field)
+ || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ % BITS_PER_UNIT) == 0
+ && host_integerp (DECL_SIZE (field), 0)
+ && (TREE_INT_CST_LOW (DECL_SIZE (field))
+ % BITS_PER_UNIT) == 0));
+
+ /* If we can prove that the memory starting at XEXP (mem, 0) and
+ ending at XEXP (mem, 0) + LENGTH will fit into this field, we
+ can keep the COMPONENT_REF in MEM_EXPR. But be careful with
+ fields without DECL_SIZE_UNIT like flexible array members. */
if (length >= 0
- && TYPE_SIZE_UNIT (TREE_TYPE (inner))
- && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
+ && DECL_SIZE_UNIT (field)
+ && host_integerp (DECL_SIZE_UNIT (field), 0))
{
HOST_WIDE_INT size
- = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
- /* If we can prove the memory starting at XEXP (mem, 0)
- and ending at XEXP (mem, 0) + LENGTH will fit into
- this field, we can keep that COMPONENT_REF in MEM_EXPR. */
+ = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
if (offset <= size
&& length <= size
&& offset + length <= size)
if (offset >= 0
&& host_integerp (DECL_FIELD_OFFSET (field), 0))
- offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
+ offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
/ BITS_PER_UNIT;
else
int regno, size, align, nelts;
enum machine_mode mode;
rtx reg, mem;
- rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
+ rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
size = nelts = 0;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
}
/* Save the arg pointer to the block. */
- tem = copy_to_reg (virtual_incoming_args_rtx);
+ tem = copy_to_reg (crtl->args.internal_arg_pointer);
#ifdef STACK_GROWS_DOWNWARD
/* We need the pointer as the caller actually passed them to us, not
as we might have pretended they were passed. Make sure it's a valid
operand, as emit_move_insn isn't expected to handle a PLUS. */
tem
- = force_operand (plus_constant (tem, current_function_pretend_args_size),
+ = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
NULL_RTX);
#endif
emit_move_insn (adjust_address (registers, Pmode, 0), tem);
/* Put the insns after the NOTE that starts the function.
If this is inside a start_sequence, make the outer-level insn
chain current, so the code is placed at the start of the
- function. */
+ function. If internal_arg_pointer is a non-virtual pseudo,
+ it needs to be placed after the function that initializes
+ that pseudo. */
push_topmost_sequence ();
- emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
+ if (REG_P (crtl->args.internal_arg_pointer)
+ && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
+ emit_insn_before (seq, parm_birth_insn);
+ else
+ emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
pop_topmost_sequence ();
return temp;
}
/* Allocate a block of memory onto the stack and copy the memory
arguments to the outgoing arguments address. */
allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
+
+ /* Set DRAP flag to true, even though allocate_dynamic_stack_space
+ may have already set current_function_calls_alloca to true.
+ current_function_calls_alloca won't be set if argsize is zero,
+ so we have to guarantee need_drap is true here. */
+ if (SUPPORTS_STACK_ALIGNMENT)
+ crtl->need_drap = true;
+
dest = virtual_outgoing_args_rtx;
#ifndef STACK_GROWS_DOWNWARD
- if (GET_CODE (argsize) == CONST_INT)
+ if (CONST_INT_P (argsize))
dest = plus_constant (dest, -INTVAL (argsize));
else
dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
emit_move_insn (reg, adjust_address (result, mode, size));
push_to_sequence (call_fusage);
- emit_insn (gen_rtx_USE (VOIDmode, reg));
+ emit_use (reg);
call_fusage = get_insns ();
end_sequence ();
size += GET_MODE_SIZE (mode);
fcodel = BUILT_IN_MATHFN##L_R ; break;
/* Return mathematic function equivalent to FN but operating directly
- on TYPE, if available. If we can't do the conversion, return zero. */
-tree
-mathfn_built_in (tree type, enum built_in_function fn)
+ on TYPE, if available. If IMPLICIT is true find the function in
+ implicit_built_in_decls[], otherwise use built_in_decls[]. If we
+ can't do the conversion, return zero. */
+
+static tree
+mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
{
+ tree const *const fn_arr
+ = implicit ? implicit_built_in_decls : built_in_decls;
enum built_in_function fcode, fcodef, fcodel;
switch (fn)
CASE_MATHFN (BUILT_IN_SCALB)
CASE_MATHFN (BUILT_IN_SCALBLN)
CASE_MATHFN (BUILT_IN_SCALBN)
+ CASE_MATHFN (BUILT_IN_SIGNBIT)
CASE_MATHFN (BUILT_IN_SIGNIFICAND)
CASE_MATHFN (BUILT_IN_SIN)
CASE_MATHFN (BUILT_IN_SINCOS)
}
if (TYPE_MAIN_VARIANT (type) == double_type_node)
- return implicit_built_in_decls[fcode];
+ return fn_arr[fcode];
else if (TYPE_MAIN_VARIANT (type) == float_type_node)
- return implicit_built_in_decls[fcodef];
+ return fn_arr[fcodef];
else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
- return implicit_built_in_decls[fcodel];
+ return fn_arr[fcodel];
else
return NULL_TREE;
}
+/* Like mathfn_built_in_1(), but always use the implicit array. */
+
+tree
+mathfn_built_in (tree type, enum built_in_function fn)
+{
+ return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
+}
+
/* If errno must be maintained, expand the RTL to check if the result,
TARGET, of a built-in function call, EXP, is NaN, and if so set
errno to EDOM. */
/* Test the result; if it is NaN, set errno=EDOM because
the argument was not in the domain. */
- emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
- 0, lab);
+ do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
+ NULL_RTX, NULL_RTX, lab);
#ifdef TARGET_EDOM
/* If this built-in doesn't throw an exception, set errno directly. */
}
#endif
+ /* Make sure the library call isn't expanded as a tail call. */
+ CALL_EXPR_TAILCALL (exp) = 0;
+
/* We can't set errno=EDOM directly; let the library call do it.
Pop the arguments right away in case the call gets deleted. */
NO_DEFER_POP;
/* Else fallthrough and expand as rint. */
CASE_FLT_FN (BUILT_IN_RINT):
builtin_optab = rint_optab; break;
+ CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
+ builtin_optab = significand_optab; break;
default:
gcc_unreachable ();
}
before_call = get_last_insn ();
- target = expand_call (exp, target, target == const0_rtx);
-
- /* If this is a sqrt operation and we don't care about errno, try to
- attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
- This allows the semantics of the libcall to be visible to the RTL
- optimizers. */
- if (builtin_optab == sqrt_optab && !errno_set)
- {
- /* Search backwards through the insns emitted by expand_call looking
- for the instruction with the REG_RETVAL note. */
- rtx last = get_last_insn ();
- while (last != before_call)
- {
- if (find_reg_note (last, REG_RETVAL, NULL))
- {
- rtx note = find_reg_note (last, REG_EQUAL, NULL);
- /* Check that the REQ_EQUAL note is an EXPR_LIST with
- two elements, i.e. symbol_ref(sqrt) and the operand. */
- if (note
- && GET_CODE (note) == EXPR_LIST
- && GET_CODE (XEXP (note, 0)) == EXPR_LIST
- && XEXP (XEXP (note, 0), 1) != NULL_RTX
- && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
- {
- rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
- /* Check operand is a register with expected mode. */
- if (operand
- && REG_P (operand)
- && GET_MODE (operand) == mode)
- {
- /* Replace the REG_EQUAL note with a SQRT rtx. */
- rtx equiv = gen_rtx_SQRT (mode, operand);
- set_unique_reg_note (last, REG_EQUAL, equiv);
- }
- }
- break;
- }
- last = PREV_INSN (last);
- }
- }
-
- return target;
+ return expand_call (exp, target, target == const0_rtx);
}
/* Expand a call to the builtin binary math functions (pow and atan2).
enum machine_mode mode;
bool errno_set = false;
tree arg;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
real_from_string (&r, buf);
result = build_call_expr (isgr_fn, 2,
- fold_build1 (ABS_EXPR, type, arg),
+ fold_build1_loc (loc, ABS_EXPR, type, arg),
build_real (type, r));
return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
}
get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
real_from_string (&r, buf);
result = build_call_expr (isle_fn, 2,
- fold_build1 (ABS_EXPR, type, arg),
+ fold_build1_loc (loc, ABS_EXPR, type, arg),
build_real (type, r));
return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
}
real_from_string (&rmax, buf);
sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
real_from_string (&rmin, buf);
- arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
+ arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
result = build_call_expr (isle_fn, 2, arg,
build_real (type, rmax));
result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
enum machine_mode mode;
tree arg, sinp, cosp;
int result;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE,
POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
target2 = gen_reg_rtx (mode);
op0 = expand_normal (arg);
- op1 = expand_normal (build_fold_indirect_ref (sinp));
- op2 = expand_normal (build_fold_indirect_ref (cosp));
+ op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
+ op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
/* Compute into target1 and target2.
Set TARGET to wherever the result comes back. */
tree arg, type;
enum machine_mode mode;
rtx op0, op1, op2;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
fn = build_fn_decl (name, fntype);
}
- narg = fold_build2 (COMPLEX_EXPR, ctype,
+ narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
build_real (type, dconst0), arg);
/* Make sure not to fold the cexp call again. */
do not need to worry about setting errno to EDOM.
If expanding via optab fails, lower expression to (int)(floor(x)).
EXP is the expression that is a call to the builtin function;
- if convenient, the result should be placed in TARGET. SUBTARGET may
- be used as the target for computing one of EXP's operands. */
+ if convenient, the result should be placed in TARGET. */
static rtx
-expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
+expand_builtin_int_roundingfn (tree exp, rtx target)
{
convert_optab builtin_optab;
rtx op0, insns, tmp;
side-effects more the once. */
CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
- op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
start_sequence ();
conversion (lrint).
Return 0 if a normal call should be emitted rather than expanding the
function in-line. EXP is the expression that is a call to the builtin
- function; if convenient, the result should be placed in TARGET.
- SUBTARGET may be used as the target for computing one of EXP's operands. */
+ function; if convenient, the result should be placed in TARGET. */
static rtx
-expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
+expand_builtin_int_roundingfn_2 (tree exp, rtx target)
{
convert_optab builtin_optab;
rtx op0, insns;
side-effects more the once. */
CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
- op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
start_sequence ();
if (real_identical (&c, &cint)
&& ((n >= -1 && n <= 2)
|| (flag_unsafe_math_optimizations
- && !optimize_size
+ && optimize_insn_for_speed_p ()
&& powi_cost (n) <= POWI_MAX_MULTS)))
{
op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
if (real_identical (&c2, &cint)
&& ((flag_unsafe_math_optimizations
- && !optimize_size
+ && optimize_insn_for_speed_p ()
&& powi_cost (n/2) <= POWI_MAX_MULTS)
|| n == 1))
{
real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
real_convert (&c2, mode, &c2);
if (real_identical (&c2, &c)
- && ((!optimize_size
+ && ((optimize_insn_for_speed_p ()
&& powi_cost (n/3) <= POWI_MAX_MULTS)
|| n == 1))
{
if ((TREE_INT_CST_HIGH (arg1) == 0
|| TREE_INT_CST_HIGH (arg1) == -1)
&& ((n >= -1 && n <= 2)
- || (! optimize_size
+ || (optimize_insn_for_speed_p ()
&& powi_cost (n) <= POWI_MAX_MULTS)))
{
op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
op1 = convert_to_mode (mode2, op1, 0);
target = emit_library_call_value (optab_libfunc (powi_optab, mode),
- target, LCT_CONST_MAKE_BLOCK, mode, 2,
+ target, LCT_CONST, mode, 2,
op0, mode, op1, mode2);
return target;
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strstr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strchr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strrchr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strpbrk (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1), type);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
unsigned int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
rtx dest_mem, src_mem, dest_addr, len_rtx;
- tree result = fold_builtin_memory_op (dest, src, len,
+ tree result = fold_builtin_memory_op (EXPR_LOCATION (exp),
+ dest, src, len,
TREE_TYPE (TREE_TYPE (fndecl)),
false, /*endp=*/0);
HOST_WIDE_INT expected_size = -1;
unsigned int expected_align = 0;
+ tree_ann_common_t ann;
if (result)
{
if (src_align == 0)
return NULL_RTX;
- stringop_block_profile (exp, &expected_align, &expected_size);
+ ann = tree_common_ann (exp);
+ if (ann)
+ stringop_block_profile (ann->stmt, &expected_align, &expected_size);
+
if (expected_align < dest_align)
expected_align = dest_align;
dest_mem = get_memory_rtx (dest, len);
by pieces, we can avoid loading the string from memory
and only stored the computed constants. */
if (src_str
- && GET_CODE (len_rtx) == CONST_INT
+ && CONST_INT_P (len_rtx)
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (void *) src_str, dest_align, false))
+ CONST_CAST (char *, src_str),
+ dest_align, false))
{
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- (void *) src_str, dest_align, false, 0);
+ CONST_CAST (char *, src_str),
+ dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
stpcpy. */
static rtx
-expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
+expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
{
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
rtx target, enum machine_mode mode, int endp)
{
/* If return value is ignored, transform mempcpy into memcpy. */
- if (target == const0_rtx)
+ if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
{
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ tree result = build_call_expr (fn, 3, dest, src, len);
- if (!fn)
- return NULL_RTX;
-
- return expand_expr (build_call_expr (fn, 3, dest, src, len),
- target, mode, EXPAND_NORMAL);
+ while (TREE_CODE (result) == COMPOUND_EXPR)
+ {
+ expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
+ EXPAND_NORMAL);
+ result = TREE_OPERAND (result, 1);
+ }
+ return expand_expr (result, target, mode, EXPAND_NORMAL);
}
else
{
unsigned int dest_align
= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
rtx dest_mem, src_mem, len_rtx;
- tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
+ tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
+ dest, src, len, type, false, endp);
if (result)
{
by pieces, we can avoid loading the string from memory
and only stored the computed constants. */
if (src_str
- && GET_CODE (len_rtx) == CONST_INT
+ && CONST_INT_P (len_rtx)
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
- (void *) src_str, dest_align, false))
+ CONST_CAST (char *, src_str),
+ dest_align, false))
{
dest_mem = get_memory_rtx (dest, len);
set_mem_align (dest_mem, dest_align);
dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
builtin_memcpy_read_str,
- (void *) src_str, dest_align,
- false, endp);
+ CONST_CAST (char *, src_str),
+ dest_align, false, endp);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
}
- if (GET_CODE (len_rtx) == CONST_INT
+ if (CONST_INT_P (len_rtx)
&& can_move_by_pieces (INTVAL (len_rtx),
MIN (dest_align, src_align)))
{
tree type, rtx target, enum machine_mode mode,
int ignore)
{
- tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
+ tree result = fold_builtin_memory_op (UNKNOWN_LOCATION,
+ dest, src, len, type, ignore, /*endp=*/3);
if (result)
{
{
tree type = TREE_TYPE (exp);
tree src, dest, size;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
This is done this way so that if it isn't expanded inline, we fall
back to calling bcopy instead of memmove. */
return expand_builtin_memmove_args (dest, src,
- fold_convert (sizetype, size),
+ fold_convert_loc (loc, sizetype, size),
type, const0_rtx, VOIDmode,
ignore);
}
expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
rtx target, enum machine_mode mode)
{
- tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
+ tree result = fold_builtin_strcpy (UNKNOWN_LOCATION,
+ fndecl, dest, src, 0);
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
return expand_movstr (dest, src, target, /*endp=*/0);
expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
{
tree dst, src;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
src = CALL_EXPR_ARG (exp, 1);
/* If return value is ignored, transform stpcpy into strcpy. */
- if (target == const0_rtx)
+ if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
{
tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
- if (!fn)
- return NULL_RTX;
+ tree result = build_call_expr (fn, 2, dst, src);
- return expand_expr (build_call_expr (fn, 2, dst, src),
- target, mode, EXPAND_NORMAL);
+ STRIP_NOPS (result);
+ while (TREE_CODE (result) == COMPOUND_EXPR)
+ {
+ expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
+ EXPAND_NORMAL);
+ result = TREE_OPERAND (result, 1);
+ }
+ return expand_expr (result, target, mode, EXPAND_NORMAL);
}
else
{
if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
return expand_movstr (dst, src, target, /*endp=*/2);
- lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
+ lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
target, mode, /*endp=*/2);
{
rtx len_rtx = expand_normal (len);
- if (GET_CODE (len_rtx) == CONST_INT)
+ if (CONST_INT_P (len_rtx))
{
ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
dst, src, target, mode);
expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
{
tree fndecl = get_callee_fndecl (exp);
+ location_t loc = EXPR_LOCATION (exp);
if (validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
tree src = CALL_EXPR_ARG (exp, 1);
tree len = CALL_EXPR_ARG (exp, 2);
tree slen = c_strlen (src, 1);
- tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
+ tree result = fold_builtin_strncpy (EXPR_LOCATION (exp),
+ fndecl, dest, src, len, slen);
if (result)
{
if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
return NULL_RTX;
- slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
+ slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
/* We're required to pad with trailing zeros if the requested
len is greater than strlen(s2)+1. In that case try to
if (!p || dest_align == 0 || !host_integerp (len, 1)
|| !can_store_by_pieces (tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (void *) p, dest_align, false))
+ CONST_CAST (char *, p),
+ dest_align, false))
return NULL_RTX;
dest_mem = get_memory_rtx (dest, len);
store_by_pieces (dest_mem, tree_low_cst (len, 1),
builtin_strncpy_read_str,
- (void *) p, dest_align, false, 0);
+ CONST_CAST (char *, p), dest_align, false, 0);
dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
dest_mem = convert_memory_address (ptr_mode, dest_mem);
return dest_mem;
enum machine_mode mode)
{
const char *c = (const char *) data;
- char *p = alloca (GET_MODE_SIZE (mode));
+ char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
memset (p, *c, GET_MODE_SIZE (mode));
if (size == 1)
return (rtx) data;
- p = alloca (size);
+ p = XALLOCAVEC (char, size);
memset (p, 1, size);
coeff = c_readstr (p, mode);
rtx dest_mem, dest_addr, len_rtx;
HOST_WIDE_INT expected_size = -1;
unsigned int expected_align = 0;
+ tree_ann_common_t ann;
dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
if (dest_align == 0)
return NULL_RTX;
- stringop_block_profile (orig_exp, &expected_align, &expected_size);
+ ann = tree_common_ann (orig_exp);
+ if (ann)
+ stringop_block_profile (ann->stmt, &expected_align, &expected_size);
+
if (expected_align < dest_align)
expected_align = dest_align;
expand_builtin_bzero (tree exp)
{
tree dest, size;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
calling bzero instead of memset. */
return expand_builtin_memset_args (dest, integer_zero_node,
- fold_convert (sizetype, size),
+ fold_convert_loc (loc, sizetype, size),
const0_rtx, VOIDmode, exp);
}
INTEGER_TYPE, VOID_TYPE))
{
tree type = TREE_TYPE (exp);
- tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_memchr (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2), type);
if (result)
static rtx
expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
else
{
- tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_memcmp (loc,
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2));
if (result)
arg1_rtx = get_memory_rtx (arg1, len);
arg2_rtx = get_memory_rtx (arg2, len);
- arg3_rtx = expand_normal (len);
+ arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
/* Set MEM_SIZE as appropriate. */
- if (GET_CODE (arg3_rtx) == CONST_INT)
+ if (CONST_INT_P (arg3_rtx))
{
set_mem_size (arg1_rtx, arg3_rtx);
set_mem_size (arg2_rtx, arg3_rtx);
if (insn)
emit_insn (insn);
else
- emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
+ emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
TYPE_MODE (integer_type_node), 3,
XEXP (arg1_rtx, 0), Pmode,
XEXP (arg2_rtx, 0), Pmode,
static rtx
expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
else
{
- tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strcmp (loc,
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1));
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
static rtx
expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
else
{
- tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strncmp (loc,
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2));
if (result)
len2 = c_strlen (arg2, 1);
if (len1)
- len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
+ len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
if (len2)
- len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
+ len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
/* If we don't have a constant length for the first, use the length
of the second, if we know it. We don't require a constant for
return NULL_RTX;
/* The actual new length parameter is MIN(len,arg3). */
- len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
- fold_convert (TREE_TYPE (len), arg3));
+ len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
+ fold_convert_loc (loc, TREE_TYPE (len), arg3));
/* If we don't have POINTER_TYPE, call the function. */
if (arg1_align == 0 || arg2_align == 0)
static rtx
expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
{
+ location_t loc = EXPR_LOCATION (exp);
+
if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
return NULL_RTX;
else
if (p && *p == '\0')
return expand_expr (dst, target, mode, EXPAND_NORMAL);
- if (!optimize_size)
+ if (optimize_insn_for_speed_p ())
{
/* See if we can store by pieces into (dst + strlen(dst)). */
tree newsrc, newdst,
newdst = build_call_expr (strlen_fn, 1, dst);
/* Create (dst p+ strlen (dst)). */
- newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
+ newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
+ TREE_TYPE (dst), dst, newdst);
newdst = builtin_save_expr (newdst);
if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
if (validate_arglist (exp,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strncat (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
CALL_EXPR_ARG (exp, 2));
if (result)
{
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strspn (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1));
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
{
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_strcspn (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1));
if (result)
return expand_expr (result, target, mode, EXPAND_NORMAL);
expand_builtin_args_info (tree exp)
{
int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
- int *word_ptr = (int *) ¤t_function_args_info;
+ int *word_ptr = (int *) &crtl->args.info;
gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
/* Checking arguments is already done in fold_builtin_next_arg
that must be called before this function. */
return expand_binop (ptr_mode, add_optab,
- current_function_internal_arg_pointer,
- current_function_arg_offset_rtx,
+ crtl->args.internal_arg_pointer,
+ crtl->args.arg_offset_rtx,
NULL_RTX, 0, OPTAB_LIB_WIDEN);
}
from multiple evaluations. */
static tree
-stabilize_va_list (tree valist, int needs_lvalue)
+stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
{
- if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
+ tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
+
+ gcc_assert (vatype != NULL_TREE);
+
+ if (TREE_CODE (vatype) == ARRAY_TYPE)
{
if (TREE_SIDE_EFFECTS (valist))
valist = save_expr (valist);
/* For this case, the backends will be expecting a pointer to
- TREE_TYPE (va_list_type_node), but it's possible we've
- actually been given an array (an actual va_list_type_node).
+ vatype, but it's possible we've actually been given an array
+ (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
So fix it. */
if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
{
- tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
- valist = build_fold_addr_expr_with_type (valist, p1);
+ tree p1 = build_pointer_type (TREE_TYPE (vatype));
+ valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
}
}
else
if (! TREE_SIDE_EFFECTS (valist))
return valist;
- pt = build_pointer_type (va_list_type_node);
- valist = fold_build1 (ADDR_EXPR, pt, valist);
+ pt = build_pointer_type (vatype);
+ valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
TREE_SIDE_EFFECTS (valist) = 1;
}
if (TREE_SIDE_EFFECTS (valist))
valist = save_expr (valist);
- valist = build_fold_indirect_ref (valist);
+ valist = build_fold_indirect_ref_loc (loc, valist);
}
return valist;
return ptr_type_node;
}
+/* The "standard" abi va_list is va_list_type_node. */
+
+tree
+std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
+{
+ return va_list_type_node;
+}
+
+/* The "standard" type of va_list is va_list_type_node. */
+
+tree
+std_canonical_va_list_type (tree type)
+{
+ tree wtype, htype;
+
+ if (INDIRECT_REF_P (type))
+ type = TREE_TYPE (type);
+ else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
+ type = TREE_TYPE (type);
+ wtype = va_list_type_node;
+ htype = type;
+ /* Treat structure va_list types. */
+ if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
+ htype = TREE_TYPE (htype);
+ else if (TREE_CODE (wtype) == ARRAY_TYPE)
+ {
+ /* If va_list is an array type, the argument may have decayed
+ to a pointer type, e.g. by being passed to another function.
+ In that case, unwrap both types so that we can compare the
+ underlying records. */
+ if (TREE_CODE (htype) == ARRAY_TYPE
+ || POINTER_TYPE_P (htype))
+ {
+ wtype = TREE_TYPE (wtype);
+ htype = TREE_TYPE (htype);
+ }
+ }
+ if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
+ return va_list_type_node;
+
+ return NULL_TREE;
+}
+
/* The "standard" implementation of va_start: just assign `nextarg' to
the variable. */
{
rtx nextarg;
tree valist;
+ location_t loc = EXPR_LOCATION (exp);
if (call_expr_nargs (exp) < 2)
{
- error ("too few arguments to function %<va_start%>");
+ error_at (loc, "too few arguments to function %<va_start%>");
return const0_rtx;
}
return const0_rtx;
nextarg = expand_builtin_next_arg ();
- valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
+ valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
if (targetm.expand_builtin_va_start)
targetm.expand_builtin_va_start (valist, nextarg);
current (padded) address and increment by the (padded) size. */
tree
-std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
+std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+ gimple_seq *post_p)
{
tree addr, t, type_size, rounded_size, valist_tmp;
unsigned HOST_WIDE_INT align, boundary;
type = build_pointer_type (type);
align = PARM_BOUNDARY / BITS_PER_UNIT;
- boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
+ boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
+
+ /* When we align parameter on stack for caller, if the parameter
+ alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
+ aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
+ here with caller. */
+ if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
+ boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
+
+ boundary /= BITS_PER_UNIT;
/* Hoist the valist value into a temporary for the moment. */
valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
&& !integer_zerop (TYPE_SIZE (type)))
{
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
+ fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (valist),
valist_tmp, size_int (boundary - 1)));
gimplify_and_add (t, pre_p);
/* If the actual alignment is less than the alignment of the type,
adjust the type accordingly so that we don't assume strict alignment
- when deferencing the pointer. */
+ when dereferencing the pointer. */
boundary *= BITS_PER_UNIT;
if (boundary < TYPE_ALIGN (type))
{
if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
{
/* Small args are padded downward. */
- t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
+ t = fold_build2_loc (input_location, GT_EXPR, sizetype,
+ rounded_size, size_int (align));
t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
size_binop (MINUS_EXPR, rounded_size, type_size));
- addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
+ addr = fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (addr), addr, t);
}
/* Compute new value for AP. */
tree
build_va_arg_indirect_ref (tree addr)
{
- addr = build_fold_indirect_ref (addr);
+ addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
mf_mark (addr);
builtin function, but a very special sort of operator. */
enum gimplify_status
-gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
+gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
{
- tree promoted_type, want_va_type, have_va_type;
+ tree promoted_type, have_va_type;
tree valist = TREE_OPERAND (*expr_p, 0);
tree type = TREE_TYPE (*expr_p);
tree t;
+ location_t loc = EXPR_LOCATION (*expr_p);
/* Verify that valist is of the proper type. */
- want_va_type = va_list_type_node;
have_va_type = TREE_TYPE (valist);
-
if (have_va_type == error_mark_node)
return GS_ERROR;
+ have_va_type = targetm.canonical_va_list_type (have_va_type);
- if (TREE_CODE (want_va_type) == ARRAY_TYPE)
- {
- /* If va_list is an array type, the argument may have decayed
- to a pointer type, e.g. by being passed to another function.
- In that case, unwrap both types so that we can compare the
- underlying records. */
- if (TREE_CODE (have_va_type) == ARRAY_TYPE
- || POINTER_TYPE_P (have_va_type))
- {
- want_va_type = TREE_TYPE (want_va_type);
- have_va_type = TREE_TYPE (have_va_type);
- }
- }
-
- if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
+ if (have_va_type == NULL_TREE)
{
- error ("first argument to %<va_arg%> not of type %<va_list%>");
+ error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
return GS_ERROR;
}
/* Generate a diagnostic for requesting data of a type that cannot
be passed through `...' due to type promotion at the call site. */
- else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
+ if ((promoted_type = lang_hooks.types.type_promotes_to (type))
!= type)
{
static bool gave_help;
+ bool warned;
/* Unfortunately, this is merely undefined, rather than a constraint
violation, so we cannot make this an error. If this call is never
executed, the program is still strictly conforming. */
- warning (0, "%qT is promoted to %qT when passed through %<...%>",
- type, promoted_type);
- if (! gave_help)
+ warned = warning_at (loc, 0,
+ "%qT is promoted to %qT when passed through %<...%>",
+ type, promoted_type);
+ if (!gave_help && warned)
{
gave_help = true;
- inform ("(so you should pass %qT not %qT to %<va_arg%>)",
- promoted_type, type);
+ inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
+ promoted_type, type);
}
/* We can, however, treat "undefined" any way we please.
Call abort to encourage the user to fix the program. */
- inform ("if this code is reached, the program will abort");
- t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
- append_to_statement_list (t, pre_p);
+ if (warned)
+ inform (loc, "if this code is reached, the program will abort");
+ /* Before the abort, allow the evaluation of the va_list
+ expression to exit or longjmp. */
+ gimplify_and_add (valist, pre_p);
+ t = build_call_expr_loc (loc,
+ implicit_built_in_decls[BUILT_IN_TRAP], 0);
+ gimplify_and_add (t, pre_p);
/* This is dead code, but go ahead and finish so that the
mode of the result comes out right. */
{
/* Make it easier for the backends by protecting the valist argument
from multiple evaluations. */
- if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
+ if (TREE_CODE (have_va_type) == ARRAY_TYPE)
{
/* For this case, the backends will be expecting a pointer to
- TREE_TYPE (va_list_type_node), but it's possible we've
- actually been given an array (an actual va_list_type_node).
+ TREE_TYPE (abi), but it's possible we've
+ actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
So fix it. */
if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
{
- tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
- valist = build_fold_addr_expr_with_type (valist, p1);
+ tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
+ valist = fold_convert_loc (loc, p1,
+ build_fold_addr_expr_loc (loc, valist));
}
+
gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
}
else
gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
if (!targetm.gimplify_va_arg_expr)
- /* FIXME:Once most targets are converted we should merely
+ /* FIXME: Once most targets are converted we should merely
assert this is non-null. */
return GS_ALL_DONE;
*expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
+ SET_EXPR_LOCATION (*expr_p, loc);
return GS_OK;
}
}
expand_builtin_va_copy (tree exp)
{
tree dst, src, t;
+ location_t loc = EXPR_LOCATION (exp);
dst = CALL_EXPR_ARG (exp, 0);
src = CALL_EXPR_ARG (exp, 1);
- dst = stabilize_va_list (dst, 1);
- src = stabilize_va_list (src, 0);
+ dst = stabilize_va_list_loc (loc, dst, 1);
+ src = stabilize_va_list_loc (loc, src, 0);
- if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
+ gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
+
+ if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
{
- t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
+ t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
/* Evaluate to pointers. */
dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
- size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
- VOIDmode, EXPAND_NORMAL);
+ size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
+ NULL_RTX, VOIDmode, EXPAND_NORMAL);
dstb = convert_memory_address (Pmode, dstb);
srcb = convert_memory_address (Pmode, srcb);
/* "Dereference" to BLKmode memories. */
dstb = gen_rtx_MEM (BLKmode, dstb);
set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
- set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
+ set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
srcb = gen_rtx_MEM (BLKmode, srcb);
set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
- set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
+ set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
/* Copy. */
emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
rtx op0;
rtx result;
- /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
- should always expand to function calls. These can be intercepted
- in libmudflap. */
- if (flag_mudflap)
+ /* Emit normal call if marked not-inlineable. */
+ if (CALL_CANNOT_INLINE_P (exp))
return NULL_RTX;
if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
/* Verify the arguments in the original call. */
if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
{
- tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
+ tree result = fold_builtin_fputs (EXPR_LOCATION (exp),
+ CALL_EXPR_ARG (exp, 0),
CALL_EXPR_ARG (exp, 1),
(target == const0_rtx),
unlocked, NULL_TREE);
emit_barrier ();
}
+/* Expand a call to __builtin_unreachable. We do nothing except emit
+ a barrier saying that control flow will not pass here.
+
+ It is the responsibility of the program being compiled to ensure
+ that control flow does never reach __builtin_unreachable. */
+static void
+expand_builtin_unreachable (void)
+{
+ emit_barrier ();
+}
+
/* Expand EXP, a call to fabs, fabsf or fabsl.
Return NULL_RTX if a normal call should be emitted rather than expanding
the function inline. If convenient, the result should be placed
t = build_string (len, str);
elem = build_type_variant (char_type_node, 1, 0);
- index = build_index_type (build_int_cst (NULL_TREE, len - 1));
+ index = build_index_type (size_int (len - 1));
type = build_array_type (elem, index);
TREE_TYPE (t) = type;
TREE_CONSTANT (t) = 1;
- TREE_INVARIANT (t) = 1;
TREE_READONLY (t) = 1;
TREE_STATIC (t) = 1;
- type = build_pointer_type (type);
- t = build1 (ADDR_EXPR, type, t);
-
type = build_pointer_type (elem);
- t = build1 (NOP_EXPR, type, t);
+ t = build1 (ADDR_EXPR, type,
+ build4 (ARRAY_REF, elem,
+ t, integer_zero_node, NULL_TREE, NULL_TREE));
return t;
}
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = alloca (len);
+ char *newstr = XALLOCAVEC (char, len);
memcpy (newstr, fmt_str, len - 1);
newstr[len - 1] = 0;
arg = build_string_literal (len, newstr);
static rtx
expand_builtin_profile_func (bool exitp)
{
- rtx this, which;
+ rtx this_rtx, which;
- this = DECL_RTL (current_function_decl);
- gcc_assert (MEM_P (this));
- this = XEXP (this, 0);
+ this_rtx = DECL_RTL (current_function_decl);
+ gcc_assert (MEM_P (this_rtx));
+ this_rtx = XEXP (this_rtx, 0);
if (exitp)
which = profile_function_exit_libfunc;
else
which = profile_function_entry_libfunc;
- emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
+ emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
0),
Pmode);
int word, bitpos;
enum insn_code icode;
rtx temp;
+ location_t loc = EXPR_LOCATION (exp);
if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
return NULL_RTX;
if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
return NULL_RTX;
- arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
+ arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
build_real (TREE_TYPE (arg), dconst0));
return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
}
lo = 0;
}
- if (imode != rmode)
+ if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
temp = gen_lowpart (rmode, temp);
temp = expand_binop (rmode, and_optab, temp,
immed_double_const (lo, hi, rmode),
gcc_unreachable ();
}
- decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
+ decl = build_decl (DECL_SOURCE_LOCATION (fn),
+ FUNCTION_DECL, id, TREE_TYPE (fn));
DECL_EXTERNAL (decl) = 1;
TREE_PUBLIC (decl) = 1;
DECL_ARTIFICIAL (decl) = 1;
TREE_NOTHROW (decl) = 1;
DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
DECL_VISIBILITY_SPECIFIED (decl) = 1;
- call = rewrite_call_expr (exp, 0, decl, 0);
+ call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
return expand_call (call, target, ignore);
}
{
rtx val, mem;
enum machine_mode old_mode;
+ location_t loc = EXPR_LOCATION (exp);
+
+ if (code == NOT && warn_sync_nand)
+ {
+ tree fndecl = get_callee_fndecl (exp);
+ enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+
+ static bool warned_f_a_n, warned_n_a_f;
+
+ switch (fcode)
+ {
+ case BUILT_IN_FETCH_AND_NAND_1:
+ case BUILT_IN_FETCH_AND_NAND_2:
+ case BUILT_IN_FETCH_AND_NAND_4:
+ case BUILT_IN_FETCH_AND_NAND_8:
+ case BUILT_IN_FETCH_AND_NAND_16:
+
+ if (warned_f_a_n)
+ break;
+
+ fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
+ inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
+ warned_f_a_n = true;
+ break;
+
+ case BUILT_IN_NAND_AND_FETCH_1:
+ case BUILT_IN_NAND_AND_FETCH_2:
+ case BUILT_IN_NAND_AND_FETCH_4:
+ case BUILT_IN_NAND_AND_FETCH_8:
+ case BUILT_IN_NAND_AND_FETCH_16:
+
+ if (warned_n_a_f)
+ break;
+
+ fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
+ inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
+ warned_n_a_f = true;
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+ }
/* Expand the operands. */
mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
}
#endif
+ if (synchronize_libfunc != NULL_RTX)
+ {
+ emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
+ return;
+ }
+
/* If no explicit memory barrier instruction is available, create an
empty asm stmt with a memory clobber. */
x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
if (!optimize
&& !called_as_built_in (fndecl)
&& DECL_ASSEMBLER_NAME_SET_P (fndecl)
- && fcode != BUILT_IN_ALLOCA)
+ && fcode != BUILT_IN_ALLOCA
+ && fcode != BUILT_IN_FREE)
return expand_call (exp, target, ignore);
/* The built-in function expanders test for target == const0_rtx
none of its arguments are volatile, we can avoid expanding the
built-in call and just evaluate the arguments for side-effects. */
if (target == const0_rtx
- && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
+ && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
{
bool volatilep = false;
tree arg;
CASE_FLT_FN (BUILT_IN_ASIN):
CASE_FLT_FN (BUILT_IN_ACOS):
CASE_FLT_FN (BUILT_IN_ATAN):
+ CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
/* Treat these like sqrt only if unsafe math optimizations are allowed,
because of possible accuracy problems. */
if (! flag_unsafe_math_optimizations)
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
- target = expand_builtin_int_roundingfn (exp, target, subtarget);
+ target = expand_builtin_int_roundingfn (exp, target);
if (target)
return target;
break;
CASE_FLT_FN (BUILT_IN_LLRINT):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
- target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
+ target = expand_builtin_int_roundingfn_2 (exp, target);
if (target)
return target;
break;
expand_builtin_trap ();
return const0_rtx;
+ case BUILT_IN_UNREACHABLE:
+ expand_builtin_unreachable ();
+ return const0_rtx;
+
case BUILT_IN_PRINTF:
target = expand_builtin_printf (exp, target, mode, false);
if (target)
maybe_emit_sprintf_chk_warning (exp, fcode);
break;
+ case BUILT_IN_FREE:
+ maybe_emit_free_warning (exp);
+ break;
+
default: /* just do library call, if unknown builtin */
break;
}
return it as a truthvalue. */
static tree
-build_builtin_expect_predicate (tree pred, tree expected)
+build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
{
tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
pred_type = TREE_VALUE (arg_types);
expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
- pred = fold_convert (pred_type, pred);
- expected = fold_convert (expected_type, expected);
- call_expr = build_call_expr (fn, 2, pred, expected);
+ pred = fold_convert_loc (loc, pred_type, pred);
+ expected = fold_convert_loc (loc, expected_type, expected);
+ call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
build_int_cst (ret_type, 0));
NULL_TREE if no simplification is possible. */
static tree
-fold_builtin_expect (tree arg0, tree arg1)
+fold_builtin_expect (location_t loc, tree arg0, tree arg1)
{
tree inner, fndecl;
enum tree_code code;
tree op0 = TREE_OPERAND (inner, 0);
tree op1 = TREE_OPERAND (inner, 1);
- op0 = build_builtin_expect_predicate (op0, arg1);
- op1 = build_builtin_expect_predicate (op1, arg1);
+ op0 = build_builtin_expect_predicate (loc, op0, arg1);
+ op1 = build_builtin_expect_predicate (loc, op1, arg1);
inner = build2 (code, TREE_TYPE (inner), op0, op1);
- return fold_convert (TREE_TYPE (arg0), inner);
+ return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
}
/* If the argument isn't invariant then there's nothing else we can do. */
- if (!TREE_INVARIANT (arg0))
+ if (!TREE_CONSTANT (arg0))
return NULL_TREE;
/* If we expect that a comparison against the argument will fold to
}
while (TREE_CODE (inner) == COMPONENT_REF
|| TREE_CODE (inner) == ARRAY_REF);
- if (DECL_P (inner) && DECL_WEAK (inner))
+ if ((TREE_CODE (inner) == VAR_DECL
+ || TREE_CODE (inner) == FUNCTION_DECL)
+ && DECL_WEAK (inner))
return NULL_TREE;
}
/* Fold a call to __builtin_strlen with argument ARG. */
static tree
-fold_builtin_strlen (tree arg)
+fold_builtin_strlen (location_t loc, tree arg)
{
if (!validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
{
/* Convert from the internal "sizetype" type to "size_t". */
if (size_type_node)
- len = fold_convert (size_type_node, len);
+ len = fold_convert_loc (loc, size_type_node, len);
return len;
}
/* Fold a call to __builtin_inf or __builtin_huge_val. */
static tree
-fold_builtin_inf (tree type, int warn)
+fold_builtin_inf (location_t loc, tree type, int warn)
{
REAL_VALUE_TYPE real;
Thus we pedwarn to ensure this constraint violation is
diagnosed. */
if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
- pedwarn ("target format does not support infinity");
+ pedwarn (loc, 0, "target format does not support infinity");
real_inf (&real);
return build_real (type, real);
case ABS_EXPR:
case SAVE_EXPR:
- case NON_LVALUE_EXPR:
return integer_valued_real_p (TREE_OPERAND (t, 0));
case COMPOUND_EXPR:
case MODIFY_EXPR:
case BIND_EXPR:
- return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
+ return integer_valued_real_p (TREE_OPERAND (t, 1));
case PLUS_EXPR:
case MINUS_EXPR:
Do the transformation for a call with argument ARG. */
static tree
-fold_trunc_transparent_mathfn (tree fndecl, tree arg)
+fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
&& (decl = mathfn_built_in (newtype, fcode)))
- return fold_convert (ftype,
- build_call_expr (decl, 1,
- fold_convert (newtype, arg0)));
+ return fold_convert_loc (loc, ftype,
+ build_call_expr_loc (loc, decl, 1,
+ fold_convert_loc (loc,
+ newtype,
+ arg0)));
}
return NULL_TREE;
}
Do the transformation for a call with argument ARG. */
static tree
-fold_fixed_mathfn (tree fndecl, tree arg)
+fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
/* If argument is already integer valued, and we don't need to worry
about setting errno, there's no need to perform rounding. */
if (! flag_errno_math && integer_valued_real_p (arg))
- return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
+ return fold_build1_loc (loc, FIX_TRUNC_EXPR,
+ TREE_TYPE (TREE_TYPE (fndecl)), arg);
if (optimize)
{
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
&& (decl = mathfn_built_in (newtype, fcode)))
- return build_call_expr (decl, 1, fold_convert (newtype, arg0));
+ return build_call_expr_loc (loc, decl, 1,
+ fold_convert_loc (loc, newtype, arg0));
}
/* Canonicalize llround (x) to lround (x) on LP64 targets where
if (newfn)
{
- tree newcall = build_call_expr(newfn, 1, arg);
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
+ tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
+ return fold_convert_loc (loc,
+ TREE_TYPE (TREE_TYPE (fndecl)), newcall);
}
}
return type. Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cabs (tree arg, tree type, tree fndecl)
+fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
{
tree res;
/* If either part is zero, cabs is fabs of the other. */
if (real_zerop (real))
- return fold_build1 (ABS_EXPR, type, imag);
+ return fold_build1_loc (loc, ABS_EXPR, type, imag);
if (real_zerop (imag))
- return fold_build1 (ABS_EXPR, type, real);
+ return fold_build1_loc (loc, ABS_EXPR, type, real);
/* cabs(x+xi) -> fabs(x)*sqrt(2). */
if (flag_unsafe_math_optimizations
&& operand_equal_p (real, imag, OEP_PURE_SAME))
{
const REAL_VALUE_TYPE sqrt2_trunc
- = real_value_truncate (TYPE_MODE (type),
- *get_real_const (rv_sqrt2));
+ = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
STRIP_NOPS (real);
- return fold_build2 (MULT_EXPR, type,
- fold_build1 (ABS_EXPR, type, real),
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_build1_loc (loc, ABS_EXPR, type, real),
build_real (type, sqrt2_trunc));
}
}
/* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
if (TREE_CODE (arg) == NEGATE_EXPR
|| TREE_CODE (arg) == CONJ_EXPR)
- return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
+ return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
/* Don't do this when optimizing for size. */
if (flag_unsafe_math_optimizations
- && optimize && !optimize_size)
+ && optimize && optimize_function_for_speed_p (cfun))
{
tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
arg = builtin_save_expr (arg);
- rpart = fold_build1 (REALPART_EXPR, type, arg);
- ipart = fold_build1 (IMAGPART_EXPR, type, arg);
+ rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
+ ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
rpart = builtin_save_expr (rpart);
ipart = builtin_save_expr (ipart);
- result = fold_build2 (PLUS_EXPR, type,
- fold_build2 (MULT_EXPR, type,
+ result = fold_build2_loc (loc, PLUS_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, type,
rpart, rpart),
- fold_build2 (MULT_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, type,
ipart, ipart));
- return build_call_expr (sqrtfn, 1, result);
+ return build_call_expr_loc (loc, sqrtfn, 1, result);
}
}
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_sqrt (tree arg, tree type)
+fold_builtin_sqrt (location_t loc, tree arg, tree type)
{
enum built_in_function fcode;
if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
- arg = fold_build2 (MULT_EXPR, type,
+ arg = fold_build2_loc (loc, MULT_EXPR, type,
CALL_EXPR_ARG (arg, 0),
build_real (type, dconsthalf));
- return build_call_expr (expfn, 1, arg);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
tree arg0 = CALL_EXPR_ARG (arg, 0);
tree tree_root;
/* The inner root was either sqrt or cbrt. */
- REAL_VALUE_TYPE dconstroot =
- BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
+ /* This was a conditional expression but it triggered a bug
+ in Sun C 5.5. */
+ REAL_VALUE_TYPE dconstroot;
+ if (BUILTIN_SQRT_P (fcode))
+ dconstroot = dconsthalf;
+ else
+ dconstroot = dconst_third ();
/* Adjust for the outer root. */
SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
- return build_call_expr (powfn, 2, arg0, tree_root);
+ return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
}
}
tree narg1;
if (!tree_expr_nonnegative_p (arg0))
arg0 = build1 (ABS_EXPR, type, arg0);
- narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
build_real (type, dconsthalf));
- return build_call_expr (powfn, 2, arg0, narg1);
+ return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cbrt (tree arg, tree type)
+fold_builtin_cbrt (location_t loc, tree arg, tree type)
{
const enum built_in_function fcode = builtin_mathfn_code (arg);
tree res;
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
const REAL_VALUE_TYPE third_trunc =
- real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
- arg = fold_build2 (MULT_EXPR, type,
+ real_value_truncate (TYPE_MODE (type), dconst_third ());
+ arg = fold_build2_loc (loc, MULT_EXPR, type,
CALL_EXPR_ARG (arg, 0),
build_real (type, third_trunc));
- return build_call_expr (expfn, 1, arg);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
{
tree arg0 = CALL_EXPR_ARG (arg, 0);
tree tree_root;
- REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
+ REAL_VALUE_TYPE dconstroot = dconst_third ();
SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
- return build_call_expr (powfn, 2, arg0, tree_root);
+ return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
}
}
REAL_VALUE_TYPE dconstroot;
real_arithmetic (&dconstroot, MULT_EXPR,
- get_real_const (rv_third),
- get_real_const (rv_third));
+ dconst_third_ptr (), dconst_third_ptr ());
dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
tree_root = build_real (type, dconstroot);
- return build_call_expr (powfn, 2, arg0, tree_root);
+ return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
}
}
}
{
tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
const REAL_VALUE_TYPE dconstroot
- = real_value_truncate (TYPE_MODE (type),
- *get_real_const (rv_third));
- tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
+ = real_value_truncate (TYPE_MODE (type), dconst_third ());
+ tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
build_real (type, dconstroot));
- return build_call_expr (powfn, 2, arg00, narg01);
+ return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
}
}
}
simplification can be made. */
static tree
-fold_builtin_cos (tree arg, tree type, tree fndecl)
+fold_builtin_cos (location_t loc,
+ tree arg, tree type, tree fndecl)
{
tree res, narg;
/* Optimize cos(-x) into cos (x). */
if ((narg = fold_strip_sign_ops (arg)))
- return build_call_expr (fndecl, 1, narg);
+ return build_call_expr_loc (loc, fndecl, 1, narg);
return NULL_TREE;
}
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cosh (tree arg, tree type, tree fndecl)
+fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
{
if (validate_arg (arg, REAL_TYPE))
{
/* Optimize cosh(-x) into cosh (x). */
if ((narg = fold_strip_sign_ops (arg)))
- return build_call_expr (fndecl, 1, narg);
+ return build_call_expr_loc (loc, fndecl, 1, narg);
}
return NULL_TREE;
}
+/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
+ argument ARG. TYPE is the type of the return value. Return
+ NULL_TREE if no simplification can be made. */
+
+static tree
+fold_builtin_ccos (location_t loc,
+ tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
+ bool hyper ATTRIBUTE_UNUSED)
+{
+ if (validate_arg (arg, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
+ {
+ tree tmp;
+
+#ifdef HAVE_mpc
+ /* Calculate the result when the argument is a constant. */
+ if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
+ return tmp;
+#endif
+
+ /* Optimize fn(-x) into fn(x). */
+ if ((tmp = fold_strip_sign_ops (arg)))
+ return build_call_expr_loc (loc, fndecl, 1, tmp);
+ }
+
+ return NULL_TREE;
+}
+
/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
Return NULL_TREE if no simplification can be made. */
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
+fold_builtin_sincos (location_t loc,
+ tree arg0, tree arg1, tree arg2)
{
tree type;
tree res, fn, call;
if (!fn)
return NULL_TREE;
- call = build_call_expr (fn, 1, arg0);
+ call = build_call_expr_loc (loc, fn, 1, arg0);
call = builtin_save_expr (call);
- return build2 (COMPOUND_EXPR, type,
+ return build2 (COMPOUND_EXPR, void_type_node,
build2 (MODIFY_EXPR, void_type_node,
- build_fold_indirect_ref (arg1),
+ build_fold_indirect_ref_loc (loc, arg1),
build1 (IMAGPART_EXPR, type, call)),
build2 (MODIFY_EXPR, void_type_node,
- build_fold_indirect_ref (arg2),
+ build_fold_indirect_ref_loc (loc, arg2),
build1 (REALPART_EXPR, type, call)));
}
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_cexp (tree arg0, tree type)
+fold_builtin_cexp (location_t loc, tree arg0, tree type)
{
tree rtype;
tree realp, imagp, ifn;
+#ifdef HAVE_mpc
+ tree res;
+#endif
- if (!validate_arg (arg0, COMPLEX_TYPE))
+ if (!validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
return NULL_TREE;
+#ifdef HAVE_mpc
+ /* Calculate the result when the argument is a constant. */
+ if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
+ return res;
+#endif
+
rtype = TREE_TYPE (TREE_TYPE (arg0));
/* In case we can figure out the real part of arg0 and it is constant zero
if (!ifn)
return NULL_TREE;
- if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
+ if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
&& real_zerop (realp))
{
- tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
- return build_call_expr (ifn, 1, narg);
+ tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
+ return build_call_expr_loc (loc, ifn, 1, narg);
}
/* In case we can easily decompose real and imaginary parts split cexp
if (!rfn)
return NULL_TREE;
- imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
+ imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
if (!imagp)
return NULL_TREE;
- icall = build_call_expr (ifn, 1, imagp);
+ icall = build_call_expr_loc (loc, ifn, 1, imagp);
icall = builtin_save_expr (icall);
- rcall = build_call_expr (rfn, 1, realp);
+ rcall = build_call_expr_loc (loc, rfn, 1, realp);
rcall = builtin_save_expr (rcall);
- return fold_build2 (COMPLEX_EXPR, type,
- fold_build2 (MULT_EXPR, rtype,
+ return fold_build2_loc (loc, COMPLEX_EXPR, type,
+ fold_build2_loc (loc, MULT_EXPR, rtype,
rcall,
- fold_build1 (REALPART_EXPR, rtype, icall)),
- fold_build2 (MULT_EXPR, rtype,
+ fold_build1_loc (loc, REALPART_EXPR,
+ rtype, icall)),
+ fold_build2_loc (loc, MULT_EXPR, rtype,
rcall,
- fold_build1 (IMAGPART_EXPR, rtype, icall)));
+ fold_build1_loc (loc, IMAGPART_EXPR,
+ rtype, icall)));
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_trunc (tree fndecl, tree arg)
+fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
return build_real (type, r);
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin floor, floorf or floorl with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_floor (tree fndecl, tree arg)
+fold_builtin_floor (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
{
tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
if (truncfn)
- return build_call_expr (truncfn, 1, arg);
+ return build_call_expr_loc (loc, truncfn, 1, arg);
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_ceil (tree fndecl, tree arg)
+fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
}
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin round, roundf or roundl with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_round (tree fndecl, tree arg)
+fold_builtin_round (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
}
}
- return fold_trunc_transparent_mathfn (fndecl, arg);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin lround, lroundf or lroundl (or the
can be made. */
static tree
-fold_builtin_int_roundingfn (tree fndecl, tree arg)
+fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
CASE_FLT_FN (BUILT_IN_LLFLOOR):
/* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
if (tree_expr_nonnegative_p (arg))
- return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
- arg);
+ return fold_build1_loc (loc, FIX_TRUNC_EXPR,
+ TREE_TYPE (TREE_TYPE (fndecl)), arg);
break;
default:;
}
- return fold_fixed_mathfn (fndecl, arg);
+ return fold_fixed_mathfn (loc, fndecl, arg);
}
/* Fold function call to builtin ffs, clz, ctz, popcount and parity
return NULL_TREE;
}
-/* Return true if EXPR is the real constant contained in VALUE. */
-
-static bool
-real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
-{
- STRIP_NOPS (expr);
-
- return ((TREE_CODE (expr) == REAL_CST
- && !TREE_OVERFLOW (expr)
- && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
- || (TREE_CODE (expr) == COMPLEX_CST
- && real_dconstp (TREE_REALPART (expr), value)
- && real_zerop (TREE_IMAGPART (expr))));
-}
-
/* A subroutine of fold_builtin to fold the various logarithmic
functions. Return NULL_TREE if no simplification can me made.
FUNC is the corresponding MPFR logarithm function. */
static tree
-fold_builtin_logarithm (tree fndecl, tree arg,
+fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
{
if (validate_arg (arg, REAL_TYPE))
tree res;
const enum built_in_function fcode = builtin_mathfn_code (arg);
- /* Optimize log(e) = 1.0. We're never passed an exact 'e',
- instead we'll look for 'e' truncated to MODE. So only do
- this if flag_unsafe_math_optimizations is set. */
- if (flag_unsafe_math_optimizations && func == mpfr_log)
- {
- const REAL_VALUE_TYPE e_truncated =
- real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
- if (real_dconstp (arg, &e_truncated))
- return build_real (type, dconst1);
- }
-
/* Calculate the result when the argument is a constant. */
if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
return res;
|| fcode == BUILT_IN_EXP2F
|| fcode == BUILT_IN_EXP2L))
|| (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
- return fold_convert (type, CALL_EXPR_ARG (arg, 0));
+ return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
/* Optimize logN(func()) for various exponential functions. We
want to determine the value "x" and the power "exponent" in
{
CASE_FLT_FN (BUILT_IN_EXP):
/* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
- x = build_real (type,
- real_value_truncate (TYPE_MODE (type),
- *get_real_const (rv_e)));
+ x = build_real (type, real_value_truncate (TYPE_MODE (type),
+ dconst_e ()));
exponent = CALL_EXPR_ARG (arg, 0);
break;
CASE_FLT_FN (BUILT_IN_EXP2):
/* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
x = CALL_EXPR_ARG (arg, 0);
exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
- *get_real_const (rv_third)));
+ dconst_third ()));
break;
CASE_FLT_FN (BUILT_IN_POW):
/* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
/* Now perform the optimization. */
if (x && exponent)
{
- tree logfn = build_call_expr (fndecl, 1, x);
- return fold_build2 (MULT_EXPR, type, exponent, logfn);
+ tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
+ return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
}
}
}
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
+fold_builtin_hypot (location_t loc, tree fndecl,
+ tree arg0, tree arg1, tree type)
{
tree res, narg0, narg1;
narg1 = fold_strip_sign_ops (arg1);
if (narg0 || narg1)
{
- return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
+ return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
narg1 ? narg1 : arg1);
}
/* If either argument is zero, hypot is fabs of the other. */
if (real_zerop (arg0))
- return fold_build1 (ABS_EXPR, type, arg1);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg1);
else if (real_zerop (arg1))
- return fold_build1 (ABS_EXPR, type, arg0);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg0);
/* hypot(x,x) -> fabs(x)*sqrt(2). */
if (flag_unsafe_math_optimizations
&& operand_equal_p (arg0, arg1, OEP_PURE_SAME))
{
const REAL_VALUE_TYPE sqrt2_trunc
- = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
- return fold_build2 (MULT_EXPR, type,
- fold_build1 (ABS_EXPR, type, arg0),
+ = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_build1_loc (loc, ABS_EXPR, type, arg0),
build_real (type, sqrt2_trunc));
}
/* Fold a builtin function call to pow, powf, or powl. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
+fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
{
tree res;
/* Optimize pow(1.0,y) = 1.0. */
if (real_onep (arg0))
- return omit_one_operand (type, build_real (type, dconst1), arg1);
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
if (TREE_CODE (arg1) == REAL_CST
&& !TREE_OVERFLOW (arg1))
/* Optimize pow(x,0.0) = 1.0. */
if (REAL_VALUES_EQUAL (c, dconst0))
- return omit_one_operand (type, build_real (type, dconst1),
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1),
arg0);
/* Optimize pow(x,1.0) = x. */
/* Optimize pow(x,-1.0) = 1.0/x. */
if (REAL_VALUES_EQUAL (c, dconstm1))
- return fold_build2 (RDIV_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type,
build_real (type, dconst1), arg0);
/* Optimize pow(x,0.5) = sqrt(x). */
tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
if (sqrtfn != NULL_TREE)
- return build_call_expr (sqrtfn, 1, arg0);
+ return build_call_expr_loc (loc, sqrtfn, 1, arg0);
}
/* Optimize pow(x,1.0/3.0) = cbrt(x). */
if (flag_unsafe_math_optimizations)
{
const REAL_VALUE_TYPE dconstroot
- = real_value_truncate (TYPE_MODE (type),
- *get_real_const (rv_third));
+ = real_value_truncate (TYPE_MODE (type), dconst_third ());
if (REAL_VALUES_EQUAL (c, dconstroot))
{
tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
if (cbrtfn != NULL_TREE)
- return build_call_expr (cbrtfn, 1, arg0);
+ return build_call_expr_loc (loc, cbrtfn, 1, arg0);
}
}
real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
if (real_identical (&c, &cint))
{
- /* Attempt to evaluate pow at compile-time. */
+ /* Attempt to evaluate pow at compile-time, unless this should
+ raise an exception. */
if (TREE_CODE (arg0) == REAL_CST
- && !TREE_OVERFLOW (arg0))
+ && !TREE_OVERFLOW (arg0)
+ && (n > 0
+ || (!flag_trapping_math && !flag_errno_math)
+ || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
{
REAL_VALUE_TYPE x;
bool inexact;
{
tree narg0 = fold_strip_sign_ops (arg0);
if (narg0)
- return build_call_expr (fndecl, 2, narg0, arg1);
+ return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
}
}
}
{
tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
tree arg = CALL_EXPR_ARG (arg0, 0);
- arg = fold_build2 (MULT_EXPR, type, arg, arg1);
- return build_call_expr (expfn, 1, arg);
+ arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
+ return build_call_expr_loc (loc, expfn, 1, arg);
}
/* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
if (BUILTIN_SQRT_P (fcode))
{
tree narg0 = CALL_EXPR_ARG (arg0, 0);
- tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
build_real (type, dconsthalf));
- return build_call_expr (fndecl, 2, narg0, narg1);
+ return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
}
/* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
if (tree_expr_nonnegative_p (arg))
{
const REAL_VALUE_TYPE dconstroot
- = real_value_truncate (TYPE_MODE (type),
- *get_real_const (rv_third));
- tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
+ = real_value_truncate (TYPE_MODE (type), dconst_third ());
+ tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
build_real (type, dconstroot));
- return build_call_expr (fndecl, 2, arg, narg1);
+ return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
}
}
{
tree arg00 = CALL_EXPR_ARG (arg0, 0);
tree arg01 = CALL_EXPR_ARG (arg0, 1);
- tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
- return build_call_expr (fndecl, 2, arg00, narg1);
+ tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
+ return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
}
}
/* Fold a builtin function call to powi, powif, or powil with argument ARG.
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
+fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
tree arg0, tree arg1, tree type)
{
if (!validate_arg (arg0, REAL_TYPE)
/* Optimize pow(1.0,y) = 1.0. */
if (real_onep (arg0))
- return omit_one_operand (type, build_real (type, dconst1), arg1);
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
if (host_integerp (arg1, 0))
{
/* Optimize pow(x,0) = 1.0. */
if (c == 0)
- return omit_one_operand (type, build_real (type, dconst1),
+ return omit_one_operand_loc (loc, type, build_real (type, dconst1),
arg0);
/* Optimize pow(x,1) = x. */
/* Optimize pow(x,-1) = 1.0/x. */
if (c == -1)
- return fold_build2 (RDIV_EXPR, type,
+ return fold_build2_loc (loc, RDIV_EXPR, type,
build_real (type, dconst1), arg0);
}
FUNC is the corresponding MPFR exponent function. */
static tree
-fold_builtin_exponent (tree fndecl, tree arg,
+fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
{
if (validate_arg (arg, REAL_TYPE))
&& (fcode == BUILT_IN_LOG10
|| fcode == BUILT_IN_LOG10F
|| fcode == BUILT_IN_LOG10L)))
- return fold_convert (type, CALL_EXPR_ARG (arg, 0));
+ return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
}
}
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
+fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
+ tree type, bool ignore)
{
- tree var, ret;
+ tree var, ret, etype;
unsigned HOST_WIDE_INT length, cval;
if (! validate_arg (dest, POINTER_TYPE)
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
- return omit_one_operand (type, dest, c);
+ return omit_one_operand_loc (loc, type, dest, c);
if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
return NULL_TREE;
if (TREE_THIS_VOLATILE (var))
return NULL_TREE;
- if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
- && !POINTER_TYPE_P (TREE_TYPE (var)))
+ etype = TREE_TYPE (var);
+ if (TREE_CODE (etype) == ARRAY_TYPE)
+ etype = TREE_TYPE (etype);
+
+ if (!INTEGRAL_TYPE_P (etype)
+ && !POINTER_TYPE_P (etype))
return NULL_TREE;
if (! var_decl_component_p (var))
return NULL_TREE;
length = tree_low_cst (len, 1);
- if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
+ if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
|| get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
< (int) length)
return NULL_TREE;
cval |= (cval << 31) << 1;
}
- ret = build_int_cst_type (TREE_TYPE (var), cval);
- ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
+ ret = build_int_cst_type (etype, cval);
+ var = build_fold_indirect_ref_loc (loc,
+ fold_convert_loc (loc,
+ build_pointer_type (etype),
+ dest));
+ ret = build2 (MODIFY_EXPR, etype, var, ret);
if (ignore)
return ret;
- return omit_one_operand (type, dest, ret);
+ return omit_one_operand_loc (loc, type, dest, ret);
}
/* Fold function call to builtin memset. Return
NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_bzero (tree dest, tree size, bool ignore)
+fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
{
if (! validate_arg (dest, POINTER_TYPE)
|| ! validate_arg (size, INTEGER_TYPE))
so that if it isn't expanded inline, we fallback to
calling bzero instead of memset. */
- return fold_builtin_memset (dest, integer_zero_node,
- fold_convert (sizetype, size),
+ return fold_builtin_memset (loc, dest, integer_zero_node,
+ fold_convert_loc (loc, sizetype, size),
void_type_node, ignore);
}
(memmove). */
static tree
-fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
+fold_builtin_memory_op (location_t loc, tree dest, tree src,
+ tree len, tree type, bool ignore, int endp)
{
tree destvar, srcvar, expr;
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
- return omit_one_operand (type, dest, src);
+ return omit_one_operand_loc (loc, type, dest, src);
/* If SRC and DEST are the same (and not volatile), return
DEST{,+LEN,+LEN-1}. */
else
{
tree srctype, desttype;
+ int src_align, dest_align;
+
if (endp == 3)
{
- int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
- int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
/* Both DEST and SRC must be pointer types.
??? This is what old code did. Is the testing for pointer types
really mandatory?
If either SRC is readonly or length is 1, we can use memcpy. */
- if (dest_align && src_align
- && (readonly_data_expr (src)
- || (host_integerp (len, 1)
- && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
- tree_low_cst (len, 1)))))
+ if (!dest_align || !src_align)
+ return NULL_TREE;
+ if (readonly_data_expr (src)
+ || (host_integerp (len, 1)
+ && (MIN (src_align, dest_align) / BITS_PER_UNIT
+ >= tree_low_cst (len, 1))))
{
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
- return NULL_TREE;
- }
- if (!host_integerp (len, 0))
- return NULL_TREE;
- /* FIXME:
- This logic lose for arguments like (type *)malloc (sizeof (type)),
- since we strip the casts of up to VOID return value from malloc.
- Perhaps we ought to inherit type from non-VOID argument here? */
- STRIP_NOPS (src);
- STRIP_NOPS (dest);
- srctype = TREE_TYPE (TREE_TYPE (src));
- desttype = TREE_TYPE (TREE_TYPE (dest));
- if (!srctype || !desttype
- || !TYPE_SIZE_UNIT (srctype)
- || !TYPE_SIZE_UNIT (desttype)
- || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
+ /* If *src and *dest can't overlap, optimize into memcpy as well. */
+ srcvar = build_fold_indirect_ref_loc (loc, src);
+ destvar = build_fold_indirect_ref_loc (loc, dest);
+ if (srcvar
+ && !TREE_THIS_VOLATILE (srcvar)
+ && destvar
+ && !TREE_THIS_VOLATILE (destvar))
+ {
+ tree src_base, dest_base, fn;
+ HOST_WIDE_INT src_offset = 0, dest_offset = 0;
+ HOST_WIDE_INT size = -1;
+ HOST_WIDE_INT maxsize = -1;
+
+ src_base = srcvar;
+ if (handled_component_p (src_base))
+ src_base = get_ref_base_and_extent (src_base, &src_offset,
+ &size, &maxsize);
+ dest_base = destvar;
+ if (handled_component_p (dest_base))
+ dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
+ &size, &maxsize);
+ if (host_integerp (len, 1))
+ {
+ maxsize = tree_low_cst (len, 1);
+ if (maxsize
+ > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
+ maxsize = -1;
+ else
+ maxsize *= BITS_PER_UNIT;
+ }
+ else
+ maxsize = -1;
+ if (SSA_VAR_P (src_base)
+ && SSA_VAR_P (dest_base))
+ {
+ if (operand_equal_p (src_base, dest_base, 0)
+ && ranges_overlap_p (src_offset, maxsize,
+ dest_offset, maxsize))
+ return NULL_TREE;
+ }
+ else if (TREE_CODE (src_base) == INDIRECT_REF
+ && TREE_CODE (dest_base) == INDIRECT_REF)
+ {
+ if (! operand_equal_p (TREE_OPERAND (src_base, 0),
+ TREE_OPERAND (dest_base, 0), 0)
+ || ranges_overlap_p (src_offset, maxsize,
+ dest_offset, maxsize))
+ return NULL_TREE;
+ }
+ else
+ return NULL_TREE;
+
+ fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ if (!fn)
+ return NULL_TREE;
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
+ }
+ return NULL_TREE;
+ }
+
+ if (!host_integerp (len, 0))
+ return NULL_TREE;
+ /* FIXME:
+ This logic lose for arguments like (type *)malloc (sizeof (type)),
+ since we strip the casts of up to VOID return value from malloc.
+ Perhaps we ought to inherit type from non-VOID argument here? */
+ STRIP_NOPS (src);
+ STRIP_NOPS (dest);
+ /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
+ if (TREE_CODE (src) == POINTER_PLUS_EXPR)
+ {
+ tree tem = TREE_OPERAND (src, 0);
+ STRIP_NOPS (tem);
+ if (tem != TREE_OPERAND (src, 0))
+ src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
+ }
+ if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
+ {
+ tree tem = TREE_OPERAND (dest, 0);
+ STRIP_NOPS (tem);
+ if (tem != TREE_OPERAND (dest, 0))
+ dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
+ }
+ srctype = TREE_TYPE (TREE_TYPE (src));
+ if (srctype
+ && TREE_CODE (srctype) == ARRAY_TYPE
+ && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
+ {
+ srctype = TREE_TYPE (srctype);
+ STRIP_NOPS (src);
+ src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
+ }
+ desttype = TREE_TYPE (TREE_TYPE (dest));
+ if (desttype
+ && TREE_CODE (desttype) == ARRAY_TYPE
+ && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
+ {
+ desttype = TREE_TYPE (desttype);
+ STRIP_NOPS (dest);
+ dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
+ }
+ if (!srctype || !desttype
+ || !TYPE_SIZE_UNIT (srctype)
+ || !TYPE_SIZE_UNIT (desttype)
+ || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
|| TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
- || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
- || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
+ || TYPE_VOLATILE (srctype)
+ || TYPE_VOLATILE (desttype))
return NULL_TREE;
- if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
- < (int) TYPE_ALIGN (desttype)
- || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
- < (int) TYPE_ALIGN (srctype)))
+ src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ if (dest_align < (int) TYPE_ALIGN (desttype)
+ || src_align < (int) TYPE_ALIGN (srctype))
return NULL_TREE;
if (!ignore)
dest = builtin_save_expr (dest);
- srcvar = build_fold_indirect_ref (src);
- if (TREE_THIS_VOLATILE (srcvar))
- return NULL_TREE;
- if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
- return NULL_TREE;
- /* With memcpy, it is possible to bypass aliasing rules, so without
- this check i. e. execute/20060930-2.c would be misoptimized, because
- it use conflicting alias set to hold argument for the memcpy call.
- This check is probably unnecesary with -fno-strict-aliasing.
- Similarly for destvar. See also PR29286. */
- if (!var_decl_component_p (srcvar)
- /* Accept: memcpy (*char_var, "test", 1); that simplify
- to char_var='t'; */
- || is_gimple_min_invariant (srcvar)
- || readonly_data_expr (src))
- return NULL_TREE;
+ srcvar = NULL_TREE;
+ if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
+ {
+ srcvar = build_fold_indirect_ref_loc (loc, src);
+ if (TREE_THIS_VOLATILE (srcvar))
+ return NULL_TREE;
+ else if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
+ srcvar = NULL_TREE;
+ /* With memcpy, it is possible to bypass aliasing rules, so without
+ this check i.e. execute/20060930-2.c would be misoptimized,
+ because it use conflicting alias set to hold argument for the
+ memcpy call. This check is probably unnecessary with
+ -fno-strict-aliasing. Similarly for destvar. See also
+ PR29286. */
+ else if (!var_decl_component_p (srcvar))
+ srcvar = NULL_TREE;
+ }
+
+ destvar = NULL_TREE;
+ if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
+ {
+ destvar = build_fold_indirect_ref_loc (loc, dest);
+ if (TREE_THIS_VOLATILE (destvar))
+ return NULL_TREE;
+ else if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
+ destvar = NULL_TREE;
+ else if (!var_decl_component_p (destvar))
+ destvar = NULL_TREE;
+ }
- destvar = build_fold_indirect_ref (dest);
- if (TREE_THIS_VOLATILE (destvar))
- return NULL_TREE;
- if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
- return NULL_TREE;
- if (!var_decl_component_p (destvar))
+ if (srcvar == NULL_TREE && destvar == NULL_TREE)
return NULL_TREE;
+ if (srcvar == NULL_TREE)
+ {
+ tree srcptype;
+ if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
+ return NULL_TREE;
+
+ srctype = build_qualified_type (desttype, 0);
+ if (src_align < (int) TYPE_ALIGN (srctype))
+ {
+ if (AGGREGATE_TYPE_P (srctype)
+ || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
+ return NULL_TREE;
+
+ srctype = build_variant_type_copy (srctype);
+ TYPE_ALIGN (srctype) = src_align;
+ TYPE_USER_ALIGN (srctype) = 1;
+ TYPE_PACKED (srctype) = 1;
+ }
+ srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
+ src = fold_convert_loc (loc, srcptype, src);
+ srcvar = build_fold_indirect_ref_loc (loc, src);
+ }
+ else if (destvar == NULL_TREE)
+ {
+ tree destptype;
+ if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
+ return NULL_TREE;
+
+ desttype = build_qualified_type (srctype, 0);
+ if (dest_align < (int) TYPE_ALIGN (desttype))
+ {
+ if (AGGREGATE_TYPE_P (desttype)
+ || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
+ return NULL_TREE;
+
+ desttype = build_variant_type_copy (desttype);
+ TYPE_ALIGN (desttype) = dest_align;
+ TYPE_USER_ALIGN (desttype) = 1;
+ TYPE_PACKED (desttype) = 1;
+ }
+ destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
+ dest = fold_convert_loc (loc, destptype, dest);
+ destvar = build_fold_indirect_ref_loc (loc, dest);
+ }
+
if (srctype == desttype
|| (gimple_in_ssa_p (cfun)
&& useless_type_conversion_p (desttype, srctype)))
|| POINTER_TYPE_P (TREE_TYPE (srcvar)))
&& (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
|| POINTER_TYPE_P (TREE_TYPE (destvar))))
- expr = fold_convert (TREE_TYPE (destvar), srcvar);
+ expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
else
- expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
+ expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
+ TREE_TYPE (destvar), srcvar);
expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
}
return expr;
if (endp == 0 || endp == 3)
- return omit_one_operand (type, dest, expr);
+ return omit_one_operand_loc (loc, type, dest, expr);
if (expr == len)
expr = NULL_TREE;
if (endp == 2)
- len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
+ len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
- dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
- dest = fold_convert (type, dest);
+ len = fold_convert_loc (loc, sizetype, len);
+ dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_convert_loc (loc, type, dest);
if (expr)
- dest = omit_one_operand (type, dest, expr);
+ dest = omit_one_operand_loc (loc, type, dest, expr);
return dest;
}
copied. Return NULL_TREE if no simplification can be made. */
tree
-fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
+fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
{
tree fn;
/* If SRC and DEST are the same (and not volatile), return DEST. */
if (operand_equal_p (src, dest, 0))
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
- if (optimize_size)
+ if (optimize_function_for_size_p (cfun))
return NULL_TREE;
fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
return NULL_TREE;
}
- len = size_binop (PLUS_EXPR, len, ssize_int (1));
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
- build_call_expr (fn, 3, dest, src, len));
+ len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ build_call_expr_loc (loc, fn, 3, dest, src, len));
}
/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
Return NULL_TREE if no simplification can be made. */
tree
-fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
+fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
+ tree src, tree len, tree slen)
{
tree fn;
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
/* We can't compare slen with len as constants below if len is not a
constant. */
if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
return NULL_TREE;
- slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
+ slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
/* We do not support simplification of this case, though we do
support it when expanding trees into RTL. */
fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return NULL_TREE;
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
- build_call_expr (fn, 3, dest, src, len));
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ build_call_expr_loc (loc, fn, 3, dest, src, len));
}
/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
+fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
{
if (!validate_arg (arg1, POINTER_TYPE)
|| !validate_arg (arg2, INTEGER_TYPE)
if (target_char_cast (arg2, &c))
return NULL_TREE;
- r = memchr (p1, c, tree_low_cst (len, 1));
+ r = (char *) memchr (p1, c, tree_low_cst (len, 1));
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
}
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_memcmp (tree arg1, tree arg2, tree len)
+fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
{
const char *p1, *p2;
/* If the LEN parameter is zero, return zero. */
if (integer_zerop (len))
- return omit_two_operands (integer_type_node, integer_zero_node,
+ return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
arg1, arg2);
/* If ARG1 and ARG2 are the same (and not volatile), return zero. */
if (operand_equal_p (arg1, arg2, 0))
- return omit_one_operand (integer_type_node, integer_zero_node, len);
+ return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
p1 = c_getstr (arg1);
p2 = c_getstr (arg2);
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree ind1 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
+ tree ind1
+ = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
arg1)));
- tree ind2 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
+ tree ind2
+ = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
arg2)));
- return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
+ return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_strcmp (tree arg1, tree arg2)
+fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
{
const char *p1, *p2;
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- return fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg1)));
+ return fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg1)));
}
/* If the first arg is "", return -*(const unsigned char*)arg2. */
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree temp = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
+ tree temp
+ = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
arg2)));
- return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
+ return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
}
return NULL_TREE;
Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_strncmp (tree arg1, tree arg2, tree len)
+fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
{
const char *p1, *p2;
/* If the LEN parameter is zero, return zero. */
if (integer_zerop (len))
- return omit_two_operands (integer_type_node, integer_zero_node,
+ return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
arg1, arg2);
/* If ARG1 and ARG2 are the same (and not volatile), return zero. */
if (operand_equal_p (arg1, arg2, 0))
- return omit_one_operand (integer_type_node, integer_zero_node, len);
+ return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
p1 = c_getstr (arg1);
p2 = c_getstr (arg2);
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- return fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg1)));
+ return fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg1)));
}
/* If the first arg is "", and the length is greater than zero,
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree temp = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg2)));
- return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
+ tree temp = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg2)));
+ return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
}
/* If len parameter is one, return an expression corresponding to
tree cst_uchar_ptr_node
= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
- tree ind1 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg1)));
- tree ind2 = fold_convert (integer_type_node,
- build1 (INDIRECT_REF, cst_uchar_node,
- fold_convert (cst_uchar_ptr_node,
- arg2)));
- return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
+ tree ind1 = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg1)));
+ tree ind2 = fold_convert_loc (loc, integer_type_node,
+ build1 (INDIRECT_REF, cst_uchar_node,
+ fold_convert_loc (loc,
+ cst_uchar_ptr_node,
+ arg2)));
+ return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
}
return NULL_TREE;
ARG. Return NULL_TREE if no simplification can be made. */
static tree
-fold_builtin_signbit (tree arg, tree type)
+fold_builtin_signbit (location_t loc, tree arg, tree type)
{
tree temp;
c = TREE_REAL_CST (arg);
temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
- return fold_convert (type, temp);
+ return fold_convert_loc (loc, type, temp);
}
/* If ARG is non-negative, the result is always zero. */
if (tree_expr_nonnegative_p (arg))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
/* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
- return fold_build2 (LT_EXPR, type, arg,
+ return fold_build2_loc (loc, LT_EXPR, type, arg,
build_real (TREE_TYPE (arg), dconst0));
return NULL_TREE;
be made. */
static tree
-fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
+fold_builtin_copysign (location_t loc, tree fndecl,
+ tree arg1, tree arg2, tree type)
{
tree tem;
/* copysign(X,X) is X. */
if (operand_equal_p (arg1, arg2, 0))
- return fold_convert (type, arg1);
+ return fold_convert_loc (loc, type, arg1);
/* If ARG1 and ARG2 are compile-time constants, determine the result. */
if (TREE_CODE (arg1) == REAL_CST
/* copysign(X, Y) is fabs(X) when Y is always non-negative.
Remember to evaluate Y for side-effects. */
if (tree_expr_nonnegative_p (arg2))
- return omit_one_operand (type,
- fold_build1 (ABS_EXPR, type, arg1),
+ return omit_one_operand_loc (loc, type,
+ fold_build1_loc (loc, ABS_EXPR, type, arg1),
arg2);
/* Strip sign changing operations for the first argument. */
tem = fold_strip_sign_ops (arg1);
if (tem)
- return build_call_expr (fndecl, 2, tem, arg2);
+ return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
return NULL_TREE;
}
/* Fold a call to builtin isascii with argument ARG. */
static tree
-fold_builtin_isascii (tree arg)
+fold_builtin_isascii (location_t loc, tree arg)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE,
~ (unsigned HOST_WIDE_INT) 0x7f));
- return fold_build2 (EQ_EXPR, integer_type_node,
+ return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
arg, integer_zero_node);
}
}
/* Fold a call to builtin toascii with argument ARG. */
static tree
-fold_builtin_toascii (tree arg)
+fold_builtin_toascii (location_t loc, tree arg)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
/* Transform toascii(c) -> (c & 0x7f). */
- return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
+ return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
build_int_cst (NULL_TREE, 0x7f));
}
/* Fold a call to builtin isdigit with argument ARG. */
static tree
-fold_builtin_isdigit (tree arg)
+fold_builtin_isdigit (location_t loc, tree arg)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
if (target_digit0 == 0)
return NULL_TREE;
- arg = fold_convert (unsigned_type_node, arg);
+ arg = fold_convert_loc (loc, unsigned_type_node, arg);
arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
build_int_cst (unsigned_type_node, target_digit0));
- return fold_build2 (LE_EXPR, integer_type_node, arg,
+ return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
build_int_cst (unsigned_type_node, 9));
}
}
/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
static tree
-fold_builtin_fabs (tree arg, tree type)
+fold_builtin_fabs (location_t loc, tree arg, tree type)
{
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
- arg = fold_convert (type, arg);
+ arg = fold_convert_loc (loc, type, arg);
if (TREE_CODE (arg) == REAL_CST)
return fold_abs_const (arg, type);
- return fold_build1 (ABS_EXPR, type, arg);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg);
}
/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
static tree
-fold_builtin_abs (tree arg, tree type)
+fold_builtin_abs (location_t loc, tree arg, tree type)
{
if (!validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
- arg = fold_convert (type, arg);
+ arg = fold_convert_loc (loc, type, arg);
if (TREE_CODE (arg) == INTEGER_CST)
return fold_abs_const (arg, type);
- return fold_build1 (ABS_EXPR, type, arg);
+ return fold_build1_loc (loc, ABS_EXPR, type, arg);
}
/* Fold a call to builtin fmin or fmax. */
static tree
-fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
+fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
+ tree type, bool max)
{
if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
{
&& real_isnan (&TREE_REAL_CST (arg0))
&& (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
|| ! TREE_REAL_CST (arg0).signalling))
- return omit_one_operand (type, arg1, arg0);
+ return omit_one_operand_loc (loc, type, arg1, arg0);
if (TREE_CODE (arg1) == REAL_CST
&& real_isnan (&TREE_REAL_CST (arg1))
&& (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
|| ! TREE_REAL_CST (arg1).signalling))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* Transform fmin/fmax(x,x) -> x. */
if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
functions to return the numeric arg if the other one is NaN.
-ffinite-math-only is set. C99 doesn't require -0.0 to be
handled, so we don't have to worry about it either. */
if (flag_finite_math_only)
- return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
- fold_convert (type, arg0),
- fold_convert (type, arg1));
+ return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
+ fold_convert_loc (loc, type, arg0),
+ fold_convert_loc (loc, type, arg1));
}
return NULL_TREE;
}
/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
static tree
-fold_builtin_carg (tree arg, tree type)
+fold_builtin_carg (location_t loc, tree arg, tree type)
{
- if (validate_arg (arg, COMPLEX_TYPE))
+ if (validate_arg (arg, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
{
tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
if (atan2_fn)
{
tree new_arg = builtin_save_expr (arg);
- tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
- tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
- return build_call_expr (atan2_fn, 2, i_arg, r_arg);
+ tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
+ tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
+ return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
}
}
/* Fold a call to builtin logb/ilogb. */
static tree
-fold_builtin_logb (tree arg, tree rettype)
+fold_builtin_logb (location_t loc, tree arg, tree rettype)
{
if (! validate_arg (arg, REAL_TYPE))
return NULL_TREE;
case rvc_inf:
/* If arg is Inf or NaN and we're logb, return it. */
if (TREE_CODE (rettype) == REAL_TYPE)
- return fold_convert (rettype, arg);
+ return fold_convert_loc (loc, rettype, arg);
/* Fall through... */
case rvc_zero:
/* Zero may set errno and/or raise an exception for logb, also
want the exponent as if they were [1.0, 2.0) so get the
exponent and subtract 1. */
if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
- return fold_convert (rettype, build_int_cst (NULL_TREE,
- REAL_EXP (value)-1));
+ return fold_convert_loc (loc, rettype,
+ build_int_cst (NULL_TREE,
+ REAL_EXP (value)-1));
break;
}
}
/* Fold a call to builtin significand, if radix == 2. */
static tree
-fold_builtin_significand (tree arg, tree rettype)
+fold_builtin_significand (location_t loc, tree arg, tree rettype)
{
if (! validate_arg (arg, REAL_TYPE))
return NULL_TREE;
case rvc_nan:
case rvc_inf:
/* If arg is +-0, +-Inf or +-NaN, then return it. */
- return fold_convert (rettype, arg);
+ return fold_convert_loc (loc, rettype, arg);
case rvc_normal:
/* For normal numbers, proceed iff radix == 2. */
if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
/* Fold a call to builtin frexp, we can assume the base is 2. */
static tree
-fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
+fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
{
if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
return NULL_TREE;
if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
return NULL_TREE;
- arg1 = build_fold_indirect_ref (arg1);
+ arg1 = build_fold_indirect_ref_loc (loc, arg1);
/* Proceed if a valid pointer type was passed in. */
if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
case rvc_nan:
case rvc_inf:
/* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
- return omit_one_operand (rettype, arg0, arg1);
+ return omit_one_operand_loc (loc, rettype, arg0, arg1);
case rvc_normal:
{
/* Since the frexp function always expects base 2, and in
}
/* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
- arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
+ arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
TREE_SIDE_EFFECTS (arg1) = 1;
- return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
+ return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
}
return NULL_TREE;
check the mode of the TYPE parameter in certain cases. */
static tree
-fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
+fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
+ tree type, bool ldexp)
{
if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
{
if (real_zerop (arg0) || integer_zerop (arg1)
|| (TREE_CODE (arg0) == REAL_CST
&& !real_isfinite (&TREE_REAL_CST (arg0))))
- return omit_one_operand (type, arg0, arg1);
+ return omit_one_operand_loc (loc, type, arg0, arg1);
/* If both arguments are constant, then try to evaluate it. */
if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
/* Fold a call to builtin modf. */
static tree
-fold_builtin_modf (tree arg0, tree arg1, tree rettype)
+fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
{
if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
return NULL_TREE;
if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
return NULL_TREE;
- arg1 = build_fold_indirect_ref (arg1);
+ arg1 = build_fold_indirect_ref_loc (loc, arg1);
/* Proceed if a valid pointer type was passed in. */
if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
}
/* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
- arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
+ arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
build_real (rettype, trunc));
TREE_SIDE_EFFECTS (arg1) = 1;
- return fold_build2 (COMPOUND_EXPR, rettype, arg1,
+ return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
build_real (rettype, frac));
}
ARG is the argument for the call. */
static tree
-fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
+fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
REAL_VALUE_TYPE r;
if (!validate_arg (arg, REAL_TYPE))
- {
- error ("non-floating-point argument to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
+ return NULL_TREE;
switch (builtin_index)
{
case BUILT_IN_ISINF:
if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
return NULL_TREE;
+ case BUILT_IN_ISINF_SIGN:
+ {
+ /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
+ /* In a boolean context, GCC will fold the inner COND_EXPR to
+ 1. So e.g. "if (isinf_sign(x))" would be folded to just
+ "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
+ tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
+ tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
+ tree tmp = NULL_TREE;
+
+ arg = builtin_save_expr (arg);
+
+ if (signbit_fn && isinf_fn)
+ {
+ tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
+ tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
+
+ signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
+ signbit_call, integer_zero_node);
+ isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
+ isinf_call, integer_zero_node);
+
+ tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
+ integer_minus_one_node, integer_one_node);
+ tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
+ isinf_call, tmp,
+ integer_zero_node);
+ }
+
+ return tmp;
+ }
+
case BUILT_IN_ISFINITE:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
&& !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_one_node, arg);
+ return omit_one_operand_loc (loc, type, integer_one_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
case BUILT_IN_ISNAN:
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
- return omit_one_operand (type, integer_zero_node, arg);
+ return omit_one_operand_loc (loc, type, integer_zero_node, arg);
if (TREE_CODE (arg) == REAL_CST)
{
}
arg = builtin_save_expr (arg);
- return fold_build2 (UNORDERED_EXPR, type, arg, arg);
+ return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
default:
gcc_unreachable ();
}
}
+/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
+ This builtin will generate code to return the appropriate floating
+ point classification depending on the value of the floating point
+ number passed in. The possible return values must be supplied as
+ int arguments to the call in the following order: FP_NAN, FP_INFINITE,
+ FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
+ one floating point argument which is "type generic". */
+
+static tree
+fold_builtin_fpclassify (location_t loc, tree exp)
+{
+ tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
+ arg, type, res, tmp;
+ enum machine_mode mode;
+ REAL_VALUE_TYPE r;
+ char buf[128];
+
+ /* Verify the required arguments in the original call. */
+ if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
+ INTEGER_TYPE, INTEGER_TYPE,
+ INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
+ return NULL_TREE;
+
+ fp_nan = CALL_EXPR_ARG (exp, 0);
+ fp_infinite = CALL_EXPR_ARG (exp, 1);
+ fp_normal = CALL_EXPR_ARG (exp, 2);
+ fp_subnormal = CALL_EXPR_ARG (exp, 3);
+ fp_zero = CALL_EXPR_ARG (exp, 4);
+ arg = CALL_EXPR_ARG (exp, 5);
+ type = TREE_TYPE (arg);
+ mode = TYPE_MODE (type);
+ arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
+
+ /* fpclassify(x) ->
+ isnan(x) ? FP_NAN :
+ (fabs(x) == Inf ? FP_INFINITE :
+ (fabs(x) >= DBL_MIN ? FP_NORMAL :
+ (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
+
+ tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
+ build_real (type, dconst0));
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
+ tmp, fp_zero, fp_subnormal);
+
+ sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
+ real_from_string (&r, buf);
+ tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
+ arg, build_real (type, r));
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
+
+ if (HONOR_INFINITIES (mode))
+ {
+ real_inf (&r);
+ tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
+ build_real (type, r));
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
+ fp_infinite, res);
+ }
+
+ if (HONOR_NANS (mode))
+ {
+ tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
+ res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
+ }
+
+ return res;
+}
+
/* Fold a call to an unordered comparison function such as
__builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
being called and ARG0 and ARG1 are the arguments for the call.
the rest. */
static tree
-fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
+fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
enum tree_code unordered_code,
enum tree_code ordered_code)
{
cmp_type = type0;
else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
cmp_type = type1;
- else
- {
- error ("non-floating-point argument to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
- arg0 = fold_convert (cmp_type, arg0);
- arg1 = fold_convert (cmp_type, arg1);
+ arg0 = fold_convert_loc (loc, cmp_type, arg0);
+ arg1 = fold_convert_loc (loc, cmp_type, arg1);
if (unordered_code == UNORDERED_EXPR)
{
if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- return omit_two_operands (type, integer_zero_node, arg0, arg1);
- return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
+ return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
+ return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
}
code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
: ordered_code;
- return fold_build1 (TRUTH_NOT_EXPR, type,
- fold_build2 (code, type, arg0, arg1));
+ return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
+ fold_build2_loc (loc, code, type, arg0, arg1));
}
/* Fold a call to built-in function FNDECL with 0 arguments.
function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
+fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
case BUILT_IN_INFD32:
case BUILT_IN_INFD64:
case BUILT_IN_INFD128:
- return fold_builtin_inf (type, true);
+ return fold_builtin_inf (loc, type, true);
CASE_FLT_FN (BUILT_IN_HUGE_VAL):
- return fold_builtin_inf (type, false);
+ return fold_builtin_inf (loc, type, false);
case BUILT_IN_CLASSIFY_TYPE:
return fold_builtin_classify_type (NULL_TREE);
function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
+fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
return fold_builtin_classify_type (arg0);
case BUILT_IN_STRLEN:
- return fold_builtin_strlen (arg0);
+ return fold_builtin_strlen (loc, arg0);
CASE_FLT_FN (BUILT_IN_FABS):
- return fold_builtin_fabs (arg0, type);
+ return fold_builtin_fabs (loc, arg0, type);
case BUILT_IN_ABS:
case BUILT_IN_LABS:
case BUILT_IN_LLABS:
case BUILT_IN_IMAXABS:
- return fold_builtin_abs (arg0, type);
+ return fold_builtin_abs (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CONJ):
- if (validate_arg (arg0, COMPLEX_TYPE))
- return fold_build1 (CONJ_EXPR, type, arg0);
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
break;
CASE_FLT_FN (BUILT_IN_CREAL):
- if (validate_arg (arg0, COMPLEX_TYPE))
- return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
break;
CASE_FLT_FN (BUILT_IN_CIMAG):
if (validate_arg (arg0, COMPLEX_TYPE))
- return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
+ return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
break;
CASE_FLT_FN (BUILT_IN_CCOS):
+ return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
+
CASE_FLT_FN (BUILT_IN_CCOSH):
- /* These functions are "even", i.e. f(x) == f(-x). */
- if (validate_arg (arg0, COMPLEX_TYPE))
- {
- tree narg = fold_strip_sign_ops (arg0);
- if (narg)
- return build_call_expr (fndecl, 1, narg);
- }
+ return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
+
+#ifdef HAVE_mpc
+ CASE_FLT_FN (BUILT_IN_CSIN):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return do_mpc_arg1 (arg0, type, mpc_sin);
break;
-
+
+ CASE_FLT_FN (BUILT_IN_CSINH):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return do_mpc_arg1 (arg0, type, mpc_sinh);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_CTAN):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return do_mpc_arg1 (arg0, type, mpc_tan);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_CTANH):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return do_mpc_arg1 (arg0, type, mpc_tanh);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_CLOG):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return do_mpc_arg1 (arg0, type, mpc_log);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_CSQRT):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
+ return do_mpc_arg1 (arg0, type, mpc_sqrt);
+ break;
+#endif
+
CASE_FLT_FN (BUILT_IN_CABS):
- return fold_builtin_cabs (arg0, type, fndecl);
+ return fold_builtin_cabs (loc, arg0, type, fndecl);
CASE_FLT_FN (BUILT_IN_CARG):
- return fold_builtin_carg (arg0, type);
+ return fold_builtin_carg (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_SQRT):
- return fold_builtin_sqrt (arg0, type);
+ return fold_builtin_sqrt (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CBRT):
- return fold_builtin_cbrt (arg0, type);
+ return fold_builtin_cbrt (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_ASIN):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_COS):
- return fold_builtin_cos (arg0, type, fndecl);
- break;
+ return fold_builtin_cos (loc, arg0, type, fndecl);
CASE_FLT_FN (BUILT_IN_TAN):
return fold_builtin_tan (arg0, type);
CASE_FLT_FN (BUILT_IN_CEXP):
- return fold_builtin_cexp (arg0, type);
+ return fold_builtin_cexp (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_CEXPI):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_COSH):
- return fold_builtin_cosh (arg0, type, fndecl);
+ return fold_builtin_cosh (loc, arg0, type, fndecl);
CASE_FLT_FN (BUILT_IN_TANH):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_EXP):
- return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
+ return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
CASE_FLT_FN (BUILT_IN_EXP2):
- return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
+ return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
CASE_FLT_FN (BUILT_IN_EXP10):
CASE_FLT_FN (BUILT_IN_POW10):
- return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
+ return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
CASE_FLT_FN (BUILT_IN_EXPM1):
if (validate_arg (arg0, REAL_TYPE))
break;
CASE_FLT_FN (BUILT_IN_LOG):
- return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
+ return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
CASE_FLT_FN (BUILT_IN_LOG2):
- return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
+ return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
CASE_FLT_FN (BUILT_IN_LOG10):
- return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
+ return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
CASE_FLT_FN (BUILT_IN_LOG1P):
if (validate_arg (arg0, REAL_TYPE))
&dconstm1, NULL, false);
break;
-#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
CASE_FLT_FN (BUILT_IN_J0):
if (validate_arg (arg0, REAL_TYPE))
return do_mpfr_arg1 (arg0, type, mpfr_j0,
return do_mpfr_arg1 (arg0, type, mpfr_y1,
&dconst0, NULL, false);
break;
-#endif
CASE_FLT_FN (BUILT_IN_NAN):
case BUILT_IN_NAND32:
return fold_builtin_nan (arg0, type, false);
CASE_FLT_FN (BUILT_IN_FLOOR):
- return fold_builtin_floor (fndecl, arg0);
+ return fold_builtin_floor (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_CEIL):
- return fold_builtin_ceil (fndecl, arg0);
+ return fold_builtin_ceil (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_TRUNC):
- return fold_builtin_trunc (fndecl, arg0);
+ return fold_builtin_trunc (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_ROUND):
- return fold_builtin_round (fndecl, arg0);
+ return fold_builtin_round (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_NEARBYINT):
CASE_FLT_FN (BUILT_IN_RINT):
- return fold_trunc_transparent_mathfn (fndecl, arg0);
+ return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
- return fold_builtin_int_roundingfn (fndecl, arg0);
+ return fold_builtin_int_roundingfn (loc, fndecl, arg0);
CASE_FLT_FN (BUILT_IN_LRINT):
CASE_FLT_FN (BUILT_IN_LLRINT):
- return fold_fixed_mathfn (fndecl, arg0);
+ return fold_fixed_mathfn (loc, fndecl, arg0);
case BUILT_IN_BSWAP32:
case BUILT_IN_BSWAP64:
return fold_builtin_bitop (fndecl, arg0);
CASE_FLT_FN (BUILT_IN_SIGNBIT):
- return fold_builtin_signbit (arg0, type);
+ return fold_builtin_signbit (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
- return fold_builtin_significand (arg0, type);
+ return fold_builtin_significand (loc, arg0, type);
CASE_FLT_FN (BUILT_IN_ILOGB):
CASE_FLT_FN (BUILT_IN_LOGB):
- return fold_builtin_logb (arg0, type);
+ return fold_builtin_logb (loc, arg0, type);
case BUILT_IN_ISASCII:
- return fold_builtin_isascii (arg0);
+ return fold_builtin_isascii (loc, arg0);
case BUILT_IN_TOASCII:
- return fold_builtin_toascii (arg0);
+ return fold_builtin_toascii (loc, arg0);
case BUILT_IN_ISDIGIT:
- return fold_builtin_isdigit (arg0);
+ return fold_builtin_isdigit (loc, arg0);
CASE_FLT_FN (BUILT_IN_FINITE):
case BUILT_IN_FINITED32:
case BUILT_IN_FINITED64:
case BUILT_IN_FINITED128:
case BUILT_IN_ISFINITE:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
CASE_FLT_FN (BUILT_IN_ISINF):
case BUILT_IN_ISINFD32:
case BUILT_IN_ISINFD64:
case BUILT_IN_ISINFD128:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
+
+ case BUILT_IN_ISINF_SIGN:
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
CASE_FLT_FN (BUILT_IN_ISNAN):
case BUILT_IN_ISNAND32:
case BUILT_IN_ISNAND64:
case BUILT_IN_ISNAND128:
- return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
-
- case BUILT_IN_ISNORMAL:
- if (!validate_arg (arg0, REAL_TYPE))
- {
- error ("non-floating-point argument to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
- break;
+ return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
case BUILT_IN_PRINTF:
case BUILT_IN_PRINTF_UNLOCKED:
case BUILT_IN_VPRINTF:
- return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
default:
break;
function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
+fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
switch (fcode)
{
-#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
CASE_FLT_FN (BUILT_IN_JN):
if (validate_arg (arg0, INTEGER_TYPE)
&& validate_arg (arg1, REAL_TYPE))
&& validate_arg(arg1, POINTER_TYPE))
return do_mpfr_lgamma_r (arg0, arg1, type);
break;
-#endif
CASE_FLT_FN (BUILT_IN_ATAN2):
if (validate_arg (arg0, REAL_TYPE)
break;
CASE_FLT_FN (BUILT_IN_HYPOT):
- return fold_builtin_hypot (fndecl, arg0, arg1, type);
+ return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
+
+#ifdef HAVE_mpc_pow
+ CASE_FLT_FN (BUILT_IN_CPOW):
+ if (validate_arg (arg0, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
+ && validate_arg (arg1, COMPLEX_TYPE)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
+ return do_mpc_arg2 (arg0, arg1, type, mpc_pow);
+ break;
+#endif
CASE_FLT_FN (BUILT_IN_LDEXP):
- return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
+ return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
CASE_FLT_FN (BUILT_IN_SCALBN):
CASE_FLT_FN (BUILT_IN_SCALBLN):
- return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
+ return fold_builtin_load_exponent (loc, arg0, arg1,
+ type, /*ldexp=*/false);
CASE_FLT_FN (BUILT_IN_FREXP):
- return fold_builtin_frexp (arg0, arg1, type);
+ return fold_builtin_frexp (loc, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_MODF):
- return fold_builtin_modf (arg0, arg1, type);
+ return fold_builtin_modf (loc, arg0, arg1, type);
case BUILT_IN_BZERO:
- return fold_builtin_bzero (arg0, arg1, ignore);
+ return fold_builtin_bzero (loc, arg0, arg1, ignore);
case BUILT_IN_FPUTS:
- return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
+ return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
case BUILT_IN_FPUTS_UNLOCKED:
- return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
+ return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
case BUILT_IN_STRSTR:
- return fold_builtin_strstr (arg0, arg1, type);
+ return fold_builtin_strstr (loc, arg0, arg1, type);
case BUILT_IN_STRCAT:
- return fold_builtin_strcat (arg0, arg1);
+ return fold_builtin_strcat (loc, arg0, arg1);
case BUILT_IN_STRSPN:
- return fold_builtin_strspn (arg0, arg1);
+ return fold_builtin_strspn (loc, arg0, arg1);
case BUILT_IN_STRCSPN:
- return fold_builtin_strcspn (arg0, arg1);
+ return fold_builtin_strcspn (loc, arg0, arg1);
case BUILT_IN_STRCHR:
case BUILT_IN_INDEX:
- return fold_builtin_strchr (arg0, arg1, type);
+ return fold_builtin_strchr (loc, arg0, arg1, type);
case BUILT_IN_STRRCHR:
case BUILT_IN_RINDEX:
- return fold_builtin_strrchr (arg0, arg1, type);
+ return fold_builtin_strrchr (loc, arg0, arg1, type);
case BUILT_IN_STRCPY:
- return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
+ return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
+
+ case BUILT_IN_STPCPY:
+ if (ignore)
+ {
+ tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
+ if (!fn)
+ break;
+
+ return build_call_expr_loc (loc, fn, 2, arg0, arg1);
+ }
+ break;
case BUILT_IN_STRCMP:
- return fold_builtin_strcmp (arg0, arg1);
+ return fold_builtin_strcmp (loc, arg0, arg1);
case BUILT_IN_STRPBRK:
- return fold_builtin_strpbrk (arg0, arg1, type);
+ return fold_builtin_strpbrk (loc, arg0, arg1, type);
case BUILT_IN_EXPECT:
- return fold_builtin_expect (arg0, arg1);
+ return fold_builtin_expect (loc, arg0, arg1);
CASE_FLT_FN (BUILT_IN_POW):
- return fold_builtin_pow (fndecl, arg0, arg1, type);
+ return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_POWI):
- return fold_builtin_powi (fndecl, arg0, arg1, type);
+ return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_COPYSIGN):
- return fold_builtin_copysign (fndecl, arg0, arg1, type);
+ return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
CASE_FLT_FN (BUILT_IN_FMIN):
- return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
+ return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
CASE_FLT_FN (BUILT_IN_FMAX):
- return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
+ return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
case BUILT_IN_ISGREATER:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNLE_EXPR, LE_EXPR);
case BUILT_IN_ISGREATEREQUAL:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNLT_EXPR, LT_EXPR);
case BUILT_IN_ISLESS:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNGE_EXPR, GE_EXPR);
case BUILT_IN_ISLESSEQUAL:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNGT_EXPR, GT_EXPR);
case BUILT_IN_ISLESSGREATER:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNEQ_EXPR, EQ_EXPR);
case BUILT_IN_ISUNORDERED:
- return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
+ return fold_builtin_unordered_cmp (loc, fndecl,
+ arg0, arg1, UNORDERED_EXPR,
NOP_EXPR);
/* We do the folding for va_start in the expander. */
break;
case BUILT_IN_SPRINTF:
- return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
+ return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
case BUILT_IN_OBJECT_SIZE:
return fold_builtin_object_size (arg0, arg1);
case BUILT_IN_PRINTF:
case BUILT_IN_PRINTF_UNLOCKED:
case BUILT_IN_VPRINTF:
- return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
case BUILT_IN_PRINTF_CHK:
case BUILT_IN_VPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg0))
return NULL_TREE;
else
- return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl,
+ arg1, NULL_TREE, ignore, fcode);
break;
case BUILT_IN_FPRINTF:
case BUILT_IN_FPRINTF_UNLOCKED:
case BUILT_IN_VFPRINTF:
- return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
ignore, fcode);
default:
This function returns NULL_TREE if no simplification was possible. */
static tree
-fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
+fold_builtin_3 (location_t loc, tree fndecl,
+ tree arg0, tree arg1, tree arg2, bool ignore)
{
tree type = TREE_TYPE (TREE_TYPE (fndecl));
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
{
CASE_FLT_FN (BUILT_IN_SINCOS):
- return fold_builtin_sincos (arg0, arg1, arg2);
+ return fold_builtin_sincos (loc, arg0, arg1, arg2);
CASE_FLT_FN (BUILT_IN_FMA):
if (validate_arg (arg0, REAL_TYPE)
return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
break;
-#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
CASE_FLT_FN (BUILT_IN_REMQUO):
if (validate_arg (arg0, REAL_TYPE)
&& validate_arg(arg1, REAL_TYPE)
&& validate_arg(arg2, POINTER_TYPE))
return do_mpfr_remquo (arg0, arg1, arg2);
break;
-#endif
case BUILT_IN_MEMSET:
- return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
+ return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
case BUILT_IN_BCOPY:
- return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
+ return fold_builtin_memory_op (loc, arg1, arg0, arg2,
+ void_type_node, true, /*endp=*/3);
case BUILT_IN_MEMCPY:
- return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
+ return fold_builtin_memory_op (loc, arg0, arg1, arg2,
+ type, ignore, /*endp=*/0);
case BUILT_IN_MEMPCPY:
- return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
+ return fold_builtin_memory_op (loc, arg0, arg1, arg2,
+ type, ignore, /*endp=*/1);
case BUILT_IN_MEMMOVE:
- return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
+ return fold_builtin_memory_op (loc, arg0, arg1, arg2,
+ type, ignore, /*endp=*/3);
case BUILT_IN_STRNCAT:
- return fold_builtin_strncat (arg0, arg1, arg2);
+ return fold_builtin_strncat (loc, arg0, arg1, arg2);
case BUILT_IN_STRNCPY:
- return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
+ return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
case BUILT_IN_STRNCMP:
- return fold_builtin_strncmp (arg0, arg1, arg2);
+ return fold_builtin_strncmp (loc, arg0, arg1, arg2);
case BUILT_IN_MEMCHR:
- return fold_builtin_memchr (arg0, arg1, arg2, type);
+ return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
case BUILT_IN_BCMP:
case BUILT_IN_MEMCMP:
- return fold_builtin_memcmp (arg0, arg1, arg2);;
+ return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
case BUILT_IN_SPRINTF:
- return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
+ return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY_CHK:
- return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
+ return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
ignore, fcode);
case BUILT_IN_STRCAT_CHK:
- return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
+ return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
case BUILT_IN_PRINTF_CHK:
case BUILT_IN_VPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg0))
return NULL_TREE;
else
- return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
+ return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
break;
case BUILT_IN_FPRINTF:
case BUILT_IN_FPRINTF_UNLOCKED:
case BUILT_IN_VFPRINTF:
- return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
+ ignore, fcode);
case BUILT_IN_FPRINTF_CHK:
case BUILT_IN_VFPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg1))
return NULL_TREE;
else
- return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
ignore, fcode);
default:
possible. */
static tree
-fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
- bool ignore)
+fold_builtin_4 (location_t loc, tree fndecl,
+ tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
case BUILT_IN_MEMPCPY_CHK:
case BUILT_IN_MEMMOVE_CHK:
case BUILT_IN_MEMSET_CHK:
- return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
+ return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
NULL_TREE, ignore,
DECL_FUNCTION_CODE (fndecl));
case BUILT_IN_STRNCPY_CHK:
- return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
+ return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
case BUILT_IN_STRNCAT_CHK:
- return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
+ return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
case BUILT_IN_FPRINTF_CHK:
case BUILT_IN_VFPRINTF_CHK:
|| TREE_SIDE_EFFECTS (arg1))
return NULL_TREE;
else
- return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
+ return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
ignore, fcode);
break;
#define MAX_ARGS_TO_FOLD_BUILTIN 4
static tree
-fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
+fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
{
- enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
tree ret = NULL_TREE;
- /* Verify the number of arguments for type-generic and thus variadic
- builtins. */
- switch (fcode)
- {
- case BUILT_IN_ISFINITE:
- case BUILT_IN_ISINF:
- case BUILT_IN_ISNAN:
- case BUILT_IN_ISNORMAL:
- if (nargs < 1)
- {
- error ("too few arguments to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
- else if (nargs > 1)
- {
- error ("too many arguments to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
- break;
-
- case BUILT_IN_ISGREATER:
- case BUILT_IN_ISGREATEREQUAL:
- case BUILT_IN_ISLESS:
- case BUILT_IN_ISLESSEQUAL:
- case BUILT_IN_ISLESSGREATER:
- case BUILT_IN_ISUNORDERED:
- if (nargs < 2)
- {
- error ("too few arguments to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
- else if (nargs > 2)
- {
- error ("too many arguments to function %qs",
- IDENTIFIER_POINTER (DECL_NAME (fndecl)));
- return error_mark_node;
- }
- break;
-
- default:
- break;
- }
-
switch (nargs)
{
case 0:
- ret = fold_builtin_0 (fndecl, ignore);
+ ret = fold_builtin_0 (loc, fndecl, ignore);
break;
case 1:
- ret = fold_builtin_1 (fndecl, args[0], ignore);
+ ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
break;
case 2:
- ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
+ ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
break;
case 3:
- ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
+ ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
break;
case 4:
- ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
+ ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
ignore);
break;
default:
}
if (ret)
{
- ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
+ ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+ SET_EXPR_LOCATION (ret, loc);
TREE_NO_WARNING (ret) = 1;
return ret;
}
result of the function call is ignored. */
static tree
-fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
+fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
+ bool ignore ATTRIBUTE_UNUSED)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
tree ret = NULL_TREE;
{
case BUILT_IN_SPRINTF_CHK:
case BUILT_IN_VSPRINTF_CHK:
- ret = fold_builtin_sprintf_chk (exp, fcode);
+ ret = fold_builtin_sprintf_chk (loc, exp, fcode);
break;
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
- ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
+ ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
+ break;
+
+ case BUILT_IN_FPCLASSIFY:
+ ret = fold_builtin_fpclassify (loc, exp);
+ break;
default:
break;
if (ret)
{
ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+ SET_EXPR_LOCATION (ret, loc);
TREE_NO_WARNING (ret) = 1;
return ret;
}
return NULL_TREE;
}
+/* Return true if FNDECL shouldn't be folded right now.
+ If a built-in function has an inline attribute always_inline
+ wrapper, defer folding it after always_inline functions have
+ been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
+ might not be performed. */
+
+static bool
+avoid_folding_inline_builtin (tree fndecl)
+{
+ return (DECL_DECLARED_INLINE_P (fndecl)
+ && DECL_DISREGARD_INLINE_LIMITS (fndecl)
+ && cfun
+ && !cfun->always_inline_functions_inlined
+ && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
+}
+
/* A wrapper function for builtin folding that prevents warnings for
"statement without effect" and the like, caused by removing the
call node earlier than the warning is generated. */
tree
-fold_call_expr (tree exp, bool ignore)
+fold_call_expr (location_t loc, tree exp, bool ignore)
{
tree ret = NULL_TREE;
tree fndecl = get_callee_fndecl (exp);
return NULL_TREE;
}
+ if (avoid_folding_inline_builtin (fndecl))
+ return NULL_TREE;
+
/* FIXME: Don't use a list in this interface. */
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
{
tree *args = CALL_EXPR_ARGP (exp);
- ret = fold_builtin_n (fndecl, args, nargs, ignore);
+ ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
}
if (!ret)
- ret = fold_builtin_varargs (fndecl, exp, ignore);
+ ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
if (ret)
- {
- /* Propagate location information from original call to
- expansion of builtin. Otherwise things like
- maybe_emit_chk_warning, that operate on the expansion
- of a builtin, will use the wrong location information. */
- if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
- {
- tree realret = ret;
- if (TREE_CODE (ret) == NOP_EXPR)
- realret = TREE_OPERAND (ret, 0);
- if (CAN_HAVE_LOCATION_P (realret)
- && !EXPR_HAS_LOCATION (realret))
- SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
- }
- return ret;
- }
+ return ret;
}
}
return NULL_TREE;
function to be called and ARGLIST is a TREE_LIST of arguments. */
tree
-build_function_call_expr (tree fndecl, tree arglist)
+build_function_call_expr (location_t loc, tree fndecl, tree arglist)
{
tree fntype = TREE_TYPE (fndecl);
tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
int n = list_length (arglist);
tree *argarray = (tree *) alloca (n * sizeof (tree));
int i;
-
+
for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
argarray[i] = TREE_VALUE (arglist);
- return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
+ return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
}
/* Conveniently construct a function call expression. FNDECL names the
parameters are the argument expressions. */
tree
-build_call_expr (tree fndecl, int n, ...)
+build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
{
va_list ap;
tree fntype = TREE_TYPE (fndecl);
for (i = 0; i < n; i++)
argarray[i] = va_arg (ap, tree);
va_end (ap);
- return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
+ return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
}
/* Construct a CALL_EXPR with type TYPE with FN as the function expression.
N arguments are passed in the array ARGARRAY. */
tree
-fold_builtin_call_array (tree type,
+fold_builtin_call_array (location_t loc, tree type,
tree fn,
int n,
tree *argarray)
&& TREE_CODE (fndecl2) == FUNCTION_DECL
&& DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
- return build_call_array (type, fn, n, argarray);
+ return build_call_array_loc (loc, type, fn, n, argarray);
}
+ if (avoid_folding_inline_builtin (fndecl))
+ return build_call_array_loc (loc, type, fn, n, argarray);
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
{
tree arglist = NULL_TREE;
ret = targetm.fold_builtin (fndecl, arglist, false);
if (ret)
return ret;
+ return build_call_array_loc (loc, type, fn, n, argarray);
}
else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
{
/* First try the transformations that don't require consing up
an exp. */
- ret = fold_builtin_n (fndecl, argarray, n, false);
+ ret = fold_builtin_n (loc, fndecl, argarray, n, false);
if (ret)
return ret;
}
/* If we got this far, we need to build an exp. */
- exp = build_call_array (type, fn, n, argarray);
- ret = fold_builtin_varargs (fndecl, exp, false);
+ exp = build_call_array_loc (loc, type, fn, n, argarray);
+ ret = fold_builtin_varargs (loc, fndecl, exp, false);
return ret ? ret : exp;
}
}
- return build_call_array (type, fn, n, argarray);
+ return build_call_array_loc (loc, type, fn, n, argarray);
}
/* Construct a new CALL_EXPR using the tail of the argument list of EXP
to do varargs-to-varargs transformations. */
static tree
-rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
+rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
{
int oldnargs = call_expr_nargs (exp);
int nargs = oldnargs - skip + n;
int i, j;
va_list ap;
- buffer = alloca (nargs * sizeof (tree));
+ buffer = XALLOCAVEC (tree, nargs);
va_start (ap, n);
for (i = 0; i < n; i++)
buffer[i] = va_arg (ap, tree);
else
buffer = CALL_EXPR_ARGP (exp) + skip;
- return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
+ return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
}
/* Validate a single argument ARG against a tree code CODE representing
/* This function validates the types of a function call argument list
against a specified list of tree_codes. If the last specifier is a 0,
that represents an ellipses, otherwise the last specifier must be a
+ VOID_TYPE.
+
+ This is the GIMPLE version of validate_arglist. Eventually we want to
+ completely convert builtins.c to work from GIMPLEs and the tree based
+ validate_arglist will then be removed. */
+
+bool
+validate_gimple_arglist (const_gimple call, ...)
+{
+ enum tree_code code;
+ bool res = 0;
+ va_list ap;
+ const_tree arg;
+ size_t i;
+
+ va_start (ap, call);
+ i = 0;
+
+ do
+ {
+ code = (enum tree_code) va_arg (ap, int);
+ switch (code)
+ {
+ case 0:
+ /* This signifies an ellipses, any further arguments are all ok. */
+ res = true;
+ goto end;
+ case VOID_TYPE:
+ /* This signifies an endlink, if no arguments remain, return
+ true, otherwise return false. */
+ res = (i == gimple_call_num_args (call));
+ goto end;
+ default:
+ /* If no parameters remain or the parameter's code does not
+ match the specified code, return false. Otherwise continue
+ checking any remaining arguments. */
+ arg = gimple_call_arg (call, i++);
+ if (!validate_arg (arg, code))
+ goto end;
+ break;
+ }
+ }
+ while (1);
+
+ /* We need gotos here since we can only have one VA_CLOSE in a
+ function. */
+ end: ;
+ va_end (ap);
+
+ return res;
+}
+
+/* This function validates the types of a function call argument list
+ against a specified list of tree_codes. If the last specifier is a 0,
+ that represents an ellipses, otherwise the last specifier must be a
VOID_TYPE. */
bool
do
{
- code = va_arg (ap, enum tree_code);
+ code = (enum tree_code) va_arg (ap, int);
switch (code)
{
case 0:
form of the builtin function call. */
static tree
-fold_builtin_strstr (tree s1, tree s2, tree type)
+fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
/* The argument is const char *, and the result is char *, so we need
a type conversion here to avoid a warning. */
if (p2[0] == '\0')
- return fold_convert (type, s1);
+ return fold_convert_loc (loc, type, s1);
if (p2[1] != '\0')
return NULL_TREE;
/* New argument list transforming strstr(s1, s2) to
strchr(s1, s2[0]). */
- return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
+ return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
}
}
form of the builtin function call. */
static tree
-fold_builtin_strchr (tree s1, tree s2, tree type)
+fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, INTEGER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
}
form of the builtin function call. */
static tree
-fold_builtin_strrchr (tree s1, tree s2, tree type)
+fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, INTEGER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (! integer_zerop (s2))
return NULL_TREE;
/* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
- return build_call_expr (fn, 2, s1, s2);
+ return build_call_expr_loc (loc, fn, 2, s1, s2);
}
}
form of the builtin function call. */
static tree
-fold_builtin_strpbrk (tree s1, tree s2, tree type)
+fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
s1, size_int (r - p1));
- return fold_convert (type, tem);
+ return fold_convert_loc (loc, type, tem);
}
if (p2[0] == '\0')
/* strpbrk(x, "") == NULL.
Evaluate and ignore s1 in case it had side-effects. */
- return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
+ return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
if (p2[1] != '\0')
return NULL_TREE; /* Really call strpbrk. */
/* New argument list transforming strpbrk(s1, s2) to
strchr(s1, s2[0]). */
- return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
+ return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
}
}
form of the builtin function call. */
static tree
-fold_builtin_strcat (tree dst, tree src)
+fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
{
if (!validate_arg (dst, POINTER_TYPE)
|| !validate_arg (src, POINTER_TYPE))
form of the builtin function call. */
static tree
-fold_builtin_strncat (tree dst, tree src, tree len)
+fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
{
if (!validate_arg (dst, POINTER_TYPE)
|| !validate_arg (src, POINTER_TYPE)
/* If the requested length is zero, or the src parameter string
length is zero, return the dst parameter. */
if (integer_zerop (len) || (p && *p == '\0'))
- return omit_two_operands (TREE_TYPE (dst), dst, src, len);
+ return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
/* If the requested len is greater than or equal to the string
length, call strcat. */
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 2, dst, src);
+ return build_call_expr_loc (loc, fn, 2, dst, src);
}
return NULL_TREE;
}
form of the builtin function call. */
static tree
-fold_builtin_strspn (tree s1, tree s2)
+fold_builtin_strspn (location_t loc, tree s1, tree s2)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
/* Evaluate and ignore both arguments in case either one has
side-effects. */
- return omit_two_operands (integer_type_node, integer_zero_node,
+ return omit_two_operands_loc (loc, size_type_node, size_zero_node,
s1, s2);
return NULL_TREE;
}
form of the builtin function call. */
static tree
-fold_builtin_strcspn (tree s1, tree s2)
+fold_builtin_strcspn (location_t loc, tree s1, tree s2)
{
if (!validate_arg (s1, POINTER_TYPE)
|| !validate_arg (s2, POINTER_TYPE))
{
/* Evaluate and ignore argument s2 in case it has
side-effects. */
- return omit_one_operand (integer_type_node,
- integer_zero_node, s2);
+ return omit_one_operand_loc (loc, size_type_node,
+ size_zero_node, s2);
}
/* If the second argument is "", return __builtin_strlen(s1). */
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 1, s1);
+ return build_call_expr_loc (loc, fn, 1, s1);
}
return NULL_TREE;
}
was possible. */
tree
-fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
+fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
+ bool ignore, bool unlocked, tree len)
{
/* If we're using an unlocked function, assume the other unlocked
functions exist explicitly. */
switch (compare_tree_int (len, 1))
{
case -1: /* length is 0, delete the call entirely . */
- return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
+ return omit_one_operand_loc (loc, integer_type_node,
+ integer_zero_node, arg1);;
case 0: /* length is 1, call fputc. */
{
if (p != NULL)
{
if (fn_fputc)
- return build_call_expr (fn_fputc, 2,
+ return build_call_expr_loc (loc, fn_fputc, 2,
build_int_cst (NULL_TREE, p[0]), arg1);
else
return NULL_TREE;
case 1: /* length is greater than 1, call fwrite. */
{
/* If optimizing for size keep fputs. */
- if (optimize_size)
+ if (optimize_function_for_size_p (cfun))
return NULL_TREE;
/* New argument list transforming fputs(string, stream) to
fwrite(string, 1, len, stream). */
if (fn_fwrite)
- return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
+ return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
+ size_one_node, len, arg1);
else
return NULL_TREE;
}
/* Fold the next_arg or va_start call EXP. Returns true if there was an error
produced. False otherwise. This is done so that we don't output the error
or warning twice or three times. */
+
bool
fold_builtin_next_arg (tree exp, bool va_start_p)
{
arg = CALL_EXPR_ARG (exp, 0);
}
+ if (TREE_CODE (arg) == SSA_NAME)
+ arg = SSA_NAME_VAR (arg);
+
/* We destructively modify the call to be __builtin_va_start (ap, 0)
or __builtin_next_arg (0) the first time we see it, after checking
the arguments and if needed issuing a warning. */
is not quite the same as STRIP_NOPS. It does more.
We must also strip off INDIRECT_EXPR for C++ reference
parameters. */
- while (TREE_CODE (arg) == NOP_EXPR
- || TREE_CODE (arg) == CONVERT_EXPR
- || TREE_CODE (arg) == NON_LVALUE_EXPR
+ while (CONVERT_EXPR_P (arg)
|| TREE_CODE (arg) == INDIRECT_REF)
arg = TREE_OPERAND (arg, 0);
if (arg != last_parm)
it. */
warning (0, "second parameter of %<va_start%> not last named argument");
}
+
+ /* Undefined by C99 7.15.1.4p4 (va_start):
+ "If the parameter parmN is declared with the register storage
+ class, with a function or array type, or with a type that is
+ not compatible with the type that results after application of
+ the default argument promotions, the behavior is undefined."
+ */
+ else if (DECL_REGISTER (arg))
+ warning (0, "undefined behaviour when second parameter of "
+ "%<va_start%> is declared with %<register%> storage");
+
/* We want to verify the second parameter just once before the tree
optimizers are run and then avoid keeping it in the tree,
as otherwise we could warn even for correct code like:
the caller does not use the returned value of the function. */
static tree
-fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
+fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
+ tree orig, int ignored)
{
tree call, retval;
const char *fmt_str = NULL;
/* Convert sprintf (str, fmt) into strcpy (str, fmt) when
'format' is known to contain no % formats. */
- call = build_call_expr (fn, 2, dest, fmt);
+ call = build_call_expr_loc (loc, fn, 2, dest, fmt);
if (!ignored)
retval = build_int_cst (NULL_TREE, strlen (fmt_str));
}
if (!retval || TREE_CODE (retval) != INTEGER_CST)
return NULL_TREE;
}
- call = build_call_expr (fn, 2, dest, orig);
+ call = build_call_expr_loc (loc, fn, 2, dest, orig);
}
if (call && retval)
{
- retval = fold_convert
- (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
+ retval = fold_convert_loc
+ (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
retval);
return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
}
if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
{
- warning (0, "%Kcall to %D will always overflow destination buffer",
- exp, get_callee_fndecl (exp));
+ warning_at (tree_nonartificial_location (exp),
+ 0, "%Kcall to %D will always overflow destination buffer",
+ exp, get_callee_fndecl (exp));
return NULL_RTX;
}
{
int is_strlen = 0;
tree len, size;
+ location_t loc = tree_nonartificial_location (exp);
switch (fcode)
{
src = c_strlen (src, 1);
if (! src || ! host_integerp (src, 1))
{
- warning (0, "%Kcall to %D might overflow destination buffer",
- exp, get_callee_fndecl (exp));
+ warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
+ exp, get_callee_fndecl (exp));
return;
}
else if (tree_int_cst_lt (src, size))
else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
return;
- warning (0, "%Kcall to %D will always overflow destination buffer",
- exp, get_callee_fndecl (exp));
+ warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
+ exp, get_callee_fndecl (exp));
}
/* Emit warning if a buffer overflow is detected at compile time
return;
if (! tree_int_cst_lt (len, size))
- {
- warning (0, "%Kcall to %D will always overflow destination buffer",
- exp, get_callee_fndecl (exp));
- }
+ warning_at (tree_nonartificial_location (exp),
+ 0, "%Kcall to %D will always overflow destination buffer",
+ exp, get_callee_fndecl (exp));
}
-/* Fold a call to __builtin_object_size with arguments PTR and OST,
- if possible. */
+/* Emit warning if a free is called with address of a variable. */
-tree
-fold_builtin_object_size (tree ptr, tree ost)
+static void
+maybe_emit_free_warning (tree exp)
{
- tree ret = NULL_TREE;
- int object_size_type;
+ tree arg = CALL_EXPR_ARG (exp, 0);
- if (!validate_arg (ptr, POINTER_TYPE)
- || !validate_arg (ost, INTEGER_TYPE))
- return NULL_TREE;
+ STRIP_NOPS (arg);
+ if (TREE_CODE (arg) != ADDR_EXPR)
+ return;
+
+ arg = get_base_address (TREE_OPERAND (arg, 0));
+ if (arg == NULL || INDIRECT_REF_P (arg))
+ return;
+
+ if (SSA_VAR_P (arg))
+ warning_at (tree_nonartificial_location (exp),
+ 0, "%Kattempt to free a non-heap object %qD", exp, arg);
+ else
+ warning_at (tree_nonartificial_location (exp),
+ 0, "%Kattempt to free a non-heap object", exp);
+}
+
+/* Fold a call to __builtin_object_size with arguments PTR and OST,
+ if possible. */
+
+tree
+fold_builtin_object_size (tree ptr, tree ost)
+{
+ tree ret = NULL_TREE;
+ int object_size_type;
+
+ if (!validate_arg (ptr, POINTER_TYPE)
+ || !validate_arg (ost, INTEGER_TYPE))
+ return NULL_TREE;
STRIP_NOPS (ost);
passed as third argument. */
tree
-fold_builtin_memory_chk (tree fndecl,
+fold_builtin_memory_chk (location_t loc, tree fndecl,
tree dest, tree src, tree len, tree size,
tree maxlen, bool ignore,
enum built_in_function fcode)
if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
{
if (fcode != BUILT_IN_MEMPCPY_CHK)
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ dest, len);
else
{
- tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
+ tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
+ dest, len);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
}
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 4, dest, src, len, size);
+ return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
}
return NULL_TREE;
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* Fold a call to the __st[rp]cpy_chk builtin.
strings passed as second argument. */
tree
-fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
+fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
+ tree src, tree size,
tree maxlen, bool ignore,
enum built_in_function fcode)
{
/* If SRC and DEST are the same (and not volatile), return DEST. */
if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
if (! host_integerp (size, 1))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, size);
+ return build_call_expr_loc (loc, fn, 3, dest, src, size);
}
if (! len || TREE_SIDE_EFFECTS (len))
if (!fn)
return NULL_TREE;
- len = size_binop (PLUS_EXPR, len, ssize_int (1));
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
- build_call_expr (fn, 4,
- dest, src, len, size));
+ len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
+ build_call_expr_loc (loc, fn, 4,
+ dest, src, len, size));
}
}
else
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 2, dest, src);
+ return build_call_expr_loc (loc, fn, 2, dest, src);
}
/* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
length passed as third argument. */
tree
-fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
- tree maxlen)
+fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
+ tree len, tree size, tree maxlen)
{
tree fn;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
are the arguments to the call. */
static tree
-fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
+fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
+ tree src, tree size)
{
tree fn;
const char *p;
p = c_getstr (src);
/* If the SRC parameter is "", return DEST. */
if (p && *p == '\0')
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
if (! host_integerp (size, 1) || ! integer_all_onesp (size))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 2, dest, src);
+ return build_call_expr_loc (loc, fn, 2, dest, src);
}
/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
LEN, and SIZE. */
static tree
-fold_builtin_strncat_chk (tree fndecl,
+fold_builtin_strncat_chk (location_t loc, tree fndecl,
tree dest, tree src, tree len, tree size)
{
tree fn;
p = c_getstr (src);
/* If the SRC parameter is "" or if LEN is 0, return DEST. */
if (p && *p == '\0')
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
else if (integer_zerop (len))
- return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
+ return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
if (! host_integerp (size, 1))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, size);
+ return build_call_expr_loc (loc, fn, 3, dest, src, size);
}
return NULL_TREE;
}
if (!fn)
return NULL_TREE;
- return build_call_expr (fn, 3, dest, src, len);
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
/* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
static tree
-fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
+fold_builtin_sprintf_chk (location_t loc, tree exp,
+ enum built_in_function fcode)
{
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
if (!fn)
return NULL_TREE;
- return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
+ return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
}
/* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
passed as second argument. */
tree
-fold_builtin_snprintf_chk (tree exp, tree maxlen,
+fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
enum built_in_function fcode)
{
tree dest, size, len, fn, fmt, flag;
if (!fn)
return NULL_TREE;
- return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
+ return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
}
/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
code of the function to be simplified. */
static tree
-fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
+fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
+ tree arg, bool ignore,
enum built_in_function fcode)
{
tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
function. */
newarg = build_int_cst (NULL_TREE, str[0]);
if (fn_putchar)
- call = build_call_expr (fn_putchar, 1, newarg);
+ call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
}
else
{
{
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = alloca (len);
+ char *newstr = XALLOCAVEC (char, len);
memcpy (newstr, str, len - 1);
newstr[len - 1] = 0;
newarg = build_string_literal (len, newstr);
if (fn_puts)
- call = build_call_expr (fn_puts, 1, newarg);
+ call = build_call_expr_loc (loc, fn_puts, 1, newarg);
}
else
/* We'd like to arrange to call fputs(string,stdout) here,
if (!arg || !validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
if (fn_puts)
- call = build_call_expr (fn_puts, 1, arg);
+ call = build_call_expr_loc (loc, fn_puts, 1, arg);
}
/* If the format specifier was "%c", call __builtin_putchar(arg). */
if (!arg || !validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
if (fn_putchar)
- call = build_call_expr (fn_putchar, 1, arg);
+ call = build_call_expr_loc (loc, fn_putchar, 1, arg);
}
if (!call)
return NULL_TREE;
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
}
/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
code of the function to be simplified. */
static tree
-fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
+fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
+ tree fmt, tree arg, bool ignore,
enum built_in_function fcode)
{
tree fn_fputc, fn_fputs, call = NULL_TREE;
fprintf (fp, string) with fputs (string, fp). The fputs
builtin will take care of special cases like length == 1. */
if (fn_fputs)
- call = build_call_expr (fn_fputs, 2, fmt, fp);
+ call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
}
/* The other optimizations can be done only on the non-va_list variants. */
if (!arg || !validate_arg (arg, POINTER_TYPE))
return NULL_TREE;
if (fn_fputs)
- call = build_call_expr (fn_fputs, 2, arg, fp);
+ call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
}
/* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
if (!arg || !validate_arg (arg, INTEGER_TYPE))
return NULL_TREE;
if (fn_fputc)
- call = build_call_expr (fn_fputc, 2, arg, fp);
+ call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
}
if (!call)
return NULL_TREE;
- return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
+ return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
}
/* Initialize format string characters in the target charset. */
return NULL_TREE;
}
+#ifdef HAVE_mpc
+/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
+ number and no overflow/underflow occurred. INEXACT is true if M
+ was not exactly calculated. TYPE is the tree type for the result.
+ This function assumes that you cleared the MPFR flags and then
+ calculated M to see if anything subsequently set a flag prior to
+ entering this function. Return NULL_TREE if any checks fail. */
+
+static tree
+do_mpc_ckconv (mpc_srcptr m, tree type, int inexact)
+{
+ /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
+ overflow/underflow occurred. If -frounding-math, proceed iff the
+ result of calling FUNC was exact. */
+ if (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
+ && !mpfr_overflow_p () && !mpfr_underflow_p ()
+ && (!flag_rounding_math || !inexact))
+ {
+ REAL_VALUE_TYPE re, im;
+
+ real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
+ real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
+ /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
+ check for overflow/underflow. If the REAL_VALUE_TYPE is zero
+ but the mpft_t is not, then we underflowed in the
+ conversion. */
+ if (real_isfinite (&re) && real_isfinite (&im)
+ && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
+ && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0))
+ {
+ REAL_VALUE_TYPE re_mode, im_mode;
+
+ real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
+ real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
+ /* Proceed iff the specified mode can hold the value. */
+ if (real_identical (&re_mode, &re) && real_identical (&im_mode, &im))
+ return build_complex (type, build_real (TREE_TYPE (type), re_mode),
+ build_real (TREE_TYPE (type), im_mode));
+ }
+ }
+ return NULL_TREE;
+}
+#endif /* HAVE_mpc */
+
/* If argument ARG is a REAL_CST, call the one-argument mpfr function
FUNC on it and return the resulting value as a tree with type TYPE.
If MIN and/or MAX are not NULL, then the supplied ARG must be
&& (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
&& (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
int inexact;
mpfr_t m;
mpfr_init2 (m, prec);
mpfr_from_real (m, ra, GMP_RNDN);
mpfr_clear_flags ();
- inexact = func (m, m, GMP_RNDN);
+ inexact = func (m, m, rnd);
result = do_mpfr_ckconv (m, type, inexact);
mpfr_clear (m);
}
if (real_isfinite (ra1) && real_isfinite (ra2))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
int inexact;
mpfr_t m1, m2;
mpfr_from_real (m1, ra1, GMP_RNDN);
mpfr_from_real (m2, ra2, GMP_RNDN);
mpfr_clear_flags ();
- inexact = func (m1, m1, m2, GMP_RNDN);
+ inexact = func (m1, m1, m2, rnd);
result = do_mpfr_ckconv (m1, type, inexact);
mpfr_clears (m1, m2, NULL);
}
if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
int inexact;
mpfr_t m1, m2, m3;
mpfr_from_real (m2, ra2, GMP_RNDN);
mpfr_from_real (m3, ra3, GMP_RNDN);
mpfr_clear_flags ();
- inexact = func (m1, m1, m2, m3, GMP_RNDN);
+ inexact = func (m1, m1, m2, m3, rnd);
result = do_mpfr_ckconv (m1, type, inexact);
mpfr_clears (m1, m2, m3, NULL);
}
if (real_isfinite (ra))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
tree result_s, result_c;
int inexact;
mpfr_t m, ms, mc;
mpfr_inits2 (prec, m, ms, mc, NULL);
mpfr_from_real (m, ra, GMP_RNDN);
mpfr_clear_flags ();
- inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
+ inexact = mpfr_sin_cos (ms, mc, m, rnd);
result_s = do_mpfr_ckconv (ms, type, inexact);
result_c = do_mpfr_ckconv (mc, type, inexact);
mpfr_clears (m, ms, mc, NULL);
return result;
}
-#if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
two-argument mpfr order N Bessel function FUNC on them and return
the resulting value as a tree with type TYPE. The mpfr precision
&& real_isfinite (ra)
&& (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
int inexact;
mpfr_t m;
mpfr_init2 (m, prec);
mpfr_from_real (m, ra, GMP_RNDN);
mpfr_clear_flags ();
- inexact = func (m, n, m, GMP_RNDN);
+ inexact = func (m, n, m, rnd);
result = do_mpfr_ckconv (m, type, inexact);
mpfr_clear (m);
}
if (real_isfinite (ra0) && real_isfinite (ra1))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
tree result_rem;
long integer_quo;
mpfr_t m0, m1;
mpfr_from_real (m0, ra0, GMP_RNDN);
mpfr_from_real (m1, ra1, GMP_RNDN);
mpfr_clear_flags ();
- mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
+ mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
/* Remquo is independent of the rounding mode, so pass
inexact=0 to do_mpfr_ckconv(). */
result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
&& ra->cl != rvc_zero
&& !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
{
- const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
+ const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
int inexact, sg;
mpfr_t m;
tree result_lg;
mpfr_init2 (m, prec);
mpfr_from_real (m, ra, GMP_RNDN);
mpfr_clear_flags ();
- inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
+ inexact = mpfr_lgamma (m, &sg, m, rnd);
result_lg = do_mpfr_ckconv (m, type, inexact);
mpfr_clear (m);
if (result_lg)
return result;
}
-#endif
+
+#ifdef HAVE_mpc
+/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
+ function FUNC on it and return the resulting value as a tree with
+ type TYPE. The mpfr precision is set to the precision of TYPE. We
+ assume that function FUNC returns zero if the result could be
+ calculated exactly within the requested precision. */
+
+static tree
+do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg);
+
+ /* To proceed, MPFR must exactly represent the target floating point
+ format, which only happens when the target base equals two. */
+ if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
+ && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
+ {
+ const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
+ const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
+
+ if (real_isfinite (re) && real_isfinite (im))
+ {
+ const struct real_format *const fmt =
+ REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
+ const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
+ int inexact;
+ mpc_t m;
+
+ mpc_init2 (m, prec);
+ mpfr_from_real (mpc_realref(m), re, rnd);
+ mpfr_from_real (mpc_imagref(m), im, rnd);
+ mpfr_clear_flags ();
+ inexact = func (m, m, crnd);
+ result = do_mpc_ckconv (m, type, inexact);
+ mpc_clear (m);
+ }
+ }
+
+ return result;
+}
+
+/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
+ mpc function FUNC on it and return the resulting value as a tree
+ with type TYPE. The mpfr precision is set to the precision of
+ TYPE. We assume that function FUNC returns zero if the result
+ could be calculated exactly within the requested precision. */
+
+#ifdef HAVE_mpc_pow
+static tree
+do_mpc_arg2 (tree arg0, tree arg1, tree type,
+ int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
+{
+ tree result = NULL_TREE;
+
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+
+ /* To proceed, MPFR must exactly represent the target floating point
+ format, which only happens when the target base equals two. */
+ if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
+ && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
+ && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
+ && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
+ {
+ const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
+ const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
+ const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
+ const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
+
+ if (real_isfinite (re0) && real_isfinite (im0)
+ && real_isfinite (re1) && real_isfinite (im1))
+ {
+ const struct real_format *const fmt =
+ REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
+ const int prec = fmt->p;
+ const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
+ const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
+ int inexact;
+ mpc_t m0, m1;
+
+ mpc_init2 (m0, prec);
+ mpc_init2 (m1, prec);
+ mpfr_from_real (mpc_realref(m0), re0, rnd);
+ mpfr_from_real (mpc_imagref(m0), im0, rnd);
+ mpfr_from_real (mpc_realref(m1), re1, rnd);
+ mpfr_from_real (mpc_imagref(m1), im1, rnd);
+ mpfr_clear_flags ();
+ inexact = func (m0, m0, m1, crnd);
+ result = do_mpc_ckconv (m0, type, inexact);
+ mpc_clear (m0);
+ mpc_clear (m1);
+ }
+ }
+
+ return result;
+}
+# endif
+#endif /* HAVE_mpc */
+
+/* FIXME tuples.
+ The functions below provide an alternate interface for folding
+ builtin function calls presented as GIMPLE_CALL statements rather
+ than as CALL_EXPRs. The folded result is still expressed as a
+ tree. There is too much code duplication in the handling of
+ varargs functions, and a more intrusive re-factoring would permit
+ better sharing of code between the tree and statement-based
+ versions of these functions. */
+
+/* Construct a new CALL_EXPR using the tail of the argument list of STMT
+ along with N new arguments specified as the "..." parameters. SKIP
+ is the number of arguments in STMT to be omitted. This function is used
+ to do varargs-to-varargs transformations. */
+
+static tree
+gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
+{
+ int oldnargs = gimple_call_num_args (stmt);
+ int nargs = oldnargs - skip + n;
+ tree fntype = TREE_TYPE (fndecl);
+ tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
+ tree *buffer;
+ int i, j;
+ va_list ap;
+ location_t loc = gimple_location (stmt);
+
+ buffer = XALLOCAVEC (tree, nargs);
+ va_start (ap, n);
+ for (i = 0; i < n; i++)
+ buffer[i] = va_arg (ap, tree);
+ va_end (ap);
+ for (j = skip; j < oldnargs; j++, i++)
+ buffer[i] = gimple_call_arg (stmt, j);
+
+ return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
+}
+
+/* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
+ a normal call should be emitted rather than expanding the function
+ inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
+
+static tree
+gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
+{
+ tree dest, size, len, fn, fmt, flag;
+ const char *fmt_str;
+ int nargs = gimple_call_num_args (stmt);
+
+ /* Verify the required arguments in the original call. */
+ if (nargs < 4)
+ return NULL_TREE;
+ dest = gimple_call_arg (stmt, 0);
+ if (!validate_arg (dest, POINTER_TYPE))
+ return NULL_TREE;
+ flag = gimple_call_arg (stmt, 1);
+ if (!validate_arg (flag, INTEGER_TYPE))
+ return NULL_TREE;
+ size = gimple_call_arg (stmt, 2);
+ if (!validate_arg (size, INTEGER_TYPE))
+ return NULL_TREE;
+ fmt = gimple_call_arg (stmt, 3);
+ if (!validate_arg (fmt, POINTER_TYPE))
+ return NULL_TREE;
+
+ if (! host_integerp (size, 1))
+ return NULL_TREE;
+
+ len = NULL_TREE;
+
+ if (!init_target_chars ())
+ return NULL_TREE;
+
+ /* Check whether the format is a literal string constant. */
+ fmt_str = c_getstr (fmt);
+ if (fmt_str != NULL)
+ {
+ /* If the format doesn't contain % args or %%, we know the size. */
+ if (strchr (fmt_str, target_percent) == 0)
+ {
+ if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
+ len = build_int_cstu (size_type_node, strlen (fmt_str));
+ }
+ /* If the format is "%s" and first ... argument is a string literal,
+ we know the size too. */
+ else if (fcode == BUILT_IN_SPRINTF_CHK
+ && strcmp (fmt_str, target_percent_s) == 0)
+ {
+ tree arg;
+
+ if (nargs == 5)
+ {
+ arg = gimple_call_arg (stmt, 4);
+ if (validate_arg (arg, POINTER_TYPE))
+ {
+ len = c_strlen (arg, 1);
+ if (! len || ! host_integerp (len, 1))
+ len = NULL_TREE;
+ }
+ }
+ }
+ }
+
+ if (! integer_all_onesp (size))
+ {
+ if (! len || ! tree_int_cst_lt (len, size))
+ return NULL_TREE;
+ }
+
+ /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
+ or if format doesn't contain % chars or is "%s". */
+ if (! integer_zerop (flag))
+ {
+ if (fmt_str == NULL)
+ return NULL_TREE;
+ if (strchr (fmt_str, target_percent) != NULL
+ && strcmp (fmt_str, target_percent_s))
+ return NULL_TREE;
+ }
+
+ /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
+ fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
+ ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
+ if (!fn)
+ return NULL_TREE;
+
+ return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
+}
+
+/* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
+ a normal call should be emitted rather than expanding the function
+ inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
+ BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
+ passed as second argument. */
+
+tree
+gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
+ enum built_in_function fcode)
+{
+ tree dest, size, len, fn, fmt, flag;
+ const char *fmt_str;
+
+ /* Verify the required arguments in the original call. */
+ if (gimple_call_num_args (stmt) < 5)
+ return NULL_TREE;
+ dest = gimple_call_arg (stmt, 0);
+ if (!validate_arg (dest, POINTER_TYPE))
+ return NULL_TREE;
+ len = gimple_call_arg (stmt, 1);
+ if (!validate_arg (len, INTEGER_TYPE))
+ return NULL_TREE;
+ flag = gimple_call_arg (stmt, 2);
+ if (!validate_arg (flag, INTEGER_TYPE))
+ return NULL_TREE;
+ size = gimple_call_arg (stmt, 3);
+ if (!validate_arg (size, INTEGER_TYPE))
+ return NULL_TREE;
+ fmt = gimple_call_arg (stmt, 4);
+ if (!validate_arg (fmt, POINTER_TYPE))
+ return NULL_TREE;
+
+ if (! host_integerp (size, 1))
+ return NULL_TREE;
+
+ if (! integer_all_onesp (size))
+ {
+ if (! host_integerp (len, 1))
+ {
+ /* If LEN is not constant, try MAXLEN too.
+ For MAXLEN only allow optimizing into non-_ocs function
+ if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
+ if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
+ return NULL_TREE;
+ }
+ else
+ maxlen = len;
+
+ if (tree_int_cst_lt (size, maxlen))
+ return NULL_TREE;
+ }
+
+ if (!init_target_chars ())
+ return NULL_TREE;
+
+ /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
+ or if format doesn't contain % chars or is "%s". */
+ if (! integer_zerop (flag))
+ {
+ fmt_str = c_getstr (fmt);
+ if (fmt_str == NULL)
+ return NULL_TREE;
+ if (strchr (fmt_str, target_percent) != NULL
+ && strcmp (fmt_str, target_percent_s))
+ return NULL_TREE;
+ }
+
+ /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
+ available. */
+ fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
+ ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
+ if (!fn)
+ return NULL_TREE;
+
+ return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
+}
+
+/* Builtins with folding operations that operate on "..." arguments
+ need special handling; we need to store the arguments in a convenient
+ data structure before attempting any folding. Fortunately there are
+ only a few builtins that fall into this category. FNDECL is the
+ function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
+ result of the function call is ignored. */
+
+static tree
+gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
+ bool ignore ATTRIBUTE_UNUSED)
+{
+ enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+ tree ret = NULL_TREE;
+
+ switch (fcode)
+ {
+ case BUILT_IN_SPRINTF_CHK:
+ case BUILT_IN_VSPRINTF_CHK:
+ ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
+ break;
+
+ case BUILT_IN_SNPRINTF_CHK:
+ case BUILT_IN_VSNPRINTF_CHK:
+ ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
+
+ default:
+ break;
+ }
+ if (ret)
+ {
+ ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+ TREE_NO_WARNING (ret) = 1;
+ return ret;
+ }
+ return NULL_TREE;
+}
+
+/* A wrapper function for builtin folding that prevents warnings for
+ "statement without effect" and the like, caused by removing the
+ call node earlier than the warning is generated. */
+
+tree
+fold_call_stmt (gimple stmt, bool ignore)
+{
+ tree ret = NULL_TREE;
+ tree fndecl = gimple_call_fndecl (stmt);
+ location_t loc = gimple_location (stmt);
+ if (fndecl
+ && TREE_CODE (fndecl) == FUNCTION_DECL
+ && DECL_BUILT_IN (fndecl)
+ && !gimple_call_va_arg_pack_p (stmt))
+ {
+ int nargs = gimple_call_num_args (stmt);
+
+ if (avoid_folding_inline_builtin (fndecl))
+ return NULL_TREE;
+ /* FIXME: Don't use a list in this interface. */
+ if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
+ {
+ tree arglist = NULL_TREE;
+ int i;
+ for (i = nargs - 1; i >= 0; i--)
+ arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
+ return targetm.fold_builtin (fndecl, arglist, ignore);
+ }
+ else
+ {
+ if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
+ {
+ tree args[MAX_ARGS_TO_FOLD_BUILTIN];
+ int i;
+ for (i = 0; i < nargs; i++)
+ args[i] = gimple_call_arg (stmt, i);
+ ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
+ }
+ if (!ret)
+ ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
+ if (ret)
+ {
+ /* Propagate location information from original call to
+ expansion of builtin. Otherwise things like
+ maybe_emit_chk_warning, that operate on the expansion
+ of a builtin, will use the wrong location information. */
+ if (gimple_has_location (stmt))
+ {
+ tree realret = ret;
+ if (TREE_CODE (ret) == NOP_EXPR)
+ realret = TREE_OPERAND (ret, 0);
+ if (CAN_HAVE_LOCATION_P (realret)
+ && !EXPR_HAS_LOCATION (realret))
+ SET_EXPR_LOCATION (realret, loc);
+ return realret;
+ }
+ return ret;
+ }
+ }
+ }
+ return NULL_TREE;
+}