static rtx expand_builtin_mathfn (tree, rtx, rtx);
static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
-static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
+static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
+static rtx expand_builtin_interclass_mathfn (tree, rtx);
static rtx expand_builtin_sincos (tree);
-static rtx expand_builtin_cexpi (tree, rtx, rtx);
+static rtx expand_builtin_cexpi (tree, rtx);
static rtx expand_builtin_int_roundingfn (tree, rtx);
static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
static rtx expand_builtin_next_arg (void);
static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
static rtx expand_builtin_bzero (tree);
static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
-static rtx expand_builtin_alloca (tree, rtx);
+static rtx expand_builtin_alloca (tree, bool);
static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
static rtx expand_builtin_frame_address (tree, tree);
static tree stabilize_va_list_loc (location_t, tree, int);
}
/* Return the alignment in bits of EXP, an object.
- Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
- guessed alignment e.g. from type alignment. */
+ Don't return more than MAX_ALIGN no matter what. */
-int
-get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
+unsigned int
+get_object_alignment (tree exp, unsigned int max_align)
{
- unsigned int inner;
-
- inner = max_align;
- if (handled_component_p (exp))
- {
- HOST_WIDE_INT bitsize, bitpos;
- tree offset;
- enum machine_mode mode;
- int unsignedp, volatilep;
-
- exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, true);
- if (bitpos)
- inner = MIN (inner, (unsigned) (bitpos & -bitpos));
- while (offset)
- {
- tree next_offset;
+ HOST_WIDE_INT bitsize, bitpos;
+ tree offset;
+ enum machine_mode mode;
+ int unsignedp, volatilep;
+ unsigned int align, inner;
- if (TREE_CODE (offset) == PLUS_EXPR)
- {
- next_offset = TREE_OPERAND (offset, 0);
- offset = TREE_OPERAND (offset, 1);
- }
- else
- next_offset = NULL;
- if (host_integerp (offset, 1))
- {
- /* Any overflow in calculating offset_bits won't change
- the alignment. */
- unsigned offset_bits
- = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
+ /* Get the innermost object and the constant (bitpos) and possibly
+ variable (offset) offset of the access. */
+ exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode, &unsignedp, &volatilep, true);
- if (offset_bits)
- inner = MIN (inner, (offset_bits & -offset_bits));
- }
- else if (TREE_CODE (offset) == MULT_EXPR
- && host_integerp (TREE_OPERAND (offset, 1), 1))
- {
- /* Any overflow in calculating offset_factor won't change
- the alignment. */
- unsigned offset_factor
- = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
- * BITS_PER_UNIT);
-
- if (offset_factor)
- inner = MIN (inner, (offset_factor & -offset_factor));
- }
- else
- {
- inner = MIN (inner, BITS_PER_UNIT);
- break;
- }
- offset = next_offset;
- }
- }
+ /* Extract alignment information from the innermost object and
+ possibly adjust bitpos and offset. */
if (TREE_CODE (exp) == CONST_DECL)
exp = DECL_INITIAL (exp);
if (DECL_P (exp)
&& TREE_CODE (exp) != LABEL_DECL)
- align = MIN (inner, DECL_ALIGN (exp));
-#ifdef CONSTANT_ALIGNMENT
+ align = DECL_ALIGN (exp);
else if (CONSTANT_CLASS_P (exp))
- align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
+ {
+ align = TYPE_ALIGN (TREE_TYPE (exp));
+#ifdef CONSTANT_ALIGNMENT
+ align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
#endif
- else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
- || TREE_CODE (exp) == INDIRECT_REF)
- align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
+ }
+ else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
+ align = TYPE_ALIGN (TREE_TYPE (exp));
+ else if (TREE_CODE (exp) == INDIRECT_REF)
+ align = TYPE_ALIGN (TREE_TYPE (exp));
+ else if (TREE_CODE (exp) == MEM_REF)
+ {
+ tree addr = TREE_OPERAND (exp, 0);
+ struct ptr_info_def *pi;
+ if (TREE_CODE (addr) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
+ {
+ align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
+ & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
+ align *= BITS_PER_UNIT;
+ addr = TREE_OPERAND (addr, 0);
+ }
+ else
+ align = BITS_PER_UNIT;
+ if (TREE_CODE (addr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (addr)))
+ {
+ bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
+ align = MAX (pi->align * BITS_PER_UNIT, align);
+ }
+ else if (TREE_CODE (addr) == ADDR_EXPR)
+ align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
+ max_align));
+ bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
+ }
+ else if (TREE_CODE (exp) == TARGET_MEM_REF)
+ {
+ struct ptr_info_def *pi;
+ tree addr = TMR_BASE (exp);
+ if (TREE_CODE (addr) == BIT_AND_EXPR
+ && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
+ {
+ align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
+ & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
+ align *= BITS_PER_UNIT;
+ addr = TREE_OPERAND (addr, 0);
+ }
+ else
+ align = BITS_PER_UNIT;
+ if (TREE_CODE (addr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (addr)))
+ {
+ bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
+ align = MAX (pi->align * BITS_PER_UNIT, align);
+ }
+ else if (TREE_CODE (addr) == ADDR_EXPR)
+ align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
+ max_align));
+ if (TMR_OFFSET (exp))
+ bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
+ if (TMR_INDEX (exp) && TMR_STEP (exp))
+ {
+ unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
+ align = MIN (align, (step & -step) * BITS_PER_UNIT);
+ }
+ else if (TMR_INDEX (exp))
+ align = BITS_PER_UNIT;
+ if (TMR_INDEX2 (exp))
+ align = BITS_PER_UNIT;
+ }
else
- align = MIN (align, inner);
+ align = BITS_PER_UNIT;
+
+ /* If there is a non-constant offset part extract the maximum
+ alignment that can prevail. */
+ inner = max_align;
+ while (offset)
+ {
+ tree next_offset;
+
+ if (TREE_CODE (offset) == PLUS_EXPR)
+ {
+ next_offset = TREE_OPERAND (offset, 0);
+ offset = TREE_OPERAND (offset, 1);
+ }
+ else
+ next_offset = NULL;
+ if (host_integerp (offset, 1))
+ {
+ /* Any overflow in calculating offset_bits won't change
+ the alignment. */
+ unsigned offset_bits
+ = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
+
+ if (offset_bits)
+ inner = MIN (inner, (offset_bits & -offset_bits));
+ }
+ else if (TREE_CODE (offset) == MULT_EXPR
+ && host_integerp (TREE_OPERAND (offset, 1), 1))
+ {
+ /* Any overflow in calculating offset_factor won't change
+ the alignment. */
+ unsigned offset_factor
+ = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
+ * BITS_PER_UNIT);
+
+ if (offset_factor)
+ inner = MIN (inner, (offset_factor & -offset_factor));
+ }
+ else
+ {
+ inner = MIN (inner, BITS_PER_UNIT);
+ break;
+ }
+ offset = next_offset;
+ }
+
+ /* Alignment is innermost object alignment adjusted by the constant
+ and non-constant offset parts. */
+ align = MIN (align, inner);
+ bitpos = bitpos & (align - 1);
+
+ /* align and bitpos now specify known low bits of the pointer.
+ ptr & (align - 1) == bitpos. */
+
+ if (bitpos != 0)
+ align = (bitpos & -bitpos);
+
return MIN (align, max_align);
}
Otherwise, look at the expression to see if we can do better, i.e., if the
expression is actually pointing at an object whose alignment is tighter. */
-int
+unsigned int
get_pointer_alignment (tree exp, unsigned int max_align)
{
- unsigned int align, inner;
-
- if (!can_trust_pointer_alignment ())
- return 0;
-
- if (!POINTER_TYPE_P (TREE_TYPE (exp)))
- return 0;
-
- align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
- align = MIN (align, max_align);
-
- while (1)
- {
- switch (TREE_CODE (exp))
- {
- CASE_CONVERT:
- exp = TREE_OPERAND (exp, 0);
- if (! POINTER_TYPE_P (TREE_TYPE (exp)))
- return align;
-
- inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
- align = MIN (inner, max_align);
- break;
-
- case POINTER_PLUS_EXPR:
- /* If sum of pointer + int, restrict our maximum alignment to that
- imposed by the integer. If not, we can't do any better than
- ALIGN. */
- if (! host_integerp (TREE_OPERAND (exp, 1), 1))
- return align;
-
- while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
- & (max_align / BITS_PER_UNIT - 1))
- != 0)
- max_align >>= 1;
-
- exp = TREE_OPERAND (exp, 0);
- break;
-
- case ADDR_EXPR:
- /* See what we are pointing at and look at its alignment. */
- return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
+ STRIP_NOPS (exp);
- default:
- return align;
- }
+ if (TREE_CODE (exp) == ADDR_EXPR)
+ return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
+ else if (TREE_CODE (exp) == SSA_NAME
+ && POINTER_TYPE_P (TREE_TYPE (exp)))
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
+ unsigned align;
+ if (!pi)
+ return BITS_PER_UNIT;
+ if (pi->misalign != 0)
+ align = (pi->misalign & -pi->misalign);
+ else
+ align = pi->align;
+ return MIN (max_align, align * BITS_PER_UNIT);
}
+
+ return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
}
/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
&& (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
return c_strlen (TREE_OPERAND (src, 1), only_value);
- if (EXPR_HAS_LOCATION (src))
- loc = EXPR_LOCATION (src);
- else
- loc = input_location;
+ loc = EXPR_LOC_OR_HERE (src);
src = string_constant (src, &offset_node);
if (src == 0)
emit_clobber (hard_frame_pointer_rtx);
}
-#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+#if !HARD_FRAME_POINTER_IS_ARG_POINTER
if (fixed_regs[ARG_POINTER_REGNUM])
{
#ifdef ELIMINABLE_REGS
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (FUNCTION_ARG_REGNO_P (regno))
{
- mode = reg_raw_mode[regno];
+ mode = targetm.calls.get_raw_arg_mode (regno);
gcc_assert (mode != VOIDmode);
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (targetm.calls.function_value_regno_p (regno))
{
- mode = reg_raw_mode[regno];
+ mode = targetm.calls.get_raw_result_mode (regno);
gcc_assert (mode != VOIDmode);
emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
/* Allocate a block of memory onto the stack and copy the memory
- arguments to the outgoing arguments address. */
- allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
+ arguments to the outgoing arguments address. We can pass TRUE
+ as the 4th argument because we just saved the stack pointer
+ and will restore it right after the call. */
+ allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
/* Set DRAP flag to true, even though allocate_dynamic_stack_space
may have already set current_function_calls_alloca to true.
errno_set = false;
/* Before working hard, check whether the instruction is available. */
- if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
+ if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
+ && (!errno_set || !optimize_insn_for_size_p ()))
{
target = gen_reg_rtx (mode);
if (! flag_errno_math || ! HONOR_NANS (mode))
errno_set = false;
+ if (errno_set && optimize_insn_for_size_p ())
+ return 0;
+
/* Always stabilize the argument list. */
CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
return target;
}
+/* Expand a call to the builtin trinary math functions (fma).
+ Return NULL_RTX if a normal call should be emitted rather than expanding the
+ function in-line. EXP is the expression that is a call to the builtin
+ function; if convenient, the result should be placed in TARGET.
+ SUBTARGET may be used as the target for computing one of EXP's
+ operands. */
+
+static rtx
+expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
+{
+ optab builtin_optab;
+ rtx op0, op1, op2, insns;
+ tree fndecl = get_callee_fndecl (exp);
+ tree arg0, arg1, arg2;
+ enum machine_mode mode;
+
+ if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
+ return NULL_RTX;
+
+ arg0 = CALL_EXPR_ARG (exp, 0);
+ arg1 = CALL_EXPR_ARG (exp, 1);
+ arg2 = CALL_EXPR_ARG (exp, 2);
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ CASE_FLT_FN (BUILT_IN_FMA):
+ builtin_optab = fma_optab; break;
+ default:
+ gcc_unreachable ();
+ }
+
+ /* Make a suitable register to place result in. */
+ mode = TYPE_MODE (TREE_TYPE (exp));
+
+ /* Before working hard, check whether the instruction is available. */
+ if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
+ return NULL_RTX;
+
+ target = gen_reg_rtx (mode);
+
+ /* Always stabilize the argument list. */
+ CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
+ CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
+ CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
+
+ op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
+ op1 = expand_normal (arg1);
+ op2 = expand_normal (arg2);
+
+ start_sequence ();
+
+ /* Compute into TARGET.
+ Set TARGET to wherever the result comes back. */
+ target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
+ target, 0);
+
+ /* If we were unable to expand via the builtin, stop the sequence
+ (without outputting the insns) and call to the library function
+ with the stabilized argument list. */
+ if (target == 0)
+ {
+ end_sequence ();
+ return expand_call (exp, target, target == const0_rtx);
+ }
+
+ /* Output the entire sequence. */
+ insns = get_insns ();
+ end_sequence ();
+ emit_insn (insns);
+
+ return target;
+}
+
/* Expand a call to the builtin sin and cos math functions.
Return NULL_RTX if a normal call should be emitted rather than expanding the
function in-line. EXP is the expression that is a call to the builtin
isnan, etc).
Return 0 if a normal call should be emitted rather than expanding the
function in-line. EXP is the expression that is a call to the builtin
- function; if convenient, the result should be placed in TARGET.
- SUBTARGET may be used as the target for computing one of EXP's operands. */
+ function; if convenient, the result should be placed in TARGET. */
static rtx
-expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
+expand_builtin_interclass_mathfn (tree exp, rtx target)
{
enum insn_code icode = CODE_FOR_nothing;
rtx op0;
side-effects more the once. */
CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
- op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
if (mode != GET_MODE (op0))
op0 = convert_to_mode (mode, op0, 0);
tree arg, sinp, cosp;
int result;
location_t loc = EXPR_LOCATION (exp);
+ tree alias_type, alias_off;
if (!validate_arglist (exp, REAL_TYPE,
POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
target2 = gen_reg_rtx (mode);
op0 = expand_normal (arg);
- op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
- op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
+ alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
+ alias_off = build_int_cst (alias_type, 0);
+ op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
+ sinp, alias_off));
+ op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
+ cosp, alias_off));
/* Compute into target1 and target2.
Set TARGET to wherever the result comes back. */
/* Expand a call to the internal cexpi builtin to the sincos math function.
EXP is the expression that is a call to the builtin function; if convenient,
- the result should be placed in TARGET. SUBTARGET may be used as the target
- for computing one of EXP's operands. */
+ the result should be placed in TARGET. */
static rtx
-expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
+expand_builtin_cexpi (tree exp, rtx target)
{
tree fndecl = get_callee_fndecl (exp);
tree arg, type;
op1 = gen_reg_rtx (mode);
op2 = gen_reg_rtx (mode);
- op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
/* Compute into op1 and op2. */
expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
function; if convenient, the result should be placed in TARGET. */
static rtx
-expand_builtin_powi (tree exp, rtx target, rtx subtarget)
+expand_builtin_powi (tree exp, rtx target)
{
tree arg0, arg1;
rtx op0, op1;
|| (optimize_insn_for_speed_p ()
&& powi_cost (n) <= POWI_MAX_MULTS)))
{
- op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
+ op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
op0 = force_reg (mode, op0);
return expand_powi (op0, mode, n);
}
if (target == NULL_RTX)
target = gen_reg_rtx (mode);
- op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
+ op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
if (GET_MODE (op0) != mode)
op0 = convert_to_mode (mode, op0, 0);
op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
rtx result, src_reg, char_rtx, before_strlen;
enum machine_mode insn_mode = target_mode, char_mode;
enum insn_code icode = CODE_FOR_nothing;
- int align;
+ unsigned int align;
/* If the length can be computed at compile-time, return it. */
len = c_strlen (src, 0);
tree arg2 = CALL_EXPR_ARG (exp, 1);
tree len = CALL_EXPR_ARG (exp, 2);
- int arg1_align
+ unsigned int arg1_align
= get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int arg2_align
+ unsigned int arg2_align
= get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
enum machine_mode insn_mode;
tree arg1 = CALL_EXPR_ARG (exp, 0);
tree arg2 = CALL_EXPR_ARG (exp, 1);
- int arg1_align
+ unsigned int arg1_align
= get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int arg2_align
+ unsigned int arg2_align
= get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
/* If we don't have POINTER_TYPE, call the function. */
tree arg2 = CALL_EXPR_ARG (exp, 1);
tree arg3 = CALL_EXPR_ARG (exp, 2);
- int arg1_align
+ unsigned int arg1_align
= get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- int arg2_align
+ unsigned int arg2_align
= get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
enum machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
type = build_pointer_type (type);
align = PARM_BOUNDARY / BITS_PER_UNIT;
- boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
+ boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
/* When we align parameter on stack for caller, if the parameter
alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
dummy_object (tree type)
{
tree t = build_int_cst (build_pointer_type (type), 0);
- return build1 (INDIRECT_REF, type, t);
+ return build2 (MEM_REF, type, t, t);
}
/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
}
}
-/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
- we failed and the caller should emit a normal call, otherwise try to get
- the result in TARGET, if convenient. */
+/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
+ failed and the caller should emit a normal call. CANNOT_ACCUMULATE
+ is the same as for allocate_dynamic_stack_space. */
static rtx
-expand_builtin_alloca (tree exp, rtx target)
+expand_builtin_alloca (tree exp, bool cannot_accumulate)
{
rtx op0;
rtx result;
op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
/* Allocate the desired space. */
- result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
+ result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
+ cannot_accumulate);
result = convert_memory_address (ptr_mode, result);
return result;
return NULL_RTX;
/* Compute the argument. */
- op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
+ op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
+ (subtarget
+ && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
+ == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
VOIDmode, EXPAND_NORMAL);
/* Compute op, into TARGET if possible.
Set TARGET to wherever the result comes back. */
satisfy the full barrier semantics of the intrinsic. */
mem = validize_mem (gen_rtx_MEM (mode, addr));
- set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
+ /* The alignment needs to be at least according to that of the mode. */
+ set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
+ get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
MEM_VOLATILE_P (mem) = 1;
tree fndecl = get_callee_fndecl (exp);
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
+ int flags;
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
none of its arguments are volatile, we can avoid expanding the
built-in call and just evaluate the arguments for side-effects. */
if (target == const0_rtx
- && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
+ && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
+ && !(flags & ECF_LOOPING_CONST_OR_PURE))
{
bool volatilep = false;
tree arg;
return target;
break;
+ CASE_FLT_FN (BUILT_IN_FMA):
+ target = expand_builtin_mathfn_ternary (exp, target, subtarget);
+ if (target)
+ return target;
+ break;
+
CASE_FLT_FN (BUILT_IN_ILOGB):
if (! flag_unsafe_math_optimizations)
break;
CASE_FLT_FN (BUILT_IN_FINITE):
case BUILT_IN_ISFINITE:
case BUILT_IN_ISNORMAL:
- target = expand_builtin_interclass_mathfn (exp, target, subtarget);
+ target = expand_builtin_interclass_mathfn (exp, target);
if (target)
return target;
break;
break;
CASE_FLT_FN (BUILT_IN_POWI):
- target = expand_builtin_powi (exp, target, subtarget);
+ target = expand_builtin_powi (exp, target);
if (target)
return target;
break;
break;
CASE_FLT_FN (BUILT_IN_CEXPI):
- target = expand_builtin_cexpi (exp, target, subtarget);
+ target = expand_builtin_cexpi (exp, target);
gcc_assert (target);
return target;
return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
case BUILT_IN_ALLOCA:
- target = expand_builtin_alloca (exp, target);
+ /* If the allocation stems from the declaration of a variable-sized
+ object, it cannot accumulate. */
+ target = expand_builtin_alloca (exp, ALLOCA_FOR_VAR_P (exp));
if (target)
return target;
break;
{
CASE_INT_FN (BUILT_IN_FFS):
if (lo != 0)
- result = exact_log2 (lo & -lo) + 1;
+ result = ffs_hwi (lo);
else if (hi != 0)
- result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
+ result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
else
result = 0;
break;
CASE_INT_FN (BUILT_IN_CTZ):
if (lo != 0)
- result = exact_log2 (lo & -lo);
+ result = ctz_hwi (lo);
else if (hi != 0)
- result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
+ result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
result = width;
break;
while (lo)
result++, lo &= lo - 1;
while (hi)
- result++, hi &= hi - 1;
+ result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
break;
CASE_INT_FN (BUILT_IN_PARITY):
while (lo)
result++, lo &= lo - 1;
while (hi)
- result++, hi &= hi - 1;
+ result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
result &= 1;
break;
length = tree_low_cst (len, 1);
if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
|| get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
- < (int) length)
+ < length)
return NULL_TREE;
if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
else
{
tree srctype, desttype;
- int src_align, dest_align;
+ unsigned int src_align, dest_align;
tree off0;
if (endp == 3)
if (readonly_data_expr (src)
|| (host_integerp (len, 1)
&& (MIN (src_align, dest_align) / BITS_PER_UNIT
- >= tree_low_cst (len, 1))))
+ >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
{
tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
if (!fn)
return NULL_TREE;
return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
+
+ /* If the destination and source do not alias optimize into
+ memcpy as well. */
+ if ((is_gimple_min_invariant (dest)
+ || TREE_CODE (dest) == SSA_NAME)
+ && (is_gimple_min_invariant (src)
+ || TREE_CODE (src) == SSA_NAME))
+ {
+ ao_ref destr, srcr;
+ ao_ref_init_from_ptr_and_size (&destr, dest, len);
+ ao_ref_init_from_ptr_and_size (&srcr, src, len);
+ if (!refs_may_alias_p_1 (&destr, &srcr, false))
+ {
+ tree fn;
+ fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
+ if (!fn)
+ return NULL_TREE;
+ return build_call_expr_loc (loc, fn, 3, dest, src, len);
+ }
+ }
+
return NULL_TREE;
}
src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
- if (dest_align < (int) TYPE_ALIGN (desttype)
- || src_align < (int) TYPE_ALIGN (srctype))
+ if (dest_align < TYPE_ALIGN (desttype)
+ || src_align < TYPE_ALIGN (srctype))
return NULL_TREE;
if (!ignore)
STRIP_NOPS (srcvar);
if (TREE_CODE (srcvar) == ADDR_EXPR
&& var_decl_component_p (TREE_OPERAND (srcvar, 0))
- && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
- && (!STRICT_ALIGNMENT
- || !destvar
- || src_align >= (int) TYPE_ALIGN (desttype)))
- srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
- srcvar, off0);
+ && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
+ {
+ if (!destvar
+ || src_align >= TYPE_ALIGN (desttype))
+ srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
+ srcvar, off0);
+ else if (!STRICT_ALIGNMENT)
+ {
+ srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
+ src_align);
+ srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
+ }
+ else
+ srcvar = NULL_TREE;
+ }
else
srcvar = NULL_TREE;
if (srcvar == NULL_TREE)
{
- if (STRICT_ALIGNMENT
- && src_align < (int) TYPE_ALIGN (desttype))
- return NULL_TREE;
STRIP_NOPS (src);
- srcvar = fold_build2 (MEM_REF, desttype, src, off0);
+ if (src_align >= TYPE_ALIGN (desttype))
+ srcvar = fold_build2 (MEM_REF, desttype, src, off0);
+ else
+ {
+ if (STRICT_ALIGNMENT)
+ return NULL_TREE;
+ srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
+ src_align);
+ srcvar = fold_build2 (MEM_REF, srctype, src, off0);
+ }
}
else if (destvar == NULL_TREE)
{
- if (STRICT_ALIGNMENT
- && dest_align < (int) TYPE_ALIGN (srctype))
- return NULL_TREE;
STRIP_NOPS (dest);
- destvar = fold_build2 (MEM_REF, srctype, dest, off0);
+ if (dest_align >= TYPE_ALIGN (srctype))
+ destvar = fold_build2 (MEM_REF, srctype, dest, off0);
+ else
+ {
+ if (STRICT_ALIGNMENT)
+ return NULL_TREE;
+ desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
+ dest_align);
+ destvar = fold_build2 (MEM_REF, desttype, dest, off0);
+ }
}
expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
static tree
fold_builtin_signbit (location_t loc, tree arg, tree type)
{
- tree temp;
-
if (!validate_arg (arg, REAL_TYPE))
return NULL_TREE;
REAL_VALUE_TYPE c;
c = TREE_REAL_CST (arg);
- temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
- return fold_convert_loc (loc, type, temp);
+ return (REAL_VALUE_NEGATIVE (c)
+ ? build_one_cst (type)
+ : build_zero_cst (type));
}
/* If ARG is non-negative, the result is always zero. */
return fold_build1_loc (loc, ABS_EXPR, type, arg);
}
+/* Fold a fma operation with arguments ARG[012]. */
+
+tree
+fold_fma (location_t loc ATTRIBUTE_UNUSED,
+ tree type, tree arg0, tree arg1, tree arg2)
+{
+ if (TREE_CODE (arg0) == REAL_CST
+ && TREE_CODE (arg1) == REAL_CST
+ && TREE_CODE (arg2) == REAL_CST)
+ return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
+
+ return NULL_TREE;
+}
+
+/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
+
+static tree
+fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
+{
+ if (validate_arg (arg0, REAL_TYPE)
+ && validate_arg(arg1, REAL_TYPE)
+ && validate_arg(arg2, REAL_TYPE))
+ {
+ tree tem = fold_fma (loc, type, arg0, arg1, arg2);
+ if (tem)
+ return tem;
+
+ /* ??? Only expand to FMA_EXPR if it's directly supported. */
+ if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
+ return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
+ }
+ return NULL_TREE;
+}
+
/* Fold a call to builtin fmin or fmax. */
static tree
return fold_builtin_sincos (loc, arg0, arg1, arg2);
CASE_FLT_FN (BUILT_IN_FMA):
- if (validate_arg (arg0, REAL_TYPE)
- && validate_arg(arg1, REAL_TYPE)
- && validate_arg(arg2, REAL_TYPE))
- return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
+ return fold_builtin_fma (loc, arg0, arg1, arg2, type);
break;
CASE_FLT_FN (BUILT_IN_REMQUO):
return build_call_array_loc (loc, type, fn, n, argarray);
}
-/* Construct a new CALL_EXPR using the tail of the argument list of EXP
- along with N new arguments specified as the "..." parameters. SKIP
- is the number of arguments in EXP to be omitted. This function is used
- to do varargs-to-varargs transformations. */
+/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
+ list ARGS along with N new arguments in NEWARGS. SKIP is the number
+ of arguments in ARGS to be omitted. OLDNARGS is the number of
+ elements in ARGS. */
static tree
-rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
+rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
+ int skip, tree fndecl, int n, va_list newargs)
{
- int oldnargs = call_expr_nargs (exp);
int nargs = oldnargs - skip + n;
- tree fntype = TREE_TYPE (fndecl);
- tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
tree *buffer;
if (n > 0)
{
int i, j;
- va_list ap;
buffer = XALLOCAVEC (tree, nargs);
- va_start (ap, n);
for (i = 0; i < n; i++)
- buffer[i] = va_arg (ap, tree);
- va_end (ap);
+ buffer[i] = va_arg (newargs, tree);
for (j = skip; j < oldnargs; j++, i++)
- buffer[i] = CALL_EXPR_ARG (exp, j);
+ buffer[i] = args[j];
}
else
- buffer = CALL_EXPR_ARGP (exp) + skip;
+ buffer = args + skip;
- return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
+ return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
+}
+
+/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
+ list ARGS along with N new arguments specified as the "..."
+ parameters. SKIP is the number of arguments in ARGS to be omitted.
+ OLDNARGS is the number of elements in ARGS. */
+
+static tree
+rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
+ int skip, tree fndecl, int n, ...)
+{
+ va_list ap;
+ tree t;
+
+ va_start (ap, n);
+ t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
+ va_end (ap);
+
+ return t;
+}
+
+/* Construct a new CALL_EXPR using the tail of the argument list of EXP
+ along with N new arguments specified as the "..." parameters. SKIP
+ is the number of arguments in EXP to be omitted. This function is used
+ to do varargs-to-varargs transformations. */
+
+static tree
+rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
+{
+ va_list ap;
+ tree t;
+
+ va_start (ap, n);
+ t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
+ CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
+ va_end (ap);
+
+ return t;
}
/* Validate a single argument ARG against a tree code CODE representing
int nargs = call_expr_nargs (exp);
tree arg;
- if (TYPE_ARG_TYPES (fntype) == 0
- || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
- == void_type_node))
+ if (!stdarg_p (fntype))
{
error ("%<va_start%> used in function with fixed args");
return true;
return build_call_expr_loc (loc, fn, 3, dest, src, len);
}
-/* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
- a normal call should be emitted rather than expanding the function
- inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
+/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
+ Return NULL_TREE if a normal call should be emitted rather than
+ expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
+ or BUILT_IN_VSPRINTF_CHK. */
static tree
-fold_builtin_sprintf_chk (location_t loc, tree exp,
- enum built_in_function fcode)
+fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
+ enum built_in_function fcode)
{
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
- int nargs = call_expr_nargs (exp);
/* Verify the required arguments in the original call. */
if (nargs < 4)
return NULL_TREE;
- dest = CALL_EXPR_ARG (exp, 0);
+ dest = args[0];
if (!validate_arg (dest, POINTER_TYPE))
return NULL_TREE;
- flag = CALL_EXPR_ARG (exp, 1);
+ flag = args[1];
if (!validate_arg (flag, INTEGER_TYPE))
return NULL_TREE;
- size = CALL_EXPR_ARG (exp, 2);
+ size = args[2];
if (!validate_arg (size, INTEGER_TYPE))
return NULL_TREE;
- fmt = CALL_EXPR_ARG (exp, 3);
+ fmt = args[3];
if (!validate_arg (fmt, POINTER_TYPE))
return NULL_TREE;
if (nargs == 5)
{
- arg = CALL_EXPR_ARG (exp, 4);
+ arg = args[4];
if (validate_arg (arg, POINTER_TYPE))
{
len = c_strlen (arg, 1);
if (!fn)
return NULL_TREE;
- return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
+ return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
}
-/* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
+/* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
a normal call should be emitted rather than expanding the function
- inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
+ inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
+
+static tree
+fold_builtin_sprintf_chk (location_t loc, tree exp,
+ enum built_in_function fcode)
+{
+ return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
+ CALL_EXPR_ARGP (exp), fcode);
+}
+
+/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
+ NULL_TREE if a normal call should be emitted rather than expanding
+ the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
passed as second argument. */
-tree
-fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
- enum built_in_function fcode)
+static tree
+fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
+ tree maxlen, enum built_in_function fcode)
{
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
/* Verify the required arguments in the original call. */
- if (call_expr_nargs (exp) < 5)
+ if (nargs < 5)
return NULL_TREE;
- dest = CALL_EXPR_ARG (exp, 0);
+ dest = args[0];
if (!validate_arg (dest, POINTER_TYPE))
return NULL_TREE;
- len = CALL_EXPR_ARG (exp, 1);
+ len = args[1];
if (!validate_arg (len, INTEGER_TYPE))
return NULL_TREE;
- flag = CALL_EXPR_ARG (exp, 2);
+ flag = args[2];
if (!validate_arg (flag, INTEGER_TYPE))
return NULL_TREE;
- size = CALL_EXPR_ARG (exp, 3);
+ size = args[3];
if (!validate_arg (size, INTEGER_TYPE))
return NULL_TREE;
- fmt = CALL_EXPR_ARG (exp, 4);
+ fmt = args[4];
if (!validate_arg (fmt, POINTER_TYPE))
return NULL_TREE;
if (!fn)
return NULL_TREE;
- return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
+ return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
+}
+
+/* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
+ a normal call should be emitted rather than expanding the function
+ inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
+ BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
+ passed as second argument. */
+
+tree
+fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
+ enum built_in_function fcode)
+{
+ return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
+ CALL_EXPR_ARGP (exp), maxlen, fcode);
}
/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
{
/* If the string was "string\n", call puts("string"). */
size_t len = strlen (str);
- if ((unsigned char)str[len - 1] == target_newline)
+ if ((unsigned char)str[len - 1] == target_newline
+ && (size_t) (int) len == len
+ && (int) len > 0)
{
+ char *newstr;
+ tree offset_node, string_cst;
+
/* Create a NUL-terminated string that's one char shorter
than the original, stripping off the trailing '\n'. */
- char *newstr = XALLOCAVEC (char, len);
- memcpy (newstr, str, len - 1);
- newstr[len - 1] = 0;
-
- newarg = build_string_literal (len, newstr);
+ newarg = build_string_literal (len, str);
+ string_cst = string_constant (newarg, &offset_node);
+ gcc_checking_assert (string_cst
+ && (TREE_STRING_LENGTH (string_cst)
+ == (int) len)
+ && integer_zerop (offset_node)
+ && (unsigned char)
+ TREE_STRING_POINTER (string_cst)[len - 1]
+ == target_newline);
+ /* build_string_literal creates a new STRING_CST,
+ modify it in place to avoid double copying. */
+ newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
+ newstr[len - 1] = '\0';
if (fn_puts)
call = build_call_expr_loc (loc, fn_puts, 1, newarg);
}
return result;
}
-/* FIXME tuples.
- The functions below provide an alternate interface for folding
- builtin function calls presented as GIMPLE_CALL statements rather
- than as CALL_EXPRs. The folded result is still expressed as a
- tree. There is too much code duplication in the handling of
- varargs functions, and a more intrusive re-factoring would permit
- better sharing of code between the tree and statement-based
- versions of these functions. */
-
-/* Construct a new CALL_EXPR using the tail of the argument list of STMT
- along with N new arguments specified as the "..." parameters. SKIP
- is the number of arguments in STMT to be omitted. This function is used
- to do varargs-to-varargs transformations. */
-
-static tree
-gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
-{
- int oldnargs = gimple_call_num_args (stmt);
- int nargs = oldnargs - skip + n;
- tree fntype = TREE_TYPE (fndecl);
- tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
- tree *buffer;
- int i, j;
- va_list ap;
- location_t loc = gimple_location (stmt);
-
- buffer = XALLOCAVEC (tree, nargs);
- va_start (ap, n);
- for (i = 0; i < n; i++)
- buffer[i] = va_arg (ap, tree);
- va_end (ap);
- for (j = skip; j < oldnargs; j++, i++)
- buffer[i] = gimple_call_arg (stmt, j);
-
- return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
-}
-
/* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
a normal call should be emitted rather than expanding the function
inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
static tree
gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
{
- tree dest, size, len, fn, fmt, flag;
- const char *fmt_str;
int nargs = gimple_call_num_args (stmt);
- /* Verify the required arguments in the original call. */
- if (nargs < 4)
- return NULL_TREE;
- dest = gimple_call_arg (stmt, 0);
- if (!validate_arg (dest, POINTER_TYPE))
- return NULL_TREE;
- flag = gimple_call_arg (stmt, 1);
- if (!validate_arg (flag, INTEGER_TYPE))
- return NULL_TREE;
- size = gimple_call_arg (stmt, 2);
- if (!validate_arg (size, INTEGER_TYPE))
- return NULL_TREE;
- fmt = gimple_call_arg (stmt, 3);
- if (!validate_arg (fmt, POINTER_TYPE))
- return NULL_TREE;
-
- if (! host_integerp (size, 1))
- return NULL_TREE;
-
- len = NULL_TREE;
-
- if (!init_target_chars ())
- return NULL_TREE;
-
- /* Check whether the format is a literal string constant. */
- fmt_str = c_getstr (fmt);
- if (fmt_str != NULL)
- {
- /* If the format doesn't contain % args or %%, we know the size. */
- if (strchr (fmt_str, target_percent) == 0)
- {
- if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
- len = build_int_cstu (size_type_node, strlen (fmt_str));
- }
- /* If the format is "%s" and first ... argument is a string literal,
- we know the size too. */
- else if (fcode == BUILT_IN_SPRINTF_CHK
- && strcmp (fmt_str, target_percent_s) == 0)
- {
- tree arg;
-
- if (nargs == 5)
- {
- arg = gimple_call_arg (stmt, 4);
- if (validate_arg (arg, POINTER_TYPE))
- {
- len = c_strlen (arg, 1);
- if (! len || ! host_integerp (len, 1))
- len = NULL_TREE;
- }
- }
- }
- }
-
- if (! integer_all_onesp (size))
- {
- if (! len || ! tree_int_cst_lt (len, size))
- return NULL_TREE;
- }
-
- /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
- or if format doesn't contain % chars or is "%s". */
- if (! integer_zerop (flag))
- {
- if (fmt_str == NULL)
- return NULL_TREE;
- if (strchr (fmt_str, target_percent) != NULL
- && strcmp (fmt_str, target_percent_s))
- return NULL_TREE;
- }
-
- /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
- fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
- ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
- if (!fn)
- return NULL_TREE;
-
- return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
+ return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
+ (nargs > 0
+ ? gimple_call_arg_ptr (stmt, 0)
+ : &error_mark_node), fcode);
}
/* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
enum built_in_function fcode)
{
- tree dest, size, len, fn, fmt, flag;
- const char *fmt_str;
-
- /* Verify the required arguments in the original call. */
- if (gimple_call_num_args (stmt) < 5)
- return NULL_TREE;
- dest = gimple_call_arg (stmt, 0);
- if (!validate_arg (dest, POINTER_TYPE))
- return NULL_TREE;
- len = gimple_call_arg (stmt, 1);
- if (!validate_arg (len, INTEGER_TYPE))
- return NULL_TREE;
- flag = gimple_call_arg (stmt, 2);
- if (!validate_arg (flag, INTEGER_TYPE))
- return NULL_TREE;
- size = gimple_call_arg (stmt, 3);
- if (!validate_arg (size, INTEGER_TYPE))
- return NULL_TREE;
- fmt = gimple_call_arg (stmt, 4);
- if (!validate_arg (fmt, POINTER_TYPE))
- return NULL_TREE;
-
- if (! host_integerp (size, 1))
- return NULL_TREE;
-
- if (! integer_all_onesp (size))
- {
- if (! host_integerp (len, 1))
- {
- /* If LEN is not constant, try MAXLEN too.
- For MAXLEN only allow optimizing into non-_ocs function
- if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
- if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
- return NULL_TREE;
- }
- else
- maxlen = len;
-
- if (tree_int_cst_lt (size, maxlen))
- return NULL_TREE;
- }
-
- if (!init_target_chars ())
- return NULL_TREE;
-
- /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
- or if format doesn't contain % chars or is "%s". */
- if (! integer_zerop (flag))
- {
- fmt_str = c_getstr (fmt);
- if (fmt_str == NULL)
- return NULL_TREE;
- if (strchr (fmt_str, target_percent) != NULL
- && strcmp (fmt_str, target_percent_s))
- return NULL_TREE;
- }
-
- /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
- available. */
- fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
- ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
- if (!fn)
- return NULL_TREE;
+ int nargs = gimple_call_num_args (stmt);
- return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
+ return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
+ (nargs > 0
+ ? gimple_call_arg_ptr (stmt, 0)
+ : &error_mark_node), maxlen, fcode);
}
/* Builtins with folding operations that operate on "..." arguments
&& !gimple_call_va_arg_pack_p (stmt))
{
int nargs = gimple_call_num_args (stmt);
+ tree *args = (nargs > 0
+ ? gimple_call_arg_ptr (stmt, 0)
+ : &error_mark_node);
if (avoid_folding_inline_builtin (fndecl))
return NULL_TREE;
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
{
- return targetm.fold_builtin (fndecl, nargs,
- (nargs > 0
- ? gimple_call_arg_ptr (stmt, 0)
- : &error_mark_node), ignore);
+ return targetm.fold_builtin (fndecl, nargs, args, ignore);
}
else
{
if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
- {
- tree args[MAX_ARGS_TO_FOLD_BUILTIN];
- int i;
- for (i = 0; i < nargs; i++)
- args[i] = gimple_call_arg (stmt, i);
- ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
- }
+ ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
if (!ret)
ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
if (ret)
return false;
}
-