return is_builtin_name (name);
}
-/* Return the alignment in bits of EXP, an object.
- Don't return more than MAX_ALIGN no matter what. */
+/* Compute values M and N such that M divides (address of EXP - N) and
+ such that N < M. Store N in *BITPOSP and return M.
+
+ Note that the address (and thus the alignment) computed here is based
+ on the address to which a symbol resolves, whereas DECL_ALIGN is based
+ on the address at which an object is actually located. These two
+ addresses are not always the same. For example, on ARM targets,
+ the address &foo of a Thumb function foo() has the lowest bit set,
+ whereas foo() itself starts on an even address. */
unsigned int
get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
exp = DECL_INITIAL (exp);
if (DECL_P (exp)
&& TREE_CODE (exp) != LABEL_DECL)
- align = DECL_ALIGN (exp);
+ {
+ if (TREE_CODE (exp) == FUNCTION_DECL)
+ {
+ /* Function addresses can encode extra information besides their
+ alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
+ allows the low bit to be used as a virtual bit, we know
+ that the address itself must be 2-byte aligned. */
+ if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
+ align = 2 * BITS_PER_UNIT;
+ else
+ align = BITS_PER_UNIT;
+ }
+ else
+ align = DECL_ALIGN (exp);
+ }
else if (CONSTANT_CLASS_P (exp))
{
align = TYPE_ALIGN (TREE_TYPE (exp));
align = MAX (pi->align * BITS_PER_UNIT, align);
}
else if (TREE_CODE (addr) == ADDR_EXPR)
- align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
+ align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
}
else if (TREE_CODE (exp) == TARGET_MEM_REF)
align = MAX (pi->align * BITS_PER_UNIT, align);
}
else if (TREE_CODE (addr) == ADDR_EXPR)
- align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
+ align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
if (TMR_OFFSET (exp))
bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
if (TMR_INDEX (exp) && TMR_STEP (exp))
return align;
}
-/* Return the alignment in bits of EXP, an object.
- Don't return more than MAX_ALIGN no matter what. */
+/* Return the alignment in bits of EXP, an object. */
unsigned int
-get_object_alignment (tree exp, unsigned int max_align)
+get_object_alignment (tree exp)
{
unsigned HOST_WIDE_INT bitpos = 0;
unsigned int align;
if (bitpos != 0)
align = (bitpos & -bitpos);
- return MIN (align, max_align);
-}
-
-/* Returns true iff we can trust that alignment information has been
- calculated properly. */
-
-bool
-can_trust_pointer_alignment (void)
-{
- /* We rely on TER to compute accurate alignment information. */
- return (optimize && flag_tree_ter);
+ return align;
}
/* Return the alignment in bits of EXP, a pointer valued expression.
- But don't return more than MAX_ALIGN no matter what.
The alignment returned is, by default, the alignment of the thing that
EXP points to. If it is not a POINTER_TYPE, 0 is returned.
expression is actually pointing at an object whose alignment is tighter. */
unsigned int
-get_pointer_alignment (tree exp, unsigned int max_align)
+get_pointer_alignment (tree exp)
{
STRIP_NOPS (exp);
if (TREE_CODE (exp) == ADDR_EXPR)
- return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
+ return get_object_alignment (TREE_OPERAND (exp, 0));
else if (TREE_CODE (exp) == SSA_NAME
&& POINTER_TYPE_P (TREE_TYPE (exp)))
{
align = (pi->misalign & -pi->misalign);
else
align = pi->align;
- return MIN (max_align, align * BITS_PER_UNIT);
+ return align * BITS_PER_UNIT;
}
return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
- if (MEM_OFFSET (mem)
- && CONST_INT_P (MEM_OFFSET (mem)))
- offset = INTVAL (MEM_OFFSET (mem));
+ if (MEM_OFFSET_KNOWN_P (mem))
+ offset = MEM_OFFSET (mem);
if (offset >= 0 && len && host_integerp (len, 0))
length = tree_low_cst (len, 0);
if (mem_expr != MEM_EXPR (mem))
{
set_mem_expr (mem, mem_expr);
- set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
+ if (offset >= 0)
+ set_mem_offset (mem, offset);
+ else
+ clear_mem_offset (mem);
}
}
set_mem_alias_set (mem, 0);
- set_mem_size (mem, NULL_RTX);
+ clear_mem_size (mem);
}
return mem;
else
#endif
emit_stack_restore (SAVE_BLOCK, old_stack_level);
+ fixup_args_size_notes (call_insn, get_last_insn(), 0);
OK_DEFER_POP;
CASE_MATHFN (BUILT_IN_HUGE_VAL)
CASE_MATHFN (BUILT_IN_HYPOT)
CASE_MATHFN (BUILT_IN_ILOGB)
+ CASE_MATHFN (BUILT_IN_ICEIL)
+ CASE_MATHFN (BUILT_IN_IFLOOR)
CASE_MATHFN (BUILT_IN_INF)
+ CASE_MATHFN (BUILT_IN_IRINT)
+ CASE_MATHFN (BUILT_IN_IROUND)
CASE_MATHFN (BUILT_IN_ISINF)
CASE_MATHFN (BUILT_IN_J0)
CASE_MATHFN (BUILT_IN_J1)
switch (DECL_FUNCTION_CODE (fndecl))
{
+ CASE_FLT_FN (BUILT_IN_ICEIL):
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LLCEIL):
builtin_optab = lceil_optab;
fallback_fn = BUILT_IN_CEIL;
break;
+ CASE_FLT_FN (BUILT_IN_IFLOOR):
CASE_FLT_FN (BUILT_IN_LFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
builtin_optab = lfloor_optab;
switch (DECL_FUNCTION_CODE (fndecl))
{
+ case BUILT_IN_ICEIL:
case BUILT_IN_LCEIL:
case BUILT_IN_LLCEIL:
name = "ceil";
break;
+ case BUILT_IN_ICEILF:
case BUILT_IN_LCEILF:
case BUILT_IN_LLCEILF:
name = "ceilf";
break;
+ case BUILT_IN_ICEILL:
case BUILT_IN_LCEILL:
case BUILT_IN_LLCEILL:
name = "ceill";
break;
+ case BUILT_IN_IFLOOR:
case BUILT_IN_LFLOOR:
case BUILT_IN_LLFLOOR:
name = "floor";
break;
+ case BUILT_IN_IFLOORF:
case BUILT_IN_LFLOORF:
case BUILT_IN_LLFLOORF:
name = "floorf";
break;
+ case BUILT_IN_IFLOORL:
case BUILT_IN_LFLOORL:
case BUILT_IN_LLFLOORL:
name = "floorl";
switch (DECL_FUNCTION_CODE (fndecl))
{
+ CASE_FLT_FN (BUILT_IN_IRINT):
CASE_FLT_FN (BUILT_IN_LRINT):
CASE_FLT_FN (BUILT_IN_LLRINT):
builtin_optab = lrint_optab; break;
+
+ CASE_FLT_FN (BUILT_IN_IROUND):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
builtin_optab = lround_optab; break;
+
default:
gcc_unreachable ();
}
return expand_expr (len, target, target_mode, EXPAND_NORMAL);
}
- align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ align = get_pointer_alignment (src) / BITS_PER_UNIT;
/* If SRC is not a pointer type, don't do this operation inline. */
if (align == 0)
/* Now that we are assured of success, expand the source. */
start_sequence ();
- pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
+ pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
if (pat != src_reg)
- emit_move_insn (src_reg, pat);
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (pat) != Pmode)
+ pat = convert_to_mode (Pmode, pat,
+ POINTERS_EXTEND_UNSIGNED);
+#endif
+ emit_move_insn (src_reg, pat);
+ }
pat = get_insns ();
end_sequence ();
tree src = CALL_EXPR_ARG (exp, 1);
tree len = CALL_EXPR_ARG (exp, 2);
const char *src_str;
- unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
- unsigned int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ unsigned int src_align = get_pointer_alignment (src);
+ unsigned int dest_align = get_pointer_alignment (dest);
rtx dest_mem, src_mem, dest_addr, len_rtx;
HOST_WIDE_INT expected_size = -1;
unsigned int expected_align = 0;
else
{
const char *src_str;
- unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
- unsigned int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ unsigned int src_align = get_pointer_alignment (src);
+ unsigned int dest_align = get_pointer_alignment (dest);
rtx dest_mem, src_mem, len_rtx;
/* If either SRC or DEST is not a pointer type, don't do this
use store_by_pieces, if it fails, punt. */
if (tree_int_cst_lt (slen, len))
{
- unsigned int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ unsigned int dest_align = get_pointer_alignment (dest);
const char *p = c_getstr (src);
rtx dest_mem;
HOST_WIDE_INT expected_size = -1;
unsigned int expected_align = 0;
- dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ dest_align = get_pointer_alignment (dest);
/* If DEST is not a pointer type, don't do this operation in-line. */
if (dest_align == 0)
calling bzero instead of memset. */
return expand_builtin_memset_args (dest, integer_zero_node,
- fold_convert_loc (loc, sizetype, size),
+ fold_convert_loc (loc,
+ size_type_node, size),
const0_rtx, VOIDmode, exp);
}
/* Expand expression EXP, which is a call to the memcmp built-in function.
- Return NULL_RTX if we failed and the
- caller should emit a normal call, otherwise try to get the result in
- TARGET, if convenient (and in mode MODE, if that's convenient). */
+ Return NULL_RTX if we failed and the caller should emit a normal call,
+ otherwise try to get the result in TARGET, if convenient (and in mode
+ MODE, if that's convenient). */
static rtx
expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
return NULL_RTX;
-#if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
+ /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
+ implementing memcmp because it will stop if it encounters two
+ zero bytes. */
+#if defined HAVE_cmpmemsi
{
rtx arg1_rtx, arg2_rtx, arg3_rtx;
rtx result;
tree arg2 = CALL_EXPR_ARG (exp, 1);
tree len = CALL_EXPR_ARG (exp, 2);
- unsigned int arg1_align
- = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- unsigned int arg2_align
- = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
+ unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
enum machine_mode insn_mode;
-#ifdef HAVE_cmpmemsi
if (HAVE_cmpmemsi)
insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
else
-#endif
-#ifdef HAVE_cmpstrnsi
- if (HAVE_cmpstrnsi)
- insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
- else
-#endif
return NULL_RTX;
/* If we don't have POINTER_TYPE, call the function. */
/* Set MEM_SIZE as appropriate. */
if (CONST_INT_P (arg3_rtx))
{
- set_mem_size (arg1_rtx, arg3_rtx);
- set_mem_size (arg2_rtx, arg3_rtx);
+ set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
+ set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
}
-#ifdef HAVE_cmpmemsi
if (HAVE_cmpmemsi)
insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
GEN_INT (MIN (arg1_align, arg2_align)));
else
-#endif
-#ifdef HAVE_cmpstrnsi
- if (HAVE_cmpstrnsi)
- insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
- GEN_INT (MIN (arg1_align, arg2_align)));
- else
-#endif
gcc_unreachable ();
if (insn)
else
return convert_to_mode (mode, result, 0);
}
-#endif
+#endif /* HAVE_cmpmemsi. */
return NULL_RTX;
}
tree arg1 = CALL_EXPR_ARG (exp, 0);
tree arg2 = CALL_EXPR_ARG (exp, 1);
- unsigned int arg1_align
- = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- unsigned int arg2_align
- = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
+ unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
/* If we don't have POINTER_TYPE, call the function. */
if (arg1_align == 0 || arg2_align == 0)
tree arg2 = CALL_EXPR_ARG (exp, 1);
tree arg3 = CALL_EXPR_ARG (exp, 2);
- unsigned int arg1_align
- = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
- unsigned int arg2_align
- = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
+ unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
+ unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
enum machine_mode insn_mode
= insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
&& !integer_zerop (TYPE_SIZE (type)))
{
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (valist),
- valist_tmp, size_int (boundary - 1)));
+ fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
gimplify_and_add (t, pre_p);
- t = fold_convert (sizetype, valist_tmp);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- fold_convert (TREE_TYPE (valist),
- fold_build2 (BIT_AND_EXPR, sizetype, t,
- size_int (-boundary))));
+ fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
+ valist_tmp,
+ build_int_cst (TREE_TYPE (valist), -boundary)));
gimplify_and_add (t, pre_p);
}
else
rounded_size, size_int (align));
t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
size_binop (MINUS_EXPR, rounded_size, type_size));
- addr = fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (addr), addr, t);
+ addr = fold_build_pointer_plus (addr, t);
}
/* Compute new value for AP. */
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
+ t = fold_build_pointer_plus (valist_tmp, rounded_size);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
gimplify_and_add (t, pre_p);
/* Compute op, into TARGET if possible.
Set TARGET to wherever the result comes back. */
target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
- op_optab, op0, target, 1);
+ op_optab, op0, target, op_optab != clrsb_optab);
gcc_assert (target);
return convert_to_mode (target_mode, target, 0);
return target;
}
+/* Expand a call to __builtin_assume_aligned. We just return our first
+ argument as the builtin_assume_aligned semantic should've been already
+ executed by CCP. */
+
+static rtx
+expand_builtin_assume_aligned (tree exp, rtx target)
+{
+ if (call_expr_nargs (exp) < 2)
+ return const0_rtx;
+ target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
+ EXPAND_NORMAL);
+ gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
+ && (call_expr_nargs (exp) < 3
+ || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
+ return target;
+}
+
void
expand_builtin_trap (void)
{
{
m_tramp = change_address (m_tramp, BLKmode, tmp);
set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
- set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
+ set_mem_size (m_tramp, TRAMPOLINE_SIZE);
}
/* The FUNC argument should be the address of the nested function.
/* The alignment needs to be at least according to that of the mode. */
set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
- get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
+ get_pointer_alignment (loc)));
set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
MEM_VOLATILE_P (mem) = 1;
switch (fcode)
{
- case BUILT_IN_FETCH_AND_NAND_1:
- case BUILT_IN_FETCH_AND_NAND_2:
- case BUILT_IN_FETCH_AND_NAND_4:
- case BUILT_IN_FETCH_AND_NAND_8:
- case BUILT_IN_FETCH_AND_NAND_16:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_1:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_2:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_4:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_8:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_16:
if (warned_f_a_n)
break;
- fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
+ fndecl = implicit_built_in_decls[BUILT_IN_SYNC_FETCH_AND_NAND_N];
inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
warned_f_a_n = true;
break;
- case BUILT_IN_NAND_AND_FETCH_1:
- case BUILT_IN_NAND_AND_FETCH_2:
- case BUILT_IN_NAND_AND_FETCH_4:
- case BUILT_IN_NAND_AND_FETCH_8:
- case BUILT_IN_NAND_AND_FETCH_16:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_1:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_2:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_4:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_8:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_16:
if (warned_n_a_f)
break;
- fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
+ fndecl = implicit_built_in_decls[BUILT_IN_SYNC_NAND_AND_FETCH_N];
inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
warned_n_a_f = true;
break;
the results. */
static rtx
-expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
+expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
rtx target)
{
rtx val, mem;
/* Expand the __sync_synchronize intrinsic. */
static void
-expand_builtin_synchronize (void)
+expand_builtin_sync_synchronize (void)
{
gimple x;
VEC (tree, gc) *v_clobbers;
/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
static void
-expand_builtin_lock_release (enum machine_mode mode, tree exp)
+expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
{
struct expand_operand ops[2];
enum insn_code icode;
/* Otherwise we can implement this operation by emitting a barrier
followed by a store of zero. */
- expand_builtin_synchronize ();
+ expand_builtin_sync_synchronize ();
emit_move_insn (mem, const0_rtx);
}
\f
return target;
break;
+ CASE_FLT_FN (BUILT_IN_ICEIL):
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LFLOOR):
+ CASE_FLT_FN (BUILT_IN_IFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
target = expand_builtin_int_roundingfn (exp, target);
if (target)
return target;
break;
+ CASE_FLT_FN (BUILT_IN_IRINT):
CASE_FLT_FN (BUILT_IN_LRINT):
CASE_FLT_FN (BUILT_IN_LLRINT):
+ CASE_FLT_FN (BUILT_IN_IROUND):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
target = expand_builtin_int_roundingfn_2 (exp, target);
return target;
break;
+ CASE_INT_FN (BUILT_IN_CLRSB):
+ case BUILT_IN_CLRSBIMAX:
+ target = expand_builtin_unop (target_mode, exp, target,
+ subtarget, clrsb_optab);
+ if (target)
+ return target;
+ break;
+
CASE_INT_FN (BUILT_IN_POPCOUNT):
case BUILT_IN_POPCOUNTIMAX:
target = expand_builtin_unop (target_mode, exp, target,
return expand_builtin_va_copy (exp);
case BUILT_IN_EXPECT:
return expand_builtin_expect (exp, target);
+ case BUILT_IN_ASSUME_ALIGNED:
+ return expand_builtin_assume_aligned (exp, target);
case BUILT_IN_PREFETCH:
expand_builtin_prefetch (exp);
return const0_rtx;
return target;
break;
- case BUILT_IN_FETCH_AND_ADD_1:
- case BUILT_IN_FETCH_AND_ADD_2:
- case BUILT_IN_FETCH_AND_ADD_4:
- case BUILT_IN_FETCH_AND_ADD_8:
- case BUILT_IN_FETCH_AND_ADD_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
+ case BUILT_IN_SYNC_FETCH_AND_ADD_1:
+ case BUILT_IN_SYNC_FETCH_AND_ADD_2:
+ case BUILT_IN_SYNC_FETCH_AND_ADD_4:
+ case BUILT_IN_SYNC_FETCH_AND_ADD_8:
+ case BUILT_IN_SYNC_FETCH_AND_ADD_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
target = expand_builtin_sync_operation (mode, exp, PLUS,
false, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_FETCH_AND_SUB_1:
- case BUILT_IN_FETCH_AND_SUB_2:
- case BUILT_IN_FETCH_AND_SUB_4:
- case BUILT_IN_FETCH_AND_SUB_8:
- case BUILT_IN_FETCH_AND_SUB_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
+ case BUILT_IN_SYNC_FETCH_AND_SUB_1:
+ case BUILT_IN_SYNC_FETCH_AND_SUB_2:
+ case BUILT_IN_SYNC_FETCH_AND_SUB_4:
+ case BUILT_IN_SYNC_FETCH_AND_SUB_8:
+ case BUILT_IN_SYNC_FETCH_AND_SUB_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
target = expand_builtin_sync_operation (mode, exp, MINUS,
false, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_FETCH_AND_OR_1:
- case BUILT_IN_FETCH_AND_OR_2:
- case BUILT_IN_FETCH_AND_OR_4:
- case BUILT_IN_FETCH_AND_OR_8:
- case BUILT_IN_FETCH_AND_OR_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
+ case BUILT_IN_SYNC_FETCH_AND_OR_1:
+ case BUILT_IN_SYNC_FETCH_AND_OR_2:
+ case BUILT_IN_SYNC_FETCH_AND_OR_4:
+ case BUILT_IN_SYNC_FETCH_AND_OR_8:
+ case BUILT_IN_SYNC_FETCH_AND_OR_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
target = expand_builtin_sync_operation (mode, exp, IOR,
false, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_FETCH_AND_AND_1:
- case BUILT_IN_FETCH_AND_AND_2:
- case BUILT_IN_FETCH_AND_AND_4:
- case BUILT_IN_FETCH_AND_AND_8:
- case BUILT_IN_FETCH_AND_AND_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
+ case BUILT_IN_SYNC_FETCH_AND_AND_1:
+ case BUILT_IN_SYNC_FETCH_AND_AND_2:
+ case BUILT_IN_SYNC_FETCH_AND_AND_4:
+ case BUILT_IN_SYNC_FETCH_AND_AND_8:
+ case BUILT_IN_SYNC_FETCH_AND_AND_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
target = expand_builtin_sync_operation (mode, exp, AND,
false, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_FETCH_AND_XOR_1:
- case BUILT_IN_FETCH_AND_XOR_2:
- case BUILT_IN_FETCH_AND_XOR_4:
- case BUILT_IN_FETCH_AND_XOR_8:
- case BUILT_IN_FETCH_AND_XOR_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
+ case BUILT_IN_SYNC_FETCH_AND_XOR_1:
+ case BUILT_IN_SYNC_FETCH_AND_XOR_2:
+ case BUILT_IN_SYNC_FETCH_AND_XOR_4:
+ case BUILT_IN_SYNC_FETCH_AND_XOR_8:
+ case BUILT_IN_SYNC_FETCH_AND_XOR_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
target = expand_builtin_sync_operation (mode, exp, XOR,
false, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_FETCH_AND_NAND_1:
- case BUILT_IN_FETCH_AND_NAND_2:
- case BUILT_IN_FETCH_AND_NAND_4:
- case BUILT_IN_FETCH_AND_NAND_8:
- case BUILT_IN_FETCH_AND_NAND_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
+ case BUILT_IN_SYNC_FETCH_AND_NAND_1:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_2:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_4:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_8:
+ case BUILT_IN_SYNC_FETCH_AND_NAND_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
target = expand_builtin_sync_operation (mode, exp, NOT,
false, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_ADD_AND_FETCH_1:
- case BUILT_IN_ADD_AND_FETCH_2:
- case BUILT_IN_ADD_AND_FETCH_4:
- case BUILT_IN_ADD_AND_FETCH_8:
- case BUILT_IN_ADD_AND_FETCH_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
+ case BUILT_IN_SYNC_ADD_AND_FETCH_1:
+ case BUILT_IN_SYNC_ADD_AND_FETCH_2:
+ case BUILT_IN_SYNC_ADD_AND_FETCH_4:
+ case BUILT_IN_SYNC_ADD_AND_FETCH_8:
+ case BUILT_IN_SYNC_ADD_AND_FETCH_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
target = expand_builtin_sync_operation (mode, exp, PLUS,
true, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_SUB_AND_FETCH_1:
- case BUILT_IN_SUB_AND_FETCH_2:
- case BUILT_IN_SUB_AND_FETCH_4:
- case BUILT_IN_SUB_AND_FETCH_8:
- case BUILT_IN_SUB_AND_FETCH_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
+ case BUILT_IN_SYNC_SUB_AND_FETCH_1:
+ case BUILT_IN_SYNC_SUB_AND_FETCH_2:
+ case BUILT_IN_SYNC_SUB_AND_FETCH_4:
+ case BUILT_IN_SYNC_SUB_AND_FETCH_8:
+ case BUILT_IN_SYNC_SUB_AND_FETCH_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
target = expand_builtin_sync_operation (mode, exp, MINUS,
true, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_OR_AND_FETCH_1:
- case BUILT_IN_OR_AND_FETCH_2:
- case BUILT_IN_OR_AND_FETCH_4:
- case BUILT_IN_OR_AND_FETCH_8:
- case BUILT_IN_OR_AND_FETCH_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
+ case BUILT_IN_SYNC_OR_AND_FETCH_1:
+ case BUILT_IN_SYNC_OR_AND_FETCH_2:
+ case BUILT_IN_SYNC_OR_AND_FETCH_4:
+ case BUILT_IN_SYNC_OR_AND_FETCH_8:
+ case BUILT_IN_SYNC_OR_AND_FETCH_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
target = expand_builtin_sync_operation (mode, exp, IOR,
true, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_AND_AND_FETCH_1:
- case BUILT_IN_AND_AND_FETCH_2:
- case BUILT_IN_AND_AND_FETCH_4:
- case BUILT_IN_AND_AND_FETCH_8:
- case BUILT_IN_AND_AND_FETCH_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
+ case BUILT_IN_SYNC_AND_AND_FETCH_1:
+ case BUILT_IN_SYNC_AND_AND_FETCH_2:
+ case BUILT_IN_SYNC_AND_AND_FETCH_4:
+ case BUILT_IN_SYNC_AND_AND_FETCH_8:
+ case BUILT_IN_SYNC_AND_AND_FETCH_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
target = expand_builtin_sync_operation (mode, exp, AND,
true, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_XOR_AND_FETCH_1:
- case BUILT_IN_XOR_AND_FETCH_2:
- case BUILT_IN_XOR_AND_FETCH_4:
- case BUILT_IN_XOR_AND_FETCH_8:
- case BUILT_IN_XOR_AND_FETCH_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
+ case BUILT_IN_SYNC_XOR_AND_FETCH_1:
+ case BUILT_IN_SYNC_XOR_AND_FETCH_2:
+ case BUILT_IN_SYNC_XOR_AND_FETCH_4:
+ case BUILT_IN_SYNC_XOR_AND_FETCH_8:
+ case BUILT_IN_SYNC_XOR_AND_FETCH_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
target = expand_builtin_sync_operation (mode, exp, XOR,
true, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_NAND_AND_FETCH_1:
- case BUILT_IN_NAND_AND_FETCH_2:
- case BUILT_IN_NAND_AND_FETCH_4:
- case BUILT_IN_NAND_AND_FETCH_8:
- case BUILT_IN_NAND_AND_FETCH_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
+ case BUILT_IN_SYNC_NAND_AND_FETCH_1:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_2:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_4:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_8:
+ case BUILT_IN_SYNC_NAND_AND_FETCH_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
target = expand_builtin_sync_operation (mode, exp, NOT,
true, target, ignore);
if (target)
return target;
break;
- case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
- case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
- case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
- case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
- case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
+ case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
+ case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
+ case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
+ case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
+ case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
if (mode == VOIDmode)
mode = TYPE_MODE (boolean_type_node);
if (!target || !register_operand (target, mode))
target = gen_reg_rtx (mode);
- mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
+ mode = get_builtin_sync_mode
+ (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
target = expand_builtin_compare_and_swap (mode, exp, true, target);
if (target)
return target;
break;
- case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
- case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
- case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
- case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
- case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
+ case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
+ case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
+ case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
+ case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
+ case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
+ mode = get_builtin_sync_mode
+ (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
target = expand_builtin_compare_and_swap (mode, exp, false, target);
if (target)
return target;
break;
- case BUILT_IN_LOCK_TEST_AND_SET_1:
- case BUILT_IN_LOCK_TEST_AND_SET_2:
- case BUILT_IN_LOCK_TEST_AND_SET_4:
- case BUILT_IN_LOCK_TEST_AND_SET_8:
- case BUILT_IN_LOCK_TEST_AND_SET_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
- target = expand_builtin_lock_test_and_set (mode, exp, target);
+ case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
+ case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
+ case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
+ case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
+ case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
+ target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
if (target)
return target;
break;
- case BUILT_IN_LOCK_RELEASE_1:
- case BUILT_IN_LOCK_RELEASE_2:
- case BUILT_IN_LOCK_RELEASE_4:
- case BUILT_IN_LOCK_RELEASE_8:
- case BUILT_IN_LOCK_RELEASE_16:
- mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
- expand_builtin_lock_release (mode, exp);
+ case BUILT_IN_SYNC_LOCK_RELEASE_1:
+ case BUILT_IN_SYNC_LOCK_RELEASE_2:
+ case BUILT_IN_SYNC_LOCK_RELEASE_4:
+ case BUILT_IN_SYNC_LOCK_RELEASE_8:
+ case BUILT_IN_SYNC_LOCK_RELEASE_16:
+ mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
+ expand_builtin_sync_lock_release (mode, exp);
return const0_rtx;
- case BUILT_IN_SYNCHRONIZE:
- expand_builtin_synchronize ();
+ case BUILT_IN_SYNC_SYNCHRONIZE:
+ expand_builtin_sync_synchronize ();
return const0_rtx;
case BUILT_IN_OBJECT_SIZE:
break;
case BUILT_IN_FREE:
- maybe_emit_free_warning (exp);
+ if (warn_free_nonheap_object)
+ maybe_emit_free_warning (exp);
break;
default: /* just do library call, if unknown builtin */
static tree
fold_builtin_expect (location_t loc, tree arg0, tree arg1)
{
- tree inner, fndecl;
+ tree inner, fndecl, inner_arg0;
enum tree_code code;
+ /* Distribute the expected value over short-circuiting operators.
+ See through the cast from truthvalue_type_node to long. */
+ inner_arg0 = arg0;
+ while (TREE_CODE (inner_arg0) == NOP_EXPR
+ && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
+ && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
+ inner_arg0 = TREE_OPERAND (inner_arg0, 0);
+
/* If this is a builtin_expect within a builtin_expect keep the
inner one. See through a comparison against a constant. It
might have been added to create a thruthvalue. */
- inner = arg0;
+ inner = inner_arg0;
+
if (COMPARISON_CLASS_P (inner)
&& TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
inner = TREE_OPERAND (inner, 0);
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
return arg0;
- /* Distribute the expected value over short-circuiting operators.
- See through the cast from truthvalue_type_node to long. */
- inner = arg0;
- while (TREE_CODE (inner) == NOP_EXPR
- && INTEGRAL_TYPE_P (TREE_TYPE (inner))
- && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
- inner = TREE_OPERAND (inner, 0);
-
+ inner = inner_arg0;
code = TREE_CODE (inner);
if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
{
}
/* If the argument isn't invariant then there's nothing else we can do. */
- if (!TREE_CONSTANT (arg0))
+ if (!TREE_CONSTANT (inner_arg0))
return NULL_TREE;
/* If we expect that a comparison against the argument will fold to
a constant return the constant. In practice, this means a true
constant or the address of a non-weak symbol. */
- inner = arg0;
+ inner = inner_arg0;
STRIP_NOPS (inner);
if (TREE_CODE (inner) == ADDR_EXPR)
{
fold_convert_loc (loc, newtype, arg0));
}
+ /* Canonicalize iround (x) to lround (x) on ILP32 targets where
+ sizeof (int) == sizeof (long). */
+ if (TYPE_PRECISION (integer_type_node)
+ == TYPE_PRECISION (long_integer_type_node))
+ {
+ tree newfn = NULL_TREE;
+ switch (fcode)
+ {
+ CASE_FLT_FN (BUILT_IN_ICEIL):
+ newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_IFLOOR):
+ newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_IROUND):
+ newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
+ break;
+
+ CASE_FLT_FN (BUILT_IN_IRINT):
+ newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
+ break;
+
+ default:
+ break;
+ }
+
+ if (newfn)
+ {
+ tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
+ return fold_convert_loc (loc,
+ TREE_TYPE (TREE_TYPE (fndecl)), newcall);
+ }
+ }
+
/* Canonicalize llround (x) to lround (x) on LP64 targets where
sizeof (long long) == sizeof (long). */
if (TYPE_PRECISION (long_long_integer_type_node)
switch (DECL_FUNCTION_CODE (fndecl))
{
+ CASE_FLT_FN (BUILT_IN_IFLOOR):
CASE_FLT_FN (BUILT_IN_LFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
real_floor (&r, TYPE_MODE (ftype), &x);
break;
+ CASE_FLT_FN (BUILT_IN_ICEIL):
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LLCEIL):
real_ceil (&r, TYPE_MODE (ftype), &x);
break;
+ CASE_FLT_FN (BUILT_IN_IROUND):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
real_round (&r, TYPE_MODE (ftype), &x);
{
hi = TREE_INT_CST_HIGH (arg);
if (width < 2 * HOST_BITS_PER_WIDE_INT)
- hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
+ hi &= ~((unsigned HOST_WIDE_INT) (-1)
+ << (width - HOST_BITS_PER_WIDE_INT));
}
else
{
result = width;
break;
+ CASE_INT_FN (BUILT_IN_CLRSB):
+ if (width > HOST_BITS_PER_WIDE_INT
+ && (hi & ((unsigned HOST_WIDE_INT) 1
+ << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
+ {
+ hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
+ << (width - HOST_BITS_PER_WIDE_INT - 1));
+ lo = ~lo;
+ }
+ else if (width <= HOST_BITS_PER_WIDE_INT
+ && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
+ lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
+ if (hi != 0)
+ result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
+ else if (lo != 0)
+ result = width - floor_log2 (lo) - 2;
+ else
+ result = width - 1;
+ break;
+
CASE_INT_FN (BUILT_IN_POPCOUNT):
result = 0;
while (lo)
length = tree_low_cst (len, 1);
if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
- || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
- < length)
+ || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
return NULL_TREE;
if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
calling bzero instead of memset. */
return fold_builtin_memset (loc, dest, integer_zero_node,
- fold_convert_loc (loc, sizetype, size),
+ fold_convert_loc (loc, size_type_node, size),
void_type_node, ignore);
}
if (endp == 3)
{
- src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
- dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ src_align = get_pointer_alignment (src);
+ dest_align = get_pointer_alignment (dest);
/* Both DEST and SRC must be pointer types.
??? This is what old code did. Is the testing for pointer types
|| TREE_ADDRESSABLE (desttype))
return NULL_TREE;
- src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
- dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ src_align = get_pointer_alignment (src);
+ dest_align = get_pointer_alignment (dest);
if (dest_align < TYPE_ALIGN (desttype)
|| src_align < TYPE_ALIGN (srctype))
return NULL_TREE;
len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
- len = fold_convert_loc (loc, sizetype, len);
- dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_build_pointer_plus_loc (loc, dest, len);
dest = fold_convert_loc (loc, type, dest);
if (expr)
dest = omit_one_operand_loc (loc, type, dest, expr);
call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
type = TREE_TYPE (TREE_TYPE (fndecl));
- len = fold_convert_loc (loc, sizetype, len);
- dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_build_pointer_plus_loc (loc, dest, len);
dest = fold_convert_loc (loc, type, dest);
dest = omit_one_operand_loc (loc, type, dest, call);
return dest;
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
- size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
/* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
- return fold_build2_loc (loc, LT_EXPR, type, arg,
- build_real (TREE_TYPE (arg), dconst0));
+ return fold_convert (type,
+ fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
+ build_real (TREE_TYPE (arg), dconst0)));
return NULL_TREE;
}
CASE_FLT_FN (BUILT_IN_RINT):
return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
+ CASE_FLT_FN (BUILT_IN_ICEIL):
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LLCEIL):
CASE_FLT_FN (BUILT_IN_LFLOOR):
+ CASE_FLT_FN (BUILT_IN_IFLOOR):
CASE_FLT_FN (BUILT_IN_LLFLOOR):
+ CASE_FLT_FN (BUILT_IN_IROUND):
CASE_FLT_FN (BUILT_IN_LROUND):
CASE_FLT_FN (BUILT_IN_LLROUND):
return fold_builtin_int_roundingfn (loc, fndecl, arg0);
+ CASE_FLT_FN (BUILT_IN_IRINT):
CASE_FLT_FN (BUILT_IN_LRINT):
CASE_FLT_FN (BUILT_IN_LLRINT):
return fold_fixed_mathfn (loc, fndecl, arg0);
CASE_INT_FN (BUILT_IN_FFS):
CASE_INT_FN (BUILT_IN_CLZ):
CASE_INT_FN (BUILT_IN_CTZ):
+ CASE_INT_FN (BUILT_IN_CLRSB):
CASE_INT_FN (BUILT_IN_POPCOUNT):
CASE_INT_FN (BUILT_IN_PARITY):
return fold_builtin_bitop (fndecl, arg0);
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
/* Create (dst p+ strlen (dst)). */
- newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
- TREE_TYPE (dst), dst, newdst);
+ newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
newdst = builtin_save_expr (newdst);
call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
return NULL_RTX;
else
{
- unsigned int dest_align
- = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
+ unsigned int dest_align = get_pointer_alignment (dest);
/* If DEST is not a pointer type, call the normal function. */
if (dest_align == 0)
return expand_expr (dest, target, mode, EXPAND_NORMAL);
}
- expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ expr = fold_build_pointer_plus (dest, len);
return expand_expr (expr, target, mode, EXPAND_NORMAL);
}
/* __memmove_chk special case. */
if (fcode == BUILT_IN_MEMMOVE_CHK)
{
- unsigned int src_align
- = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
+ unsigned int src_align = get_pointer_alignment (src);
if (src_align == 0)
return NULL_RTX;
return;
if (SSA_VAR_P (arg))
- warning_at (tree_nonartificial_location (exp),
- 0, "%Kattempt to free a non-heap object %qD", exp, arg);
+ warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
+ "%Kattempt to free a non-heap object %qD", exp, arg);
else
- warning_at (tree_nonartificial_location (exp),
- 0, "%Kattempt to free a non-heap object", exp);
+ warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
+ "%Kattempt to free a non-heap object", exp);
}
/* Fold a call to __builtin_object_size with arguments PTR and OST,
dest, len);
else
{
- tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
- dest, len);
+ tree temp = fold_build_pointer_plus_loc (loc, dest, len);
return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
}
}
case BUILT_IN_OBJECT_SIZE:
case BUILT_IN_UNREACHABLE:
/* Simple register moves or loads from stack. */
+ case BUILT_IN_ASSUME_ALIGNED:
case BUILT_IN_RETURN_ADDRESS:
case BUILT_IN_EXTRACT_RETURN_ADDR:
case BUILT_IN_FROB_RETURN_ADDR: