&& flag_strict_volatile_bitfields > 0)
nmode = lmode;
else
- nmode = get_best_mode (lbitsize, lbitpos,
+ nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
const_p ? TYPE_ALIGN (TREE_TYPE (linner))
: MIN (TYPE_ALIGN (TREE_TYPE (linner)),
TYPE_ALIGN (TREE_TYPE (rinner))),
{
if (value != 0 && !TREE_OVERFLOW (value))
{
- low = fold_convert_loc (loc, sizetype, low);
- low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
+ low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
return build_range_check (loc, type,
- fold_build2_loc (loc, POINTER_PLUS_EXPR,
- etype, exp, low),
+ fold_build_pointer_plus_loc (loc, exp, low),
1, build_int_cst (etype, 0), value);
}
return 0;
to be relative to a field of that size. */
first_bit = MIN (ll_bitpos, rl_bitpos);
end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
- lnmode = get_best_mode (end_bit - first_bit, first_bit,
+ lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
volatilep);
if (lnmode == VOIDmode)
first_bit = MIN (lr_bitpos, rr_bitpos);
end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
- rnmode = get_best_mode (end_bit - first_bit, first_bit,
+ rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
volatilep);
if (rnmode == VOIDmode)
int11 = TREE_INT_CST_LOW (arg11);
/* Move min of absolute values to int11. */
- if ((int01 >= 0 ? int01 : -int01)
- < (int11 >= 0 ? int11 : -int11))
+ if (absu_hwi (int01) < absu_hwi (int11))
{
tmp = int01, int01 = int11, int11 = tmp;
alt0 = arg00, arg00 = arg10, arg10 = alt0;
else
maybe_same = arg11;
- if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
+ if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
/* The remainder should not be a constant, otherwise we
end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
increased the number of multiplications necessary. */
non-integral type.
Do not fold the result as that would not simplify further, also
folding again results in recursions. */
- if (INTEGRAL_TYPE_P (type))
+ if (TREE_CODE (type) == BOOLEAN_TYPE)
return build2_loc (loc, TREE_CODE (op0), type,
TREE_OPERAND (op0, 0),
TREE_OPERAND (op0, 1));
- else
+ else if (!INTEGRAL_TYPE_P (type))
return build3_loc (loc, COND_EXPR, type, op0,
fold_convert (type, boolean_true_node),
fold_convert (type, boolean_false_node));
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
- return fold_build2_loc (loc,
- TREE_CODE (arg0), type,
- fold_convert_loc (loc, type, arg00),
- fold_convert_loc (loc, sizetype, arg01));
+ return fold_build_pointer_plus_loc
+ (loc, fold_convert_loc (loc, type, arg00), arg01);
}
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
cst0, build_int_cst (TREE_TYPE (cst0), 1));
if (code0 != INTEGER_CST)
t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
+ t = fold_convert (TREE_TYPE (arg1), t);
/* If swapping might yield to a more canonical form, do so. */
if (swap)
return fold_build2_loc (loc, cmp_code, type, variable1, const2);
}
- tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
+ tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
if (tem)
return tem;
inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
arg01, fold_convert_loc (loc, sizetype, arg1));
return fold_convert_loc (loc, type,
- fold_build2_loc (loc, POINTER_PLUS_EXPR,
- TREE_TYPE (arg00),
- arg00, inner));
+ fold_build_pointer_plus_loc (loc,
+ arg00, inner));
}
/* PTR_CST +p CST -> CST1 */
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
(plus (plus (mult) (mult)) (foo)) so that we can
take advantage of the factoring cases below. */
- if (((TREE_CODE (arg0) == PLUS_EXPR
- || TREE_CODE (arg0) == MINUS_EXPR)
- && TREE_CODE (arg1) == MULT_EXPR)
- || ((TREE_CODE (arg1) == PLUS_EXPR
- || TREE_CODE (arg1) == MINUS_EXPR)
- && TREE_CODE (arg0) == MULT_EXPR))
+ if (TYPE_OVERFLOW_WRAPS (type)
+ && (((TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
+ && TREE_CODE (arg1) == MULT_EXPR)
+ || ((TREE_CODE (arg1) == PLUS_EXPR
+ || TREE_CODE (arg1) == MINUS_EXPR)
+ && TREE_CODE (arg0) == MULT_EXPR)))
{
tree parg0, parg1, parg, marg;
enum tree_code pcode;
CASE_FLT_FN (BUILT_IN_FLOOR):
CASE_FLT_FN (BUILT_IN_FMOD):
CASE_FLT_FN (BUILT_IN_FREXP):
+ CASE_FLT_FN (BUILT_IN_ICEIL):
+ CASE_FLT_FN (BUILT_IN_IFLOOR):
+ CASE_FLT_FN (BUILT_IN_IRINT):
+ CASE_FLT_FN (BUILT_IN_IROUND):
CASE_FLT_FN (BUILT_IN_LCEIL):
CASE_FLT_FN (BUILT_IN_LDEXP):
CASE_FLT_FN (BUILT_IN_LFLOOR):