/* Fold a constant sub-tree into a single node for C-compiler
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
#include "hashtab.h"
#include "langhooks.h"
#include "md5.h"
+#include "gimple.h"
/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
static enum tree_code compcode_to_comparison (enum comparison_code);
static tree combine_comparisons (enum tree_code, enum tree_code,
enum tree_code, tree, tree, tree);
-static int truth_value_p (enum tree_code);
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (tree, tree, tree, tree, tree);
if (hden < 0)
neg_double (lden, hden, &labs_den, &habs_den);
- /* If (2 * abs (lrem) >= abs (lden)) */
+ /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
labs_rem, habs_rem, <wice, &htwice);
< (unsigned HOST_WIDE_INT) htwice)
|| (((unsigned HOST_WIDE_INT) habs_den
== (unsigned HOST_WIDE_INT) htwice)
- && (labs_den < ltwice)))
+ && (labs_den <= ltwice)))
{
if (*hquo < 0)
/* quo = quo - 1; */
deferred code. */
void
-fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
+fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
{
const char *warnmsg;
location_t locus;
if (!issue || warnmsg == NULL)
return;
- if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
+ if (gimple_no_warning_p (stmt))
return;
/* Use the smallest code level when deciding to issue the
if (!issue_strict_overflow_warning (code))
return;
- if (stmt == NULL_TREE || !expr_has_location (stmt))
+ if (stmt == NULL)
locus = input_location;
else
- locus = expr_location (stmt);
+ locus = gimple_location (stmt);
warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
}
void
fold_undefer_and_ignore_overflow_warnings (void)
{
- fold_undefer_overflow_warnings (false, NULL_TREE, 0);
+ fold_undefer_overflow_warnings (false, NULL, 0);
}
/* Whether we are deferring overflow warnings. */
static void
fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
{
- gcc_assert (!flag_wrapv && !flag_trapv);
if (fold_deferring_overflow_warnings > 0)
{
if (fold_deferred_overflow_warning == NULL
flag_rounding_math is set, or if GCC's software emulation
is unable to accurately represent the result. */
if ((flag_rounding_math
- || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
- && !flag_unsafe_math_optimizations))
+ || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
&& (inexact || !real_identical (&result, &value)))
return NULL_TREE;
if (TREE_TYPE (arg1) == type)
return arg1;
- if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
+ || TREE_CODE (type) == OFFSET_TYPE)
{
if (TREE_CODE (arg1) == INTEGER_CST)
return fold_convert_const_int_from_int (type, arg1);
case VOID_TYPE:
tem = fold_ignored_result (arg);
- if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
+ if (TREE_CODE (tem) == MODIFY_EXPR)
return tem;
return fold_build1 (NOP_EXPR, type, tem);
case WITH_CLEANUP_EXPR:
case COMPOUND_EXPR:
case MODIFY_EXPR:
- case GIMPLE_MODIFY_STMT:
case TARGET_EXPR:
case COND_EXPR:
case BIND_EXPR:
return fold_build2 (compcode_to_comparison (compcode),
truth_type, ll_arg, lr_arg);
}
-
-/* Return nonzero if CODE is a tree code that represents a truth value. */
-
-static int
-truth_value_p (enum tree_code code)
-{
- return (TREE_CODE_CLASS (code) == tcc_comparison
- || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
- || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
- || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
-}
\f
/* Return nonzero if two operands (typically of the same tree node)
are necessarily equal. If either argument has side-effects this
if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
return 0;
+ /* Check equality of integer constants before bailing out due to
+ precision differences. */
+ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ return tree_int_cst_equal (arg0, arg1);
+
/* If both types don't have the same signedness, then we can't consider
them equal. We must check this before the STRIP_NOPS calls
- because they may change the signedness of the arguments. */
- if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
+ because they may change the signedness of the arguments. As pointers
+ strictly don't have a signedness, require either two pointers or
+ two non-pointers as well. */
+ if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
+ || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
return 0;
/* If both types don't have the same precision, then it is not safe
/* Two conversions are equal only if signedness and modes match. */
switch (TREE_CODE (arg0))
{
- case NOP_EXPR:
- case CONVERT_EXPR:
+ CASE_CONVERT:
case FIX_TRUNC_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg0))
!= TYPE_UNSIGNED (TREE_TYPE (arg1)))
&& operand_equal_p (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 0), flags));
+ case COND_EXPR:
+ return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+
default:
return 0;
}
twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
{
enum tree_code code = TREE_CODE (arg);
- enum tree_code_class class = TREE_CODE_CLASS (code);
+ enum tree_code_class tclass = TREE_CODE_CLASS (code);
/* We can handle some of the tcc_expression cases here. */
- if (class == tcc_expression && code == TRUTH_NOT_EXPR)
- class = tcc_unary;
- else if (class == tcc_expression
+ if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
+ tclass = tcc_unary;
+ else if (tclass == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
|| code == COMPOUND_EXPR))
- class = tcc_binary;
+ tclass = tcc_binary;
- else if (class == tcc_expression && code == SAVE_EXPR
+ else if (tclass == tcc_expression && code == SAVE_EXPR
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
{
/* If we've already found a CVAL1 or CVAL2, this expression is
if (*cval1 || *cval2)
return 0;
- class = tcc_unary;
+ tclass = tcc_unary;
*save_p = 1;
}
- switch (class)
+ switch (tclass)
{
case tcc_unary:
return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
{
tree type = TREE_TYPE (arg);
enum tree_code code = TREE_CODE (arg);
- enum tree_code_class class = TREE_CODE_CLASS (code);
+ enum tree_code_class tclass = TREE_CODE_CLASS (code);
/* We can handle some of the tcc_expression cases here. */
- if (class == tcc_expression && code == TRUTH_NOT_EXPR)
- class = tcc_unary;
- else if (class == tcc_expression
+ if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
+ tclass = tcc_unary;
+ else if (tclass == tcc_expression
&& (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
- class = tcc_binary;
+ tclass = tcc_binary;
- switch (class)
+ switch (tclass)
{
case tcc_unary:
return fold_build1 (code, type,
else
return 0;
+ common = fold_convert (type, common);
+ left = fold_convert (type, left);
+ right = fold_convert (type, right);
return fold_build2 (TREE_CODE (arg0), type, common,
fold_build2 (code, type, left, right));
}
/* We are interested in the bare arrangement of bits, so strip everything
that doesn't affect the machine mode. However, record the type of the
outermost expression if it may matter below. */
- if (TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR
+ if (CONVERT_EXPR_P (exp)
|| TREE_CODE (exp) == NON_LVALUE_EXPR)
outer_type = TREE_TYPE (exp);
STRIP_NOPS (exp);
exp = arg0;
continue;
- case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
+ CASE_CONVERT: case NON_LVALUE_EXPR:
if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
break;
{
if (TYPE_UNSIGNED (etype))
{
- etype = signed_type_for (etype);
+ tree signed_etype = signed_type_for (etype);
+ if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
+ etype
+ = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
+ else
+ etype = signed_etype;
exp = fold_convert (etype, exp);
}
return fold_build2 (GT_EXPR, type, exp,
\f
#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
-#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
+#define LOGICAL_OP_NON_SHORT_CIRCUIT \
+ (BRANCH_COST (optimize_function_for_speed_p (cfun), \
+ false) >= 2)
#endif
/* EXP is some logical combination of boolean tests. See if we can
that can be merged. Avoid doing this if the RHS is a floating-point
comparison since those can trap. */
- if (BRANCH_COST >= 2
+ if (BRANCH_COST (optimize_function_for_speed_p (cfun),
+ false) >= 2
&& ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
&& simple_operand_p (rl_arg)
&& simple_operand_p (rr_arg))
fold_convert (ctype, c), 0);
break;
- case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
+ CASE_CONVERT: case NON_LVALUE_EXPR:
/* If op0 is an expression ... */
if ((COMPARISON_CLASS_P (op0)
|| UNARY_CLASS_P (op0)
|| BINARY_CLASS_P (op0)
|| VL_EXP_CLASS_P (op0)
|| EXPRESSION_CLASS_P (op0))
- /* ... and is unsigned, and its type is smaller than ctype,
- then we cannot pass through as widening. */
- && ((TYPE_UNSIGNED (TREE_TYPE (op0))
+ /* ... and has wrapping overflow, and its type is smaller
+ than ctype, then we cannot pass through as widening. */
+ && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
&& ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
&& TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
- && (GET_MODE_SIZE (TYPE_MODE (ctype))
- > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
+ && (TYPE_PRECISION (ctype)
+ > TYPE_PRECISION (TREE_TYPE (op0))))
/* ... or this is a truncation (t is narrower than op0),
then we cannot pass through this narrowing. */
- || (GET_MODE_SIZE (TYPE_MODE (type))
- < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
+ || (TYPE_PRECISION (type)
+ < TYPE_PRECISION (TREE_TYPE (op0)))
/* ... or signedness changes for division or modulus,
then we cannot pass through this conversion. */
|| (code != MULT_EXPR
(C * 8) % 4 since we know that's zero. */
if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
|| code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
+ /* If the multiplication can overflow we cannot optimize this.
+ ??? Until we can properly mark individual operations as
+ not overflowing we need to treat sizetype special here as
+ stor-layout relies on this opimization to make
+ DECL_FIELD_BIT_OFFSET always a constant. */
+ && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
+ || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
&& TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
&& integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
- return omit_one_operand (type, integer_zero_node, op0);
+ {
+ *strict_overflow_p = true;
+ return omit_one_operand (type, integer_zero_node, op0);
+ }
/* ... fall through ... */
/* If these are the same operation types, we can associate them
assuming no overflow. */
if (tcode == code
- && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
- fold_convert (ctype, c), 0))
+ && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
+ fold_convert (ctype, c), 1))
+ && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
+ TREE_INT_CST_HIGH (t1),
+ (TYPE_UNSIGNED (ctype)
+ && tcode != MULT_EXPR) ? -1 : 1,
+ TREE_OVERFLOW (t1)))
&& !TREE_OVERFLOW (t1))
return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
if (TREE_CONSTANT (arg0))
return 1;
- if (optimize_size)
+ if (optimize_function_for_size_p (cfun))
return 0;
if (reorder && flag_evaluation_order
if ((code == EQ_EXPR || code == NE_EXPR
|| TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
&& (TREE_TYPE (arg1_unw) == shorter_type
- || (TYPE_PRECISION (shorter_type)
- >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
+ || ((TYPE_PRECISION (shorter_type)
+ >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
+ && (TYPE_UNSIGNED (shorter_type)
+ == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
|| (TREE_CODE (arg1_unw) == INTEGER_CST
&& (TREE_CODE (shorter_type) == INTEGER_TYPE
|| TREE_CODE (shorter_type) == BOOLEAN_TYPE)
tree arg0_inner;
tree inner_type, outer_type;
- if (TREE_CODE (arg0) != NOP_EXPR
- && TREE_CODE (arg0) != CONVERT_EXPR)
+ if (!CONVERT_EXPR_P (arg0))
return NULL_TREE;
outer_type = TREE_TYPE (arg0);
return NULL_TREE;
if (TREE_CODE (arg1) != INTEGER_CST
- && !((TREE_CODE (arg1) == NOP_EXPR
- || TREE_CODE (arg1) == CONVERT_EXPR)
+ && !(CONVERT_EXPR_P (arg1)
&& TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
return NULL_TREE;
- if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+ if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
+ || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
&& code != NE_EXPR
&& code != EQ_EXPR)
return NULL_TREE;
else
{
/* Try if delta is a multiple of step. */
- tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
+ tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
if (! tmp)
continue;
delta = tmp;
if (total_bytes > len)
return 0;
- words = 32 / UNITS_PER_WORD;
+ words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
}
+/* Subroutine of native_encode_expr. Encode the STRING_CST
+ specified by EXPR into the buffer PTR of length LEN bytes.
+ Return the number of bytes placed in the buffer, or zero
+ upon failure. */
+
+static int
+native_encode_string (const_tree expr, unsigned char *ptr, int len)
+{
+ tree type = TREE_TYPE (expr);
+ HOST_WIDE_INT total_bytes;
+
+ if (TREE_CODE (type) != ARRAY_TYPE
+ || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
+ || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
+ || !host_integerp (TYPE_SIZE_UNIT (type), 0))
+ return 0;
+ total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
+ if (total_bytes > len)
+ return 0;
+ if (TREE_STRING_LENGTH (expr) < total_bytes)
+ {
+ memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
+ memset (ptr + TREE_STRING_LENGTH (expr), 0,
+ total_bytes - TREE_STRING_LENGTH (expr));
+ }
+ else
+ memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
+ return total_bytes;
+}
+
+
/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
buffer PTR of length LEN bytes. Return the number of bytes
case VECTOR_CST:
return native_encode_vector (expr, ptr, len);
+ case STRING_CST:
+ return native_encode_string (expr, ptr, len);
+
default:
return 0;
}
total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
if (total_bytes > len || total_bytes > 24)
return NULL_TREE;
- words = 32 / UNITS_PER_WORD;
+ words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
memset (tmp, 0, sizeof (tmp));
for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
arg0 = op0;
if (arg0)
{
- if (code == NOP_EXPR || code == CONVERT_EXPR
+ if (CONVERT_EXPR_CODE_P (code)
|| code == FLOAT_EXPR || code == ABS_EXPR)
{
/* Don't use STRIP_NOPS, because signedness of argument type
so we don't get into an infinite recursion loop taking the
conversion out and then back in. */
- if ((code == NOP_EXPR || code == CONVERT_EXPR
+ if ((CONVERT_EXPR_CODE_P (code)
|| code == NON_LVALUE_EXPR)
&& TREE_CODE (tem) == COND_EXPR
&& TREE_CODE (TREE_OPERAND (tem, 1)) == code
return fold_convert (type, op0);
return NULL_TREE;
- case NOP_EXPR:
+ CASE_CONVERT:
case FLOAT_EXPR:
- case CONVERT_EXPR:
case FIX_TRUNC_EXPR:
if (TREE_TYPE (op0) == type)
return op0;
TREE_OPERAND (op0, 1));
/* Handle cases of two conversions in a row. */
- if (TREE_CODE (op0) == NOP_EXPR
- || TREE_CODE (op0) == CONVERT_EXPR)
+ if (CONVERT_EXPR_P (op0))
{
tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
tree inter_type = TREE_TYPE (op0);
return fold_convert (type, fold_addr_expr (base));
}
- if ((TREE_CODE (op0) == MODIFY_EXPR
- || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
- && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
+ if (TREE_CODE (op0) == MODIFY_EXPR
+ && TREE_CONSTANT (TREE_OPERAND (op0, 1))
/* Detect assigning a bitfield. */
- && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
+ && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
&& DECL_BIT_FIELD
- (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
+ (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
{
/* Don't leave an assignment inside a conversion
unless assigning a bitfield. */
- tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
+ tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
/* First do the assignment, then return converted constant. */
tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
TREE_NO_WARNING (tem) = 1;
/* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
constants (if x has signed type, the sign bit cannot be set
- in c). This folds extension into the BIT_AND_EXPR. */
- if (INTEGRAL_TYPE_P (type)
- && TREE_CODE (type) != BOOLEAN_TYPE
+ in c). This folds extension into the BIT_AND_EXPR.
+ ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
+ very likely don't have maximal range for their precision and this
+ transformation effectively doesn't preserve non-maximal ranges. */
+ if (TREE_CODE (type) == INTEGER_TYPE
&& TREE_CODE (op0) == BIT_AND_EXPR
- && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
+ && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
+ /* Not if the conversion is to the sub-type. */
+ && TREE_TYPE (type) != TREE_TYPE (op0))
{
tree and = op0;
tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
if (INTEGRAL_TYPE_P (type)
&& TREE_CODE (op0) == BIT_NOT_EXPR
&& INTEGRAL_TYPE_P (TREE_TYPE (op0))
- && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
- || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
+ && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
&& TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
{
tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
return fold_convert (type, op0);
/* Strip inner integral conversions that do not change the precision. */
- if ((TREE_CODE (op0) == NOP_EXPR
- || TREE_CODE (op0) == CONVERT_EXPR)
+ if (CONVERT_EXPR_P (op0)
&& (INTEGRAL_TYPE_P (TREE_TYPE (op0))
|| POINTER_TYPE_P (TREE_TYPE (op0)))
&& (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
int sgn0;
bool swap = false;
- /* Match A +- CST code arg1 and CST code arg1. */
- if (!(((code0 == MINUS_EXPR
- || code0 == PLUS_EXPR)
+ /* Match A +- CST code arg1 and CST code arg1. We can change the
+ first form only if overflow is undefined. */
+ if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
+ /* In principle pointers also have undefined overflow behavior,
+ but that causes problems elsewhere. */
+ && !POINTER_TYPE_P (TREE_TYPE (arg0))
+ && (code0 == MINUS_EXPR
+ || code0 == PLUS_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|| code0 == INTEGER_CST))
return NULL_TREE;
*strict_overflow_p = true;
}
- /* Now build the constant reduced in magnitude. */
+ /* Now build the constant reduced in magnitude. But not if that
+ would produce one outside of its types range. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
+ && ((sgn0 == 1
+ && TYPE_MIN_VALUE (TREE_TYPE (cst0))
+ && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
+ || (sgn0 == -1
+ && TYPE_MAX_VALUE (TREE_TYPE (cst0))
+ && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
+ /* We cannot swap the comparison here as that would cause us to
+ endlessly recurse. */
+ return NULL_TREE;
+
t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
- cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
+ cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
if (code0 != INTEGER_CST)
t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
const char * const warnmsg = G_("assuming signed overflow does not occur "
"when reducing constant in comparison");
- /* In principle pointers also have undefined overflow behavior,
- but that causes problems elsewhere. */
- if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
- || POINTER_TYPE_P (TREE_TYPE (arg0)))
- return NULL_TREE;
-
/* Try canonicalization by simplifying arg0. */
strict_overflow_p = false;
t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
return t;
}
+/* Return whether BASE + OFFSET + BITPOS may wrap around the address
+ space. This is used to avoid issuing overflow warnings for
+ expressions like &p->x which can not wrap. */
+
+static bool
+pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
+{
+ unsigned HOST_WIDE_INT offset_low, total_low;
+ HOST_WIDE_INT size, offset_high, total_high;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (base)))
+ return true;
+
+ if (bitpos < 0)
+ return true;
+
+ if (offset == NULL_TREE)
+ {
+ offset_low = 0;
+ offset_high = 0;
+ }
+ else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
+ return true;
+ else
+ {
+ offset_low = TREE_INT_CST_LOW (offset);
+ offset_high = TREE_INT_CST_HIGH (offset);
+ }
+
+ if (add_double_with_sign (offset_low, offset_high,
+ bitpos / BITS_PER_UNIT, 0,
+ &total_low, &total_high,
+ true))
+ return true;
+
+ if (total_high != 0)
+ return true;
+
+ size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
+ if (size <= 0)
+ return true;
+
+ /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
+ array. */
+ if (TREE_CODE (base) == ADDR_EXPR)
+ {
+ HOST_WIDE_INT base_size;
+
+ base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
+ if (base_size > 0 && size < base_size)
+ size = base_size;
+ }
+
+ return total_low > (unsigned HOST_WIDE_INT) size;
+}
+
/* Subroutine of fold_binary. This routine performs all of the
transformations that are common to the equality/inequality
operators (EQ_EXPR and NE_EXPR) and the ordering operators
{
/* We can fold this expression to a constant if the non-constant
offset parts are equal. */
- if (offset0 == offset1
- || (offset0 && offset1
- && operand_equal_p (offset0, offset1, 0)))
+ if ((offset0 == offset1
+ || (offset0 && offset1
+ && operand_equal_p (offset0, offset1, 0)))
+ && (code == EQ_EXPR
+ || code == NE_EXPR
+ || POINTER_TYPE_OVERFLOW_UNDEFINED))
+
{
+ if (code != EQ_EXPR
+ && code != NE_EXPR
+ && bitpos0 != bitpos1
+ && (pointer_may_wrap_p (base0, offset0, bitpos0)
+ || pointer_may_wrap_p (base1, offset1, bitpos1)))
+ fold_overflow_warning (("assuming pointer wraparound does not "
+ "occur when comparing P +- C1 with "
+ "P +- C2"),
+ WARN_STRICT_OVERFLOW_CONDITIONAL);
+
switch (code)
{
case EQ_EXPR:
because pointer arithmetic is restricted to retain within an
object and overflow on pointer differences is undefined as of
6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
- else if (bitpos0 == bitpos1)
+ else if (bitpos0 == bitpos1
+ && ((code == EQ_EXPR || code == NE_EXPR)
+ || POINTER_TYPE_OVERFLOW_UNDEFINED))
{
tree signed_size_type_node;
signed_size_type_node = signed_type_for (size_type_node);
else
offset1 = fold_convert (signed_size_type_node, offset1);
+ if (code != EQ_EXPR
+ && code != NE_EXPR
+ && (pointer_may_wrap_p (base0, offset0, bitpos0)
+ || pointer_may_wrap_p (base1, offset1, bitpos1)))
+ fold_overflow_warning (("assuming pointer wraparound does not "
+ "occur when comparing P +- C1 with "
+ "P +- C2"),
+ WARN_STRICT_OVERFLOW_COMPARISON);
+
return fold_build2 (code, type, offset0, offset1);
}
}
}
if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && (TREE_CODE (arg0) == NOP_EXPR
- || TREE_CODE (arg0) == CONVERT_EXPR))
+ && CONVERT_EXPR_P (arg0))
{
/* If we are widening one operand of an integer comparison,
see if the other operand is similarly being widened. Perhaps we
tree t1 = NULL_TREE;
bool strict_overflow_p;
- gcc_assert ((IS_EXPR_CODE_CLASS (kind)
- || IS_GIMPLE_STMT_CODE_CLASS (kind))
+ gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 2
&& op0 != NULL_TREE
&& op1 != NULL_TREE);
safe for every expression, except for a comparison expression
because its signedness is derived from its operands. So, in
the latter case, only strip conversions that don't change the
- signedness.
+ signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
+ preserved.
Note that this is done as an internal manipulation within the
constant folder, in order to find the simplest representation
cases, the appropriate type conversions should be put back in
the tree that will get out of the constant folder. */
- if (kind == tcc_comparison)
+ if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
{
STRIP_SIGN_NOPS (arg0);
STRIP_SIGN_NOPS (arg1);
/* Convert ~A + 1 to -A. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& integer_onep (arg1))
- return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
+ return fold_build1 (NEGATE_EXPR, type,
+ fold_convert (type, TREE_OPERAND (arg0, 0)));
/* ~X + X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
/* With undefined overflow we can only associate constants
with one variable. */
- if ((POINTER_TYPE_P (type)
+ if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
|| (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
&& var0 && var1)
{
return fold_build2 (LSHIFT_EXPR, type, op1,
TREE_OPERAND (arg0, 1));
+ /* (A + A) * C -> A * 2 * C */
+ if (TREE_CODE (arg0) == PLUS_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1), 0))
+ return fold_build2 (MULT_EXPR, type,
+ omit_one_operand (type, TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1)),
+ fold_build2 (MULT_EXPR, type,
+ build_int_cst (type, 2) , arg1));
+
strict_overflow_p = false;
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
}
/* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
- if (! optimize_size
+ if (optimize_function_for_speed_p (cfun)
&& operand_equal_p (arg0, arg1, 0))
{
tree powfn = mathfn_built_in (type, BUILT_IN_POW);
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
- tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 0), tmp1);
- tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1), tmp1);
+ tree tmp1 = fold_convert (type, arg1);
+ tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
+ tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
+ tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
+ tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
return fold_convert (type,
- fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
- tmp2, tmp3));
+ fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
}
/* (X | Y) & Y is (X, Y). */
{
if (strict_overflow_p)
fold_overflow_warning (("assuming signed overflow does not occur "
- "when simplifying modulos"),
+ "when simplifying modulus"),
WARN_STRICT_OVERFLOW_MISC);
return fold_convert (type, tem);
}
}
}
- /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
- This transformation affects the cases which are handled in later
- optimizations involving comparisons with non-negative constants. */
- if (TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (arg0) != INTEGER_CST
- && tree_int_cst_sgn (arg1) > 0)
- {
- if (code == GE_EXPR)
- {
- arg1 = const_binop (MINUS_EXPR, arg1,
- build_int_cst (TREE_TYPE (arg1), 1), 0);
- return fold_build2 (GT_EXPR, type, arg0,
- fold_convert (TREE_TYPE (arg0), arg1));
- }
- if (code == LT_EXPR)
- {
- arg1 = const_binop (MINUS_EXPR, arg1,
- build_int_cst (TREE_TYPE (arg1), 1), 0);
- return fold_build2 (LE_EXPR, type, arg0,
- fold_convert (TREE_TYPE (arg0), arg1));
- }
- }
-
/* Comparisons with the highest or lowest possible integer of
the specified precision will have known values. */
{
unsigned int width = TYPE_PRECISION (arg1_type);
if (TREE_CODE (arg1) == INTEGER_CST
- && !TREE_OVERFLOW (arg1)
&& width <= 2 * HOST_BITS_PER_WIDE_INT
&& (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
{
if ((code == LT_EXPR || code == GE_EXPR)
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
- && (TREE_CODE (arg1) == NOP_EXPR
- || TREE_CODE (arg1) == CONVERT_EXPR)
+ && CONVERT_EXPR_P (arg1)
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
return
return fold_convert (type, integer_zero_node);
}
}
+
+ /* A bit-field-ref that referenced the full argument can be stripped. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
+ && integer_zerop (op2))
+ return fold_convert (type, arg0);
+
return NULL_TREE;
default:
return expr;
}
- if (IS_EXPR_CODE_CLASS (kind)
- || IS_GIMPLE_STMT_CODE_CLASS (kind))
+ if (IS_EXPR_CODE_CLASS (kind))
{
tree type = TREE_TYPE (t);
tree op0, op1, op2;
/* zero_extend(x) * zero_extend(y) is non-negative if x and y are
both unsigned and their total bits is shorter than the result. */
if (TREE_CODE (type) == INTEGER_TYPE
- && TREE_CODE (op0) == NOP_EXPR
- && TREE_CODE (op1) == NOP_EXPR)
+ && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
+ && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
{
- tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
- tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
- if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
- && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
- return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
- < TYPE_PRECISION (type);
+ tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
+ ? TREE_TYPE (TREE_OPERAND (op0, 0))
+ : TREE_TYPE (op0);
+ tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
+ ? TREE_TYPE (TREE_OPERAND (op1, 0))
+ : TREE_TYPE (op1);
+
+ bool unsigned0 = TYPE_UNSIGNED (inner0);
+ bool unsigned1 = TYPE_UNSIGNED (inner1);
+
+ if (TREE_CODE (op0) == INTEGER_CST)
+ unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
+
+ if (TREE_CODE (op1) == INTEGER_CST)
+ unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
+
+ if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
+ && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
+ {
+ unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
+ ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
+ : TYPE_PRECISION (inner0);
+
+ unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
+ ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
+ : TYPE_PRECISION (inner1);
+
+ return precision0 + precision1 < TYPE_PRECISION (type);
+ }
}
return false;
switch (TREE_CODE (t))
{
- case SSA_NAME:
- /* Query VRP to see if it has recorded any information about
- the range of this object. */
- return ssa_name_nonnegative_p (t);
-
case INTEGER_CST:
return tree_int_cst_sgn (t) >= 0;
*STRICT_OVERFLOW_P. */
bool
-tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl,
+tree_call_nonnegative_warnv_p (tree type, tree fndecl,
tree arg0, tree arg1, bool *strict_overflow_p)
{
if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
CASE_FLT_FN (BUILT_IN_POWI):
/* True if the 1st argument is nonnegative or the second
argument is an even integer. */
- if (TREE_CODE (arg1) == INTEGER_CST)
- {
- tree arg1 = arg1;
- if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
- return true;
- }
+ if (TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_INT_CST_LOW (arg1) & 1) == 0)
+ return true;
return tree_expr_nonnegative_warnv_p (arg0,
strict_overflow_p);
default:
break;
}
- return tree_simple_nonnegative_warnv_p (code,
+ return tree_simple_nonnegative_warnv_p (CALL_EXPR,
type);
}
else
break;
}
- if ((TREE_CODE (t) == MODIFY_EXPR
- || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
- && GENERIC_TREE_OPERAND (t, 0) == temp)
- return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+ if (TREE_CODE (t) == MODIFY_EXPR
+ && TREE_OPERAND (t, 0) == temp)
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
strict_overflow_p);
return false;
tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
- return tree_call_nonnegative_warnv_p (TREE_CODE (t),
- TREE_TYPE (t),
+ return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
get_callee_fndecl (t),
arg0,
arg1,
}
case COMPOUND_EXPR:
case MODIFY_EXPR:
- case GIMPLE_MODIFY_STMT:
- return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+ return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
strict_overflow_p);
case BIND_EXPR:
return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
bool sub_strict_overflow_p;
switch (TREE_CODE (t))
{
- case SSA_NAME:
- /* Query VRP to see if it has recorded any information about
- the range of this object. */
- return ssa_name_nonzero_p (t);
-
case INTEGER_CST:
return !integer_zerop (t);
case COMPOUND_EXPR:
case MODIFY_EXPR:
- case GIMPLE_MODIFY_STMT:
case BIND_EXPR:
- return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
strict_overflow_p);
case SAVE_EXPR:
with constant folding. (E.g. suppose the lower bound is 1,
and its mode is QI. Without the conversion,l (ARRAY
+(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
- +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
+ +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
if (! integer_zerop (low_bound))
index = size_diffop (index, fold_convert (sizetype, low_bound));