+/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
+ starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
+
+static tree
+make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
+ HOST_WIDE_INT bitpos, int unsignedp)
+{
+ tree result, bftype;
+
+ if (bitpos == 0)
+ {
+ tree size = TYPE_SIZE (TREE_TYPE (inner));
+ if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
+ || POINTER_TYPE_P (TREE_TYPE (inner)))
+ && host_integerp (size, 0)
+ && tree_low_cst (size, 0) == bitsize)
+ return fold_convert (type, inner);
+ }
+
+ bftype = type;
+ if (TYPE_PRECISION (bftype) != bitsize
+ || TYPE_UNSIGNED (bftype) == !unsignedp)
+ bftype = build_nonstandard_integer_type (bitsize, 0);
+
+ result = build3 (BIT_FIELD_REF, bftype, inner,
+ size_int (bitsize), bitsize_int (bitpos));
+
+ if (bftype != type)
+ result = fold_convert (type, result);
+
+ return result;
+}
+
+/* Optimize a bit-field compare.
+
+ There are two cases: First is a compare against a constant and the
+ second is a comparison of two items where the fields are at the same
+ bit position relative to the start of a chunk (byte, halfword, word)
+ large enough to contain it. In these cases we can avoid the shift
+ implicit in bitfield extractions.
+
+ For constants, we emit a compare of the shifted constant with the
+ BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
+ compared. For two fields at the same position, we do the ANDs with the
+ similar mask and compare the result of the ANDs.
+
+ CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
+ COMPARE_TYPE is the type of the comparison, and LHS and RHS
+ are the left and right operands of the comparison, respectively.
+
+ If the optimization described above can be done, we return the resulting
+ tree. Otherwise we return zero. */
+
+static tree
+optimize_bit_field_compare (enum tree_code code, tree compare_type,
+ tree lhs, tree rhs)
+{
+ HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
+ tree type = TREE_TYPE (lhs);
+ tree signed_type, unsigned_type;
+ int const_p = TREE_CODE (rhs) == INTEGER_CST;
+ enum machine_mode lmode, rmode, nmode;
+ int lunsignedp, runsignedp;
+ int lvolatilep = 0, rvolatilep = 0;
+ tree linner, rinner = NULL_TREE;
+ tree mask;
+ tree offset;
+
+ /* Get all the information about the extractions being done. If the bit size
+ if the same as the size of the underlying object, we aren't doing an
+ extraction at all and so can do nothing. We also don't want to
+ do anything if the inner expression is a PLACEHOLDER_EXPR since we
+ then will no longer be able to replace it. */
+ linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
+ &lunsignedp, &lvolatilep, false);
+ if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
+ || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
+ return 0;
+
+ if (!const_p)
+ {
+ /* If this is not a constant, we can only do something if bit positions,
+ sizes, and signedness are the same. */
+ rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
+ &runsignedp, &rvolatilep, false);
+
+ if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
+ || lunsignedp != runsignedp || offset != 0
+ || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
+ return 0;
+ }
+
+ /* See if we can find a mode to refer to this field. We should be able to,
+ but fail if we can't. */
+ nmode = get_best_mode (lbitsize, lbitpos,
+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+ TYPE_ALIGN (TREE_TYPE (rinner))),
+ word_mode, lvolatilep || rvolatilep);
+ if (nmode == VOIDmode)
+ return 0;
+
+ /* Set signed and unsigned types of the precision of this mode for the
+ shifts below. */
+ signed_type = lang_hooks.types.type_for_mode (nmode, 0);
+ unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
+
+ /* Compute the bit position and size for the new reference and our offset
+ within it. If the new reference is the same size as the original, we
+ won't optimize anything, so return zero. */
+ nbitsize = GET_MODE_BITSIZE (nmode);
+ nbitpos = lbitpos & ~ (nbitsize - 1);
+ lbitpos -= nbitpos;
+ if (nbitsize == lbitsize)
+ return 0;
+
+ if (BYTES_BIG_ENDIAN)
+ lbitpos = nbitsize - lbitsize - lbitpos;
+
+ /* Make the mask to be used against the extracted field. */
+ mask = build_int_cst_type (unsigned_type, -1);
+ mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
+ mask = const_binop (RSHIFT_EXPR, mask,
+ size_int (nbitsize - lbitsize - lbitpos), 0);
+
+ if (! const_p)
+ /* If not comparing with constant, just rework the comparison
+ and return. */
+ return fold_build2 (code, compare_type,
+ fold_build2 (BIT_AND_EXPR, unsigned_type,
+ make_bit_field_ref (linner,
+ unsigned_type,
+ nbitsize, nbitpos,
+ 1),
+ mask),
+ fold_build2 (BIT_AND_EXPR, unsigned_type,
+ make_bit_field_ref (rinner,
+ unsigned_type,
+ nbitsize, nbitpos,
+ 1),
+ mask));
+
+ /* Otherwise, we are handling the constant case. See if the constant is too
+ big for the field. Warn and return a tree of for 0 (false) if so. We do
+ this not only for its own sake, but to avoid having to test for this
+ error case below. If we didn't, we might generate wrong code.
+
+ For unsigned fields, the constant shifted right by the field length should
+ be all zero. For signed fields, the high-order bits should agree with
+ the sign bit. */
+
+ if (lunsignedp)
+ {
+ if (! integer_zerop (const_binop (RSHIFT_EXPR,
+ fold_convert (unsigned_type, rhs),
+ size_int (lbitsize), 0)))
+ {
+ warning (0, "comparison is always %d due to width of bit-field",
+ code == NE_EXPR);
+ return constant_boolean_node (code == NE_EXPR, compare_type);
+ }
+ }
+ else
+ {
+ tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
+ size_int (lbitsize - 1), 0);
+ if (! integer_zerop (tem) && ! integer_all_onesp (tem))
+ {
+ warning (0, "comparison is always %d due to width of bit-field",
+ code == NE_EXPR);
+ return constant_boolean_node (code == NE_EXPR, compare_type);
+ }
+ }
+
+ /* Single-bit compares should always be against zero. */
+ if (lbitsize == 1 && ! integer_zerop (rhs))
+ {
+ code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
+ rhs = build_int_cst (type, 0);
+ }
+
+ /* Make a new bitfield reference, shift the constant over the
+ appropriate number of bits and mask it with the computed mask
+ (in case this was a signed field). If we changed it, make a new one. */
+ lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
+ if (lvolatilep)
+ {
+ TREE_SIDE_EFFECTS (lhs) = 1;
+ TREE_THIS_VOLATILE (lhs) = 1;
+ }
+
+ rhs = const_binop (BIT_AND_EXPR,
+ const_binop (LSHIFT_EXPR,
+ fold_convert (unsigned_type, rhs),
+ size_int (lbitpos), 0),
+ mask, 0);
+
+ return build2 (code, compare_type,
+ build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
+ rhs);
+}
+\f