+ return sub;
+}
+
+/* Obtain the canonical type for field F of ELEMENT. */
+
+static tree
+canon_type_for_field (tree f, tree element)
+{
+ tree field_type = TREE_TYPE (f);
+
+ /* canonicalize_component_ref() unwidens some bit-field types (not
+ marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
+ may introduce type mismatches. */
+ if (INTEGRAL_TYPE_P (field_type)
+ && DECL_MODE (f) != TYPE_MODE (field_type))
+ field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
+ field_type,
+ element,
+ f, NULL_TREE),
+ NULL_TREE));
+
+ return field_type;
+}
+
+/* Look for adjacent fields of ELT starting at F that we'd like to
+ scalarize as a single variable. Return the last field of the
+ group. */
+
+static tree
+try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
+{
+ unsigned HOST_WIDE_INT align, oalign, word, bit, size, alchk;
+ enum machine_mode mode;
+ tree first = f, prev;
+ tree type, var;
+ struct sra_elt *block;
+
+ if (!is_sra_scalar_type (TREE_TYPE (f))
+ || !host_integerp (DECL_FIELD_OFFSET (f), 1)
+ || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
+ || !host_integerp (DECL_SIZE (f), 1)
+ || lookup_element (elt, f, NULL, NO_INSERT))
+ return f;
+
+ /* Taking the alignment of elt->element is not enough, since it
+ might be just an array index or some such. We shouldn't need to
+ initialize align here, but our optimizers don't always realize
+ that, if we leave the loop without initializing align, we'll fail
+ the assertion right after the loop. */
+ align = (unsigned HOST_WIDE_INT)-1;
+ for (block = elt; block; block = block->parent)
+ if (DECL_P (block->element))
+ {
+ align = DECL_ALIGN (block->element);
+ break;
+ }
+ gcc_assert (block);
+
+ oalign = DECL_OFFSET_ALIGN (f);
+ word = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
+ bit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+ size = tree_low_cst (DECL_SIZE (f), 1);
+
+ if (align > oalign)
+ align = oalign;
+
+ alchk = align - 1;
+ alchk = ~alchk;
+
+ if ((bit & alchk) != ((bit + size - 1) & alchk))
+ return f;
+
+ /* Find adjacent fields in the same alignment word. */
+
+ for (prev = f, f = TREE_CHAIN (f);
+ f && TREE_CODE (f) == FIELD_DECL
+ && is_sra_scalar_type (TREE_TYPE (f))
+ && host_integerp (DECL_FIELD_OFFSET (f), 1)
+ && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
+ && host_integerp (DECL_SIZE (f), 1)
+ && (HOST_WIDE_INT)word == tree_low_cst (DECL_FIELD_OFFSET (f), 1)
+ && !lookup_element (elt, f, NULL, NO_INSERT);
+ prev = f, f = TREE_CHAIN (f))
+ {
+ unsigned HOST_WIDE_INT nbit, nsize;
+
+ nbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+ nsize = tree_low_cst (DECL_SIZE (f), 1);
+
+ if (bit + size == nbit)
+ {
+ if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
+ break;
+ size += nsize;
+ }
+ else if (nbit + nsize == bit)
+ {
+ if ((nbit & alchk) != ((bit + size - 1) & alchk))
+ break;
+ bit = nbit;
+ size += nsize;
+ }
+ else
+ break;
+ }
+
+ f = prev;
+
+ if (f == first)
+ return f;
+
+ gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
+
+ /* Try to widen the bit range so as to cover padding bits as well. */
+
+ if ((bit & ~alchk) || size != align)
+ {
+ unsigned HOST_WIDE_INT mbit = bit & alchk;
+ unsigned HOST_WIDE_INT msize = align;
+
+ for (f = TYPE_FIELDS (elt->type);
+ f; f = TREE_CHAIN (f))
+ {
+ unsigned HOST_WIDE_INT fword, fbit, fsize;
+
+ /* Skip the fields from first to prev. */
+ if (f == first)
+ {
+ f = prev;
+ continue;
+ }
+
+ if (!(TREE_CODE (f) == FIELD_DECL
+ && host_integerp (DECL_FIELD_OFFSET (f), 1)
+ && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
+ continue;
+
+ fword = tree_low_cst (DECL_FIELD_OFFSET (f), 1);
+ /* If we're past the selected word, we're fine. */
+ if (word < fword)
+ continue;
+
+ fbit = tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
+
+ if (host_integerp (DECL_SIZE (f), 1))
+ fsize = tree_low_cst (DECL_SIZE (f), 1);
+ else
+ /* Assume a variable-sized field takes up all space till
+ the end of the word. ??? Endianness issues? */
+ fsize = align - fbit;
+
+ if (fword < word)
+ {
+ /* A large field might start at a previous word and
+ extend into the selected word. Exclude those
+ bits. ??? Endianness issues? */
+ HOST_WIDE_INT diff = fbit + fsize
+ - (HOST_WIDE_INT)((word - fword) * BITS_PER_UNIT + mbit);
+
+ if (diff <= 0)
+ continue;
+
+ mbit += diff;
+ msize -= diff;
+ }
+ else
+ {
+ gcc_assert (fword == word);
+
+ /* Non-overlapping, great. */
+ if (fbit + fsize <= mbit
+ || mbit + msize <= fbit)
+ continue;
+
+ if (fbit <= mbit)
+ {
+ unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
+ mbit += diff;
+ msize -= diff;
+ }
+ else if (fbit > mbit)
+ msize -= (mbit + msize - fbit);
+ else
+ gcc_unreachable ();
+ }
+ }
+
+ bit = mbit;
+ size = msize;
+ }
+
+ /* Now we know the bit range we're interested in. Find the smallest
+ machine mode we can use to access it. */
+
+ for (mode = smallest_mode_for_size (size, MODE_INT);
+ ;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ gcc_assert (mode != VOIDmode);
+
+ alchk = GET_MODE_PRECISION (mode) - 1;
+ alchk = ~alchk;
+
+ if ((bit & alchk) == ((bit + size - 1) & alchk))
+ break;
+ }
+
+ gcc_assert (~alchk < align);
+
+ /* Create the field group as a single variable. */
+
+ type = lang_hooks.types.type_for_mode (mode, 1);
+ gcc_assert (type);
+ var = build3 (BIT_FIELD_REF, type, NULL_TREE,
+ bitsize_int (size),
+ bitsize_int (word * BITS_PER_UNIT + bit));
+ BIT_FIELD_REF_UNSIGNED (var) = 1;
+
+ block = instantiate_missing_elements_1 (elt, var, type);
+ gcc_assert (block && block->is_scalar);
+
+ var = block->replacement;
+
+ if (((word * BITS_PER_UNIT + bit) & ~alchk)
+ || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
+ {
+ block->replacement = build3 (BIT_FIELD_REF,
+ TREE_TYPE (block->element), var,
+ bitsize_int (size),
+ bitsize_int ((word * BITS_PER_UNIT
+ + bit) & ~alchk));
+ BIT_FIELD_REF_UNSIGNED (block->replacement) = 1;
+ TREE_NO_WARNING (block->replacement) = 1;
+ }
+
+ block->in_bitfld_block = 2;
+
+ /* Add the member fields to the group, such that they access
+ portions of the group variable. */
+
+ for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
+ {
+ tree field_type = canon_type_for_field (f, elt->element);
+ struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
+
+ gcc_assert (fld && fld->is_scalar && !fld->replacement);
+
+ fld->replacement = build3 (BIT_FIELD_REF, field_type, var,
+ DECL_SIZE (f),
+ bitsize_int
+ ((word * BITS_PER_UNIT
+ + (TREE_INT_CST_LOW
+ (DECL_FIELD_BIT_OFFSET (f))))
+ & ~alchk));
+ BIT_FIELD_REF_UNSIGNED (fld->replacement) = TYPE_UNSIGNED (field_type);
+ TREE_NO_WARNING (block->replacement) = 1;
+ fld->in_bitfld_block = 1;
+ }
+
+ return prev;