+/* Data structure that bitfield_overlaps_p fills in with information
+ about the element passed in and how much of it overlaps with the
+ bit-range passed it to. */
+
+struct bitfield_overlap_info
+{
+ /* The bit-length of an element. */
+ tree field_len;
+
+ /* The bit-position of the element in its parent. */
+ tree field_pos;
+
+ /* The number of bits of the element that overlap with the incoming
+ bit range. */
+ tree overlap_len;
+
+ /* The first bit of the element that overlaps with the incoming bit
+ range. */
+ tree overlap_pos;
+};
+
+/* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
+ expression (referenced as BF below) accesses any of the bits in FLD,
+ false if it doesn't. If DATA is non-null, its field_len and
+ field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
+ field_len, field_pos> (referenced as BFLD below) represents the
+ entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
+ overlap_pos> represents the portion of the entire field that
+ overlaps with BF. */
+
+static bool
+bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
+ struct bitfield_overlap_info *data)
+{
+ tree flen, fpos;
+ bool ret;
+
+ if (TREE_CODE (fld->element) == FIELD_DECL)
+ {
+ flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
+ fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
+ fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
+ fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
+ }
+ else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
+ {
+ flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
+ fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
+ }
+ else if (TREE_CODE (fld->element) == INTEGER_CST)
+ {
+ flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
+ fpos = fold_convert (bitsizetype, fld->element);
+ fpos = size_binop (MULT_EXPR, flen, fpos);
+ }
+ else
+ gcc_unreachable ();
+
+ gcc_assert (host_integerp (blen, 1)
+ && host_integerp (bpos, 1)
+ && host_integerp (flen, 1)
+ && host_integerp (fpos, 1));
+
+ ret = ((!tree_int_cst_lt (fpos, bpos)
+ && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
+ blen))
+ || (!tree_int_cst_lt (bpos, fpos)
+ && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
+ flen)));
+
+ if (!ret)
+ return ret;
+
+ if (data)
+ {
+ tree bend, fend;
+
+ data->field_len = flen;
+ data->field_pos = fpos;
+
+ fend = size_binop (PLUS_EXPR, fpos, flen);
+ bend = size_binop (PLUS_EXPR, bpos, blen);
+
+ if (tree_int_cst_lt (bend, fend))
+ data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
+ else
+ data->overlap_len = NULL;
+
+ if (tree_int_cst_lt (fpos, bpos))
+ {
+ data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
+ data->overlap_len = size_binop (MINUS_EXPR,
+ data->overlap_len
+ ? data->overlap_len
+ : data->field_len,
+ data->overlap_pos);
+ }
+ else
+ data->overlap_pos = NULL;
+ }
+
+ return ret;
+}
+
+/* Add to LISTP a sequence of statements that copies BLEN bits between
+ VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
+ and at bit BPOS of ELT. The direction of the copy is given by
+ TO_VAR. */
+
+static void
+sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
+ tree *listp, tree blen, tree bpos,
+ struct sra_elt *elt)
+{
+ struct sra_elt *fld;
+ struct bitfield_overlap_info flp;
+
+ FOR_EACH_ACTUAL_CHILD (fld, elt)
+ {
+ tree flen, fpos;
+
+ if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
+ continue;
+
+ flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
+ fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
+
+ if (fld->replacement)
+ {
+ tree infld, invar, st, type;
+
+ infld = fld->replacement;
+
+ type = TREE_TYPE (infld);
+ if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
+ type = lang_hooks.types.type_for_size (TREE_INT_CST_LOW (flen), 1);
+ else
+ type = unsigned_type_for (type);
+
+ if (TREE_CODE (infld) == BIT_FIELD_REF)
+ {
+ fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
+ infld = TREE_OPERAND (infld, 0);
+ }
+ else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
+ && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
+ DECL_SIZE (fld->element)))
+ {
+ fpos = size_binop (PLUS_EXPR, fpos,
+ TYPE_SIZE (TREE_TYPE (infld)));
+ fpos = size_binop (MINUS_EXPR, fpos,
+ DECL_SIZE (fld->element));
+ }
+
+ infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
+
+ invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
+ if (flp.overlap_pos)
+ invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
+ invar = size_binop (PLUS_EXPR, invar, vpos);
+
+ invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
+
+ if (to_var)
+ st = sra_build_bf_assignment (invar, infld);
+ else
+ st = sra_build_bf_assignment (infld, invar);
+
+ append_to_statement_list (st, listp);
+ }
+ else
+ {
+ tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
+ sub = size_binop (PLUS_EXPR, vpos, sub);
+ if (flp.overlap_pos)
+ sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
+
+ sra_explode_bitfield_assignment (var, sub, to_var, listp,
+ flen, fpos, fld);
+ }
+ }
+}
+
+/* Add to LISTBEFOREP statements that copy scalarized members of ELT
+ that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
+ into the full variable, and to LISTAFTERP, if non-NULL, statements
+ that copy the (presumably modified) overlapping portions of the
+ full variable back to the scalarized variables. */
+
+static void
+sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
+ tree blen, tree bpos,
+ struct sra_elt *elt)
+{
+ struct sra_elt *fld;
+ struct bitfield_overlap_info flp;
+
+ FOR_EACH_ACTUAL_CHILD (fld, elt)
+ if (bitfield_overlaps_p (blen, bpos, fld, &flp))
+ {
+ if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
+ {
+ generate_copy_inout (fld, false, generate_element_ref (fld),
+ listbeforep);
+ mark_no_warning (fld);
+ if (listafterp)
+ generate_copy_inout (fld, true, generate_element_ref (fld),
+ listafterp);
+ }
+ else
+ {
+ tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
+ tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
+
+ sra_sync_for_bitfield_assignment (listbeforep, listafterp,
+ flen, fpos, fld);
+ }
+ }
+}
+