&& !(MEM_P (op0) && MEM_VOLATILE_P (op0)
&& flag_strict_volatile_bitfields > 0)
&& ! ((REG_P (op0) || GET_CODE (op0) == SUBREG)
- && (bitsize + bitpos > GET_MODE_BITSIZE (op_mode))))
+ && (bitsize + bitpos > GET_MODE_BITSIZE (op_mode)))
+ /* Do not use insv if the bit region is restricted and
+ op_mode integer at offset doesn't fit into the
+ restricted region. */
+ && !(MEM_P (op0) && bitregion_end
+ && bitnum - bitpos + GET_MODE_BITSIZE (op_mode)
+ > bitregion_end + 1))
{
struct expand_operand ops[4];
int xbitpos = bitpos;
|| GET_MODE_BITSIZE (GET_MODE (op0)) > maxbits
|| (op_mode != MAX_MACHINE_MODE
&& GET_MODE_SIZE (GET_MODE (op0)) > GET_MODE_SIZE (op_mode)))
- bestmode = get_best_mode (bitsize, bitnum,
+ bestmode = get_best_mode (bitsize, bitnum,
bitregion_start, bitregion_end,
MEM_ALIGN (op0),
(op_mode == MAX_MACHINE_MODE
offset = (bitpos + bitsdone) / unit;
thispos = (bitpos + bitsdone) % unit;
+ /* When region of bytes we can touch is restricted, decrease
+ UNIT close to the end of the region as needed. */
+ if (bitregion_end
+ && unit > BITS_PER_UNIT
+ && bitpos + bitsdone - thispos + unit > bitregion_end + 1)
+ {
+ unit = unit / 2;
+ continue;
+ }
+
/* THISSIZE must not overrun a word boundary. Otherwise,
store_fixed_bit_field will call us again, and we will mutually
recurse forever. */
% GET_MODE_BITSIZE (mode));
else if (GET_CODE (op1) == SUBREG
&& subreg_lowpart_p (op1)
- && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (op1))))
+ && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (op1)))
+ && SCALAR_INT_MODE_P (GET_MODE (op1)))
op1 = SUBREG_REG (op1);
}