PUT_MODE (mem, srcmode);
- if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
+ if (insn_operand_matches (ic, 1, mem))
float_extend_from_mem[mode][srcmode] = true;
}
}
emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
unsigned int expected_align, HOST_WIDE_INT expected_size)
{
- rtx opalign = GEN_INT (align / BITS_PER_UNIT);
int save_volatile_ok = volatile_ok;
enum machine_mode mode;
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = direct_optab_handler (movmem_optab, mode);
- insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
&& ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
- || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
- && ((pred = insn_data[(int) code].operand[0].predicate) == 0
- || (*pred) (x, BLKmode))
- && ((pred = insn_data[(int) code].operand[1].predicate) == 0
- || (*pred) (y, BLKmode))
- && ((pred = insn_data[(int) code].operand[3].predicate) == 0
- || (*pred) (opalign, VOIDmode)))
+ || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
{
- rtx op2;
- rtx last = get_last_insn ();
- rtx pat;
-
- op2 = convert_to_mode (mode, size, 1);
- pred = insn_data[(int) code].operand[2].predicate;
- if (pred != 0 && ! (*pred) (op2, mode))
- op2 = copy_to_mode_reg (mode, op2);
+ struct expand_operand ops[6];
+ unsigned int nops;
/* ??? When called via emit_block_move_for_call, it'd be
nice if there were some way to inform the backend, so
that it doesn't fail the expansion because it thinks
emitting the libcall would be more efficient. */
-
- if (insn_data[(int) code].n_operands == 4)
- pat = GEN_FCN ((int) code) (x, y, op2, opalign);
- else
- pat = GEN_FCN ((int) code) (x, y, op2, opalign,
- GEN_INT (expected_align
- / BITS_PER_UNIT),
- GEN_INT (expected_size));
- if (pat)
+ nops = insn_data[(int) code].n_generator_args;
+ gcc_assert (nops == 4 || nops == 6);
+
+ create_fixed_operand (&ops[0], x);
+ create_fixed_operand (&ops[1], y);
+ /* The check above guarantees that this size conversion is valid. */
+ create_convert_operand_to (&ops[2], size, mode, true);
+ create_integer_operand (&ops[3], align / BITS_PER_UNIT);
+ if (nops == 6)
+ {
+ create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
+ create_integer_operand (&ops[5], expected_size);
+ }
+ if (maybe_expand_insn (code, nops, ops))
{
- emit_insn (pat);
volatile_ok = save_volatile_ok;
return true;
}
- else
- delete_insns_since (last);
}
}
including more than one in the machine description unless
the more limited one has some advantage. */
- rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
if (expected_align < align)
mode = GET_MODE_WIDER_MODE (mode))
{
enum insn_code code = direct_optab_handler (setmem_optab, mode);
- insn_operand_predicate_fn pred;
if (code != CODE_FOR_nothing
/* We don't need MODE to be narrower than
&& ((CONST_INT_P (size)
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
<= (GET_MODE_MASK (mode) >> 1)))
- || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
- && ((pred = insn_data[(int) code].operand[0].predicate) == 0
- || (*pred) (object, BLKmode))
- && ((pred = insn_data[(int) code].operand[3].predicate) == 0
- || (*pred) (opalign, VOIDmode)))
+ || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
{
- rtx opsize, opchar;
- enum machine_mode char_mode;
- rtx last = get_last_insn ();
- rtx pat;
-
- opsize = convert_to_mode (mode, size, 1);
- pred = insn_data[(int) code].operand[1].predicate;
- if (pred != 0 && ! (*pred) (opsize, mode))
- opsize = copy_to_mode_reg (mode, opsize);
-
- opchar = val;
- char_mode = insn_data[(int) code].operand[2].mode;
- if (char_mode != VOIDmode)
- {
- opchar = convert_to_mode (char_mode, opchar, 1);
- pred = insn_data[(int) code].operand[2].predicate;
- if (pred != 0 && ! (*pred) (opchar, char_mode))
- opchar = copy_to_mode_reg (char_mode, opchar);
- }
+ struct expand_operand ops[6];
+ unsigned int nops;
- if (insn_data[(int) code].n_operands == 4)
- pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
- else
- pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
- GEN_INT (expected_align
- / BITS_PER_UNIT),
- GEN_INT (expected_size));
- if (pat)
+ nops = insn_data[(int) code].n_generator_args;
+ gcc_assert (nops == 4 || nops == 6);
+
+ create_fixed_operand (&ops[0], object);
+ /* The check above guarantees that this size conversion is valid. */
+ create_convert_operand_to (&ops[1], size, mode, true);
+ create_convert_operand_from (&ops[2], val, byte_mode, true);
+ create_integer_operand (&ops[3], align / BITS_PER_UNIT);
+ if (nops == 6)
{
- emit_insn (pat);
- return true;
+ create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
+ create_integer_operand (&ops[5], expected_size);
}
- else
- delete_insns_since (last);
+ if (maybe_expand_insn (code, nops, ops))
+ return true;
}
}
{
/* Skip if the target needs extra instructions to perform
the extension. */
- if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
+ if (!insn_operand_matches (ic, 1, trunc_y))
continue;
/* This is valid, but may not be cheaper than the original. */
newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
rtx dest;
enum insn_code icode;
- insn_operand_predicate_fn pred;
stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
/* If there is push pattern, use it. Otherwise try old way of throwing
icode = optab_handler (push_optab, mode);
if (icode != CODE_FOR_nothing)
{
- if (((pred = insn_data[(int) icode].operand[0].predicate)
- && !((*pred) (x, mode))))
- x = force_reg (mode, x);
- emit_insn (GEN_FCN (icode) (x));
- return;
+ struct expand_operand ops[1];
+
+ create_input_operand (&ops[0], x, mode);
+ if (maybe_expand_insn (icode, 1, ops))
+ return;
}
if (GET_MODE_SIZE (mode) == rounded_size)
dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
tree op0, op1;
rtx value, result;
optab binop;
+ gimple srcstmt;
+ enum tree_code code;
if (mode1 != VOIDmode
|| bitsize >= BITS_PER_WORD
return false;
STRIP_NOPS (src);
- if (!BINARY_CLASS_P (src)
- || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
+ if (TREE_CODE (src) != SSA_NAME)
+ return false;
+ if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
+ return false;
+
+ srcstmt = get_gimple_for_ssa_name (src);
+ if (!srcstmt
+ || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
return false;
- op0 = TREE_OPERAND (src, 0);
- op1 = TREE_OPERAND (src, 1);
- STRIP_NOPS (op0);
+ code = gimple_assign_rhs_code (srcstmt);
+
+ op0 = gimple_assign_rhs1 (srcstmt);
+
+ /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
+ to find its initialization. Hopefully the initialization will
+ be from a bitfield load. */
+ if (TREE_CODE (op0) == SSA_NAME)
+ {
+ gimple op0stmt = get_gimple_for_ssa_name (op0);
+
+ /* We want to eventually have OP0 be the same as TO, which
+ should be a bitfield. */
+ if (!op0stmt
+ || !is_gimple_assign (op0stmt)
+ || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
+ return false;
+ op0 = gimple_assign_rhs1 (op0stmt);
+ }
+
+ op1 = gimple_assign_rhs2 (srcstmt);
if (!operand_equal_p (to, op0, 0))
return false;
if (BYTES_BIG_ENDIAN)
bitpos = str_bitsize - bitpos - bitsize;
- switch (TREE_CODE (src))
+ switch (code)
{
case PLUS_EXPR:
case MINUS_EXPR:
set_mem_expr (str_rtx, 0);
}
- binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
+ binop = code == PLUS_EXPR ? add_optab : sub_optab;
if (bitsize == 1 && bitpos + bitsize != str_bitsize)
{
value = expand_and (str_mode, value, const1_rtx, NULL);
set_mem_expr (str_rtx, 0);
}
- binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
+ binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
{
rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
rtx to_rtx = 0;
rtx result;
enum machine_mode mode;
- int align, icode;
+ int align;
+ enum insn_code icode;
/* Don't crash if the lhs of the assignment was erroneous. */
if (TREE_CODE (to) == ERROR_MARK)
{
- result = expand_normal (from);
+ expand_normal (from);
return;
}
&& ((icode = optab_handler (movmisalign_optab, mode))
!= CODE_FOR_nothing))
{
- enum machine_mode address_mode, op_mode1;
- rtx insn, reg, op0, mem;
+ struct expand_operand ops[2];
+ enum machine_mode address_mode;
+ rtx reg, op0, mem;
reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
reg = force_not_mem (reg);
if (TREE_THIS_VOLATILE (to))
MEM_VOLATILE_P (mem) = 1;
- op_mode1 = insn_data[icode].operand[1].mode;
- if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
- && op_mode1 != VOIDmode)
- reg = copy_to_mode_reg (op_mode1, reg);
-
- insn = GEN_FCN (icode) (mem, reg);
+ create_fixed_operand (&ops[0], mem);
+ create_input_operand (&ops[1], reg, mode);
/* The movmisalign<mode> pattern cannot fail, else the assignment would
silently be omitted. */
- gcc_assert (insn != NULL_RTX);
- emit_insn (insn);
+ expand_insn (icode, 2, ops);
return;
}
/* Handle expand_expr of a complex value returning a CONCAT. */
else if (GET_CODE (to_rtx) == CONCAT)
{
- if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
+ unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
+ if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
+ && bitpos == 0
+ && bitsize == mode_bitsize)
+ result = store_expr (from, to_rtx, false, nontemporal);
+ else if (bitsize == mode_bitsize / 2
+ && (bitpos == 0 || bitpos == mode_bitsize / 2))
+ result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
+ nontemporal);
+ else if (bitpos + bitsize <= mode_bitsize / 2)
+ result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
+ mode1, from, TREE_TYPE (tem),
+ get_alias_set (to), nontemporal);
+ else if (bitpos >= mode_bitsize / 2)
+ result = store_field (XEXP (to_rtx, 1), bitsize,
+ bitpos - mode_bitsize / 2, mode1, from,
+ TREE_TYPE (tem), get_alias_set (to),
+ nontemporal);
+ else if (bitpos == 0 && bitsize == mode_bitsize)
{
- gcc_assert (bitpos == 0);
- result = store_expr (from, to_rtx, false, nontemporal);
+ rtx from_rtx;
+ result = expand_normal (from);
+ from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
+ TYPE_MODE (TREE_TYPE (from)), 0);
+ emit_move_insn (XEXP (to_rtx, 0),
+ read_complex_part (from_rtx, false));
+ emit_move_insn (XEXP (to_rtx, 1),
+ read_complex_part (from_rtx, true));
}
else
{
- gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
- result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
- nontemporal);
+ rtx temp = assign_stack_temp (GET_MODE (to_rtx),
+ GET_MODE_SIZE (GET_MODE (to_rtx)),
+ 0);
+ write_complex_part (temp, XEXP (to_rtx, 0), false);
+ write_complex_part (temp, XEXP (to_rtx, 1), true);
+ result = store_field (temp, bitsize, bitpos, mode1, from,
+ TREE_TYPE (tem), get_alias_set (to),
+ nontemporal);
+ emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
+ emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
}
}
else
bool
emit_storent_insn (rtx to, rtx from)
{
- enum machine_mode mode = GET_MODE (to), imode;
+ struct expand_operand ops[2];
+ enum machine_mode mode = GET_MODE (to);
enum insn_code code = optab_handler (storent_optab, mode);
- rtx pattern;
if (code == CODE_FOR_nothing)
return false;
- imode = insn_data[code].operand[0].mode;
- if (!insn_data[code].operand[0].predicate (to, imode))
- return false;
-
- imode = insn_data[code].operand[1].mode;
- if (!insn_data[code].operand[1].predicate (from, imode))
- {
- from = copy_to_mode_reg (imode, from);
- if (!insn_data[code].operand[1].predicate (from, imode))
- return false;
- }
-
- pattern = GEN_FCN (code) (to, from);
- if (pattern == NULL_RTX)
- return false;
-
- emit_insn (pattern);
- return true;
+ create_fixed_operand (&ops[0], to);
+ create_input_operand (&ops[1], from, mode);
+ return maybe_expand_insn (code, 2, ops);
}
/* Generate code for computing expression EXP,
tmp = convert_memory_address_addr_space (tmode, tmp, as);
if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- result = gen_rtx_PLUS (tmode, result, tmp);
+ result = simplify_gen_binary (PLUS, tmode, result, tmp);
else
{
subtarget = bitpos ? NULL_RTX : target;
case VEC_UNPACK_LO_EXPR:
{
op0 = expand_normal (treeop0);
- this_optab = optab_for_tree_code (code, type, optab_default);
temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
target, unsignedp);
gcc_assert (temp);
{
op0 = expand_normal (treeop0);
/* The signedness is determined from input operand. */
- this_optab = optab_for_tree_code (code,
- TREE_TYPE (treeop0),
- optab_default);
temp = expand_widen_pattern_expr
(ops, op0, NULL_RTX, NULL_RTX,
target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
mode = TYPE_MODE (TREE_TYPE (treeop0));
goto binop;
+ case DOT_PROD_EXPR:
+ {
+ tree oprnd0 = treeop0;
+ tree oprnd1 = treeop1;
+ tree oprnd2 = treeop2;
+ rtx op2;
+
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+ op2 = expand_normal (oprnd2);
+ target = expand_widen_pattern_expr (ops, op0, op1, op2,
+ target, unsignedp);
+ return target;
+ }
+
+ case REALIGN_LOAD_EXPR:
+ {
+ tree oprnd0 = treeop0;
+ tree oprnd1 = treeop1;
+ tree oprnd2 = treeop2;
+ rtx op2;
+
+ this_optab = optab_for_tree_code (code, type, optab_default);
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+ op2 = expand_normal (oprnd2);
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+ target, unsignedp);
+ gcc_assert (temp);
+ return temp;
+ }
+
default:
gcc_unreachable ();
}
int unsignedp;
enum machine_mode mode;
enum tree_code code = TREE_CODE (exp);
- optab this_optab;
rtx subtarget, original_target;
int ignore;
tree context;
{
temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
if (MEM_P (temp))
- temp = copy_to_reg (temp);
+ copy_to_reg (temp);
return const0_rtx;
}
gcc_assert (decl_rtl);
decl_rtl = copy_rtx (decl_rtl);
/* Record writes to register variables. */
- if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
- && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
- {
- int i = REGNO (decl_rtl);
- int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
- while (nregs)
- {
- SET_HARD_REG_BIT (crtl->asm_clobbers, i);
- i++;
- nregs--;
- }
- }
+ if (modifier == EXPAND_WRITE
+ && REG_P (decl_rtl)
+ && HARD_REGISTER_P (decl_rtl))
+ add_to_hard_reg_set (&crtl->asm_clobbers,
+ GET_MODE (decl_rtl), REGNO (decl_rtl));
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
&& (g = SSA_NAME_DEF_STMT (ssa_name))
&& gimple_code (g) == GIMPLE_CALL)
pmode = promote_function_mode (type, mode, &unsignedp,
- TREE_TYPE
- (TREE_TYPE (gimple_call_fn (g))),
+ gimple_call_fntype (g),
2);
else
pmode = promote_decl_mode (exp, &unsignedp);
&& modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER)
/* If the field is volatile, we always want an aligned
- access. */
- || (volatilep && flag_strict_volatile_bitfields > 0)
+ access. Only do this if the access is not already naturally
+ aligned, otherwise "normal" (non-bitfield) volatile fields
+ become non-addressable. */
+ || (volatilep && flag_strict_volatile_bitfields > 0
+ && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
|| (mode1 != BLKmode
return expand_expr_real (treeop0, original_target, tmode,
modifier, alt_rtl);
- case REALIGN_LOAD_EXPR:
- {
- tree oprnd0 = treeop0;
- tree oprnd1 = treeop1;
- tree oprnd2 = treeop2;
- rtx op2;
-
- this_optab = optab_for_tree_code (code, type, optab_default);
- expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
- op2 = expand_normal (oprnd2);
- temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
- target, unsignedp);
- gcc_assert (temp);
- return temp;
- }
-
- case DOT_PROD_EXPR:
- {
- tree oprnd0 = treeop0;
- tree oprnd1 = treeop1;
- tree oprnd2 = treeop2;
- rtx op2;
-
- expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
- op2 = expand_normal (oprnd2);
- target = expand_widen_pattern_expr (&ops, op0, op1, op2,
- target, unsignedp);
- return target;
- }
-
case COMPOUND_LITERAL_EXPR:
{
/* Initialize the anonymous variable declared in the compound
rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
rtx fallback_label ATTRIBUTE_UNUSED)
{
+ struct expand_operand ops[5];
enum machine_mode index_mode = SImode;
int index_bits = GET_MODE_BITSIZE (index_mode);
rtx op1, op2, index;
- enum machine_mode op_mode;
if (! HAVE_casesi)
return 0;
do_pending_stack_adjust ();
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
- (index, op_mode))
- index = copy_to_mode_reg (op_mode, index);
-
op1 = expand_normal (minval);
-
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
- op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
- op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
- (op1, op_mode))
- op1 = copy_to_mode_reg (op_mode, op1);
-
op2 = expand_normal (range);
- op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
- op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
- op2, TYPE_UNSIGNED (TREE_TYPE (range)));
- if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
- (op2, op_mode))
- op2 = copy_to_mode_reg (op_mode, op2);
-
- emit_jump_insn (gen_casesi (index, op1, op2,
- table_label, !default_label
- ? fallback_label : default_label));
+ create_input_operand (&ops[0], index, index_mode);
+ create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
+ create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
+ create_fixed_operand (&ops[3], table_label);
+ create_fixed_operand (&ops[4], (default_label
+ ? default_label
+ : fallback_label));
+ expand_jump_insn (CODE_FOR_casesi, 5, ops);
return 1;
}