#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
+/* Determine whether the LEN bytes can be moved by using several move
+ instructions. Return nonzero if a call to move_by_pieces should
+ succeed. */
+
+int
+can_move_by_pieces (len, align)
+ unsigned HOST_WIDE_INT len;
+ unsigned int align;
+{
+ return MOVE_BY_PIECES_P (len, align);
+}
+
/* Generate several move instructions to copy LEN bytes from block FROM to
block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
and TO through protect_from_queue before calling.
If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
used to push FROM to the stack.
- ALIGN is maximum stack alignment we can assume. */
+ ALIGN is maximum stack alignment we can assume.
-void
-move_by_pieces (to, from, len, align)
+ If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
+ mempcpy, and if ENDP is 2 return memory the end minus one byte ala
+ stpcpy. */
+
+rtx
+move_by_pieces (to, from, len, align, endp)
rtx to, from;
unsigned HOST_WIDE_INT len;
unsigned int align;
+ int endp;
{
struct move_by_pieces data;
rtx to_addr, from_addr = XEXP (from, 0);
/* The code above should have handled everything. */
if (data.len > 0)
abort ();
+
+ if (endp)
+ {
+ rtx to1;
+
+ if (data.reverse)
+ abort ();
+ if (data.autinc_to)
+ {
+ if (endp == 2)
+ {
+ if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
+ emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
+ else
+ data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
+ -1));
+ }
+ to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
+ data.offset);
+ }
+ else
+ {
+ if (endp == 2)
+ --data.offset;
+ to1 = adjust_address (data.to, QImode, data.offset);
+ }
+ return to1;
+ }
+ else
+ return data.to;
}
/* Return number of insns required to move L bytes by pieces.
}
if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
- move_by_pieces (x, y, INTVAL (size), align);
+ move_by_pieces (x, y, INTVAL (size), align, 0);
else if (emit_block_move_via_movstr (x, y, size, align))
;
else if (may_use_call)
{
if (!block_move_fn)
{
- tree fn, args;
+ tree args, fn;
if (TARGET_MEM_FUNCTIONS)
{
/* Generate several move instructions to store LEN bytes generated by
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
pointer which will be passed as argument in every CONSTFUN call.
- ALIGN is maximum alignment we can assume. */
+ ALIGN is maximum alignment we can assume.
+ If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
+ mempcpy, and if ENDP is 2 return memory the end minus one byte ala
+ stpcpy. */
-void
-store_by_pieces (to, len, constfun, constfundata, align)
+rtx
+store_by_pieces (to, len, constfun, constfundata, align, endp)
rtx to;
unsigned HOST_WIDE_INT len;
rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
PTR constfundata;
unsigned int align;
+ int endp;
{
struct store_by_pieces data;
data.len = len;
data.to = to;
store_by_pieces_1 (&data, align);
+ if (endp)
+ {
+ rtx to1;
+
+ if (data.reverse)
+ abort ();
+ if (data.autinc_to)
+ {
+ if (endp == 2)
+ {
+ if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
+ emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
+ else
+ data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
+ -1));
+ }
+ to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
+ data.offset);
+ }
+ else
+ {
+ if (endp == 2)
+ --data.offset;
+ to1 = adjust_address (data.to, QImode, data.offset);
+ }
+ return to1;
+ }
+ else
+ return data.to;
}
/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
{
enum machine_mode mode = GET_MODE (x);
rtx y_cst = NULL_RTX;
- rtx last_insn;
+ rtx last_insn, set;
x = protect_from_queue (x, 1);
y = protect_from_queue (y, 0);
&& (last_insn = compress_float_constant (x, y)))
return last_insn;
+ y_cst = y;
+
if (!LEGITIMATE_CONSTANT_P (y))
{
- y_cst = y;
y = force_const_mem (mode, y);
/* If the target's cannot_force_const_mem prevented the spill,
last_insn = emit_move_insn_1 (x, y);
- if (y_cst && GET_CODE (x) == REG)
+ if (y_cst && GET_CODE (x) == REG
+ && (set = single_set (last_insn)) != NULL_RTX
+ && SET_DEST (set) == x
+ && ! rtx_equal_p (y_cst, SET_SRC (set)))
set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
return last_insn;
/* Note that the real part always precedes the imag part in memory
regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
- emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, XEXP (x, 0)),
- gen_imagpart (submode, y)));
- emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, XEXP (x, 0)),
- gen_realpart (submode, y)));
+ emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
+ gen_imagpart (submode, y));
+ emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
+ gen_realpart (submode, y));
#else
- emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, XEXP (x, 0)),
- gen_realpart (submode, y)));
- emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, XEXP (x, 0)),
- gen_imagpart (submode, y)));
+ emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
+ gen_realpart (submode, y));
+ emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
+ gen_imagpart (submode, y));
#endif
}
else
|| GET_CODE (imagpart_x) == SUBREG))
emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
- emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (realpart_x, realpart_y));
- emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (imagpart_x, imagpart_y));
+ emit_move_insn (realpart_x, realpart_y);
+ emit_move_insn (imagpart_x, imagpart_y);
}
return get_last_insn ();
last_insn = get_last_insn ();
if (GET_CODE (x) == REG)
- REG_NOTES (last_insn)
- = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
+ set_unique_reg_note (last_insn, REG_EQUAL, y);
return last_insn;
}
&& where_pad != none && where_pad != stack_direction)
anti_adjust_stack (GEN_INT (extra));
- move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
+ move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
}
else
#endif /* PUSH_ROUNDING */
{
rtx offset_rtx;
- if (contains_placeholder_p (offset))
+ if (CONTAINS_PLACEHOLDER_P (offset))
offset = build (WITH_RECORD_EXPR, sizetype,
offset, make_tree (TREE_TYPE (exp), target));
made during type construction. */
if (this_offset == 0)
break;
- else if (! TREE_CONSTANT (this_offset)
- && contains_placeholder_p (this_offset))
+ else if (CONTAINS_PLACEHOLDER_P (this_offset))
this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
offset = size_binop (PLUS_EXPR, offset, this_offset);
/* If the index has a self-referential type, pass it to a
WITH_RECORD_EXPR; if the component size is, pass our
component to one. */
- if (! TREE_CONSTANT (index)
- && contains_placeholder_p (index))
+ if (CONTAINS_PLACEHOLDER_P (index))
index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
- if (! TREE_CONSTANT (unit_size)
- && contains_placeholder_p (unit_size))
+ if (CONTAINS_PLACEHOLDER_P (unit_size))
unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
offset = size_binop (PLUS_EXPR, offset,
&& ((TREE_CODE (type) == VECTOR_TYPE
&& !is_zeros_p (exp))
|| ! mostly_zeros_p (exp)))))
- || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
+ || ((modifier == EXPAND_INITIALIZER
+ || modifier == EXPAND_CONST_ADDRESS)
+ && TREE_CONSTANT (exp)))
{
rtx constructor = output_constant_def (exp, 1);