tree, tree, int, int);
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
- tree, enum machine_mode, int, tree, int);
+ tree, tree, int);
static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
}
-/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
- where DST is non-consecutive registers represented by a PARALLEL.
- SSIZE represents the total size of block ORIG_SRC in bytes, or -1
- if not known. */
+/* A subroutine of emit_group_load. Arguments as for emit_group_load,
+ except that values are placed in TMPS[i], and must later be moved
+ into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
-void
-emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
+static void
+emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
{
- rtx *tmps, src;
+ rtx src;
int start, i;
+ enum machine_mode m = GET_MODE (orig_src);
gcc_assert (GET_CODE (dst) == PARALLEL);
+ if (!SCALAR_INT_MODE_P (m)
+ && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
+ {
+ enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
+ if (imode == BLKmode)
+ src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
+ else
+ src = gen_reg_rtx (imode);
+ if (imode != BLKmode)
+ src = gen_lowpart (GET_MODE (orig_src), src);
+ emit_move_insn (src, orig_src);
+ /* ...and back again. */
+ if (imode != BLKmode)
+ src = gen_lowpart (imode, src);
+ emit_group_load_1 (tmps, dst, src, type, ssize);
+ return;
+ }
+
/* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */
if (XEXP (XVECEXP (dst, 0, 0), 0))
else
start = 1;
- tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
-
/* Process the pieces. */
for (i = start; i < XVECLEN (dst, 0); i++)
{
else
{
rtx mem;
-
+
gcc_assert (!bytepos);
mem = assign_stack_temp (GET_MODE (src), slen, 0);
emit_move_insn (mem, src);
- tmps[i] = adjust_address (mem, mode, 0);
+ tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
+ 0, 1, NULL_RTX, mode, mode);
}
}
/* FIXME: A SIMD parallel will eventually lead to a subreg of a
tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
build_int_cst (NULL_TREE, shift), tmps[i], 0);
}
+}
+
+/* Emit code to move a block SRC of type TYPE to a block DST,
+ where DST is non-consecutive registers represented by a PARALLEL.
+ SSIZE represents the total size of block ORIG_SRC in bytes, or -1
+ if not known. */
+
+void
+emit_group_load (rtx dst, rtx src, tree type, int ssize)
+{
+ rtx *tmps;
+ int i;
+
+ tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
+ emit_group_load_1 (tmps, dst, src, type, ssize);
/* Copy the extracted pieces into the proper (probable) hard regs. */
- for (i = start; i < XVECLEN (dst, 0); i++)
- emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
+ for (i = 0; i < XVECLEN (dst, 0); i++)
+ {
+ rtx d = XEXP (XVECEXP (dst, 0, i), 0);
+ if (d == NULL)
+ continue;
+ emit_move_insn (d, tmps[i]);
+ }
+}
+
+/* Similar, but load SRC into new pseudos in a format that looks like
+ PARALLEL. This can later be fed to emit_group_move to get things
+ in the right place. */
+
+rtx
+emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
+{
+ rtvec vec;
+ int i;
+
+ vec = rtvec_alloc (XVECLEN (parallel, 0));
+ emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
+
+ /* Convert the vector to look just like the original PARALLEL, except
+ with the computed values. */
+ for (i = 0; i < XVECLEN (parallel, 0); i++)
+ {
+ rtx e = XVECEXP (parallel, 0, i);
+ rtx d = XEXP (e, 0);
+
+ if (d)
+ {
+ d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
+ e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
+ }
+ RTVEC_ELT (vec, i) = e;
+ }
+
+ return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
}
/* Emit code to move a block SRC to block DST, where SRC and DST are
XEXP (XVECEXP (src, 0, i), 0));
}
+/* Move a group of registers represented by a PARALLEL into pseudos. */
+
+rtx
+emit_group_move_into_temps (rtx src)
+{
+ rtvec vec = rtvec_alloc (XVECLEN (src, 0));
+ int i;
+
+ for (i = 0; i < XVECLEN (src, 0); i++)
+ {
+ rtx e = XVECEXP (src, 0, i);
+ rtx d = XEXP (e, 0);
+
+ if (d)
+ e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
+ RTVEC_ELT (vec, i) = e;
+ }
+
+ return gen_rtx_PARALLEL (GET_MODE (src), vec);
+}
+
/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
where SRC is non-consecutive registers represented by a PARALLEL.
SSIZE represents the total size of block ORIG_DST, or -1 if not
{
rtx *tmps, dst;
int start, i;
+ enum machine_mode m = GET_MODE (orig_dst);
gcc_assert (GET_CODE (src) == PARALLEL);
+ if (!SCALAR_INT_MODE_P (m)
+ && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
+ {
+ enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
+ if (imode == BLKmode)
+ dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
+ else
+ dst = gen_reg_rtx (imode);
+ emit_group_store (dst, src, type, ssize);
+ if (imode != BLKmode)
+ dst = gen_lowpart (GET_MODE (orig_dst), dst);
+ emit_move_insn (orig_dst, dst);
+ return;
+ }
+
/* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */
if (XEXP (XVECEXP (src, 0, 0), 0))
? 0 : x);
}
-/* Expand an assignment that stores the value of FROM into TO.
- If WANT_VALUE is nonzero, return an rtx for the value of TO.
- (If the value is constant, this rtx is a constant.)
- Otherwise, the returned value is NULL_RTX. */
+/* Expand an assignment that stores the value of FROM into TO. */
-rtx
-expand_assignment (tree to, tree from, int want_value)
+void
+expand_assignment (tree to, tree from)
{
rtx to_rtx = 0;
rtx result;
if (TREE_CODE (to) == ERROR_MARK)
{
result = expand_expr (from, NULL_RTX, VOIDmode, 0);
- return want_value ? result : NULL_RTX;
+ return;
}
/* Assignment of a structure component needs special treatment
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
- if (mode1 == VOIDmode && want_value)
- tem = stabilize_reference (tem);
-
orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
if (offset != 0)
}
/* Optimize bitfld op= val in certain cases. */
- while (mode1 == VOIDmode && !want_value
+ while (mode1 == VOIDmode
&& bitsize > 0 && bitsize < BITS_PER_WORD
&& GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
&& !TREE_SIDE_EFFECTS (to)
src = from;
STRIP_NOPS (src);
if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
- || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
+ || !BINARY_CLASS_P (src))
break;
op0 = TREE_OPERAND (src, 0);
emit_move_insn (str_rtx, result);
free_temp_slots ();
pop_temp_slots ();
- return NULL_RTX;
+ return;
default:
break;
}
result = store_field (to_rtx, bitsize, bitpos, mode1, from,
- (want_value
- /* Spurious cast for HPUX compiler. */
- ? ((enum machine_mode)
- TYPE_MODE (TREE_TYPE (to)))
- : VOIDmode),
- unsignedp, TREE_TYPE (tem), get_alias_set (to));
+ TREE_TYPE (tem), get_alias_set (to));
preserve_temp_slots (result);
free_temp_slots ();
/* If the value is meaningful, convert RESULT to the proper mode.
Otherwise, return nothing. */
- return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
- TYPE_MODE (TREE_TYPE (from)),
- result,
- TYPE_UNSIGNED (TREE_TYPE (to)))
- : NULL_RTX);
+ return;
}
/* If the rhs is a function call and its value is not an aggregate,
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? to_rtx : NULL_RTX;
+ return;
}
/* Ordinary treatment. Expand TO to get a REG or MEM rtx.
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? to_rtx : NULL_RTX;
+ return;
}
/* In case we are returning the contents of an object which overlaps
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? to_rtx : NULL_RTX;
+ return;
}
/* Compute FROM and store the value in the rtx we got. */
push_temp_slots ();
- result = store_expr (from, to_rtx, want_value);
+ result = store_expr (from, to_rtx, 0);
preserve_temp_slots (result);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? result : NULL_RTX;
+ return;
}
/* Generate code for computing expression EXP,
and storing the value into TARGET.
- If WANT_VALUE & 1 is nonzero, return a copy of the value
- not in TARGET, so that we can be sure to use the proper
- value in a containing expression even if TARGET has something
- else stored in it. If possible, we copy the value through a pseudo
- and return that pseudo. Or, if the value is constant, we try to
- return the constant. In some cases, we return a pseudo
- copied *from* TARGET.
-
If the mode is BLKmode then we may return TARGET itself.
It turns out that in BLKmode it doesn't cause a problem.
because C has no operators that could combine two different
with no sequence point. Will other languages need this to
be more thorough?
- If WANT_VALUE & 1 is 0, we return NULL, to make sure
- to catch quickly any cases where the caller uses the value
- and fails to set WANT_VALUE.
-
- If WANT_VALUE & 2 is set, this is a store into a call param on the
+ If CALL_PARAM_P is nonzero, this is a store into a call param on the
stack, and block moves may need to be treated specially. */
rtx
-store_expr (tree exp, rtx target, int want_value)
+store_expr (tree exp, rtx target, int call_param_p)
{
rtx temp;
rtx alt_rtl = NULL_RTX;
int dont_return_target = 0;
- int dont_store_target = 0;
if (VOID_TYPE_P (TREE_TYPE (exp)))
{
/* C++ can generate ?: expressions with a throw expression in one
branch and an rvalue in the other. Here, we resolve attempts to
store the throw expression's nonexistent result. */
- gcc_assert (!want_value);
+ gcc_assert (!call_param_p);
expand_expr (exp, const0_rtx, VOIDmode, 0);
return NULL_RTX;
}
/* Perform first part of compound expression, then assign from second
part. */
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
- return store_expr (TREE_OPERAND (exp, 1), target, want_value);
+ call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
+ return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
}
else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
{
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
- store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
+ store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
- store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
+ store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
emit_label (lab2);
OK_DEFER_POP;
- return want_value & 1 ? target : NULL_RTX;
- }
- else if ((want_value & 1) != 0
- && MEM_P (target)
- && ! MEM_VOLATILE_P (target)
- && GET_MODE (target) != BLKmode)
- /* If target is in memory and caller wants value in a register instead,
- arrange that. Pass TARGET as target for expand_expr so that,
- if EXP is another assignment, WANT_VALUE will be nonzero for it.
- We know expand_expr will not use the target in that case.
- Don't do this if TARGET is volatile because we are supposed
- to write it and then read it. */
- {
- temp = expand_expr (exp, target, GET_MODE (target),
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
- if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
- {
- /* If TEMP is already in the desired TARGET, only copy it from
- memory and don't store it there again. */
- if (temp == target
- || (rtx_equal_p (temp, target)
- && ! side_effects_p (temp) && ! side_effects_p (target)))
- dont_store_target = 1;
- temp = copy_to_reg (temp);
- }
- dont_return_target = 1;
+ return NULL_RTX;
}
else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
/* If this is a scalar in a register that is stored in a wider mode
{
rtx inner_target = 0;
- /* If we don't want a value, we can do the conversion inside EXP,
- which will often result in some optimizations. Do the conversion
- in two steps: first change the signedness, if needed, then
- the extend. But don't do this if the type of EXP is a subtype
- of something else since then the conversion might involve
- more than just converting modes. */
- if ((want_value & 1) == 0
- && INTEGRAL_TYPE_P (TREE_TYPE (exp))
+ /* We can do the conversion inside EXP, which will often result
+ in some optimizations. Do the conversion in two steps: first
+ change the signedness, if needed, then the extend. But don't
+ do this if the type of EXP is a subtype of something else
+ since then the conversion might involve more than just
+ converting modes. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
&& TREE_TYPE (TREE_TYPE (exp)) == 0
&& (!lang_hooks.reduce_bit_field_operations
|| (GET_MODE_PRECISION (GET_MODE (target))
}
temp = expand_expr (exp, inner_target, VOIDmode,
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
-
- /* If TEMP is a MEM and we want a result value, make the access
- now so it gets done only once. Strictly speaking, this is
- only necessary if the MEM is volatile, or if the address
- overlaps TARGET. But not performing the load twice also
- reduces the amount of rtl we generate and then have to CSE. */
- if (MEM_P (temp) && (want_value & 1) != 0)
- temp = copy_to_reg (temp);
+ call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
/* If TEMP is a VOIDmode constant, use convert_modes to make
sure that we properly convert it. */
convert_move (SUBREG_REG (target), temp,
SUBREG_PROMOTED_UNSIGNED_P (target));
- /* If we promoted a constant, change the mode back down to match
- target. Otherwise, the caller might get confused by a result whose
- mode is larger than expected. */
-
- if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
- {
- if (GET_MODE (temp) != VOIDmode)
- {
- temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
- SUBREG_PROMOTED_VAR_P (temp) = 1;
- SUBREG_PROMOTED_UNSIGNED_SET (temp,
- SUBREG_PROMOTED_UNSIGNED_P (target));
- }
- else
- temp = convert_modes (GET_MODE (target),
- GET_MODE (SUBREG_REG (target)),
- temp, SUBREG_PROMOTED_UNSIGNED_P (target));
- }
-
- return want_value & 1 ? temp : NULL_RTX;
+ return NULL_RTX;
}
else
{
temp = expand_expr_real (exp, target, GET_MODE (target),
- (want_value & 2
+ (call_param_p
? EXPAND_STACK_PARM : EXPAND_NORMAL),
&alt_rtl);
/* Return TARGET if it's a specified hardware register.
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
&& !(MEM_P (target) && MEM_VOLATILE_P (target))
&& ! rtx_equal_p (temp, target)
- && (CONSTANT_P (temp) || (want_value & 1) != 0))
+ && CONSTANT_P (temp))
dont_return_target = 1;
}
|| (temp != target && (side_effects_p (temp)
|| side_effects_p (target))))
&& TREE_CODE (exp) != ERROR_MARK
- && ! dont_store_target
/* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
but TARGET is not valid memory reference, TEMP will differ
from TARGET although it is really the same location. */
if (GET_CODE (size) == CONST_INT
&& INTVAL (size) < TREE_STRING_LENGTH (exp))
emit_block_move (target, temp, size,
- (want_value & 2
+ (call_param_p
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
{
size_int (TREE_STRING_LENGTH (exp)));
rtx copy_size_rtx
= expand_expr (copy_size, NULL_RTX, VOIDmode,
- (want_value & 2
+ (call_param_p
? EXPAND_STACK_PARM : EXPAND_NORMAL));
rtx label = 0;
copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
TYPE_UNSIGNED (sizetype));
emit_block_move (target, temp, copy_size_rtx,
- (want_value & 2
+ (call_param_p
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
/* Figure out how much is left in TARGET that we have to clear.
int_size_in_bytes (TREE_TYPE (exp)));
else if (GET_MODE (temp) == BLKmode)
emit_block_move (target, temp, expr_size (exp),
- (want_value & 2
+ (call_param_p
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
{
}
}
- /* If we don't want a value, return NULL_RTX. */
- if ((want_value & 1) == 0)
- return NULL_RTX;
-
- /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
- ??? The latter test doesn't seem to make sense. */
- else if (dont_return_target && !MEM_P (temp))
- return temp;
-
- /* Return TARGET itself if it is a hard register. */
- else if ((want_value & 1) != 0
- && GET_MODE (target) != BLKmode
- && ! (REG_P (target)
- && REGNO (target) < FIRST_PSEUDO_REGISTER))
- return copy_to_reg (target);
-
- else
- return target;
+ return NULL_RTX;
}
\f
/* Examine CTOR. Discover how many scalar fields are set to nonzero
/* Return 1 if EXP contains mostly (3/4) zeros. */
-int
+static int
mostly_zeros_p (tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
}
else
- store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
- alias_set);
+ store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
}
/* Store the value of constructor EXP into the rtx TARGET.
the loop. */
expand_assignment (index,
build2 (PLUS_EXPR, TREE_TYPE (index),
- index, integer_one_node), 0);
+ index, integer_one_node));
emit_jump (loop_start);
BITSIZE bits, starting BITPOS bits from the start of TARGET.
If MODE is VOIDmode, it means that we are storing into a bit-field.
- If VALUE_MODE is VOIDmode, return nothing in particular.
- UNSIGNEDP is not used in this case.
-
- Otherwise, return an rtx for the value stored. This rtx
- has mode VALUE_MODE if that is convenient to do.
- In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
+ Always return const0_rtx unless we have something particular to
+ return.
TYPE is the type of the underlying object,
static rtx
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
- enum machine_mode mode, tree exp, enum machine_mode value_mode,
- int unsignedp, tree type, int alias_set)
+ enum machine_mode mode, tree exp, tree type, int alias_set)
{
HOST_WIDE_INT width_mask = 0;
if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
emit_move_insn (object, target);
- store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
- alias_set);
+ store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
emit_move_insn (target, object);
/* We're storing into a struct containing a single __complex. */
gcc_assert (!bitpos);
- return store_expr (exp, target, value_mode != VOIDmode);
+ return store_expr (exp, target, 0);
}
/* If the structure is in a register or if the component
/ BITS_PER_UNIT),
BLOCK_OP_NORMAL);
- return value_mode == VOIDmode ? const0_rtx : target;
+ return const0_rtx;
}
/* Store the value in the bitfield. */
store_bit_field (target, bitsize, bitpos, mode, temp);
- if (value_mode != VOIDmode)
- {
- /* The caller wants an rtx for the value.
- If possible, avoid refetching from the bitfield itself. */
- if (width_mask != 0
- && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
- {
- tree count;
- enum machine_mode tmode;
-
- tmode = GET_MODE (temp);
- if (tmode == VOIDmode)
- tmode = value_mode;
-
- if (unsignedp)
- return expand_and (tmode, temp,
- gen_int_mode (width_mask, tmode),
- NULL_RTX);
-
- count = build_int_cst (NULL_TREE,
- GET_MODE_BITSIZE (tmode) - bitsize);
- temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
- return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
- }
-
- return extract_bit_field (target, bitsize, bitpos, unsignedp,
- NULL_RTX, value_mode, VOIDmode);
- }
return const0_rtx;
}
else
{
- rtx addr = XEXP (target, 0);
- rtx to_rtx = target;
-
- /* If a value is wanted, it must be the lhs;
- so make the address stable for multiple use. */
-
- if (value_mode != VOIDmode && !REG_P (addr)
- && ! CONSTANT_ADDRESS_P (addr)
- /* A frame-pointer reference is already stable. */
- && ! (GET_CODE (addr) == PLUS
- && GET_CODE (XEXP (addr, 1)) == CONST_INT
- && (XEXP (addr, 0) == virtual_incoming_args_rtx
- || XEXP (addr, 0) == virtual_stack_vars_rtx)))
- to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
-
/* Now build a reference to just the desired component. */
-
- to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
+ rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
if (to_rtx == target)
to_rtx = copy_rtx (to_rtx);
if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
set_mem_alias_set (to_rtx, alias_set);
- return store_expr (exp, to_rtx, value_mode != VOIDmode);
+ return store_expr (exp, to_rtx, 0);
}
}
\f
/* Now look at our tree code and possibly recurse. */
switch (TREE_CODE_CLASS (TREE_CODE (exp)))
{
- case 'd':
+ case tcc_declaration:
exp_rtl = DECL_RTL_IF_SET (exp);
break;
- case 'c':
+ case tcc_constant:
return 1;
- case 'x':
+ case tcc_exceptional:
if (TREE_CODE (exp) == TREE_LIST)
{
while (1)
else
return 0;
- case 's':
+ case tcc_statement:
/* The only case we look at here is the DECL_INITIAL inside a
DECL_EXPR. */
return (TREE_CODE (exp) != DECL_EXPR
|| !DECL_INITIAL (DECL_EXPR_DECL (exp))
|| safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
- case '2':
- case '<':
+ case tcc_binary:
+ case tcc_comparison:
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
return 0;
/* Fall through. */
- case '1':
+ case tcc_unary:
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
- case 'e':
- case 'r':
+ case tcc_expression:
+ case tcc_reference:
/* Now do code-specific tests. EXP_RTL is set to any rtx we find in
the expression. If it is set, we conflict iff we are that rtx or
both are in memory. Otherwise, we check all operands of the
}
break;
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
if (MEM_P (x)
&& alias_sets_conflict_p (MEM_ALIAS_SET (x),
>= (unsigned int) LAST_AND_UNUSED_TREE_CODE
&& !lang_hooks.safe_from_p (x, exp))
return 0;
+ break;
+
+ case tcc_type:
+ /* Should never get a type here. */
+ gcc_unreachable ();
}
/* If we have an rtl, find any enclosed object. Then see if we conflict
}
\f
-/* A subroutine of expand_expr. Evaluate the address of EXP.
+/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
static rtx
-expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
- enum expand_modifier modifier)
+expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier)
{
rtx result, subtarget;
tree inner, offset;
generating ADDR_EXPR of something that isn't an LVALUE. The only
exception here is STRING_CST. */
if (TREE_CODE (exp) == CONSTRUCTOR
- || TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
+ || CONSTANT_CLASS_P (exp))
return XEXP (output_constant_def (exp, 0), 0);
/* Everything must be something allowed by is_gimple_addressable. */
case CONST_DECL:
/* Recurse and make the output_constant_def clause above handle this. */
- return expand_expr_addr_expr (DECL_INITIAL (exp), target,
- tmode, modifier);
+ return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
+ tmode, modifier);
case REALPART_EXPR:
/* The real part of the complex number is always first, therefore
result = XEXP (result, 0);
/* ??? Is this needed anymore? */
- if (!TREE_USED (exp) == 0)
+ if (DECL_P (exp) && !TREE_USED (exp) == 0)
{
assemble_external (exp);
TREE_USED (exp) = 1;
gcc_assert (inner != exp);
subtarget = offset || bitpos ? NULL_RTX : target;
- result = expand_expr_addr_expr (inner, subtarget, tmode, modifier);
-
- if (tmode == VOIDmode)
- {
- tmode = GET_MODE (result);
- if (tmode == VOIDmode)
- tmode = Pmode;
- }
+ result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
if (offset)
{
result = force_operand (result, NULL);
tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
+ result = convert_memory_address (tmode, result);
+ tmp = convert_memory_address (tmode, tmp);
+
if (modifier == EXPAND_SUM)
result = gen_rtx_PLUS (tmode, result, tmp);
else
{
/* Someone beforehand should have rejected taking the address
of such an object. */
- gcc_assert (!(bitpos % BITS_PER_UNIT));
+ gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
result = plus_constant (result, bitpos / BITS_PER_UNIT);
if (modifier < EXPAND_SUM)
return result;
}
+/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
+ The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
+
+static rtx
+expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier)
+{
+ enum machine_mode rmode;
+ rtx result;
+
+ /* Target mode of VOIDmode says "whatever's natural". */
+ if (tmode == VOIDmode)
+ tmode = TYPE_MODE (TREE_TYPE (exp));
+
+ /* We can get called with some Weird Things if the user does silliness
+ like "(short) &a". In that case, convert_memory_address won't do
+ the right thing, so ignore the given target mode. */
+ if (tmode != Pmode && tmode != ptr_mode)
+ tmode = Pmode;
+
+ result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
+ tmode, modifier);
+
+ /* Despite expand_expr claims concerning ignoring TMODE when not
+ strictly convenient, stuff breaks if we don't honor it. Note
+ that combined with the above, we only do this for pointer modes. */
+ rmode = GET_MODE (result);
+ if (rmode == VOIDmode)
+ rmode = tmode;
+ if (rmode != tmode)
+ result = convert_memory_address (tmode, result);
+
+ return result;
+}
+
+
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
In the case of a void EXP, const0_rtx is returned.
return const0_rtx;
}
- if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
- || code == INDIRECT_REF)
+ if (TREE_CODE_CLASS (code) == tcc_unary
+ || code == COMPONENT_REF || code == INDIRECT_REF)
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
modifier);
- else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
+ else if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison
|| code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
return target;
}
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
{
tree exp1 = TREE_OPERAND (exp, 0);
tree orig;
+ if (code == MISALIGNED_INDIRECT_REF
+ && !targetm.vectorize.misaligned_mem_ok (mode))
+ abort ();
+
if (modifier != EXPAND_WRITE)
{
tree t;
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
+
+ if (code == ALIGN_INDIRECT_REF)
+ {
+ int align = TYPE_ALIGN_UNIT (type);
+ op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
+ op0 = memory_address (mode, op0);
+ }
+
temp = gen_rtx_MEM (mode, op0);
orig = REF_ORIGINAL (exp);
/* Store data into beginning of memory target. */
store_expr (TREE_OPERAND (exp, 0),
adjust_address (target, TYPE_MODE (valtype), 0),
- modifier == EXPAND_STACK_PARM ? 2 : 0);
+ modifier == EXPAND_STACK_PARM);
else
{
* BITS_PER_UNIT),
(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
- VOIDmode, 0, type, 0);
+ type, 0);
}
/* Return the entire union. */
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
store_expr (TREE_OPERAND (exp, 1), temp,
- modifier == EXPAND_STACK_PARM ? 2 : 0);
+ modifier == EXPAND_STACK_PARM);
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
store_expr (TREE_OPERAND (exp, 2), temp,
- modifier == EXPAND_STACK_PARM ? 2 : 0);
+ modifier == EXPAND_STACK_PARM);
emit_label (op1);
OK_DEFER_POP;
return temp;
+ case VEC_COND_EXPR:
+ target = expand_vec_cond_expr (exp, target);
+ return target;
+
case MODIFY_EXPR:
{
- /* If lhs is complex, expand calls in rhs before computing it.
- That's so we don't compute a pointer and save it over a
- call. If lhs is simple, compute it first so we can give it
- as a target if the rhs is just a call. This avoids an
- extra temp and copy and that prevents a partial-subsumption
- which makes bad code. Actually we could treat
- component_ref's of vars like vars. */
-
tree lhs = TREE_OPERAND (exp, 0);
tree rhs = TREE_OPERAND (exp, 1);
- temp = 0;
+ gcc_assert (ignore);
/* Check for |= or &= of a bitfield of size one into another bitfield
of size 1. In this case, (unless we need the result of the
??? At this point, we can't get a BIT_FIELD_REF here. But if
things change so we do, this code should be enhanced to
support it. */
- if (ignore
- && TREE_CODE (lhs) == COMPONENT_REF
+ if (TREE_CODE (lhs) == COMPONENT_REF
&& (TREE_CODE (rhs) == BIT_IOR_EXPR
|| TREE_CODE (rhs) == BIT_AND_EXPR)
&& TREE_OPERAND (rhs, 0) == lhs
expand_assignment (lhs, convert (TREE_TYPE (rhs),
(TREE_CODE (rhs) == BIT_IOR_EXPR
? integer_one_node
- : integer_zero_node)),
- 0);
+ : integer_zero_node)));
do_pending_stack_adjust ();
emit_label (label);
return const0_rtx;
}
- temp = expand_assignment (lhs, rhs, ! ignore);
+ expand_assignment (lhs, rhs);
- return temp;
+ return const0_rtx;
}
case RETURN_EXPR:
return const0_rtx;
case ADDR_EXPR:
- return expand_expr_addr_expr (TREE_OPERAND (exp, 0), target,
- tmode, modifier);
+ return expand_expr_addr_expr (exp, target, tmode, modifier);
/* COMPLEX type for Extended Pascal & Fortran */
case COMPLEX_EXPR:
return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
modifier, alt_rtl);
+ case REALIGN_LOAD_EXPR:
+ {
+ tree oprnd0 = TREE_OPERAND (exp, 0);
+ tree oprnd1 = TREE_OPERAND (exp, 1);
+ tree oprnd2 = TREE_OPERAND (exp, 2);
+ rtx op2;
+
+ this_optab = optab_for_tree_code (code, type);
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
+ op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+ target, unsignedp);
+ if (temp == 0)
+ abort ();
+ return temp;
+ }
+
+
default:
return lang_hooks.expand_expr (exp, original_target, tmode,
modifier, alt_rtl);
tree
string_constant (tree arg, tree *ptr_offset)
{
+ tree array, offset;
STRIP_NOPS (arg);
- if (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ if (TREE_CODE (arg) == ADDR_EXPR)
{
- *ptr_offset = size_zero_node;
- return TREE_OPERAND (arg, 0);
- }
- if (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
- && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
- {
- *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
- return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ {
+ *ptr_offset = size_zero_node;
+ return TREE_OPERAND (arg, 0);
+ }
+ else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
+ {
+ array = TREE_OPERAND (arg, 0);
+ offset = size_zero_node;
+ }
+ else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
+ {
+ array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
+ if (TREE_CODE (array) != STRING_CST
+ && TREE_CODE (array) != VAR_DECL)
+ return 0;
+ }
+ else
+ return 0;
}
else if (TREE_CODE (arg) == PLUS_EXPR)
{
STRIP_NOPS (arg1);
if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
+ && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
+ || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
{
- *ptr_offset = convert (sizetype, arg1);
- return TREE_OPERAND (arg0, 0);
+ array = TREE_OPERAND (arg0, 0);
+ offset = arg1;
}
else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
+ && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
+ || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
{
- *ptr_offset = convert (sizetype, arg0);
- return TREE_OPERAND (arg1, 0);
+ array = TREE_OPERAND (arg1, 0);
+ offset = arg0;
}
+ else
+ return 0;
+ }
+ else
+ return 0;
+
+ if (TREE_CODE (array) == STRING_CST)
+ {
+ *ptr_offset = convert (sizetype, offset);
+ return array;
+ }
+ else if (TREE_CODE (array) == VAR_DECL)
+ {
+ int length;
+
+ /* Variables initialized to string literals can be handled too. */
+ if (DECL_INITIAL (array) == NULL_TREE
+ || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
+ return 0;
+
+ /* If they are read-only, non-volatile and bind locally. */
+ if (! TREE_READONLY (array)
+ || TREE_SIDE_EFFECTS (array)
+ || ! targetm.binds_local_p (array))
+ return 0;
+
+ /* Avoid const char foo[4] = "abcde"; */
+ if (DECL_SIZE_UNIT (array) == NULL_TREE
+ || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
+ || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
+ || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
+ return 0;
+
+ /* If variable is bigger than the string literal, OFFSET must be constant
+ and inside of the bounds of the string literal. */
+ offset = convert (sizetype, offset);
+ if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
+ && (! host_integerp (offset, 1)
+ || compare_tree_int (offset, length) >= 0))
+ return 0;
+
+ *ptr_offset = offset;
+ return DECL_INITIAL (array);
}
return 0;