tree, tree, int, int);
static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
- tree, enum machine_mode, int, tree, int);
+ tree, tree, int);
static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
: (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
- if (to_real != from_real)
- abort ();
+ gcc_assert (to_real == from_real);
/* If the source and destination are already the same, then there's
nothing to do. */
&& SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
from = gen_lowpart (to_mode, from), from_mode = to_mode;
- if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
- abort ();
+ gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
if (to_mode == from_mode
|| (from_mode == VOIDmode && CONSTANT_P (from)))
if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
{
- if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
- abort ();
+ gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
if (VECTOR_MODE_P (to_mode))
from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
rtx value, insns;
convert_optab tab;
+ gcc_assert (GET_MODE_PRECISION (from_mode)
+ != GET_MODE_PRECISION (to_mode));
+
if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
tab = sext_optab;
- else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
- tab = trunc_optab;
else
- abort ();
+ tab = trunc_optab;
/* Try converting directly if the insn is supported. */
/* Otherwise use a libcall. */
libcall = tab->handlers[to_mode][from_mode].libfunc;
- if (!libcall)
- /* This conversion is not implemented yet. */
- abort ();
+ /* Is this conversion implemented yet? */
+ gcc_assert (libcall);
start_sequence ();
value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
- if (trunc_optab->handlers[to_mode][full_mode].insn_code
- == CODE_FOR_nothing)
- abort ();
+ gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
+ != CODE_FOR_nothing);
if (full_mode != from_mode)
from = convert_to_mode (full_mode, from, unsignedp);
enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
- if (sext_optab->handlers[full_mode][from_mode].insn_code
- == CODE_FOR_nothing)
- abort ();
+ gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
+ != CODE_FOR_nothing);
emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
to, from, UNKNOWN);
int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
rtx subword = operand_subword (to, index, 1, to_mode);
- if (subword == 0)
- abort ();
+ gcc_assert (subword);
if (fill_value != subword)
emit_move_insn (subword, fill_value);
}
/* Mode combination is not recognized. */
- abort ();
+ gcc_unreachable ();
}
/* Return an rtx for a value that would result
subreg operation. */
if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
{
- if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
- abort ();
+ gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
return simplify_gen_subreg (mode, x, oldmode, 0);
}
}
/* The code above should have handled everything. */
- if (data.len > 0)
- abort ();
+ gcc_assert (!data.len);
if (endp)
{
rtx to1;
- if (data.reverse)
- abort ();
+ gcc_assert (!data.reverse);
if (data.autinc_to)
{
if (endp == 2)
max_size = GET_MODE_SIZE (mode);
}
- if (l)
- abort ();
+ gcc_assert (!l);
return n_insns;
}
#ifdef PUSH_ROUNDING
emit_single_push_insn (mode, from1, NULL);
#else
- abort ();
+ gcc_unreachable ();
#endif
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
- if (!MEM_P (x))
- abort ();
- if (!MEM_P (y))
- abort ();
- if (size == 0)
- abort ();
+ gcc_assert (MEM_P (x));
+ gcc_assert (MEM_P (y));
+ gcc_assert (size);
/* Make sure we've got BLKmode addresses; store_one_arg can decide that
block copy is more efficient for other large modes, e.g. DCmode. */
{
rtx tem = operand_subword (x, i, 1, BLKmode);
- if (tem == 0)
- abort ();
+ gcc_assert (tem);
emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
}
int i, length;
rtx *tmps;
- if (GET_CODE (orig) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (orig) == PARALLEL);
length = XVECLEN (orig, 0);
tmps = alloca (sizeof (rtx) * length);
return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
}
-/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
- where DST is non-consecutive registers represented by a PARALLEL.
- SSIZE represents the total size of block ORIG_SRC in bytes, or -1
- if not known. */
+/* A subroutine of emit_group_load. Arguments as for emit_group_load,
+ except that values are placed in TMPS[i], and must later be moved
+ into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
-void
-emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
+static void
+emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
{
- rtx *tmps, src;
+ rtx src;
int start, i;
+ enum machine_mode m = GET_MODE (orig_src);
- if (GET_CODE (dst) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (dst) == PARALLEL);
+
+ if (!SCALAR_INT_MODE_P (m)
+ && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
+ {
+ enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
+ if (imode == BLKmode)
+ src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
+ else
+ src = gen_reg_rtx (imode);
+ if (imode != BLKmode)
+ src = gen_lowpart (GET_MODE (orig_src), src);
+ emit_move_insn (src, orig_src);
+ /* ...and back again. */
+ if (imode != BLKmode)
+ src = gen_lowpart (imode, src);
+ emit_group_load_1 (tmps, dst, src, type, ssize);
+ return;
+ }
/* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */
else
start = 1;
- tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
-
/* Process the pieces. */
for (i = start; i < XVECLEN (dst, 0); i++)
{
)
shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
bytelen = ssize - bytepos;
- if (bytelen <= 0)
- abort ();
+ gcc_assert (bytelen > 0);
}
/* If we won't be loading directly from memory, protect the real source
(bytepos % slen0) * BITS_PER_UNIT,
1, NULL_RTX, mode, mode);
}
- else if (bytepos == 0)
+ else
{
- rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
+ rtx mem;
+
+ gcc_assert (!bytepos);
+ mem = assign_stack_temp (GET_MODE (src), slen, 0);
emit_move_insn (mem, src);
- tmps[i] = adjust_address (mem, mode, 0);
+ tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
+ 0, 1, NULL_RTX, mode, mode);
}
- else
- abort ();
}
/* FIXME: A SIMD parallel will eventually lead to a subreg of a
SIMD register, which is currently broken. While we get GCC
tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
build_int_cst (NULL_TREE, shift), tmps[i], 0);
}
+}
+
+/* Emit code to move a block SRC of type TYPE to a block DST,
+ where DST is non-consecutive registers represented by a PARALLEL.
+ SSIZE represents the total size of block ORIG_SRC in bytes, or -1
+ if not known. */
+
+void
+emit_group_load (rtx dst, rtx src, tree type, int ssize)
+{
+ rtx *tmps;
+ int i;
+
+ tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
+ emit_group_load_1 (tmps, dst, src, type, ssize);
/* Copy the extracted pieces into the proper (probable) hard regs. */
- for (i = start; i < XVECLEN (dst, 0); i++)
- emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
+ for (i = 0; i < XVECLEN (dst, 0); i++)
+ {
+ rtx d = XEXP (XVECEXP (dst, 0, i), 0);
+ if (d == NULL)
+ continue;
+ emit_move_insn (d, tmps[i]);
+ }
+}
+
+/* Similar, but load SRC into new pseudos in a format that looks like
+ PARALLEL. This can later be fed to emit_group_move to get things
+ in the right place. */
+
+rtx
+emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
+{
+ rtvec vec;
+ int i;
+
+ vec = rtvec_alloc (XVECLEN (parallel, 0));
+ emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
+
+ /* Convert the vector to look just like the original PARALLEL, except
+ with the computed values. */
+ for (i = 0; i < XVECLEN (parallel, 0); i++)
+ {
+ rtx e = XVECEXP (parallel, 0, i);
+ rtx d = XEXP (e, 0);
+
+ if (d)
+ {
+ d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
+ e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
+ }
+ RTVEC_ELT (vec, i) = e;
+ }
+
+ return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
}
/* Emit code to move a block SRC to block DST, where SRC and DST are
{
int i;
- if (GET_CODE (src) != PARALLEL
- || GET_CODE (dst) != PARALLEL
- || XVECLEN (src, 0) != XVECLEN (dst, 0))
- abort ();
+ gcc_assert (GET_CODE (src) == PARALLEL
+ && GET_CODE (dst) == PARALLEL
+ && XVECLEN (src, 0) == XVECLEN (dst, 0));
/* Skip first entry if NULL. */
for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
XEXP (XVECEXP (src, 0, i), 0));
}
+/* Move a group of registers represented by a PARALLEL into pseudos. */
+
+rtx
+emit_group_move_into_temps (rtx src)
+{
+ rtvec vec = rtvec_alloc (XVECLEN (src, 0));
+ int i;
+
+ for (i = 0; i < XVECLEN (src, 0); i++)
+ {
+ rtx e = XVECEXP (src, 0, i);
+ rtx d = XEXP (e, 0);
+
+ if (d)
+ e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
+ RTVEC_ELT (vec, i) = e;
+ }
+
+ return gen_rtx_PARALLEL (GET_MODE (src), vec);
+}
+
/* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
where SRC is non-consecutive registers represented by a PARALLEL.
SSIZE represents the total size of block ORIG_DST, or -1 if not
{
rtx *tmps, dst;
int start, i;
+ enum machine_mode m = GET_MODE (orig_dst);
+
+ gcc_assert (GET_CODE (src) == PARALLEL);
- if (GET_CODE (src) != PARALLEL)
- abort ();
+ if (!SCALAR_INT_MODE_P (m)
+ && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
+ {
+ enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
+ if (imode == BLKmode)
+ dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
+ else
+ dst = gen_reg_rtx (imode);
+ emit_group_store (dst, src, type, ssize);
+ if (imode != BLKmode)
+ dst = gen_lowpart (GET_MODE (orig_dst), dst);
+ emit_move_insn (orig_dst, dst);
+ return;
+ }
/* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */
bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
dest = XEXP (dst, 1);
}
- else if (bytepos == 0 && XVECLEN (src, 0))
+ else
{
+ gcc_assert (bytepos == 0 && XVECLEN (src, 0));
dest = assign_stack_temp (GET_MODE (dest),
GET_MODE_SIZE (GET_MODE (dest)), 0);
emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
dst = dest;
break;
}
- else
- abort ();
}
/* Optimize the access just a bit. */
void
use_reg (rtx *call_fusage, rtx reg)
{
- if (!REG_P (reg)
- || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort ();
-
+ gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
+
*call_fusage
= gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, reg), *call_fusage);
{
int i;
- if (regno + nregs > FIRST_PSEUDO_REGISTER)
- abort ();
+ gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
for (i = 0; i < nregs; i++)
use_reg (call_fusage, regno_reg_rtx[regno + i]);
}
/* The code above should have handled everything. */
- if (l != 0)
- abort ();
+ gcc_assert (!l);
}
return 1;
if (len == 0)
{
- if (endp == 2)
- abort ();
+ gcc_assert (endp != 2);
return to;
}
- if (! STORE_BY_PIECES_P (len, align))
- abort ();
+ gcc_assert (STORE_BY_PIECES_P (len, align));
data.constfun = constfun;
data.constfundata = constfundata;
data.len = len;
{
rtx to1;
- if (data.reverse)
- abort ();
+ gcc_assert (!data.reverse);
if (data.autinc_to)
{
if (endp == 2)
}
/* The code above should have handled everything. */
- if (data->len != 0)
- abort ();
+ gcc_assert (!data->len);
}
/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
rtx y_cst = NULL_RTX;
rtx last_insn, set;
- if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
- abort ();
+ gcc_assert (mode != BLKmode
+ && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
if (CONSTANT_P (y))
{
&& CONSTANT_ADDRESS_P (XEXP (y, 0)))))
y = validize_mem (y);
- if (mode == BLKmode)
- abort ();
+ gcc_assert (mode != BLKmode);
last_insn = emit_move_insn_1 (x, y);
{
enum machine_mode mode = GET_MODE (x);
enum machine_mode submode;
- enum mode_class class = GET_MODE_CLASS (mode);
- if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
- abort ();
+ gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
return
emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
/* Expand complex moves by moving real part and imag part, if possible. */
- else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
+ else if (COMPLEX_MODE_P (mode)
&& BLKmode != (submode = GET_MODE_INNER (mode))
&& (mov_optab->handlers[(int) submode].insn_code
!= CODE_FOR_nothing))
{
+ unsigned int modesize = GET_MODE_SIZE (mode);
+ unsigned int submodesize = GET_MODE_SIZE (submode);
+
/* Don't split destination if it is a stack push. */
- int stack = push_operand (x, GET_MODE (x));
+ int stack = push_operand (x, mode);
#ifdef PUSH_ROUNDING
/* In case we output to the stack, but the size is smaller than the
machine can push exactly, we need to use move instructions. */
- if (stack
- && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
- != GET_MODE_SIZE (submode)))
+ if (stack && PUSH_ROUNDING (submodesize) != submodesize)
{
rtx temp;
HOST_WIDE_INT offset1, offset2;
add_optab,
#endif
stack_pointer_rtx,
- GEN_INT
- (PUSH_ROUNDING
- (GET_MODE_SIZE (GET_MODE (x)))),
+ GEN_INT (PUSH_ROUNDING (modesize)),
stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
if (temp != stack_pointer_rtx)
#ifdef STACK_GROWS_DOWNWARD
offset1 = 0;
- offset2 = GET_MODE_SIZE (submode);
+ offset2 = submodesize;
#else
- offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
- offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
- + GET_MODE_SIZE (submode));
+ offset1 = -PUSH_ROUNDING (modesize);
+ offset2 = -PUSH_ROUNDING (modesize) + submodesize;
#endif
emit_move_insn (change_address (x, submode,
memory and reload. FIXME, we should see about using extract and
insert on integer registers, but complex short and complex char
variables should be rarely used. */
- if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
- && (reload_in_progress | reload_completed) == 0)
+ if ((reload_in_progress | reload_completed) == 0
+ && (!validate_subreg (submode, mode, NULL, submodesize)
+ || !validate_subreg (submode, mode, NULL, 0)))
{
- int packed_dest_p
- = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
- int packed_src_p
- = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
-
- if (packed_dest_p || packed_src_p)
+ if (REG_P (x) || REG_P (y))
{
- enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
- ? MODE_FLOAT : MODE_INT);
-
+ rtx mem, cmem;
enum machine_mode reg_mode
- = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
+ = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 1);
- if (reg_mode != BLKmode)
+ gcc_assert (reg_mode != BLKmode);
+
+ mem = assign_stack_temp (reg_mode, modesize, 0);
+ cmem = adjust_address (mem, mode, 0);
+
+ if (REG_P (x))
+ {
+ rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
+ emit_move_insn_1 (cmem, y);
+ return emit_move_insn_1 (sreg, mem);
+ }
+ else
{
- rtx mem = assign_stack_temp (reg_mode,
- GET_MODE_SIZE (mode), 0);
- rtx cmem = adjust_address (mem, mode, 0);
-
- if (packed_dest_p)
- {
- rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
-
- emit_move_insn_1 (cmem, y);
- return emit_move_insn_1 (sreg, mem);
- }
- else
- {
- rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
-
- emit_move_insn_1 (mem, sreg);
- return emit_move_insn_1 (x, cmem);
- }
+ rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
+ emit_move_insn_1 (mem, sreg);
+ return emit_move_insn_1 (x, cmem);
}
}
}
if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
break;
- if (tmode == VOIDmode)
- abort ();
+ gcc_assert (tmode != VOIDmode);
/* Get X and Y in TMODE. We can't use gen_lowpart here because it
may call change_address which is not appropriate if we were
/* This will handle any multi-word or full-word mode that lacks a move_insn
pattern. However, you will get better code if you define such patterns,
even if they must turn into multiple assembler instructions. */
- else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
+ else
{
rtx last_insn = 0;
rtx seq, inner;
int need_clobber;
int i;
-
+
+ gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
+
#ifdef PUSH_ROUNDING
/* If X is a push on the stack, do the push now and replace
else if (ypart == 0)
ypart = operand_subword_force (y, i, mode);
- if (xpart == 0 || ypart == 0)
- abort ();
+ gcc_assert (xpart && ypart);
need_clobber |= (GET_CODE (xpart) == SUBREG);
return last_insn;
}
- else
- abort ();
}
/* If Y is representable exactly in a narrower mode, and the target can
else
offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
- if (size == 0)
- abort ();
+ gcc_assert (size);
used -= offset;
? 0 : x);
}
-/* Expand an assignment that stores the value of FROM into TO.
- If WANT_VALUE is nonzero, return an rtx for the value of TO.
- (If the value is constant, this rtx is a constant.)
- Otherwise, the returned value is NULL_RTX. */
+/* Expand an assignment that stores the value of FROM into TO. */
-rtx
-expand_assignment (tree to, tree from, int want_value)
+void
+expand_assignment (tree to, tree from)
{
rtx to_rtx = 0;
rtx result;
if (TREE_CODE (to) == ERROR_MARK)
{
result = expand_expr (from, NULL_RTX, VOIDmode, 0);
- return want_value ? result : NULL_RTX;
+ return;
}
/* Assignment of a structure component needs special treatment
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
- if (mode1 == VOIDmode && want_value)
- tem = stabilize_reference (tem);
-
orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
- if (!MEM_P (to_rtx))
- abort ();
+ gcc_assert (MEM_P (to_rtx));
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode)
}
/* Optimize bitfld op= val in certain cases. */
- while (mode1 == VOIDmode && !want_value
+ while (mode1 == VOIDmode
&& bitsize > 0 && bitsize < BITS_PER_WORD
&& GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
&& !TREE_SIDE_EFFECTS (to)
src = from;
STRIP_NOPS (src);
if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
- || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
+ || !BINARY_CLASS_P (src))
break;
op0 = TREE_OPERAND (src, 0);
if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
break;
- /* We can't handle fields split accross multiple entities. */
+ /* We can't handle fields split across multiple entities. */
if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
break;
emit_move_insn (str_rtx, result);
free_temp_slots ();
pop_temp_slots ();
- return NULL_RTX;
+ return;
default:
break;
}
result = store_field (to_rtx, bitsize, bitpos, mode1, from,
- (want_value
- /* Spurious cast for HPUX compiler. */
- ? ((enum machine_mode)
- TYPE_MODE (TREE_TYPE (to)))
- : VOIDmode),
- unsignedp, TREE_TYPE (tem), get_alias_set (to));
+ TREE_TYPE (tem), get_alias_set (to));
preserve_temp_slots (result);
free_temp_slots ();
/* If the value is meaningful, convert RESULT to the proper mode.
Otherwise, return nothing. */
- return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
- TYPE_MODE (TREE_TYPE (from)),
- result,
- TYPE_UNSIGNED (TREE_TYPE (to)))
- : NULL_RTX);
+ return;
}
/* If the rhs is a function call and its value is not an aggregate,
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? to_rtx : NULL_RTX;
+ return;
}
/* Ordinary treatment. Expand TO to get a REG or MEM rtx.
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? to_rtx : NULL_RTX;
+ return;
}
/* In case we are returning the contents of an object which overlaps
preserve_temp_slots (to_rtx);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? to_rtx : NULL_RTX;
+ return;
}
/* Compute FROM and store the value in the rtx we got. */
push_temp_slots ();
- result = store_expr (from, to_rtx, want_value);
+ result = store_expr (from, to_rtx, 0);
preserve_temp_slots (result);
free_temp_slots ();
pop_temp_slots ();
- return want_value ? result : NULL_RTX;
+ return;
}
/* Generate code for computing expression EXP,
and storing the value into TARGET.
- If WANT_VALUE & 1 is nonzero, return a copy of the value
- not in TARGET, so that we can be sure to use the proper
- value in a containing expression even if TARGET has something
- else stored in it. If possible, we copy the value through a pseudo
- and return that pseudo. Or, if the value is constant, we try to
- return the constant. In some cases, we return a pseudo
- copied *from* TARGET.
-
If the mode is BLKmode then we may return TARGET itself.
It turns out that in BLKmode it doesn't cause a problem.
because C has no operators that could combine two different
with no sequence point. Will other languages need this to
be more thorough?
- If WANT_VALUE & 1 is 0, we return NULL, to make sure
- to catch quickly any cases where the caller uses the value
- and fails to set WANT_VALUE.
-
- If WANT_VALUE & 2 is set, this is a store into a call param on the
+ If CALL_PARAM_P is nonzero, this is a store into a call param on the
stack, and block moves may need to be treated specially. */
rtx
-store_expr (tree exp, rtx target, int want_value)
+store_expr (tree exp, rtx target, int call_param_p)
{
rtx temp;
rtx alt_rtl = NULL_RTX;
int dont_return_target = 0;
- int dont_store_target = 0;
if (VOID_TYPE_P (TREE_TYPE (exp)))
{
/* C++ can generate ?: expressions with a throw expression in one
branch and an rvalue in the other. Here, we resolve attempts to
store the throw expression's nonexistent result. */
- if (want_value)
- abort ();
+ gcc_assert (!call_param_p);
expand_expr (exp, const0_rtx, VOIDmode, 0);
return NULL_RTX;
}
/* Perform first part of compound expression, then assign from second
part. */
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
- return store_expr (TREE_OPERAND (exp, 1), target, want_value);
+ call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
+ return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
}
else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
{
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
- store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
+ store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
- store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
+ store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
emit_label (lab2);
OK_DEFER_POP;
- return want_value & 1 ? target : NULL_RTX;
- }
- else if ((want_value & 1) != 0
- && MEM_P (target)
- && ! MEM_VOLATILE_P (target)
- && GET_MODE (target) != BLKmode)
- /* If target is in memory and caller wants value in a register instead,
- arrange that. Pass TARGET as target for expand_expr so that,
- if EXP is another assignment, WANT_VALUE will be nonzero for it.
- We know expand_expr will not use the target in that case.
- Don't do this if TARGET is volatile because we are supposed
- to write it and then read it. */
- {
- temp = expand_expr (exp, target, GET_MODE (target),
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
- if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
- {
- /* If TEMP is already in the desired TARGET, only copy it from
- memory and don't store it there again. */
- if (temp == target
- || (rtx_equal_p (temp, target)
- && ! side_effects_p (temp) && ! side_effects_p (target)))
- dont_store_target = 1;
- temp = copy_to_reg (temp);
- }
- dont_return_target = 1;
+ return NULL_RTX;
}
else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
/* If this is a scalar in a register that is stored in a wider mode
{
rtx inner_target = 0;
- /* If we don't want a value, we can do the conversion inside EXP,
- which will often result in some optimizations. Do the conversion
- in two steps: first change the signedness, if needed, then
- the extend. But don't do this if the type of EXP is a subtype
- of something else since then the conversion might involve
- more than just converting modes. */
- if ((want_value & 1) == 0
- && INTEGRAL_TYPE_P (TREE_TYPE (exp))
+ /* We can do the conversion inside EXP, which will often result
+ in some optimizations. Do the conversion in two steps: first
+ change the signedness, if needed, then the extend. But don't
+ do this if the type of EXP is a subtype of something else
+ since then the conversion might involve more than just
+ converting modes. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
&& TREE_TYPE (TREE_TYPE (exp)) == 0
&& (!lang_hooks.reduce_bit_field_operations
|| (GET_MODE_PRECISION (GET_MODE (target))
}
temp = expand_expr (exp, inner_target, VOIDmode,
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
-
- /* If TEMP is a MEM and we want a result value, make the access
- now so it gets done only once. Strictly speaking, this is
- only necessary if the MEM is volatile, or if the address
- overlaps TARGET. But not performing the load twice also
- reduces the amount of rtl we generate and then have to CSE. */
- if (MEM_P (temp) && (want_value & 1) != 0)
- temp = copy_to_reg (temp);
+ call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
/* If TEMP is a VOIDmode constant, use convert_modes to make
sure that we properly convert it. */
convert_move (SUBREG_REG (target), temp,
SUBREG_PROMOTED_UNSIGNED_P (target));
- /* If we promoted a constant, change the mode back down to match
- target. Otherwise, the caller might get confused by a result whose
- mode is larger than expected. */
-
- if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
- {
- if (GET_MODE (temp) != VOIDmode)
- {
- temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
- SUBREG_PROMOTED_VAR_P (temp) = 1;
- SUBREG_PROMOTED_UNSIGNED_SET (temp,
- SUBREG_PROMOTED_UNSIGNED_P (target));
- }
- else
- temp = convert_modes (GET_MODE (target),
- GET_MODE (SUBREG_REG (target)),
- temp, SUBREG_PROMOTED_UNSIGNED_P (target));
- }
-
- return want_value & 1 ? temp : NULL_RTX;
+ return NULL_RTX;
}
else
{
temp = expand_expr_real (exp, target, GET_MODE (target),
- (want_value & 2
+ (call_param_p
? EXPAND_STACK_PARM : EXPAND_NORMAL),
&alt_rtl);
/* Return TARGET if it's a specified hardware register.
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
&& !(MEM_P (target) && MEM_VOLATILE_P (target))
&& ! rtx_equal_p (temp, target)
- && (CONSTANT_P (temp) || (want_value & 1) != 0))
+ && CONSTANT_P (temp))
dont_return_target = 1;
}
|| (temp != target && (side_effects_p (temp)
|| side_effects_p (target))))
&& TREE_CODE (exp) != ERROR_MARK
- && ! dont_store_target
/* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
but TARGET is not valid memory reference, TEMP will differ
from TARGET although it is really the same location. */
if (GET_CODE (size) == CONST_INT
&& INTVAL (size) < TREE_STRING_LENGTH (exp))
emit_block_move (target, temp, size,
- (want_value & 2
+ (call_param_p
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
{
size_int (TREE_STRING_LENGTH (exp)));
rtx copy_size_rtx
= expand_expr (copy_size, NULL_RTX, VOIDmode,
- (want_value & 2
+ (call_param_p
? EXPAND_STACK_PARM : EXPAND_NORMAL));
rtx label = 0;
copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
TYPE_UNSIGNED (sizetype));
emit_block_move (target, temp, copy_size_rtx,
- (want_value & 2
+ (call_param_p
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
/* Figure out how much is left in TARGET that we have to clear.
int_size_in_bytes (TREE_TYPE (exp)));
else if (GET_MODE (temp) == BLKmode)
emit_block_move (target, temp, expr_size (exp),
- (want_value & 2
+ (call_param_p
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
{
}
}
- /* If we don't want a value, return NULL_RTX. */
- if ((want_value & 1) == 0)
- return NULL_RTX;
-
- /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
- ??? The latter test doesn't seem to make sense. */
- else if (dont_return_target && !MEM_P (temp))
- return temp;
-
- /* Return TARGET itself if it is a hard register. */
- else if ((want_value & 1) != 0
- && GET_MODE (target) != BLKmode
- && ! (REG_P (target)
- && REGNO (target) < FIRST_PSEUDO_REGISTER))
- return copy_to_reg (target);
-
- else
- return target;
+ return NULL_RTX;
}
\f
/* Examine CTOR. Discover how many scalar fields are set to nonzero
if (!initializer_zerop (value))
nz_elts += mult;
break;
+
+ case STRING_CST:
+ nz_elts += mult * TREE_STRING_LENGTH (value);
+ break;
+
case COMPLEX_CST:
if (!initializer_zerop (TREE_REALPART (value)))
nz_elts += mult;
if (!initializer_zerop (TREE_IMAGPART (value)))
nz_elts += mult;
break;
+
case VECTOR_CST:
{
tree v;
case FUNCTION_TYPE:
case LANG_TYPE:
default:
- abort ();
+ gcc_unreachable ();
}
}
/* Return 1 if EXP contains mostly (3/4) zeros. */
-int
+static int
mostly_zeros_p (tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
}
else
- store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
- alias_set);
+ store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
}
/* Store the value of constructor EXP into the rtx TARGET.
HOST_WIDE_INT exp_size = int_size_in_bytes (type);
#endif
- if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
+ switch (TREE_CODE (type))
{
- tree elt;
-
- /* If size is zero or the target is already cleared, do nothing. */
- if (size == 0 || cleared)
- cleared = 1;
- /* We either clear the aggregate or indicate the value is dead. */
- else if ((TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
- && ! CONSTRUCTOR_ELTS (exp))
- /* If the constructor is empty, clear the union. */
- {
- clear_storage (target, expr_size (exp));
- cleared = 1;
- }
-
- /* If we are building a static constructor into a register,
- set the initial value as zero so we can fold the value into
- a constant. But if more than one register is involved,
- this probably loses. */
- else if (REG_P (target) && TREE_STATIC (exp)
- && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
- {
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
- cleared = 1;
- }
+ case RECORD_TYPE:
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ tree elt;
- /* If the constructor has fewer fields than the structure
- or if we are initializing the structure to mostly zeros,
- clear the whole structure first. Don't do this if TARGET is a
- register whose mode size isn't equal to SIZE since clear_storage
- can't handle this case. */
- else if (size > 0
- && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
- || mostly_zeros_p (exp))
- && (!REG_P (target)
- || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
- == size)))
- {
- clear_storage (target, GEN_INT (size));
+ /* If size is zero or the target is already cleared, do nothing. */
+ if (size == 0 || cleared)
cleared = 1;
- }
-
- if (! cleared)
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
- /* Store each element of the constructor into
- the corresponding field of TARGET. */
-
- for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- {
- tree field = TREE_PURPOSE (elt);
- tree value = TREE_VALUE (elt);
- enum machine_mode mode;
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos = 0;
- tree offset;
- rtx to_rtx = target;
-
- /* Just ignore missing fields.
- We cleared the whole structure, above,
- if any fields are missing. */
- if (field == 0)
- continue;
-
- if (cleared && initializer_zerop (value))
- continue;
-
- if (host_integerp (DECL_SIZE (field), 1))
- bitsize = tree_low_cst (DECL_SIZE (field), 1);
- else
- bitsize = -1;
-
- mode = DECL_MODE (field);
- if (DECL_BIT_FIELD (field))
- mode = VOIDmode;
+ /* We either clear the aggregate or indicate the value is dead. */
+ else if ((TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
+ && ! CONSTRUCTOR_ELTS (exp))
+ /* If the constructor is empty, clear the union. */
+ {
+ clear_storage (target, expr_size (exp));
+ cleared = 1;
+ }
- offset = DECL_FIELD_OFFSET (field);
- if (host_integerp (offset, 0)
- && host_integerp (bit_position (field), 0))
- {
- bitpos = int_bit_position (field);
- offset = 0;
- }
- else
- bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
+ /* If we are building a static constructor into a register,
+ set the initial value as zero so we can fold the value into
+ a constant. But if more than one register is involved,
+ this probably loses. */
+ else if (REG_P (target) && TREE_STATIC (exp)
+ && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
+ {
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ cleared = 1;
+ }
- if (offset)
- {
- rtx offset_rtx;
+ /* If the constructor has fewer fields than the structure or
+ if we are initializing the structure to mostly zeros, clear
+ the whole structure first. Don't do this if TARGET is a
+ register whose mode size isn't equal to SIZE since
+ clear_storage can't handle this case. */
+ else if (size > 0
+ && ((list_length (CONSTRUCTOR_ELTS (exp))
+ != fields_length (type))
+ || mostly_zeros_p (exp))
+ && (!REG_P (target)
+ || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
+ == size)))
+ {
+ clear_storage (target, GEN_INT (size));
+ cleared = 1;
+ }
- offset
- = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
- make_tree (TREE_TYPE (exp),
- target));
+ if (! cleared)
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
- offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
- if (!MEM_P (to_rtx))
- abort ();
+ /* Store each element of the constructor into the
+ corresponding field of TARGET. */
+ for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
+ {
+ tree field = TREE_PURPOSE (elt);
+ tree value = TREE_VALUE (elt);
+ enum machine_mode mode;
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos = 0;
+ tree offset;
+ rtx to_rtx = target;
+
+ /* Just ignore missing fields. We cleared the whole
+ structure, above, if any fields are missing. */
+ if (field == 0)
+ continue;
+
+ if (cleared && initializer_zerop (value))
+ continue;
+
+ if (host_integerp (DECL_SIZE (field), 1))
+ bitsize = tree_low_cst (DECL_SIZE (field), 1);
+ else
+ bitsize = -1;
+
+ mode = DECL_MODE (field);
+ if (DECL_BIT_FIELD (field))
+ mode = VOIDmode;
+
+ offset = DECL_FIELD_OFFSET (field);
+ if (host_integerp (offset, 0)
+ && host_integerp (bit_position (field), 0))
+ {
+ bitpos = int_bit_position (field);
+ offset = 0;
+ }
+ else
+ bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
+
+ if (offset)
+ {
+ rtx offset_rtx;
+
+ offset
+ = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
+ make_tree (TREE_TYPE (exp),
+ target));
+
+ offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+ gcc_assert (MEM_P (to_rtx));
+
#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (offset_rtx) != Pmode)
- offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
+ if (GET_MODE (offset_rtx) != Pmode)
+ offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
#else
- if (GET_MODE (offset_rtx) != ptr_mode)
- offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
+ if (GET_MODE (offset_rtx) != ptr_mode)
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
#endif
- to_rtx = offset_address (to_rtx, offset_rtx,
- highest_pow2_factor (offset));
- }
+ to_rtx = offset_address (to_rtx, offset_rtx,
+ highest_pow2_factor (offset));
+ }
#ifdef WORD_REGISTER_OPERATIONS
- /* If this initializes a field that is smaller than a word, at the
- start of a word, try to widen it to a full word.
- This special case allows us to output C++ member function
- initializations in a form that the optimizers can understand. */
- if (REG_P (target)
- && bitsize < BITS_PER_WORD
- && bitpos % BITS_PER_WORD == 0
- && GET_MODE_CLASS (mode) == MODE_INT
- && TREE_CODE (value) == INTEGER_CST
- && exp_size >= 0
- && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
- {
- tree type = TREE_TYPE (value);
-
- if (TYPE_PRECISION (type) < BITS_PER_WORD)
- {
- type = lang_hooks.types.type_for_size
- (BITS_PER_WORD, TYPE_UNSIGNED (type));
- value = convert (type, value);
- }
-
- if (BYTES_BIG_ENDIAN)
- value
- = fold (build2 (LSHIFT_EXPR, type, value,
- build_int_cst (NULL_TREE,
- BITS_PER_WORD - bitsize)));
- bitsize = BITS_PER_WORD;
- mode = word_mode;
- }
+ /* If this initializes a field that is smaller than a
+ word, at the start of a word, try to widen it to a full
+ word. This special case allows us to output C++ member
+ function initializations in a form that the optimizers
+ can understand. */
+ if (REG_P (target)
+ && bitsize < BITS_PER_WORD
+ && bitpos % BITS_PER_WORD == 0
+ && GET_MODE_CLASS (mode) == MODE_INT
+ && TREE_CODE (value) == INTEGER_CST
+ && exp_size >= 0
+ && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
+ {
+ tree type = TREE_TYPE (value);
+
+ if (TYPE_PRECISION (type) < BITS_PER_WORD)
+ {
+ type = lang_hooks.types.type_for_size
+ (BITS_PER_WORD, TYPE_UNSIGNED (type));
+ value = convert (type, value);
+ }
+
+ if (BYTES_BIG_ENDIAN)
+ value
+ = fold (build2 (LSHIFT_EXPR, type, value,
+ build_int_cst (NULL_TREE,
+ BITS_PER_WORD - bitsize)));
+ bitsize = BITS_PER_WORD;
+ mode = word_mode;
+ }
#endif
- if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
- && DECL_NONADDRESSABLE_P (field))
- {
- to_rtx = copy_rtx (to_rtx);
- MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
- }
-
- store_constructor_field (to_rtx, bitsize, bitpos, mode,
- value, type, cleared,
- get_alias_set (TREE_TYPE (field)));
- }
- }
-
- else if (TREE_CODE (type) == ARRAY_TYPE)
- {
- tree elt;
- int i;
- int need_to_clear;
- tree domain;
- tree elttype = TREE_TYPE (type);
- int const_bounds_p;
- HOST_WIDE_INT minelt = 0;
- HOST_WIDE_INT maxelt = 0;
-
- domain = TYPE_DOMAIN (type);
- const_bounds_p = (TYPE_MIN_VALUE (domain)
- && TYPE_MAX_VALUE (domain)
- && host_integerp (TYPE_MIN_VALUE (domain), 0)
- && host_integerp (TYPE_MAX_VALUE (domain), 0));
-
- /* If we have constant bounds for the range of the type, get them. */
- if (const_bounds_p)
- {
- minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
- maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
- }
-
- /* If the constructor has fewer elements than the array,
- clear the whole array first. Similarly if this is
- static constructor of a non-BLKmode object. */
- if (cleared)
- need_to_clear = 0;
- else if (REG_P (target) && TREE_STATIC (exp))
- need_to_clear = 1;
- else
- {
- HOST_WIDE_INT count = 0, zero_count = 0;
- need_to_clear = ! const_bounds_p;
-
- /* This loop is a more accurate version of the loop in
- mostly_zeros_p (it handles RANGE_EXPR in an index).
- It is also needed to check for missing elements. */
- for (elt = CONSTRUCTOR_ELTS (exp);
- elt != NULL_TREE && ! need_to_clear;
- elt = TREE_CHAIN (elt))
- {
- tree index = TREE_PURPOSE (elt);
- HOST_WIDE_INT this_node_count;
-
- if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
- {
- tree lo_index = TREE_OPERAND (index, 0);
- tree hi_index = TREE_OPERAND (index, 1);
+ if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
+ && DECL_NONADDRESSABLE_P (field))
+ {
+ to_rtx = copy_rtx (to_rtx);
+ MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
+ }
+
+ store_constructor_field (to_rtx, bitsize, bitpos, mode,
+ value, type, cleared,
+ get_alias_set (TREE_TYPE (field)));
+ }
+ break;
+ }
+ case ARRAY_TYPE:
+ {
+ tree elt;
+ int i;
+ int need_to_clear;
+ tree domain;
+ tree elttype = TREE_TYPE (type);
+ int const_bounds_p;
+ HOST_WIDE_INT minelt = 0;
+ HOST_WIDE_INT maxelt = 0;
+
+ domain = TYPE_DOMAIN (type);
+ const_bounds_p = (TYPE_MIN_VALUE (domain)
+ && TYPE_MAX_VALUE (domain)
+ && host_integerp (TYPE_MIN_VALUE (domain), 0)
+ && host_integerp (TYPE_MAX_VALUE (domain), 0));
+
+ /* If we have constant bounds for the range of the type, get them. */
+ if (const_bounds_p)
+ {
+ minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
+ maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
+ }
- if (! host_integerp (lo_index, 1)
- || ! host_integerp (hi_index, 1))
- {
- need_to_clear = 1;
- break;
- }
+ /* If the constructor has fewer elements than the array, clear
+ the whole array first. Similarly if this is static
+ constructor of a non-BLKmode object. */
+ if (cleared)
+ need_to_clear = 0;
+ else if (REG_P (target) && TREE_STATIC (exp))
+ need_to_clear = 1;
+ else
+ {
+ HOST_WIDE_INT count = 0, zero_count = 0;
+ need_to_clear = ! const_bounds_p;
+
+ /* This loop is a more accurate version of the loop in
+ mostly_zeros_p (it handles RANGE_EXPR in an index). It
+ is also needed to check for missing elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp);
+ elt != NULL_TREE && ! need_to_clear;
+ elt = TREE_CHAIN (elt))
+ {
+ tree index = TREE_PURPOSE (elt);
+ HOST_WIDE_INT this_node_count;
+
+ if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (index, 0);
+ tree hi_index = TREE_OPERAND (index, 1);
+
+ if (! host_integerp (lo_index, 1)
+ || ! host_integerp (hi_index, 1))
+ {
+ need_to_clear = 1;
+ break;
+ }
+
+ this_node_count = (tree_low_cst (hi_index, 1)
+ - tree_low_cst (lo_index, 1) + 1);
+ }
+ else
+ this_node_count = 1;
+
+ count += this_node_count;
+ if (mostly_zeros_p (TREE_VALUE (elt)))
+ zero_count += this_node_count;
+ }
+
+ /* Clear the entire array first if there are any missing
+ elements, or if the incidence of zero elements is >=
+ 75%. */
+ if (! need_to_clear
+ && (count < maxelt - minelt + 1
+ || 4 * zero_count >= 3 * count))
+ need_to_clear = 1;
+ }
+
+ if (need_to_clear && size > 0)
+ {
+ if (REG_P (target))
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ else
+ clear_storage (target, GEN_INT (size));
+ cleared = 1;
+ }
- this_node_count = (tree_low_cst (hi_index, 1)
- - tree_low_cst (lo_index, 1) + 1);
- }
- else
- this_node_count = 1;
+ if (!cleared && REG_P (target))
+ /* Inform later passes that the old value is dead. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
- count += this_node_count;
- if (mostly_zeros_p (TREE_VALUE (elt)))
- zero_count += this_node_count;
- }
+ /* Store each element of the constructor into the
+ corresponding element of TARGET, determined by counting the
+ elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
+ elt;
+ elt = TREE_CHAIN (elt), i++)
+ {
+ enum machine_mode mode;
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ int unsignedp;
+ tree value = TREE_VALUE (elt);
+ tree index = TREE_PURPOSE (elt);
+ rtx xtarget = target;
+
+ if (cleared && initializer_zerop (value))
+ continue;
+
+ unsignedp = TYPE_UNSIGNED (elttype);
+ mode = TYPE_MODE (elttype);
+ if (mode == BLKmode)
+ bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
+ ? tree_low_cst (TYPE_SIZE (elttype), 1)
+ : -1);
+ else
+ bitsize = GET_MODE_BITSIZE (mode);
+
+ if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (index, 0);
+ tree hi_index = TREE_OPERAND (index, 1);
+ rtx index_r, pos_rtx;
+ HOST_WIDE_INT lo, hi, count;
+ tree position;
+
+ /* If the range is constant and "small", unroll the loop. */
+ if (const_bounds_p
+ && host_integerp (lo_index, 0)
+ && host_integerp (hi_index, 0)
+ && (lo = tree_low_cst (lo_index, 0),
+ hi = tree_low_cst (hi_index, 0),
+ count = hi - lo + 1,
+ (!MEM_P (target)
+ || count <= 2
+ || (host_integerp (TYPE_SIZE (elttype), 1)
+ && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
+ <= 40 * 8)))))
+ {
+ lo -= minelt; hi -= minelt;
+ for (; lo <= hi; lo++)
+ {
+ bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
+
+ if (MEM_P (target)
+ && !MEM_KEEP_ALIAS_SET_P (target)
+ && TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_NONALIASED_COMPONENT (type))
+ {
+ target = copy_rtx (target);
+ MEM_KEEP_ALIAS_SET_P (target) = 1;
+ }
+
+ store_constructor_field
+ (target, bitsize, bitpos, mode, value, type, cleared,
+ get_alias_set (elttype));
+ }
+ }
+ else
+ {
+ rtx loop_start = gen_label_rtx ();
+ rtx loop_end = gen_label_rtx ();
+ tree exit_cond;
+
+ expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
+ unsignedp = TYPE_UNSIGNED (domain);
+
+ index = build_decl (VAR_DECL, NULL_TREE, domain);
+
+ index_r
+ = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
+ &unsignedp, 0));
+ SET_DECL_RTL (index, index_r);
+ store_expr (lo_index, index_r, 0);
+
+ /* Build the head of the loop. */
+ do_pending_stack_adjust ();
+ emit_label (loop_start);
+
+ /* Assign value to element index. */
+ position
+ = convert (ssizetype,
+ fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, TYPE_MIN_VALUE (domain))));
+ position = size_binop (MULT_EXPR, position,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (elttype)));
+
+ pos_rtx = expand_expr (position, 0, VOIDmode, 0);
+ xtarget = offset_address (target, pos_rtx,
+ highest_pow2_factor (position));
+ xtarget = adjust_address (xtarget, mode, 0);
+ if (TREE_CODE (value) == CONSTRUCTOR)
+ store_constructor (value, xtarget, cleared,
+ bitsize / BITS_PER_UNIT);
+ else
+ store_expr (value, xtarget, 0);
+
+ /* Generate a conditional jump to exit the loop. */
+ exit_cond = build2 (LT_EXPR, integer_type_node,
+ index, hi_index);
+ jumpif (exit_cond, loop_end);
+
+ /* Update the loop counter, and jump to the head of
+ the loop. */
+ expand_assignment (index,
+ build2 (PLUS_EXPR, TREE_TYPE (index),
+ index, integer_one_node));
+
+ emit_jump (loop_start);
+
+ /* Build the end of the loop. */
+ emit_label (loop_end);
+ }
+ }
+ else if ((index != 0 && ! host_integerp (index, 0))
+ || ! host_integerp (TYPE_SIZE (elttype), 1))
+ {
+ tree position;
+
+ if (index == 0)
+ index = ssize_int (1);
+
+ if (minelt)
+ index = fold_convert (ssizetype,
+ fold (build2 (MINUS_EXPR,
+ TREE_TYPE (index),
+ index,
+ TYPE_MIN_VALUE (domain))));
+
+ position = size_binop (MULT_EXPR, index,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (elttype)));
+ xtarget = offset_address (target,
+ expand_expr (position, 0, VOIDmode, 0),
+ highest_pow2_factor (position));
+ xtarget = adjust_address (xtarget, mode, 0);
+ store_expr (value, xtarget, 0);
+ }
+ else
+ {
+ if (index != 0)
+ bitpos = ((tree_low_cst (index, 0) - minelt)
+ * tree_low_cst (TYPE_SIZE (elttype), 1));
+ else
+ bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
+
+ if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
+ && TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_NONALIASED_COMPONENT (type))
+ {
+ target = copy_rtx (target);
+ MEM_KEEP_ALIAS_SET_P (target) = 1;
+ }
+ store_constructor_field (target, bitsize, bitpos, mode, value,
+ type, cleared, get_alias_set (elttype));
+ }
+ }
+ break;
+ }
- /* Clear the entire array first if there are any missing elements,
- or if the incidence of zero elements is >= 75%. */
- if (! need_to_clear
- && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
- need_to_clear = 1;
- }
+ case VECTOR_TYPE:
+ {
+ tree elt;
+ int i;
+ int need_to_clear;
+ int icode = 0;
+ tree elttype = TREE_TYPE (type);
+ int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
+ enum machine_mode eltmode = TYPE_MODE (elttype);
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ rtx *vector = NULL;
+ unsigned n_elts;
+
+ gcc_assert (eltmode != BLKmode);
+
+ n_elts = TYPE_VECTOR_SUBPARTS (type);
+ if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
+ {
+ enum machine_mode mode = GET_MODE (target);
+
+ icode = (int) vec_init_optab->handlers[mode].insn_code;
+ if (icode != CODE_FOR_nothing)
+ {
+ unsigned int i;
+
+ vector = alloca (n_elts);
+ for (i = 0; i < n_elts; i++)
+ vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
+ }
+ }
+
+ /* If the constructor has fewer elements than the vector,
+ clear the whole array first. Similarly if this is static
+ constructor of a non-BLKmode object. */
+ if (cleared)
+ need_to_clear = 0;
+ else if (REG_P (target) && TREE_STATIC (exp))
+ need_to_clear = 1;
+ else
+ {
+ unsigned HOST_WIDE_INT count = 0, zero_count = 0;
+
+ for (elt = CONSTRUCTOR_ELTS (exp);
+ elt != NULL_TREE;
+ elt = TREE_CHAIN (elt))
+ {
+ int n_elts_here = tree_low_cst
+ (int_const_binop (TRUNC_DIV_EXPR,
+ TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
+ TYPE_SIZE (elttype), 0), 1);
+
+ count += n_elts_here;
+ if (mostly_zeros_p (TREE_VALUE (elt)))
+ zero_count += n_elts_here;
+ }
- if (need_to_clear && size > 0)
- {
- if (REG_P (target))
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
- else
- clear_storage (target, GEN_INT (size));
- cleared = 1;
- }
-
- if (!cleared && REG_P (target))
- /* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
- /* Store each element of the constructor into
- the corresponding element of TARGET, determined
- by counting the elements. */
- for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
- elt;
- elt = TREE_CHAIN (elt), i++)
- {
- enum machine_mode mode;
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
- int unsignedp;
- tree value = TREE_VALUE (elt);
- tree index = TREE_PURPOSE (elt);
- rtx xtarget = target;
-
- if (cleared && initializer_zerop (value))
- continue;
-
- unsignedp = TYPE_UNSIGNED (elttype);
- mode = TYPE_MODE (elttype);
- if (mode == BLKmode)
- bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
- ? tree_low_cst (TYPE_SIZE (elttype), 1)
- : -1);
- else
- bitsize = GET_MODE_BITSIZE (mode);
-
- if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
- {
- tree lo_index = TREE_OPERAND (index, 0);
- tree hi_index = TREE_OPERAND (index, 1);
- rtx index_r, pos_rtx;
- HOST_WIDE_INT lo, hi, count;
- tree position;
-
- /* If the range is constant and "small", unroll the loop. */
- if (const_bounds_p
- && host_integerp (lo_index, 0)
- && host_integerp (hi_index, 0)
- && (lo = tree_low_cst (lo_index, 0),
- hi = tree_low_cst (hi_index, 0),
- count = hi - lo + 1,
- (!MEM_P (target)
- || count <= 2
- || (host_integerp (TYPE_SIZE (elttype), 1)
- && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
- <= 40 * 8)))))
- {
- lo -= minelt; hi -= minelt;
- for (; lo <= hi; lo++)
- {
- bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
-
- if (MEM_P (target)
- && !MEM_KEEP_ALIAS_SET_P (target)
- && TREE_CODE (type) == ARRAY_TYPE
- && TYPE_NONALIASED_COMPONENT (type))
- {
- target = copy_rtx (target);
- MEM_KEEP_ALIAS_SET_P (target) = 1;
- }
-
- store_constructor_field
- (target, bitsize, bitpos, mode, value, type, cleared,
- get_alias_set (elttype));
- }
- }
- else
- {
- rtx loop_start = gen_label_rtx ();
- rtx loop_end = gen_label_rtx ();
- tree exit_cond;
-
- expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
- unsignedp = TYPE_UNSIGNED (domain);
-
- index = build_decl (VAR_DECL, NULL_TREE, domain);
-
- index_r
- = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
- &unsignedp, 0));
- SET_DECL_RTL (index, index_r);
- store_expr (lo_index, index_r, 0);
-
- /* Build the head of the loop. */
- do_pending_stack_adjust ();
- emit_label (loop_start);
-
- /* Assign value to element index. */
- position
- = convert (ssizetype,
- fold (build2 (MINUS_EXPR, TREE_TYPE (index),
- index, TYPE_MIN_VALUE (domain))));
- position = size_binop (MULT_EXPR, position,
- convert (ssizetype,
- TYPE_SIZE_UNIT (elttype)));
-
- pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- xtarget = offset_address (target, pos_rtx,
- highest_pow2_factor (position));
- xtarget = adjust_address (xtarget, mode, 0);
- if (TREE_CODE (value) == CONSTRUCTOR)
- store_constructor (value, xtarget, cleared,
- bitsize / BITS_PER_UNIT);
- else
- store_expr (value, xtarget, 0);
-
- /* Generate a conditional jump to exit the loop. */
- exit_cond = build2 (LT_EXPR, integer_type_node,
- index, hi_index);
- jumpif (exit_cond, loop_end);
-
- /* Update the loop counter, and jump to the head of
- the loop. */
- expand_assignment (index,
- build2 (PLUS_EXPR, TREE_TYPE (index),
- index, integer_one_node), 0);
-
- emit_jump (loop_start);
-
- /* Build the end of the loop. */
- emit_label (loop_end);
- }
- }
- else if ((index != 0 && ! host_integerp (index, 0))
- || ! host_integerp (TYPE_SIZE (elttype), 1))
- {
- tree position;
-
- if (index == 0)
- index = ssize_int (1);
-
- if (minelt)
- index = fold_convert (ssizetype,
- fold (build2 (MINUS_EXPR,
- TREE_TYPE (index),
- index,
- TYPE_MIN_VALUE (domain))));
-
- position = size_binop (MULT_EXPR, index,
- convert (ssizetype,
- TYPE_SIZE_UNIT (elttype)));
- xtarget = offset_address (target,
- expand_expr (position, 0, VOIDmode, 0),
- highest_pow2_factor (position));
- xtarget = adjust_address (xtarget, mode, 0);
- store_expr (value, xtarget, 0);
- }
- else
- {
- if (index != 0)
- bitpos = ((tree_low_cst (index, 0) - minelt)
- * tree_low_cst (TYPE_SIZE (elttype), 1));
- else
- bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
-
- if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
- && TREE_CODE (type) == ARRAY_TYPE
- && TYPE_NONALIASED_COMPONENT (type))
- {
- target = copy_rtx (target);
- MEM_KEEP_ALIAS_SET_P (target) = 1;
- }
- store_constructor_field (target, bitsize, bitpos, mode, value,
- type, cleared, get_alias_set (elttype));
- }
- }
- }
-
- else if (TREE_CODE (type) == VECTOR_TYPE)
- {
- tree elt;
- int i;
- int need_to_clear;
- int icode = 0;
- tree elttype = TREE_TYPE (type);
- int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
- enum machine_mode eltmode = TYPE_MODE (elttype);
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
- rtx *vector = NULL;
- unsigned n_elts;
-
- if (eltmode == BLKmode)
- abort ();
-
- n_elts = TYPE_VECTOR_SUBPARTS (type);
- if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
- {
- enum machine_mode mode = GET_MODE (target);
-
- icode = (int) vec_init_optab->handlers[mode].insn_code;
- if (icode != CODE_FOR_nothing)
- {
- unsigned int i;
-
- vector = alloca (n_elts);
- for (i = 0; i < n_elts; i++)
- vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
- }
- }
-
- /* If the constructor has fewer elements than the vector,
- clear the whole array first. Similarly if this is
- static constructor of a non-BLKmode object. */
- if (cleared)
- need_to_clear = 0;
- else if (REG_P (target) && TREE_STATIC (exp))
- need_to_clear = 1;
- else
- {
- unsigned HOST_WIDE_INT count = 0, zero_count = 0;
-
- for (elt = CONSTRUCTOR_ELTS (exp);
- elt != NULL_TREE;
- elt = TREE_CHAIN (elt))
- {
- int n_elts_here =
- tree_low_cst (
- int_const_binop (TRUNC_DIV_EXPR,
- TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
- TYPE_SIZE (elttype), 0), 1);
-
- count += n_elts_here;
- if (mostly_zeros_p (TREE_VALUE (elt)))
- zero_count += n_elts_here;
- }
-
- /* Clear the entire vector first if there are any missing elements,
- or if the incidence of zero elements is >= 75%. */
- need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
- }
-
- if (need_to_clear && size > 0 && !vector)
- {
- if (REG_P (target))
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
- else
- clear_storage (target, GEN_INT (size));
- cleared = 1;
- }
-
- if (!cleared && REG_P (target))
- /* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
- /* Store each element of the constructor into the corresponding
- element of TARGET, determined by counting the elements. */
- for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
- elt;
- elt = TREE_CHAIN (elt), i += bitsize / elt_size)
- {
- tree value = TREE_VALUE (elt);
- tree index = TREE_PURPOSE (elt);
- HOST_WIDE_INT eltpos;
-
- bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
- if (cleared && initializer_zerop (value))
- continue;
-
- if (index != 0)
- eltpos = tree_low_cst (index, 1);
- else
- eltpos = i;
-
- if (vector)
- {
- /* Vector CONSTRUCTORs should only be built from smaller
- vectors in the case of BLKmode vectors. */
- if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
- abort ();
- vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
- }
- else
- {
- enum machine_mode value_mode =
- TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
+ /* Clear the entire vector first if there are any missing elements,
+ or if the incidence of zero elements is >= 75%. */
+ need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
+ }
+
+ if (need_to_clear && size > 0 && !vector)
+ {
+ if (REG_P (target))
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ else
+ clear_storage (target, GEN_INT (size));
+ cleared = 1;
+ }
+
+ if (!cleared && REG_P (target))
+ /* Inform later passes that the old value is dead. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
+
+ /* Store each element of the constructor into the corresponding
+ element of TARGET, determined by counting the elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
+ elt;
+ elt = TREE_CHAIN (elt), i += bitsize / elt_size)
+ {
+ tree value = TREE_VALUE (elt);
+ tree index = TREE_PURPOSE (elt);
+ HOST_WIDE_INT eltpos;
+
+ bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
+ if (cleared && initializer_zerop (value))
+ continue;
+
+ if (index != 0)
+ eltpos = tree_low_cst (index, 1);
+ else
+ eltpos = i;
+
+ if (vector)
+ {
+ /* Vector CONSTRUCTORs should only be built from smaller
+ vectors in the case of BLKmode vectors. */
+ gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
+ vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
+ }
+ else
+ {
+ enum machine_mode value_mode =
+ TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
? TYPE_MODE (TREE_TYPE (value))
: eltmode;
- bitpos = eltpos * elt_size;
- store_constructor_field (target, bitsize, bitpos, value_mode, value,
- type, cleared, get_alias_set (elttype));
- }
- }
-
- if (vector)
- emit_insn (GEN_FCN (icode) (target,
- gen_rtx_PARALLEL (GET_MODE (target),
- gen_rtvec_v (n_elts, vector))));
- }
-
- /* Set constructor assignments. */
- else if (TREE_CODE (type) == SET_TYPE)
- {
- tree elt = CONSTRUCTOR_ELTS (exp);
- unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
- tree domain = TYPE_DOMAIN (type);
- tree domain_min, domain_max, bitlength;
-
- /* The default implementation strategy is to extract the constant
- parts of the constructor, use that to initialize the target,
- and then "or" in whatever non-constant ranges we need in addition.
-
- If a large set is all zero or all ones, it is
- probably better to set it using memset.
- Also, if a large set has just a single range, it may also be
- better to first clear all the first clear the set (using
- memset), and set the bits we want. */
-
- /* Check for all zeros. */
- if (elt == NULL_TREE && size > 0)
- {
- if (!cleared)
- clear_storage (target, GEN_INT (size));
- return;
- }
-
- domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
- domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
- bitlength = size_binop (PLUS_EXPR,
- size_diffop (domain_max, domain_min),
- ssize_int (1));
-
- nbits = tree_low_cst (bitlength, 1);
-
- /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
- are "complicated" (more than one range), initialize (the
- constant parts) by copying from a constant. */
- if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
- || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
- {
- unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
- enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
- char *bit_buffer = alloca (nbits);
- HOST_WIDE_INT word = 0;
- unsigned int bit_pos = 0;
- unsigned int ibit = 0;
- unsigned int offset = 0; /* In bytes from beginning of set. */
-
- elt = get_set_constructor_bits (exp, bit_buffer, nbits);
- for (;;)
- {
- if (bit_buffer[ibit])
- {
- if (BYTES_BIG_ENDIAN)
- word |= (1 << (set_word_size - 1 - bit_pos));
- else
- word |= 1 << bit_pos;
- }
-
- bit_pos++; ibit++;
- if (bit_pos >= set_word_size || ibit == nbits)
- {
- if (word != 0 || ! cleared)
- {
- rtx datum = gen_int_mode (word, mode);
- rtx to_rtx;
-
- /* The assumption here is that it is safe to use
- XEXP if the set is multi-word, but not if
- it's single-word. */
- if (MEM_P (target))
- to_rtx = adjust_address (target, mode, offset);
- else if (offset == 0)
- to_rtx = target;
- else
- abort ();
- emit_move_insn (to_rtx, datum);
- }
-
- if (ibit == nbits)
- break;
- word = 0;
- bit_pos = 0;
- offset += set_word_size / BITS_PER_UNIT;
- }
- }
- }
- else if (!cleared)
- /* Don't bother clearing storage if the set is all ones. */
- if (TREE_CHAIN (elt) != NULL_TREE
- || (TREE_PURPOSE (elt) == NULL_TREE
- ? nbits != 1
- : ( ! host_integerp (TREE_VALUE (elt), 0)
- || ! host_integerp (TREE_PURPOSE (elt), 0)
- || (tree_low_cst (TREE_VALUE (elt), 0)
- - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
- != (HOST_WIDE_INT) nbits))))
- clear_storage (target, expr_size (exp));
-
- for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
- {
- /* Start of range of element or NULL. */
- tree startbit = TREE_PURPOSE (elt);
- /* End of range of element, or element value. */
- tree endbit = TREE_VALUE (elt);
- HOST_WIDE_INT startb, endb;
- rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
-
- bitlength_rtx = expand_expr (bitlength,
- NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
-
- /* Handle non-range tuple element like [ expr ]. */
- if (startbit == NULL_TREE)
- {
- startbit = save_expr (endbit);
- endbit = startbit;
- }
+ bitpos = eltpos * elt_size;
+ store_constructor_field (target, bitsize, bitpos,
+ value_mode, value, type,
+ cleared, get_alias_set (elttype));
+ }
+ }
+
+ if (vector)
+ emit_insn (GEN_FCN (icode)
+ (target,
+ gen_rtx_PARALLEL (GET_MODE (target),
+ gen_rtvec_v (n_elts, vector))));
+ break;
+ }
- startbit = convert (sizetype, startbit);
- endbit = convert (sizetype, endbit);
- if (! integer_zerop (domain_min))
- {
- startbit = size_binop (MINUS_EXPR, startbit, domain_min);
- endbit = size_binop (MINUS_EXPR, endbit, domain_min);
- }
- startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
+ /* Set constructor assignments. */
+ case SET_TYPE:
+ {
+ tree elt = CONSTRUCTOR_ELTS (exp);
+ unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
+ tree domain = TYPE_DOMAIN (type);
+ tree domain_min, domain_max, bitlength;
+
+ /* The default implementation strategy is to extract the
+ constant parts of the constructor, use that to initialize
+ the target, and then "or" in whatever non-constant ranges
+ we need in addition.
+
+ If a large set is all zero or all ones, it is probably
+ better to set it using memset. Also, if a large set has
+ just a single range, it may also be better to first clear
+ all the first clear the set (using memset), and set the
+ bits we want. */
+
+ /* Check for all zeros. */
+ if (elt == NULL_TREE && size > 0)
+ {
+ if (!cleared)
+ clear_storage (target, GEN_INT (size));
+ return;
+ }
+
+ domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
+ domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
+ bitlength = size_binop (PLUS_EXPR,
+ size_diffop (domain_max, domain_min),
+ ssize_int (1));
+
+ nbits = tree_low_cst (bitlength, 1);
+
+ /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
+ that are "complicated" (more than one range), initialize
+ (the constant parts) by copying from a constant. */
+ if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
+ || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
+ {
+ unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
+ enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
+ char *bit_buffer = alloca (nbits);
+ HOST_WIDE_INT word = 0;
+ unsigned int bit_pos = 0;
+ unsigned int ibit = 0;
+ unsigned int offset = 0; /* In bytes from beginning of set. */
+
+ elt = get_set_constructor_bits (exp, bit_buffer, nbits);
+ for (;;)
+ {
+ if (bit_buffer[ibit])
+ {
+ if (BYTES_BIG_ENDIAN)
+ word |= (1 << (set_word_size - 1 - bit_pos));
+ else
+ word |= 1 << bit_pos;
+ }
+
+ bit_pos++; ibit++;
+ if (bit_pos >= set_word_size || ibit == nbits)
+ {
+ if (word != 0 || ! cleared)
+ {
+ rtx datum = gen_int_mode (word, mode);
+ rtx to_rtx;
+
+ /* The assumption here is that it is safe to
+ use XEXP if the set is multi-word, but not
+ if it's single-word. */
+ if (MEM_P (target))
+ to_rtx = adjust_address (target, mode, offset);
+ else
+ {
+ gcc_assert (!offset);
+ to_rtx = target;
+ }
+ emit_move_insn (to_rtx, datum);
+ }
+
+ if (ibit == nbits)
+ break;
+ word = 0;
+ bit_pos = 0;
+ offset += set_word_size / BITS_PER_UNIT;
+ }
+ }
+ }
+ else if (!cleared)
+ /* Don't bother clearing storage if the set is all ones. */
+ if (TREE_CHAIN (elt) != NULL_TREE
+ || (TREE_PURPOSE (elt) == NULL_TREE
+ ? nbits != 1
+ : ( ! host_integerp (TREE_VALUE (elt), 0)
+ || ! host_integerp (TREE_PURPOSE (elt), 0)
+ || (tree_low_cst (TREE_VALUE (elt), 0)
+ - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
+ != (HOST_WIDE_INT) nbits))))
+ clear_storage (target, expr_size (exp));
+
+ for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
+ {
+ /* Start of range of element or NULL. */
+ tree startbit = TREE_PURPOSE (elt);
+ /* End of range of element, or element value. */
+ tree endbit = TREE_VALUE (elt);
+ HOST_WIDE_INT startb, endb;
+ rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
+
+ bitlength_rtx = expand_expr (bitlength,
+ NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
+
+ /* Handle non-range tuple element like [ expr ]. */
+ if (startbit == NULL_TREE)
+ {
+ startbit = save_expr (endbit);
+ endbit = startbit;
+ }
+
+ startbit = convert (sizetype, startbit);
+ endbit = convert (sizetype, endbit);
+ if (! integer_zerop (domain_min))
+ {
+ startbit = size_binop (MINUS_EXPR, startbit, domain_min);
+ endbit = size_binop (MINUS_EXPR, endbit, domain_min);
+ }
+ startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
+ EXPAND_CONST_ADDRESS);
+ endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS);
- endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
- EXPAND_CONST_ADDRESS);
-
- if (REG_P (target))
- {
- targetx
- = assign_temp
+
+ if (REG_P (target))
+ {
+ targetx
+ = assign_temp
((build_qualified_type (lang_hooks.types.type_for_mode
(GET_MODE (target), 0),
TYPE_QUAL_CONST)),
0, 1, 1);
- emit_move_insn (targetx, target);
- }
+ emit_move_insn (targetx, target);
+ }
+
+ else
+ {
+ gcc_assert (MEM_P (target));
+ targetx = target;
+ }
- else if (MEM_P (target))
- targetx = target;
- else
- abort ();
-
- /* Optimization: If startbit and endbit are constants divisible
- by BITS_PER_UNIT, call memset instead. */
- if (TREE_CODE (startbit) == INTEGER_CST
- && TREE_CODE (endbit) == INTEGER_CST
- && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
- && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
- {
- emit_library_call (memset_libfunc, LCT_NORMAL,
- VOIDmode, 3,
- plus_constant (XEXP (targetx, 0),
- startb / BITS_PER_UNIT),
- Pmode,
- constm1_rtx, TYPE_MODE (integer_type_node),
- GEN_INT ((endb - startb) / BITS_PER_UNIT),
- TYPE_MODE (sizetype));
- }
- else
- emit_library_call (setbits_libfunc, LCT_NORMAL,
- VOIDmode, 4, XEXP (targetx, 0),
- Pmode, bitlength_rtx, TYPE_MODE (sizetype),
- startbit_rtx, TYPE_MODE (sizetype),
- endbit_rtx, TYPE_MODE (sizetype));
-
- if (REG_P (target))
- emit_move_insn (target, targetx);
- }
+ /* Optimization: If startbit and endbit are constants divisible
+ by BITS_PER_UNIT, call memset instead. */
+ if (TREE_CODE (startbit) == INTEGER_CST
+ && TREE_CODE (endbit) == INTEGER_CST
+ && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
+ && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
+ {
+ emit_library_call (memset_libfunc, LCT_NORMAL,
+ VOIDmode, 3,
+ plus_constant (XEXP (targetx, 0),
+ startb / BITS_PER_UNIT),
+ Pmode,
+ constm1_rtx, TYPE_MODE (integer_type_node),
+ GEN_INT ((endb - startb) / BITS_PER_UNIT),
+ TYPE_MODE (sizetype));
+ }
+ else
+ emit_library_call (setbits_libfunc, LCT_NORMAL,
+ VOIDmode, 4, XEXP (targetx, 0),
+ Pmode, bitlength_rtx, TYPE_MODE (sizetype),
+ startbit_rtx, TYPE_MODE (sizetype),
+ endbit_rtx, TYPE_MODE (sizetype));
+
+ if (REG_P (target))
+ emit_move_insn (target, targetx);
+ }
+ break;
+ }
+ default:
+ gcc_unreachable ();
}
-
- else
- abort ();
}
/* Store the value of EXP (an expression tree)
BITSIZE bits, starting BITPOS bits from the start of TARGET.
If MODE is VOIDmode, it means that we are storing into a bit-field.
- If VALUE_MODE is VOIDmode, return nothing in particular.
- UNSIGNEDP is not used in this case.
-
- Otherwise, return an rtx for the value stored. This rtx
- has mode VALUE_MODE if that is convenient to do.
- In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
+ Always return const0_rtx unless we have something particular to
+ return.
TYPE is the type of the underlying object,
static rtx
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
- enum machine_mode mode, tree exp, enum machine_mode value_mode,
- int unsignedp, tree type, int alias_set)
+ enum machine_mode mode, tree exp, tree type, int alias_set)
{
HOST_WIDE_INT width_mask = 0;
if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
emit_move_insn (object, target);
- store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
- alias_set);
+ store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
emit_move_insn (target, object);
{
/* We're storing into a struct containing a single __complex. */
- if (bitpos != 0)
- abort ();
- return store_expr (exp, target, value_mode != VOIDmode);
+ gcc_assert (!bitpos);
+ return store_expr (exp, target, 0);
}
/* If the structure is in a register or if the component
boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{
- if (!MEM_P (target) || !MEM_P (temp)
- || bitpos % BITS_PER_UNIT != 0)
- abort ();
+ gcc_assert (MEM_P (target) && MEM_P (temp)
+ && !(bitpos % BITS_PER_UNIT));
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
emit_block_move (target, temp,
/ BITS_PER_UNIT),
BLOCK_OP_NORMAL);
- return value_mode == VOIDmode ? const0_rtx : target;
+ return const0_rtx;
}
/* Store the value in the bitfield. */
store_bit_field (target, bitsize, bitpos, mode, temp);
- if (value_mode != VOIDmode)
- {
- /* The caller wants an rtx for the value.
- If possible, avoid refetching from the bitfield itself. */
- if (width_mask != 0
- && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
- {
- tree count;
- enum machine_mode tmode;
-
- tmode = GET_MODE (temp);
- if (tmode == VOIDmode)
- tmode = value_mode;
-
- if (unsignedp)
- return expand_and (tmode, temp,
- gen_int_mode (width_mask, tmode),
- NULL_RTX);
-
- count = build_int_cst (NULL_TREE,
- GET_MODE_BITSIZE (tmode) - bitsize);
- temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
- return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
- }
-
- return extract_bit_field (target, bitsize, bitpos, unsignedp,
- NULL_RTX, value_mode, VOIDmode);
- }
return const0_rtx;
}
else
{
- rtx addr = XEXP (target, 0);
- rtx to_rtx = target;
-
- /* If a value is wanted, it must be the lhs;
- so make the address stable for multiple use. */
-
- if (value_mode != VOIDmode && !REG_P (addr)
- && ! CONSTANT_ADDRESS_P (addr)
- /* A frame-pointer reference is already stable. */
- && ! (GET_CODE (addr) == PLUS
- && GET_CODE (XEXP (addr, 1)) == CONST_INT
- && (XEXP (addr, 0) == virtual_incoming_args_rtx
- || XEXP (addr, 0) == virtual_stack_vars_rtx)))
- to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
-
/* Now build a reference to just the desired component. */
-
- to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
+ rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
if (to_rtx == target)
to_rtx = copy_rtx (to_rtx);
if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
set_mem_alias_set (to_rtx, alias_set);
- return store_expr (exp, to_rtx, value_mode != VOIDmode);
+ return store_expr (exp, to_rtx, 0);
}
}
\f
/* If a size was specified in the ARRAY_REF, it's the size measured
in alignment units of the element type. So multiply by that value. */
if (aligned_size)
- return size_binop (MULT_EXPR, aligned_size,
- size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
+ {
+ /* ??? tree_ssa_useless_type_conversion will eliminate casts to
+ sizetype from another type of the same width and signedness. */
+ if (TREE_TYPE (aligned_size) != sizetype)
+ aligned_size = fold_convert (sizetype, aligned_size);
+ return size_binop (MULT_EXPR, aligned_size,
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
+ }
/* Otherwise, take the size from that of the element type. Substitute
any PLACEHOLDER_EXPR that we have. */
return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
/* Otherwise, return a zero of the appropriate type. */
- return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
+ return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
}
/* Return a tree representing the upper bound of the array mentioned in
in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
value. */
if (aligned_offset)
- return size_binop (MULT_EXPR, aligned_offset,
- size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
+ {
+ /* ??? tree_ssa_useless_type_conversion will eliminate casts to
+ sizetype from another type of the same width and signedness. */
+ if (TREE_TYPE (aligned_offset) != sizetype)
+ aligned_offset = fold_convert (sizetype, aligned_offset);
+ return size_binop (MULT_EXPR, aligned_offset,
+ size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
+ }
/* Otherwise, take the offset from that of the field. Substitute
any PLACEHOLDER_EXPR that we have. */
/* Now look at our tree code and possibly recurse. */
switch (TREE_CODE_CLASS (TREE_CODE (exp)))
{
- case 'd':
+ case tcc_declaration:
exp_rtl = DECL_RTL_IF_SET (exp);
break;
- case 'c':
+ case tcc_constant:
return 1;
- case 'x':
+ case tcc_exceptional:
if (TREE_CODE (exp) == TREE_LIST)
{
while (1)
else
return 0;
- case 's':
+ case tcc_statement:
/* The only case we look at here is the DECL_INITIAL inside a
DECL_EXPR. */
return (TREE_CODE (exp) != DECL_EXPR
|| !DECL_INITIAL (DECL_EXPR_DECL (exp))
|| safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
- case '2':
- case '<':
+ case tcc_binary:
+ case tcc_comparison:
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
return 0;
/* Fall through. */
- case '1':
+ case tcc_unary:
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
- case 'e':
- case 'r':
+ case tcc_expression:
+ case tcc_reference:
/* Now do code-specific tests. EXP_RTL is set to any rtx we find in
the expression. If it is set, we conflict iff we are that rtx or
both are in memory. Otherwise, we check all operands of the
}
break;
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
if (MEM_P (x)
&& alias_sets_conflict_p (MEM_ALIAS_SET (x),
case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR:
/* Lowered by gimplify.c. */
- abort ();
+ gcc_unreachable ();
case SAVE_EXPR:
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
>= (unsigned int) LAST_AND_UNUSED_TREE_CODE
&& !lang_hooks.safe_from_p (x, exp))
return 0;
+ break;
+
+ case tcc_type:
+ /* Should never get a type here. */
+ gcc_unreachable ();
}
/* If we have an rtl, find any enclosed object. Then see if we conflict
factor = highest_pow2_factor (exp);
if (TREE_CODE (target) == COMPONENT_REF)
- target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
+ target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
else
- target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
+ target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
return MAX (factor, target_align);
}
\f
expand_decl (var);
else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
rest_of_decl_compilation (var, 0, 0);
- else if (TREE_CODE (var) == TYPE_DECL
- || TREE_CODE (var) == CONST_DECL
- || TREE_CODE (var) == FUNCTION_DECL
- || TREE_CODE (var) == LABEL_DECL)
- /* No expansion needed. */;
else
- abort ();
+ /* No expansion needed. */
+ gcc_assert (TREE_CODE (var) == TYPE_DECL
+ || TREE_CODE (var) == CONST_DECL
+ || TREE_CODE (var) == FUNCTION_DECL
+ || TREE_CODE (var) == LABEL_DECL);
}
}
}
\f
-/* A subroutine of expand_expr. Evaluate the address of EXP.
+/* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
static rtx
-expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
- enum expand_modifier modifier)
+expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier)
{
rtx result, subtarget;
tree inner, offset;
generating ADDR_EXPR of something that isn't an LVALUE. The only
exception here is STRING_CST. */
if (TREE_CODE (exp) == CONSTRUCTOR
- || TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
+ || CONSTANT_CLASS_P (exp))
return XEXP (output_constant_def (exp, 0), 0);
/* Everything must be something allowed by is_gimple_addressable. */
case CONST_DECL:
/* Recurse and make the output_constant_def clause above handle this. */
- return expand_expr_addr_expr (DECL_INITIAL (exp), target,
- tmode, modifier);
+ return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
+ tmode, modifier);
case REALPART_EXPR:
/* The real part of the complex number is always first, therefore
case IMAGPART_EXPR:
/* The imaginary part of the complex number is always second.
- The expresion is therefore always offset by the size of the
+ The expression is therefore always offset by the size of the
scalar type. */
offset = 0;
bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
/* If the DECL isn't in memory, then the DECL wasn't properly
marked TREE_ADDRESSABLE, which will be either a front-end
or a tree optimizer bug. */
- if (GET_CODE (result) != MEM)
- abort ();
+ gcc_assert (GET_CODE (result) == MEM);
result = XEXP (result, 0);
/* ??? Is this needed anymore? */
- if (!TREE_USED (exp) == 0)
+ if (DECL_P (exp) && !TREE_USED (exp) == 0)
{
assemble_external (exp);
TREE_USED (exp) = 1;
}
/* We must have made progress. */
- if (inner == exp)
- abort ();
+ gcc_assert (inner != exp);
subtarget = offset || bitpos ? NULL_RTX : target;
- result = expand_expr_addr_expr (inner, subtarget, tmode, modifier);
-
- if (tmode == VOIDmode)
- {
- tmode = GET_MODE (result);
- if (tmode == VOIDmode)
- tmode = Pmode;
- }
+ result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
if (offset)
{
result = force_operand (result, NULL);
tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
+ result = convert_memory_address (tmode, result);
+ tmp = convert_memory_address (tmode, tmp);
+
if (modifier == EXPAND_SUM)
result = gen_rtx_PLUS (tmode, result, tmp);
else
{
/* Someone beforehand should have rejected taking the address
of such an object. */
- if (bitpos % BITS_PER_UNIT != 0)
- abort ();
+ gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
result = plus_constant (result, bitpos / BITS_PER_UNIT);
if (modifier < EXPAND_SUM)
return result;
}
+/* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
+ The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
+
+static rtx
+expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier)
+{
+ enum machine_mode rmode;
+ rtx result;
+
+ /* Target mode of VOIDmode says "whatever's natural". */
+ if (tmode == VOIDmode)
+ tmode = TYPE_MODE (TREE_TYPE (exp));
+
+ /* We can get called with some Weird Things if the user does silliness
+ like "(short) &a". In that case, convert_memory_address won't do
+ the right thing, so ignore the given target mode. */
+ if (tmode != Pmode && tmode != ptr_mode)
+ tmode = Pmode;
+
+ result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
+ tmode, modifier);
+
+ /* Despite expand_expr claims concerning ignoring TMODE when not
+ strictly convenient, stuff breaks if we don't honor it. Note
+ that combined with the above, we only do this for pointer modes. */
+ rmode = GET_MODE (result);
+ if (rmode == VOIDmode)
+ rmode = tmode;
+ if (rmode != tmode)
+ result = convert_memory_address (tmode, result);
+
+ return result;
+}
+
+
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
In the case of a void EXP, const0_rtx is returned.
return const0_rtx;
}
- if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
- || code == INDIRECT_REF)
+ if (TREE_CODE_CLASS (code) == tcc_unary
+ || code == COMPONENT_REF || code == INDIRECT_REF)
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
modifier);
- else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
+ else if (TREE_CODE_CLASS (code) == tcc_binary
+ || TREE_CODE_CLASS (code) == tcc_comparison
|| code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
return temp;
}
+ case SSA_NAME:
+ return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
+ NULL);
+
case PARM_DECL:
case VAR_DECL:
/* If a static var's type was incomplete when the decl was written,
case FUNCTION_DECL:
case RESULT_DECL:
- if (DECL_RTL (exp) == 0)
- abort ();
+ gcc_assert (DECL_RTL (exp));
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
/* Variables inherited from containing functions should have
been lowered by this point. */
context = decl_function_context (exp);
- if (context != 0
- && context != current_function_decl
- && !TREE_STATIC (exp)
- /* ??? C++ creates functions that are not TREE_STATIC. */
- && TREE_CODE (exp) != FUNCTION_DECL)
- abort ();
+ gcc_assert (!context
+ || context == current_function_decl
+ || TREE_STATIC (exp)
+ /* ??? C++ creates functions that are not TREE_STATIC. */
+ || TREE_CODE (exp) == FUNCTION_DECL);
/* This is the case of an array whose size is to be determined
from its initializer, while the initializer is still being parsed.
See expand_decl. */
- else if (MEM_P (DECL_RTL (exp))
+ if (MEM_P (DECL_RTL (exp))
&& REG_P (XEXP (DECL_RTL (exp), 0)))
temp = validize_mem (DECL_RTL (exp));
if (REG_P (DECL_RTL (exp))
&& GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
{
+ enum machine_mode pmode;
+
/* Get the signedness used for this variable. Ensure we get the
same mode we got when the variable was declared. */
- if (GET_MODE (DECL_RTL (exp))
- != promote_mode (type, DECL_MODE (exp), &unsignedp,
- (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
- abort ();
+ pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
+ (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
+ gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
SUBREG_PROMOTED_VAR_P (temp) = 1;
expanders calling save_expr immediately before expanding
something. Assume this means that we only have to deal
with non-BLKmode values. */
- if (GET_MODE (ret) == BLKmode)
- abort ();
+ gcc_assert (GET_MODE (ret) != BLKmode);
val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
DECL_ARTIFICIAL (val) = 1;
return target;
}
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
{
tree exp1 = TREE_OPERAND (exp, 0);
+ tree orig;
+
+ if (code == MISALIGNED_INDIRECT_REF
+ && !targetm.vectorize.misaligned_mem_ok (mode))
+ abort ();
if (modifier != EXPAND_WRITE)
{
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
+
+ if (code == ALIGN_INDIRECT_REF)
+ {
+ int align = TYPE_ALIGN_UNIT (type);
+ op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
+ op0 = memory_address (mode, op0);
+ }
+
temp = gen_rtx_MEM (mode, op0);
- set_mem_attributes (temp, exp, 0);
+
+ orig = REF_ORIGINAL (exp);
+ if (!orig)
+ orig = exp;
+ set_mem_attributes (temp, orig, 0);
return temp;
}
case ARRAY_REF:
-#ifdef ENABLE_CHECKING
- if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
- abort ();
-#endif
-
{
tree array = TREE_OPERAND (exp, 0);
- tree low_bound = array_ref_low_bound (exp);
- tree index = convert (sizetype, TREE_OPERAND (exp, 1));
- HOST_WIDE_INT i;
-
- /* Optimize the special-case of a zero lower bound.
-
- We convert the low_bound to sizetype to avoid some problems
- with constant folding. (E.g. suppose the lower bound is 1,
- and its mode is QI. Without the conversion, (ARRAY
- +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
- +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
-
- if (! integer_zerop (low_bound))
- index = size_diffop (index, convert (sizetype, low_bound));
+ tree index = TREE_OPERAND (exp, 1);
/* Fold an expression like: "foo"[2].
This is not done in fold so it won't happen inside &.
&& modifier != EXPAND_MEMORY
&& TREE_CODE (array) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array)
- && TREE_CODE (index) == INTEGER_CST
- && 0 > compare_tree_int (index,
- list_length (CONSTRUCTOR_ELTS
- (TREE_OPERAND (exp, 0)))))
+ && TREE_CODE (index) == INTEGER_CST)
{
tree elem;
- for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
- i = TREE_INT_CST_LOW (index);
- elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
+ for (elem = CONSTRUCTOR_ELTS (array);
+ (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
+ elem = TREE_CHAIN (elem))
;
- if (elem)
+ if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
modifier);
}
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
infinitely recurse. */
- if (tem == exp)
- abort ();
+ gcc_assert (tem != exp);
/* If TEM's type is a union of variable size, pass TARGET to the inner
computation, since it will need a temporary and TARGET is known
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
EXPAND_SUM);
- if (!MEM_P (op0))
- abort ();
+ gcc_assert (MEM_P (op0));
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode)
one element arrays having the same mode as its element. */
if (GET_CODE (op0) == CONCAT)
{
- if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
- abort ();
+ gcc_assert (bitpos == 0
+ && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
return op0;
}
/* In this case, BITPOS must start at a byte boundary and
TARGET, if specified, must be a MEM. */
- if (!MEM_P (op0)
- || (target != 0 && !MEM_P (target))
- || bitpos % BITS_PER_UNIT != 0)
- abort ();
+ gcc_assert (MEM_P (op0)
+ && (!target || MEM_P (target))
+ && !(bitpos % BITS_PER_UNIT));
emit_block_move (target,
adjust_address (op0, VOIDmode,
/* Store data into beginning of memory target. */
store_expr (TREE_OPERAND (exp, 0),
adjust_address (target, TYPE_MODE (valtype), 0),
- modifier == EXPAND_STACK_PARM ? 2 : 0);
-
- else if (REG_P (target))
- /* Store this field into a union of the proper type. */
- store_field (target,
- MIN ((int_size_in_bytes (TREE_TYPE
- (TREE_OPERAND (exp, 0)))
- * BITS_PER_UNIT),
- (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
- 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
- VOIDmode, 0, type, 0);
+ modifier == EXPAND_STACK_PARM);
+
else
- abort ();
+ {
+ gcc_assert (REG_P (target));
+
+ /* Store this field into a union of the proper type. */
+ store_field (target,
+ MIN ((int_size_in_bytes (TREE_TYPE
+ (TREE_OPERAND (exp, 0)))
+ * BITS_PER_UNIT),
+ (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
+ 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
+ type, 0);
+ }
/* Return the entire union. */
return target;
}
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
- op0 = REDUCE_BIT_FIELD (op0);
if (GET_MODE (op0) == mode)
- return op0;
+ ;
/* If OP0 is a constant, just convert it into the proper mode. */
- if (CONSTANT_P (op0))
+ else if (CONSTANT_P (op0))
{
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
enum machine_mode inner_mode = TYPE_MODE (inner_type);
if (modifier == EXPAND_INITIALIZER)
- return simplify_gen_subreg (mode, op0, inner_mode,
- subreg_lowpart_offset (mode,
- inner_mode));
+ op0 = simplify_gen_subreg (mode, op0, inner_mode,
+ subreg_lowpart_offset (mode,
+ inner_mode));
else
- return convert_modes (mode, inner_mode, op0,
- TYPE_UNSIGNED (inner_type));
+ op0= convert_modes (mode, inner_mode, op0,
+ TYPE_UNSIGNED (inner_type));
}
- if (modifier == EXPAND_INITIALIZER)
- return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
+ else if (modifier == EXPAND_INITIALIZER)
+ op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
- if (target == 0)
- return
- convert_to_mode (mode, op0,
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ else if (target == 0)
+ op0 = convert_to_mode (mode, op0,
+ TYPE_UNSIGNED (TREE_TYPE
+ (TREE_OPERAND (exp, 0))));
else
- convert_move (target, op0,
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
- return target;
+ {
+ convert_move (target, op0,
+ TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ op0 = target;
+ }
+
+ return REDUCE_BIT_FIELD (op0);
case VIEW_CONVERT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
constants to change mode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- if (TREE_ADDRESSABLE (exp))
- abort ();
+ gcc_assert (!TREE_ADDRESSABLE (exp));
if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
target
temp_size, 0, type);
rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
- if (TREE_ADDRESSABLE (exp))
- abort ();
+ gcc_assert (!TREE_ADDRESSABLE (exp));
if (GET_MODE (op0) == BLKmode)
emit_block_move (new_with_op0_mode, op0,
case FIX_ROUND_EXPR:
case FIX_FLOOR_EXPR:
case FIX_CEIL_EXPR:
- abort (); /* Not used for C. */
+ gcc_unreachable (); /* Not used for C. */
case FIX_TRUNC_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
temp = expand_unop (mode,
optab_for_tree_code (NEGATE_EXPR, type),
op0, target, 0);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return REDUCE_BIT_FIELD (temp);
case ABS_EXPR:
target = 0;
/* ABS_EXPR is not valid for complex arguments. */
- if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
- || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
- abort ();
+ gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
+ && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
/* Unsigned abs is simply the operand. Testing here means we don't
risk generating incorrect code below. */
if (modifier == EXPAND_STACK_PARM)
target = 0;
temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return temp;
/* ??? Can optimize bitwise operations with one arg constant.
only with operands that are always zero or one. */
temp = expand_binop (mode, xor_optab, op0, const1_rtx,
target, 1, OPTAB_LIB_WIDEN);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return temp;
case STATEMENT_LIST:
{
tree_stmt_iterator iter;
- if (!ignore)
- abort ();
+ gcc_assert (ignore);
for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
tree then_ = TREE_OPERAND (exp, 1);
tree else_ = TREE_OPERAND (exp, 2);
- if (TREE_CODE (then_) != GOTO_EXPR
- || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
- || TREE_CODE (else_) != GOTO_EXPR
- || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
- abort ();
+ gcc_assert (TREE_CODE (then_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
+ && TREE_CODE (else_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
return expand_expr (else_, const0_rtx, VOIDmode, 0);
a temporary variable, so that we can evaluate them here
for side effect only. If type is void, we must do likewise. */
- if (TREE_ADDRESSABLE (type)
- || ignore
- || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
- || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
- abort ();
+ gcc_assert (!TREE_ADDRESSABLE (type)
+ && !ignore
+ && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
+ && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
/* If we are not to produce a result, we have no target. Otherwise,
if a target was specified use it; it will not be used as an
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
store_expr (TREE_OPERAND (exp, 1), temp,
- modifier == EXPAND_STACK_PARM ? 2 : 0);
+ modifier == EXPAND_STACK_PARM);
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
store_expr (TREE_OPERAND (exp, 2), temp,
- modifier == EXPAND_STACK_PARM ? 2 : 0);
+ modifier == EXPAND_STACK_PARM);
emit_label (op1);
OK_DEFER_POP;
return temp;
+ case VEC_COND_EXPR:
+ target = expand_vec_cond_expr (exp, target);
+ return target;
+
case MODIFY_EXPR:
{
- /* If lhs is complex, expand calls in rhs before computing it.
- That's so we don't compute a pointer and save it over a
- call. If lhs is simple, compute it first so we can give it
- as a target if the rhs is just a call. This avoids an
- extra temp and copy and that prevents a partial-subsumption
- which makes bad code. Actually we could treat
- component_ref's of vars like vars. */
-
tree lhs = TREE_OPERAND (exp, 0);
tree rhs = TREE_OPERAND (exp, 1);
- temp = 0;
+ gcc_assert (ignore);
/* Check for |= or &= of a bitfield of size one into another bitfield
of size 1. In this case, (unless we need the result of the
??? At this point, we can't get a BIT_FIELD_REF here. But if
things change so we do, this code should be enhanced to
support it. */
- if (ignore
- && TREE_CODE (lhs) == COMPONENT_REF
+ if (TREE_CODE (lhs) == COMPONENT_REF
&& (TREE_CODE (rhs) == BIT_IOR_EXPR
|| TREE_CODE (rhs) == BIT_AND_EXPR)
&& TREE_OPERAND (rhs, 0) == lhs
expand_assignment (lhs, convert (TREE_TYPE (rhs),
(TREE_CODE (rhs) == BIT_IOR_EXPR
? integer_one_node
- : integer_zero_node)),
- 0);
+ : integer_zero_node)));
do_pending_stack_adjust ();
emit_label (label);
return const0_rtx;
}
- temp = expand_assignment (lhs, rhs, ! ignore);
+ expand_assignment (lhs, rhs);
- return temp;
+ return const0_rtx;
}
case RETURN_EXPR:
return const0_rtx;
case ADDR_EXPR:
- return expand_expr_addr_expr (TREE_OPERAND (exp, 0), target,
- tmode, modifier);
+ return expand_expr_addr_expr (exp, target, tmode, modifier);
/* COMPLEX type for Extended Pascal & Fortran */
case COMPLEX_EXPR:
case EH_FILTER_EXPR:
case TRY_FINALLY_EXPR:
/* Lowered by tree-eh.c. */
- abort ();
+ gcc_unreachable ();
case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR:
case POSTDECREMENT_EXPR:
case LOOP_EXPR:
case EXIT_EXPR:
- case LABELED_BLOCK_EXPR:
- case EXIT_BLOCK_EXPR:
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
/* Lowered by gimplify.c. */
- abort ();
+ gcc_unreachable ();
case EXC_PTR_EXPR:
return get_exception_pointer (cfun);
case FDESC_EXPR:
/* Function descriptors are not valid except for as
initialization constants, and should not be expanded. */
- abort ();
+ gcc_unreachable ();
case SWITCH_EXPR:
expand_case (exp);
return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
modifier, alt_rtl);
+ case REALIGN_LOAD_EXPR:
+ {
+ tree oprnd0 = TREE_OPERAND (exp, 0);
+ tree oprnd1 = TREE_OPERAND (exp, 1);
+ tree oprnd2 = TREE_OPERAND (exp, 2);
+ rtx op2;
+
+ this_optab = optab_for_tree_code (code, type);
+ expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
+ op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+ target, unsignedp);
+ if (temp == 0)
+ abort ();
+ return temp;
+ }
+
+
default:
return lang_hooks.expand_expr (exp, original_target, tmode,
modifier, alt_rtl);
target = 0;
temp = expand_binop (mode, this_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return REDUCE_BIT_FIELD (temp);
}
#undef REDUCE_BIT_FIELD
tree
string_constant (tree arg, tree *ptr_offset)
{
+ tree array, offset;
STRIP_NOPS (arg);
- if (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ if (TREE_CODE (arg) == ADDR_EXPR)
{
- *ptr_offset = size_zero_node;
- return TREE_OPERAND (arg, 0);
- }
- if (TREE_CODE (arg) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
- && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
- {
- *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
- return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
+ {
+ *ptr_offset = size_zero_node;
+ return TREE_OPERAND (arg, 0);
+ }
+ else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
+ {
+ array = TREE_OPERAND (arg, 0);
+ offset = size_zero_node;
+ }
+ else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
+ {
+ array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
+ if (TREE_CODE (array) != STRING_CST
+ && TREE_CODE (array) != VAR_DECL)
+ return 0;
+ }
+ else
+ return 0;
}
else if (TREE_CODE (arg) == PLUS_EXPR)
{
STRIP_NOPS (arg1);
if (TREE_CODE (arg0) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
+ && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
+ || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
{
- *ptr_offset = convert (sizetype, arg1);
- return TREE_OPERAND (arg0, 0);
+ array = TREE_OPERAND (arg0, 0);
+ offset = arg1;
}
else if (TREE_CODE (arg1) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
+ && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
+ || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
{
- *ptr_offset = convert (sizetype, arg0);
- return TREE_OPERAND (arg1, 0);
+ array = TREE_OPERAND (arg1, 0);
+ offset = arg0;
}
+ else
+ return 0;
+ }
+ else
+ return 0;
+
+ if (TREE_CODE (array) == STRING_CST)
+ {
+ *ptr_offset = convert (sizetype, offset);
+ return array;
+ }
+ else if (TREE_CODE (array) == VAR_DECL)
+ {
+ int length;
+
+ /* Variables initialized to string literals can be handled too. */
+ if (DECL_INITIAL (array) == NULL_TREE
+ || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
+ return 0;
+
+ /* If they are read-only, non-volatile and bind locally. */
+ if (! TREE_READONLY (array)
+ || TREE_SIDE_EFFECTS (array)
+ || ! targetm.binds_local_p (array))
+ return 0;
+
+ /* Avoid const char foo[4] = "abcde"; */
+ if (DECL_SIZE_UNIT (array) == NULL_TREE
+ || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
+ || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
+ || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
+ return 0;
+
+ /* If variable is bigger than the string literal, OFFSET must be constant
+ and inside of the bounds of the string literal. */
+ offset = convert (sizetype, offset);
+ if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
+ && (! host_integerp (offset, 1)
+ || compare_tree_int (offset, length) >= 0))
+ return 0;
+
+ *ptr_offset = offset;
+ return DECL_INITIAL (array);
}
return 0;
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* Put a constant second. */
code = GET_CODE (result);
label = gen_label_rtx ();
- if (bcc_gen_fctn[(int) code] == 0)
- abort ();
+ gcc_assert (bcc_gen_fctn[(int) code]);
emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
emit_move_insn (target, invert ? const1_rtx : const0_rtx);
for (; i < units; ++i)
RTVEC_ELT (v, i) = CONST0_RTX (inner);
- return gen_rtx_raw_CONST_VECTOR (mode, v);
+ return gen_rtx_CONST_VECTOR (mode, v);
}
#include "gt-expr.h"