};
static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
+ unsigned int,
unsigned int);
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *);
to perform a structure copy. */
#ifndef MOVE_BY_PIECES_P
#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
- (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
+ (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
+ < (unsigned int) MOVE_RATIO)
#endif
/* This macro is used to determine whether clear_by_pieces should be
called to clear storage. */
#ifndef CLEAR_BY_PIECES_P
#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
- (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
+ (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
+ < (unsigned int) CLEAR_RATIO)
#endif
/* This macro is used to determine whether store_by_pieces should be
called to "memset" storage with byte values other than zero, or
to "memcpy" storage when the source is a constant string. */
#ifndef STORE_BY_PIECES_P
-#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
+#define STORE_BY_PIECES_P(SIZE, ALIGN) \
+ (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
+ < (unsigned int) MOVE_RATIO)
#endif
/* This array records the insn_code of insns to perform block moves. */
: (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
- if (to_real != from_real)
- abort ();
+ gcc_assert (to_real == from_real);
/* If the source and destination are already the same, then there's
nothing to do. */
&& SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
from = gen_lowpart (to_mode, from), from_mode = to_mode;
- if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
- abort ();
+ gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
if (to_mode == from_mode
|| (from_mode == VOIDmode && CONSTANT_P (from)))
if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
{
- if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
- abort ();
+ gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
if (VECTOR_MODE_P (to_mode))
from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
rtx value, insns;
convert_optab tab;
+ gcc_assert (GET_MODE_PRECISION (from_mode)
+ != GET_MODE_PRECISION (to_mode));
+
if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
tab = sext_optab;
- else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
- tab = trunc_optab;
else
- abort ();
+ tab = trunc_optab;
/* Try converting directly if the insn is supported. */
/* Otherwise use a libcall. */
libcall = tab->handlers[to_mode][from_mode].libfunc;
- if (!libcall)
- /* This conversion is not implemented yet. */
- abort ();
+ /* Is this conversion implemented yet? */
+ gcc_assert (libcall);
start_sequence ();
value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
- if (trunc_optab->handlers[to_mode][full_mode].insn_code
- == CODE_FOR_nothing)
- abort ();
+ gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
+ != CODE_FOR_nothing);
if (full_mode != from_mode)
from = convert_to_mode (full_mode, from, unsignedp);
enum machine_mode full_mode
= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
- if (sext_optab->handlers[full_mode][from_mode].insn_code
- == CODE_FOR_nothing)
- abort ();
+ gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
+ != CODE_FOR_nothing);
emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
to, from, UNKNOWN);
int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
rtx subword = operand_subword (to, index, 1, to_mode);
- if (subword == 0)
- abort ();
+ gcc_assert (subword);
if (fill_value != subword)
emit_move_insn (subword, fill_value);
/* No suitable intermediate mode.
Generate what we need with shifts. */
- shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
- - GET_MODE_BITSIZE (from_mode), 0);
+ shift_amount = build_int_cst (NULL_TREE,
+ GET_MODE_BITSIZE (to_mode)
+ - GET_MODE_BITSIZE (from_mode));
from = gen_lowpart (to_mode, force_reg (from_mode, from));
tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
to, unsignedp);
}
/* Mode combination is not recognized. */
- abort ();
+ gcc_unreachable ();
}
/* Return an rtx for a value that would result
subreg operation. */
if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
{
- if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
- abort ();
+ gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
return simplify_gen_subreg (mode, x, oldmode, 0);
}
copy addresses to registers (to make displacements shorter)
and use post-increment if available. */
if (!(data.autinc_from && data.autinc_to)
- && move_by_pieces_ninsns (len, align) > 2)
+ && move_by_pieces_ninsns (len, align, max_size) > 2)
{
/* Find the mode of the largest move... */
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
data.to_addr = copy_addr_to_reg (to_addr);
}
- if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
- || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
- align = MOVE_MAX * BITS_PER_UNIT;
+ tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
+ if (align >= GET_MODE_ALIGNMENT (tmode))
+ align = GET_MODE_ALIGNMENT (tmode);
+ else
+ {
+ enum machine_mode xmode;
+
+ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
+ tmode != VOIDmode;
+ xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
+ if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
+ || SLOW_UNALIGNED_ACCESS (tmode, align))
+ break;
+
+ align = MAX (align, GET_MODE_ALIGNMENT (xmode));
+ }
/* First move what we can in the largest integer mode, then go to
successively smaller modes. */
}
/* The code above should have handled everything. */
- if (data.len > 0)
- abort ();
+ gcc_assert (!data.len);
if (endp)
{
rtx to1;
- if (data.reverse)
- abort ();
+ gcc_assert (!data.reverse);
if (data.autinc_to)
{
if (endp == 2)
ALIGN (in bits) is maximum alignment we can assume. */
static unsigned HOST_WIDE_INT
-move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
+move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
+ unsigned int max_size)
{
unsigned HOST_WIDE_INT n_insns = 0;
- unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
+ enum machine_mode tmode;
- if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
- || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
- align = MOVE_MAX * BITS_PER_UNIT;
+ tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
+ if (align >= GET_MODE_ALIGNMENT (tmode))
+ align = GET_MODE_ALIGNMENT (tmode);
+ else
+ {
+ enum machine_mode tmode, xmode;
+
+ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
+ tmode != VOIDmode;
+ xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
+ if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
+ || SLOW_UNALIGNED_ACCESS (tmode, align))
+ break;
+
+ align = MAX (align, GET_MODE_ALIGNMENT (xmode));
+ }
while (max_size > 1)
{
- enum machine_mode mode = VOIDmode, tmode;
+ enum machine_mode mode = VOIDmode;
enum insn_code icode;
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
max_size = GET_MODE_SIZE (mode);
}
- if (l)
- abort ();
+ gcc_assert (!l);
return n_insns;
}
#ifdef PUSH_ROUNDING
emit_single_push_insn (mode, from1, NULL);
#else
- abort ();
+ gcc_unreachable ();
#endif
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
- if (!MEM_P (x))
- abort ();
- if (!MEM_P (y))
- abort ();
- if (size == 0)
- abort ();
+ gcc_assert (MEM_P (x));
+ gcc_assert (MEM_P (y));
+ gcc_assert (size);
/* Make sure we've got BLKmode addresses; store_one_arg can decide that
block copy is more efficient for other large modes, e.g. DCmode. */
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
- /* If we are initializing a readonly value, show the above call clobbered
- it. Otherwise, a load from it may erroneously be hoisted from a loop, or
- the delay slot scheduler might overlook conflicts and take nasty
- decisions. */
- if (RTX_UNCHANGING_P (dst))
- add_function_usage_to
- (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
- gen_rtx_CLOBBER (VOIDmode, dst),
- NULL_RTX));
-
return retval;
}
{
rtx tem = operand_subword (x, i, 1, BLKmode);
- if (tem == 0)
- abort ();
+ gcc_assert (tem);
emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
}
int i, length;
rtx *tmps;
- if (GET_CODE (orig) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (orig) == PARALLEL);
length = XVECLEN (orig, 0);
tmps = alloca (sizeof (rtx) * length);
rtx *tmps, src;
int start, i;
- if (GET_CODE (dst) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (dst) == PARALLEL);
/* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */
)
shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
bytelen = ssize - bytepos;
- if (bytelen <= 0)
- abort ();
+ gcc_assert (bytelen > 0);
}
/* If we won't be loading directly from memory, protect the real source
(bytepos % slen0) * BITS_PER_UNIT,
1, NULL_RTX, mode, mode);
}
- else if (bytepos == 0)
+ else
{
- rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
+ rtx mem;
+
+ gcc_assert (!bytepos);
+ mem = assign_stack_temp (GET_MODE (src), slen, 0);
emit_move_insn (mem, src);
tmps[i] = adjust_address (mem, mode, 0);
}
- else
- abort ();
}
/* FIXME: A SIMD parallel will eventually lead to a subreg of a
SIMD register, which is currently broken. While we get GCC
if (shift)
tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
- build_int_2 (shift, 0), tmps[i], 0);
+ build_int_cst (NULL_TREE, shift), tmps[i], 0);
}
/* Copy the extracted pieces into the proper (probable) hard regs. */
{
int i;
- if (GET_CODE (src) != PARALLEL
- || GET_CODE (dst) != PARALLEL
- || XVECLEN (src, 0) != XVECLEN (dst, 0))
- abort ();
+ gcc_assert (GET_CODE (src) == PARALLEL
+ && GET_CODE (dst) == PARALLEL
+ && XVECLEN (src, 0) == XVECLEN (dst, 0));
/* Skip first entry if NULL. */
for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
rtx *tmps, dst;
int start, i;
- if (GET_CODE (src) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (src) == PARALLEL);
/* Check for a NULL entry, used to indicate that the parameter goes
both on the stack and in registers. */
{
int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
- build_int_2 (shift, 0), tmps[i], 0);
+ build_int_cst (NULL_TREE, shift),
+ tmps[i], 0);
}
bytelen = ssize - bytepos;
}
bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
dest = XEXP (dst, 1);
}
- else if (bytepos == 0 && XVECLEN (src, 0))
+ else
{
+ gcc_assert (bytepos == 0 && XVECLEN (src, 0));
dest = assign_stack_temp (GET_MODE (dest),
GET_MODE_SIZE (GET_MODE (dest)), 0);
emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
dst = dest;
break;
}
- else
- abort ();
}
/* Optimize the access just a bit. */
void
use_reg (rtx *call_fusage, rtx reg)
{
- if (!REG_P (reg)
- || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
- abort ();
-
+ gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
+
*call_fusage
= gen_rtx_EXPR_LIST (VOIDmode,
gen_rtx_USE (VOIDmode, reg), *call_fusage);
{
int i;
- if (regno + nregs > FIRST_PSEUDO_REGISTER)
- abort ();
+ gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
for (i = 0; i < nregs; i++)
use_reg (call_fusage, regno_reg_rtx[regno + i]);
rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
void *constfundata, unsigned int align)
{
- unsigned HOST_WIDE_INT max_size, l;
+ unsigned HOST_WIDE_INT l;
+ unsigned int max_size;
HOST_WIDE_INT offset = 0;
enum machine_mode mode, tmode;
enum insn_code icode;
if (! STORE_BY_PIECES_P (len, align))
return 0;
- if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
- || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
- align = MOVE_MAX * BITS_PER_UNIT;
+ tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
+ if (align >= GET_MODE_ALIGNMENT (tmode))
+ align = GET_MODE_ALIGNMENT (tmode);
+ else
+ {
+ enum machine_mode xmode;
+
+ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
+ tmode != VOIDmode;
+ xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
+ if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
+ || SLOW_UNALIGNED_ACCESS (tmode, align))
+ break;
+
+ align = MAX (align, GET_MODE_ALIGNMENT (xmode));
+ }
/* We would first store what we can in the largest integer mode, then go to
successively smaller modes. */
}
/* The code above should have handled everything. */
- if (l != 0)
- abort ();
+ gcc_assert (!l);
}
return 1;
if (len == 0)
{
- if (endp == 2)
- abort ();
+ gcc_assert (endp != 2);
return to;
}
- if (! STORE_BY_PIECES_P (len, align))
- abort ();
+ gcc_assert (STORE_BY_PIECES_P (len, align));
data.constfun = constfun;
data.constfundata = constfundata;
data.len = len;
{
rtx to1;
- if (data.reverse)
- abort ();
+ gcc_assert (!data.reverse);
if (data.autinc_to)
{
if (endp == 2)
unsigned int align ATTRIBUTE_UNUSED)
{
rtx to_addr = XEXP (data->to, 0);
- unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
+ unsigned int max_size = STORE_MAX_PIECES + 1;
enum machine_mode mode = VOIDmode, tmode;
enum insn_code icode;
copy addresses to registers (to make displacements shorter)
and use post-increment if available. */
if (!data->autinc_to
- && move_by_pieces_ninsns (data->len, align) > 2)
+ && move_by_pieces_ninsns (data->len, align, max_size) > 2)
{
/* Determine the main mode we'll be using. */
for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
data->to_addr = copy_addr_to_reg (to_addr);
}
- if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
- || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
- align = MOVE_MAX * BITS_PER_UNIT;
+ tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
+ if (align >= GET_MODE_ALIGNMENT (tmode))
+ align = GET_MODE_ALIGNMENT (tmode);
+ else
+ {
+ enum machine_mode xmode;
+
+ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
+ tmode != VOIDmode;
+ xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
+ if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
+ || SLOW_UNALIGNED_ACCESS (tmode, align))
+ break;
+
+ align = MAX (align, GET_MODE_ALIGNMENT (xmode));
+ }
/* First store what we can in the largest integer mode, then go to
successively smaller modes. */
}
/* The code above should have handled everything. */
- if (data->len != 0)
- abort ();
+ gcc_assert (!data->len);
}
/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
- /* If we are initializing a readonly value, show the above call
- clobbered it. Otherwise, a load from it may erroneously be
- hoisted from a loop. */
- if (RTX_UNCHANGING_P (object))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
-
return retval;
}
rtx y_cst = NULL_RTX;
rtx last_insn, set;
- if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
- abort ();
+ gcc_assert (mode != BLKmode
+ && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
if (CONSTANT_P (y))
{
&& CONSTANT_ADDRESS_P (XEXP (y, 0)))))
y = validize_mem (y);
- if (mode == BLKmode)
- abort ();
+ gcc_assert (mode != BLKmode);
last_insn = emit_move_insn_1 (x, y);
enum machine_mode submode;
enum mode_class class = GET_MODE_CLASS (mode);
- if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
- abort ();
+ gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
return
if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
break;
- if (tmode == VOIDmode)
- abort ();
+ gcc_assert (tmode != VOIDmode);
/* Get X and Y in TMODE. We can't use gen_lowpart here because it
may call change_address which is not appropriate if we were
/* This will handle any multi-word or full-word mode that lacks a move_insn
pattern. However, you will get better code if you define such patterns,
even if they must turn into multiple assembler instructions. */
- else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
+ else
{
rtx last_insn = 0;
rtx seq, inner;
int need_clobber;
int i;
-
+
+ gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
+
#ifdef PUSH_ROUNDING
/* If X is a push on the stack, do the push now and replace
else if (ypart == 0)
ypart = operand_subword_force (y, i, mode);
- if (xpart == 0 || ypart == 0)
- abort ();
+ gcc_assert (xpart && ypart);
need_clobber |= (GET_CODE (xpart) == SUBREG);
return last_insn;
}
- else
- abort ();
}
/* If Y is representable exactly in a narrower mode, and the target can
else
offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
- if (size == 0)
- abort ();
+ gcc_assert (size);
used -= offset;
static rtx
get_subtarget (rtx x)
{
- return ((x == 0
+ return (optimize
+ || x == 0
/* Only registers can be subtargets. */
|| !REG_P (x)
- /* If the register is readonly, it can't be set more than once. */
- || RTX_UNCHANGING_P (x)
/* Don't use hard regs to avoid extending their life. */
|| REGNO (x) < FIRST_PSEUDO_REGISTER
- /* Avoid subtargets inside loops,
- since they hide some invariant expressions. */
- || preserve_subexpressions_p ())
? 0 : x);
}
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
- if (!MEM_P (to_rtx))
- abort ();
+ gcc_assert (MEM_P (to_rtx));
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode)
MEM_VOLATILE_P (to_rtx) = 1;
}
- if (TREE_CODE (to) == COMPONENT_REF
- && TREE_READONLY (TREE_OPERAND (to, 1))
- /* We can't assert that a MEM won't be set more than once
- if the component is not addressable because another
- non-addressable component may be referenced by the same MEM. */
- && ! (MEM_P (to_rtx) && ! can_address_p (to)))
- {
- if (to_rtx == orig_to_rtx)
- to_rtx = copy_rtx (to_rtx);
- RTX_UNCHANGING_P (to_rtx) = 1;
- }
-
if (MEM_P (to_rtx) && ! can_address_p (to))
{
if (to_rtx == orig_to_rtx)
if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
break;
- /* We can't handle fields split accross multiple entities. */
+ /* We can't handle fields split across multiple entities. */
if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
break;
NULL_RTX);
binop = xor_optab;
}
- value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
- value, build_int_2 (bitpos1, 0),
+ value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
+ build_int_cst (NULL_TREE, bitpos1),
NULL_RTX, 1);
result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
value, str_rtx, 1, OPTAB_WIDEN);
/* C++ can generate ?: expressions with a throw expression in one
branch and an rvalue in the other. Here, we resolve attempts to
store the throw expression's nonexistent result. */
- if (want_value)
- abort ();
+ gcc_assert (!want_value);
expand_expr (exp, const0_rtx, VOIDmode, 0);
return NULL_RTX;
}
case FUNCTION_TYPE:
case LANG_TYPE:
default:
- abort ();
+ gcc_unreachable ();
}
}
HOST_WIDE_INT exp_size = int_size_in_bytes (type);
#endif
- if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
+ switch (TREE_CODE (type))
{
- tree elt;
-
- /* If size is zero or the target is already cleared, do nothing. */
- if (size == 0 || cleared)
- cleared = 1;
- /* We either clear the aggregate or indicate the value is dead. */
- else if ((TREE_CODE (type) == UNION_TYPE
- || TREE_CODE (type) == QUAL_UNION_TYPE)
- && ! CONSTRUCTOR_ELTS (exp))
- /* If the constructor is empty, clear the union. */
- {
- clear_storage (target, expr_size (exp));
- cleared = 1;
- }
-
- /* If we are building a static constructor into a register,
- set the initial value as zero so we can fold the value into
- a constant. But if more than one register is involved,
- this probably loses. */
- else if (REG_P (target) && TREE_STATIC (exp)
- && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
- {
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
- cleared = 1;
- }
-
- /* If the constructor has fewer fields than the structure
- or if we are initializing the structure to mostly zeros,
- clear the whole structure first. Don't do this if TARGET is a
- register whose mode size isn't equal to SIZE since clear_storage
- can't handle this case. */
- else if (size > 0
- && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
- || mostly_zeros_p (exp))
- && (!REG_P (target)
- || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
- == size)))
- {
- rtx xtarget = target;
-
- if (readonly_fields_p (type))
- {
- xtarget = copy_rtx (xtarget);
- RTX_UNCHANGING_P (xtarget) = 1;
- }
+ case RECORD_TYPE:
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ tree elt;
- clear_storage (xtarget, GEN_INT (size));
+ /* If size is zero or the target is already cleared, do nothing. */
+ if (size == 0 || cleared)
cleared = 1;
- }
-
- if (! cleared)
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
- /* Store each element of the constructor into
- the corresponding field of TARGET. */
-
- for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- {
- tree field = TREE_PURPOSE (elt);
- tree value = TREE_VALUE (elt);
- enum machine_mode mode;
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos = 0;
- tree offset;
- rtx to_rtx = target;
-
- /* Just ignore missing fields.
- We cleared the whole structure, above,
- if any fields are missing. */
- if (field == 0)
- continue;
-
- if (cleared && initializer_zerop (value))
- continue;
-
- if (host_integerp (DECL_SIZE (field), 1))
- bitsize = tree_low_cst (DECL_SIZE (field), 1);
- else
- bitsize = -1;
-
- mode = DECL_MODE (field);
- if (DECL_BIT_FIELD (field))
- mode = VOIDmode;
+ /* We either clear the aggregate or indicate the value is dead. */
+ else if ((TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
+ && ! CONSTRUCTOR_ELTS (exp))
+ /* If the constructor is empty, clear the union. */
+ {
+ clear_storage (target, expr_size (exp));
+ cleared = 1;
+ }
- offset = DECL_FIELD_OFFSET (field);
- if (host_integerp (offset, 0)
- && host_integerp (bit_position (field), 0))
- {
- bitpos = int_bit_position (field);
- offset = 0;
- }
- else
- bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
+ /* If we are building a static constructor into a register,
+ set the initial value as zero so we can fold the value into
+ a constant. But if more than one register is involved,
+ this probably loses. */
+ else if (REG_P (target) && TREE_STATIC (exp)
+ && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
+ {
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ cleared = 1;
+ }
- if (offset)
- {
- rtx offset_rtx;
+ /* If the constructor has fewer fields than the structure or
+ if we are initializing the structure to mostly zeros, clear
+ the whole structure first. Don't do this if TARGET is a
+ register whose mode size isn't equal to SIZE since
+ clear_storage can't handle this case. */
+ else if (size > 0
+ && ((list_length (CONSTRUCTOR_ELTS (exp))
+ != fields_length (type))
+ || mostly_zeros_p (exp))
+ && (!REG_P (target)
+ || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
+ == size)))
+ {
+ clear_storage (target, GEN_INT (size));
+ cleared = 1;
+ }
- offset
- = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
- make_tree (TREE_TYPE (exp),
- target));
+ if (! cleared)
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
- offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
- if (!MEM_P (to_rtx))
- abort ();
+ /* Store each element of the constructor into the
+ corresponding field of TARGET. */
+ for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
+ {
+ tree field = TREE_PURPOSE (elt);
+ tree value = TREE_VALUE (elt);
+ enum machine_mode mode;
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos = 0;
+ tree offset;
+ rtx to_rtx = target;
+
+ /* Just ignore missing fields. We cleared the whole
+ structure, above, if any fields are missing. */
+ if (field == 0)
+ continue;
+
+ if (cleared && initializer_zerop (value))
+ continue;
+
+ if (host_integerp (DECL_SIZE (field), 1))
+ bitsize = tree_low_cst (DECL_SIZE (field), 1);
+ else
+ bitsize = -1;
+
+ mode = DECL_MODE (field);
+ if (DECL_BIT_FIELD (field))
+ mode = VOIDmode;
+
+ offset = DECL_FIELD_OFFSET (field);
+ if (host_integerp (offset, 0)
+ && host_integerp (bit_position (field), 0))
+ {
+ bitpos = int_bit_position (field);
+ offset = 0;
+ }
+ else
+ bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
+
+ if (offset)
+ {
+ rtx offset_rtx;
+
+ offset
+ = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
+ make_tree (TREE_TYPE (exp),
+ target));
+
+ offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+ gcc_assert (MEM_P (to_rtx));
+
#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (offset_rtx) != Pmode)
- offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
+ if (GET_MODE (offset_rtx) != Pmode)
+ offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
#else
- if (GET_MODE (offset_rtx) != ptr_mode)
- offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
+ if (GET_MODE (offset_rtx) != ptr_mode)
+ offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
#endif
- to_rtx = offset_address (to_rtx, offset_rtx,
- highest_pow2_factor (offset));
- }
-
- if (TREE_READONLY (field))
- {
- if (MEM_P (to_rtx))
- to_rtx = copy_rtx (to_rtx);
-
- RTX_UNCHANGING_P (to_rtx) = 1;
- }
+ to_rtx = offset_address (to_rtx, offset_rtx,
+ highest_pow2_factor (offset));
+ }
#ifdef WORD_REGISTER_OPERATIONS
- /* If this initializes a field that is smaller than a word, at the
- start of a word, try to widen it to a full word.
- This special case allows us to output C++ member function
- initializations in a form that the optimizers can understand. */
- if (REG_P (target)
- && bitsize < BITS_PER_WORD
- && bitpos % BITS_PER_WORD == 0
- && GET_MODE_CLASS (mode) == MODE_INT
- && TREE_CODE (value) == INTEGER_CST
- && exp_size >= 0
- && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
- {
- tree type = TREE_TYPE (value);
-
- if (TYPE_PRECISION (type) < BITS_PER_WORD)
- {
- type = lang_hooks.types.type_for_size
- (BITS_PER_WORD, TYPE_UNSIGNED (type));
- value = convert (type, value);
- }
-
- if (BYTES_BIG_ENDIAN)
- value
- = fold (build2 (LSHIFT_EXPR, type, value,
- build_int_2 (BITS_PER_WORD - bitsize, 0)));
- bitsize = BITS_PER_WORD;
- mode = word_mode;
- }
+ /* If this initializes a field that is smaller than a
+ word, at the start of a word, try to widen it to a full
+ word. This special case allows us to output C++ member
+ function initializations in a form that the optimizers
+ can understand. */
+ if (REG_P (target)
+ && bitsize < BITS_PER_WORD
+ && bitpos % BITS_PER_WORD == 0
+ && GET_MODE_CLASS (mode) == MODE_INT
+ && TREE_CODE (value) == INTEGER_CST
+ && exp_size >= 0
+ && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
+ {
+ tree type = TREE_TYPE (value);
+
+ if (TYPE_PRECISION (type) < BITS_PER_WORD)
+ {
+ type = lang_hooks.types.type_for_size
+ (BITS_PER_WORD, TYPE_UNSIGNED (type));
+ value = convert (type, value);
+ }
+
+ if (BYTES_BIG_ENDIAN)
+ value
+ = fold (build2 (LSHIFT_EXPR, type, value,
+ build_int_cst (NULL_TREE,
+ BITS_PER_WORD - bitsize)));
+ bitsize = BITS_PER_WORD;
+ mode = word_mode;
+ }
#endif
- if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
- && DECL_NONADDRESSABLE_P (field))
- {
- to_rtx = copy_rtx (to_rtx);
- MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
- }
-
- store_constructor_field (to_rtx, bitsize, bitpos, mode,
- value, type, cleared,
- get_alias_set (TREE_TYPE (field)));
- }
- }
-
- else if (TREE_CODE (type) == ARRAY_TYPE)
- {
- tree elt;
- int i;
- int need_to_clear;
- tree domain;
- tree elttype = TREE_TYPE (type);
- int const_bounds_p;
- HOST_WIDE_INT minelt = 0;
- HOST_WIDE_INT maxelt = 0;
-
- domain = TYPE_DOMAIN (type);
- const_bounds_p = (TYPE_MIN_VALUE (domain)
- && TYPE_MAX_VALUE (domain)
- && host_integerp (TYPE_MIN_VALUE (domain), 0)
- && host_integerp (TYPE_MAX_VALUE (domain), 0));
-
- /* If we have constant bounds for the range of the type, get them. */
- if (const_bounds_p)
- {
- minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
- maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
- }
-
- /* If the constructor has fewer elements than the array,
- clear the whole array first. Similarly if this is
- static constructor of a non-BLKmode object. */
- if (cleared)
- need_to_clear = 0;
- else if (REG_P (target) && TREE_STATIC (exp))
- need_to_clear = 1;
- else
- {
- HOST_WIDE_INT count = 0, zero_count = 0;
- need_to_clear = ! const_bounds_p;
-
- /* This loop is a more accurate version of the loop in
- mostly_zeros_p (it handles RANGE_EXPR in an index).
- It is also needed to check for missing elements. */
- for (elt = CONSTRUCTOR_ELTS (exp);
- elt != NULL_TREE && ! need_to_clear;
- elt = TREE_CHAIN (elt))
- {
- tree index = TREE_PURPOSE (elt);
- HOST_WIDE_INT this_node_count;
-
- if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
- {
- tree lo_index = TREE_OPERAND (index, 0);
- tree hi_index = TREE_OPERAND (index, 1);
-
- if (! host_integerp (lo_index, 1)
- || ! host_integerp (hi_index, 1))
- {
- need_to_clear = 1;
- break;
- }
-
- this_node_count = (tree_low_cst (hi_index, 1)
- - tree_low_cst (lo_index, 1) + 1);
- }
- else
- this_node_count = 1;
-
- count += this_node_count;
- if (mostly_zeros_p (TREE_VALUE (elt)))
- zero_count += this_node_count;
- }
-
- /* Clear the entire array first if there are any missing elements,
- or if the incidence of zero elements is >= 75%. */
- if (! need_to_clear
- && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
- need_to_clear = 1;
- }
-
- if (need_to_clear && size > 0)
- {
- if (REG_P (target))
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
- else
- clear_storage (target, GEN_INT (size));
- cleared = 1;
- }
-
- if (!cleared && REG_P (target))
- /* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
- /* Store each element of the constructor into
- the corresponding element of TARGET, determined
- by counting the elements. */
- for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
- elt;
- elt = TREE_CHAIN (elt), i++)
- {
- enum machine_mode mode;
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
- int unsignedp;
- tree value = TREE_VALUE (elt);
- tree index = TREE_PURPOSE (elt);
- rtx xtarget = target;
-
- if (cleared && initializer_zerop (value))
- continue;
-
- unsignedp = TYPE_UNSIGNED (elttype);
- mode = TYPE_MODE (elttype);
- if (mode == BLKmode)
- bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
- ? tree_low_cst (TYPE_SIZE (elttype), 1)
- : -1);
- else
- bitsize = GET_MODE_BITSIZE (mode);
-
- if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
- {
- tree lo_index = TREE_OPERAND (index, 0);
- tree hi_index = TREE_OPERAND (index, 1);
- rtx index_r, pos_rtx;
- HOST_WIDE_INT lo, hi, count;
- tree position;
-
- /* If the range is constant and "small", unroll the loop. */
- if (const_bounds_p
- && host_integerp (lo_index, 0)
- && host_integerp (hi_index, 0)
- && (lo = tree_low_cst (lo_index, 0),
- hi = tree_low_cst (hi_index, 0),
- count = hi - lo + 1,
- (!MEM_P (target)
- || count <= 2
- || (host_integerp (TYPE_SIZE (elttype), 1)
- && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
- <= 40 * 8)))))
- {
- lo -= minelt; hi -= minelt;
- for (; lo <= hi; lo++)
- {
- bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
-
- if (MEM_P (target)
- && !MEM_KEEP_ALIAS_SET_P (target)
- && TREE_CODE (type) == ARRAY_TYPE
- && TYPE_NONALIASED_COMPONENT (type))
- {
- target = copy_rtx (target);
- MEM_KEEP_ALIAS_SET_P (target) = 1;
- }
-
- store_constructor_field
- (target, bitsize, bitpos, mode, value, type, cleared,
- get_alias_set (elttype));
- }
- }
- else
- {
- rtx loop_start = gen_label_rtx ();
- rtx loop_end = gen_label_rtx ();
- tree exit_cond;
-
- expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
- unsignedp = TYPE_UNSIGNED (domain);
-
- index = build_decl (VAR_DECL, NULL_TREE, domain);
-
- index_r
- = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
- &unsignedp, 0));
- SET_DECL_RTL (index, index_r);
- store_expr (lo_index, index_r, 0);
-
- /* Build the head of the loop. */
- do_pending_stack_adjust ();
- emit_label (loop_start);
-
- /* Assign value to element index. */
- position
- = convert (ssizetype,
- fold (build2 (MINUS_EXPR, TREE_TYPE (index),
- index, TYPE_MIN_VALUE (domain))));
- position = size_binop (MULT_EXPR, position,
- convert (ssizetype,
- TYPE_SIZE_UNIT (elttype)));
-
- pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- xtarget = offset_address (target, pos_rtx,
- highest_pow2_factor (position));
- xtarget = adjust_address (xtarget, mode, 0);
- if (TREE_CODE (value) == CONSTRUCTOR)
- store_constructor (value, xtarget, cleared,
- bitsize / BITS_PER_UNIT);
- else
- store_expr (value, xtarget, 0);
-
- /* Generate a conditional jump to exit the loop. */
- exit_cond = build2 (LT_EXPR, integer_type_node,
- index, hi_index);
- jumpif (exit_cond, loop_end);
-
- /* Update the loop counter, and jump to the head of
- the loop. */
- expand_assignment (index,
- build2 (PLUS_EXPR, TREE_TYPE (index),
- index, integer_one_node), 0);
-
- emit_jump (loop_start);
-
- /* Build the end of the loop. */
- emit_label (loop_end);
- }
- }
- else if ((index != 0 && ! host_integerp (index, 0))
- || ! host_integerp (TYPE_SIZE (elttype), 1))
- {
- tree position;
-
- if (index == 0)
- index = ssize_int (1);
-
- if (minelt)
- index = fold_convert (ssizetype,
- fold (build2 (MINUS_EXPR,
- TREE_TYPE (index),
- index,
- TYPE_MIN_VALUE (domain))));
-
- position = size_binop (MULT_EXPR, index,
- convert (ssizetype,
- TYPE_SIZE_UNIT (elttype)));
- xtarget = offset_address (target,
- expand_expr (position, 0, VOIDmode, 0),
- highest_pow2_factor (position));
- xtarget = adjust_address (xtarget, mode, 0);
- store_expr (value, xtarget, 0);
- }
- else
- {
- if (index != 0)
- bitpos = ((tree_low_cst (index, 0) - minelt)
- * tree_low_cst (TYPE_SIZE (elttype), 1));
- else
- bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
-
- if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
- && TREE_CODE (type) == ARRAY_TYPE
- && TYPE_NONALIASED_COMPONENT (type))
- {
- target = copy_rtx (target);
- MEM_KEEP_ALIAS_SET_P (target) = 1;
- }
- store_constructor_field (target, bitsize, bitpos, mode, value,
- type, cleared, get_alias_set (elttype));
- }
- }
- }
-
- else if (TREE_CODE (type) == VECTOR_TYPE)
- {
- tree elt;
- int i;
- int need_to_clear;
- int icode = 0;
- tree elttype = TREE_TYPE (type);
- int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
- enum machine_mode eltmode = TYPE_MODE (elttype);
- HOST_WIDE_INT bitsize;
- HOST_WIDE_INT bitpos;
- rtx *vector = NULL;
- unsigned n_elts;
-
- if (eltmode == BLKmode)
- abort ();
-
- n_elts = TYPE_VECTOR_SUBPARTS (type);
- if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
- {
- enum machine_mode mode = GET_MODE (target);
-
- icode = (int) vec_init_optab->handlers[mode].insn_code;
- if (icode != CODE_FOR_nothing)
- {
- unsigned int i;
-
- vector = alloca (n_elts);
- for (i = 0; i < n_elts; i++)
- vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
- }
- }
-
- /* If the constructor has fewer elements than the vector,
- clear the whole array first. Similarly if this is
- static constructor of a non-BLKmode object. */
- if (cleared)
- need_to_clear = 0;
- else if (REG_P (target) && TREE_STATIC (exp))
- need_to_clear = 1;
- else
- {
- unsigned HOST_WIDE_INT count = 0, zero_count = 0;
-
- for (elt = CONSTRUCTOR_ELTS (exp);
- elt != NULL_TREE;
- elt = TREE_CHAIN (elt))
- {
- int n_elts_here =
- tree_low_cst (
- int_const_binop (TRUNC_DIV_EXPR,
- TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
- TYPE_SIZE (elttype), 0), 1);
-
- count += n_elts_here;
- if (mostly_zeros_p (TREE_VALUE (elt)))
- zero_count += n_elts_here;
- }
-
- /* Clear the entire vector first if there are any missing elements,
- or if the incidence of zero elements is >= 75%. */
- need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
- }
-
- if (need_to_clear && size > 0 && !vector)
- {
- if (REG_P (target))
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
- else
- clear_storage (target, GEN_INT (size));
- cleared = 1;
- }
+ if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
+ && DECL_NONADDRESSABLE_P (field))
+ {
+ to_rtx = copy_rtx (to_rtx);
+ MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
+ }
+
+ store_constructor_field (to_rtx, bitsize, bitpos, mode,
+ value, type, cleared,
+ get_alias_set (TREE_TYPE (field)));
+ }
+ break;
+ }
+ case ARRAY_TYPE:
+ {
+ tree elt;
+ int i;
+ int need_to_clear;
+ tree domain;
+ tree elttype = TREE_TYPE (type);
+ int const_bounds_p;
+ HOST_WIDE_INT minelt = 0;
+ HOST_WIDE_INT maxelt = 0;
+
+ domain = TYPE_DOMAIN (type);
+ const_bounds_p = (TYPE_MIN_VALUE (domain)
+ && TYPE_MAX_VALUE (domain)
+ && host_integerp (TYPE_MIN_VALUE (domain), 0)
+ && host_integerp (TYPE_MAX_VALUE (domain), 0));
+
+ /* If we have constant bounds for the range of the type, get them. */
+ if (const_bounds_p)
+ {
+ minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
+ maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
+ }
- if (!cleared && REG_P (target))
- /* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
+ /* If the constructor has fewer elements than the array, clear
+ the whole array first. Similarly if this is static
+ constructor of a non-BLKmode object. */
+ if (cleared)
+ need_to_clear = 0;
+ else if (REG_P (target) && TREE_STATIC (exp))
+ need_to_clear = 1;
+ else
+ {
+ HOST_WIDE_INT count = 0, zero_count = 0;
+ need_to_clear = ! const_bounds_p;
+
+ /* This loop is a more accurate version of the loop in
+ mostly_zeros_p (it handles RANGE_EXPR in an index). It
+ is also needed to check for missing elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp);
+ elt != NULL_TREE && ! need_to_clear;
+ elt = TREE_CHAIN (elt))
+ {
+ tree index = TREE_PURPOSE (elt);
+ HOST_WIDE_INT this_node_count;
+
+ if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (index, 0);
+ tree hi_index = TREE_OPERAND (index, 1);
+
+ if (! host_integerp (lo_index, 1)
+ || ! host_integerp (hi_index, 1))
+ {
+ need_to_clear = 1;
+ break;
+ }
+
+ this_node_count = (tree_low_cst (hi_index, 1)
+ - tree_low_cst (lo_index, 1) + 1);
+ }
+ else
+ this_node_count = 1;
+
+ count += this_node_count;
+ if (mostly_zeros_p (TREE_VALUE (elt)))
+ zero_count += this_node_count;
+ }
+
+ /* Clear the entire array first if there are any missing
+ elements, or if the incidence of zero elements is >=
+ 75%. */
+ if (! need_to_clear
+ && (count < maxelt - minelt + 1
+ || 4 * zero_count >= 3 * count))
+ need_to_clear = 1;
+ }
+
+ if (need_to_clear && size > 0)
+ {
+ if (REG_P (target))
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ else
+ clear_storage (target, GEN_INT (size));
+ cleared = 1;
+ }
- /* Store each element of the constructor into the corresponding
- element of TARGET, determined by counting the elements. */
- for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
- elt;
- elt = TREE_CHAIN (elt), i += bitsize / elt_size)
- {
- tree value = TREE_VALUE (elt);
- tree index = TREE_PURPOSE (elt);
- HOST_WIDE_INT eltpos;
+ if (!cleared && REG_P (target))
+ /* Inform later passes that the old value is dead. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
- bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
- if (cleared && initializer_zerop (value))
- continue;
+ /* Store each element of the constructor into the
+ corresponding element of TARGET, determined by counting the
+ elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
+ elt;
+ elt = TREE_CHAIN (elt), i++)
+ {
+ enum machine_mode mode;
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ int unsignedp;
+ tree value = TREE_VALUE (elt);
+ tree index = TREE_PURPOSE (elt);
+ rtx xtarget = target;
+
+ if (cleared && initializer_zerop (value))
+ continue;
+
+ unsignedp = TYPE_UNSIGNED (elttype);
+ mode = TYPE_MODE (elttype);
+ if (mode == BLKmode)
+ bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
+ ? tree_low_cst (TYPE_SIZE (elttype), 1)
+ : -1);
+ else
+ bitsize = GET_MODE_BITSIZE (mode);
+
+ if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (index, 0);
+ tree hi_index = TREE_OPERAND (index, 1);
+ rtx index_r, pos_rtx;
+ HOST_WIDE_INT lo, hi, count;
+ tree position;
+
+ /* If the range is constant and "small", unroll the loop. */
+ if (const_bounds_p
+ && host_integerp (lo_index, 0)
+ && host_integerp (hi_index, 0)
+ && (lo = tree_low_cst (lo_index, 0),
+ hi = tree_low_cst (hi_index, 0),
+ count = hi - lo + 1,
+ (!MEM_P (target)
+ || count <= 2
+ || (host_integerp (TYPE_SIZE (elttype), 1)
+ && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
+ <= 40 * 8)))))
+ {
+ lo -= minelt; hi -= minelt;
+ for (; lo <= hi; lo++)
+ {
+ bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
+
+ if (MEM_P (target)
+ && !MEM_KEEP_ALIAS_SET_P (target)
+ && TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_NONALIASED_COMPONENT (type))
+ {
+ target = copy_rtx (target);
+ MEM_KEEP_ALIAS_SET_P (target) = 1;
+ }
+
+ store_constructor_field
+ (target, bitsize, bitpos, mode, value, type, cleared,
+ get_alias_set (elttype));
+ }
+ }
+ else
+ {
+ rtx loop_start = gen_label_rtx ();
+ rtx loop_end = gen_label_rtx ();
+ tree exit_cond;
+
+ expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
+ unsignedp = TYPE_UNSIGNED (domain);
+
+ index = build_decl (VAR_DECL, NULL_TREE, domain);
+
+ index_r
+ = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
+ &unsignedp, 0));
+ SET_DECL_RTL (index, index_r);
+ store_expr (lo_index, index_r, 0);
+
+ /* Build the head of the loop. */
+ do_pending_stack_adjust ();
+ emit_label (loop_start);
+
+ /* Assign value to element index. */
+ position
+ = convert (ssizetype,
+ fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, TYPE_MIN_VALUE (domain))));
+ position = size_binop (MULT_EXPR, position,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (elttype)));
+
+ pos_rtx = expand_expr (position, 0, VOIDmode, 0);
+ xtarget = offset_address (target, pos_rtx,
+ highest_pow2_factor (position));
+ xtarget = adjust_address (xtarget, mode, 0);
+ if (TREE_CODE (value) == CONSTRUCTOR)
+ store_constructor (value, xtarget, cleared,
+ bitsize / BITS_PER_UNIT);
+ else
+ store_expr (value, xtarget, 0);
+
+ /* Generate a conditional jump to exit the loop. */
+ exit_cond = build2 (LT_EXPR, integer_type_node,
+ index, hi_index);
+ jumpif (exit_cond, loop_end);
+
+ /* Update the loop counter, and jump to the head of
+ the loop. */
+ expand_assignment (index,
+ build2 (PLUS_EXPR, TREE_TYPE (index),
+ index, integer_one_node), 0);
+
+ emit_jump (loop_start);
+
+ /* Build the end of the loop. */
+ emit_label (loop_end);
+ }
+ }
+ else if ((index != 0 && ! host_integerp (index, 0))
+ || ! host_integerp (TYPE_SIZE (elttype), 1))
+ {
+ tree position;
+
+ if (index == 0)
+ index = ssize_int (1);
+
+ if (minelt)
+ index = fold_convert (ssizetype,
+ fold (build2 (MINUS_EXPR,
+ TREE_TYPE (index),
+ index,
+ TYPE_MIN_VALUE (domain))));
+
+ position = size_binop (MULT_EXPR, index,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (elttype)));
+ xtarget = offset_address (target,
+ expand_expr (position, 0, VOIDmode, 0),
+ highest_pow2_factor (position));
+ xtarget = adjust_address (xtarget, mode, 0);
+ store_expr (value, xtarget, 0);
+ }
+ else
+ {
+ if (index != 0)
+ bitpos = ((tree_low_cst (index, 0) - minelt)
+ * tree_low_cst (TYPE_SIZE (elttype), 1));
+ else
+ bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
+
+ if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
+ && TREE_CODE (type) == ARRAY_TYPE
+ && TYPE_NONALIASED_COMPONENT (type))
+ {
+ target = copy_rtx (target);
+ MEM_KEEP_ALIAS_SET_P (target) = 1;
+ }
+ store_constructor_field (target, bitsize, bitpos, mode, value,
+ type, cleared, get_alias_set (elttype));
+ }
+ }
+ break;
+ }
- if (index != 0)
- eltpos = tree_low_cst (index, 1);
- else
- eltpos = i;
+ case VECTOR_TYPE:
+ {
+ tree elt;
+ int i;
+ int need_to_clear;
+ int icode = 0;
+ tree elttype = TREE_TYPE (type);
+ int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
+ enum machine_mode eltmode = TYPE_MODE (elttype);
+ HOST_WIDE_INT bitsize;
+ HOST_WIDE_INT bitpos;
+ rtx *vector = NULL;
+ unsigned n_elts;
+
+ gcc_assert (eltmode != BLKmode);
+
+ n_elts = TYPE_VECTOR_SUBPARTS (type);
+ if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
+ {
+ enum machine_mode mode = GET_MODE (target);
+
+ icode = (int) vec_init_optab->handlers[mode].insn_code;
+ if (icode != CODE_FOR_nothing)
+ {
+ unsigned int i;
+
+ vector = alloca (n_elts);
+ for (i = 0; i < n_elts; i++)
+ vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
+ }
+ }
+
+ /* If the constructor has fewer elements than the vector,
+ clear the whole array first. Similarly if this is static
+ constructor of a non-BLKmode object. */
+ if (cleared)
+ need_to_clear = 0;
+ else if (REG_P (target) && TREE_STATIC (exp))
+ need_to_clear = 1;
+ else
+ {
+ unsigned HOST_WIDE_INT count = 0, zero_count = 0;
+
+ for (elt = CONSTRUCTOR_ELTS (exp);
+ elt != NULL_TREE;
+ elt = TREE_CHAIN (elt))
+ {
+ int n_elts_here = tree_low_cst
+ (int_const_binop (TRUNC_DIV_EXPR,
+ TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
+ TYPE_SIZE (elttype), 0), 1);
+
+ count += n_elts_here;
+ if (mostly_zeros_p (TREE_VALUE (elt)))
+ zero_count += n_elts_here;
+ }
- if (vector)
- {
- /* Vector CONSTRUCTORs should only be built from smaller
- vectors in the case of BLKmode vectors. */
- if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
- abort ();
- vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
- }
- else
- {
- enum machine_mode value_mode =
- TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
+ /* Clear the entire vector first if there are any missing elements,
+ or if the incidence of zero elements is >= 75%. */
+ need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
+ }
+
+ if (need_to_clear && size > 0 && !vector)
+ {
+ if (REG_P (target))
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ else
+ clear_storage (target, GEN_INT (size));
+ cleared = 1;
+ }
+
+ if (!cleared && REG_P (target))
+ /* Inform later passes that the old value is dead. */
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
+
+ /* Store each element of the constructor into the corresponding
+ element of TARGET, determined by counting the elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
+ elt;
+ elt = TREE_CHAIN (elt), i += bitsize / elt_size)
+ {
+ tree value = TREE_VALUE (elt);
+ tree index = TREE_PURPOSE (elt);
+ HOST_WIDE_INT eltpos;
+
+ bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
+ if (cleared && initializer_zerop (value))
+ continue;
+
+ if (index != 0)
+ eltpos = tree_low_cst (index, 1);
+ else
+ eltpos = i;
+
+ if (vector)
+ {
+ /* Vector CONSTRUCTORs should only be built from smaller
+ vectors in the case of BLKmode vectors. */
+ gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
+ vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
+ }
+ else
+ {
+ enum machine_mode value_mode =
+ TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
? TYPE_MODE (TREE_TYPE (value))
: eltmode;
- bitpos = eltpos * elt_size;
- store_constructor_field (target, bitsize, bitpos, value_mode, value,
- type, cleared, get_alias_set (elttype));
- }
- }
-
- if (vector)
- emit_insn (GEN_FCN (icode) (target,
- gen_rtx_PARALLEL (GET_MODE (target),
- gen_rtvec_v (n_elts, vector))));
- }
-
- /* Set constructor assignments. */
- else if (TREE_CODE (type) == SET_TYPE)
- {
- tree elt = CONSTRUCTOR_ELTS (exp);
- unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
- tree domain = TYPE_DOMAIN (type);
- tree domain_min, domain_max, bitlength;
-
- /* The default implementation strategy is to extract the constant
- parts of the constructor, use that to initialize the target,
- and then "or" in whatever non-constant ranges we need in addition.
-
- If a large set is all zero or all ones, it is
- probably better to set it using memset.
- Also, if a large set has just a single range, it may also be
- better to first clear all the first clear the set (using
- memset), and set the bits we want. */
-
- /* Check for all zeros. */
- if (elt == NULL_TREE && size > 0)
- {
- if (!cleared)
- clear_storage (target, GEN_INT (size));
- return;
- }
-
- domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
- domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
- bitlength = size_binop (PLUS_EXPR,
- size_diffop (domain_max, domain_min),
- ssize_int (1));
-
- nbits = tree_low_cst (bitlength, 1);
-
- /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
- are "complicated" (more than one range), initialize (the
- constant parts) by copying from a constant. */
- if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
- || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
- {
- unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
- enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
- char *bit_buffer = alloca (nbits);
- HOST_WIDE_INT word = 0;
- unsigned int bit_pos = 0;
- unsigned int ibit = 0;
- unsigned int offset = 0; /* In bytes from beginning of set. */
-
- elt = get_set_constructor_bits (exp, bit_buffer, nbits);
- for (;;)
- {
- if (bit_buffer[ibit])
- {
- if (BYTES_BIG_ENDIAN)
- word |= (1 << (set_word_size - 1 - bit_pos));
- else
- word |= 1 << bit_pos;
- }
-
- bit_pos++; ibit++;
- if (bit_pos >= set_word_size || ibit == nbits)
- {
- if (word != 0 || ! cleared)
- {
- rtx datum = gen_int_mode (word, mode);
- rtx to_rtx;
-
- /* The assumption here is that it is safe to use
- XEXP if the set is multi-word, but not if
- it's single-word. */
- if (MEM_P (target))
- to_rtx = adjust_address (target, mode, offset);
- else if (offset == 0)
- to_rtx = target;
- else
- abort ();
- emit_move_insn (to_rtx, datum);
- }
-
- if (ibit == nbits)
- break;
- word = 0;
- bit_pos = 0;
- offset += set_word_size / BITS_PER_UNIT;
- }
- }
- }
- else if (!cleared)
- /* Don't bother clearing storage if the set is all ones. */
- if (TREE_CHAIN (elt) != NULL_TREE
- || (TREE_PURPOSE (elt) == NULL_TREE
- ? nbits != 1
- : ( ! host_integerp (TREE_VALUE (elt), 0)
- || ! host_integerp (TREE_PURPOSE (elt), 0)
- || (tree_low_cst (TREE_VALUE (elt), 0)
- - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
- != (HOST_WIDE_INT) nbits))))
- clear_storage (target, expr_size (exp));
-
- for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
- {
- /* Start of range of element or NULL. */
- tree startbit = TREE_PURPOSE (elt);
- /* End of range of element, or element value. */
- tree endbit = TREE_VALUE (elt);
- HOST_WIDE_INT startb, endb;
- rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
-
- bitlength_rtx = expand_expr (bitlength,
- NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
-
- /* Handle non-range tuple element like [ expr ]. */
- if (startbit == NULL_TREE)
- {
- startbit = save_expr (endbit);
- endbit = startbit;
- }
+ bitpos = eltpos * elt_size;
+ store_constructor_field (target, bitsize, bitpos,
+ value_mode, value, type,
+ cleared, get_alias_set (elttype));
+ }
+ }
+
+ if (vector)
+ emit_insn (GEN_FCN (icode)
+ (target,
+ gen_rtx_PARALLEL (GET_MODE (target),
+ gen_rtvec_v (n_elts, vector))));
+ break;
+ }
- startbit = convert (sizetype, startbit);
- endbit = convert (sizetype, endbit);
- if (! integer_zerop (domain_min))
- {
- startbit = size_binop (MINUS_EXPR, startbit, domain_min);
- endbit = size_binop (MINUS_EXPR, endbit, domain_min);
- }
- startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
+ /* Set constructor assignments. */
+ case SET_TYPE:
+ {
+ tree elt = CONSTRUCTOR_ELTS (exp);
+ unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
+ tree domain = TYPE_DOMAIN (type);
+ tree domain_min, domain_max, bitlength;
+
+ /* The default implementation strategy is to extract the
+ constant parts of the constructor, use that to initialize
+ the target, and then "or" in whatever non-constant ranges
+ we need in addition.
+
+ If a large set is all zero or all ones, it is probably
+ better to set it using memset. Also, if a large set has
+ just a single range, it may also be better to first clear
+ all the first clear the set (using memset), and set the
+ bits we want. */
+
+ /* Check for all zeros. */
+ if (elt == NULL_TREE && size > 0)
+ {
+ if (!cleared)
+ clear_storage (target, GEN_INT (size));
+ return;
+ }
+
+ domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
+ domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
+ bitlength = size_binop (PLUS_EXPR,
+ size_diffop (domain_max, domain_min),
+ ssize_int (1));
+
+ nbits = tree_low_cst (bitlength, 1);
+
+ /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
+ that are "complicated" (more than one range), initialize
+ (the constant parts) by copying from a constant. */
+ if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
+ || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
+ {
+ unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
+ enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
+ char *bit_buffer = alloca (nbits);
+ HOST_WIDE_INT word = 0;
+ unsigned int bit_pos = 0;
+ unsigned int ibit = 0;
+ unsigned int offset = 0; /* In bytes from beginning of set. */
+
+ elt = get_set_constructor_bits (exp, bit_buffer, nbits);
+ for (;;)
+ {
+ if (bit_buffer[ibit])
+ {
+ if (BYTES_BIG_ENDIAN)
+ word |= (1 << (set_word_size - 1 - bit_pos));
+ else
+ word |= 1 << bit_pos;
+ }
+
+ bit_pos++; ibit++;
+ if (bit_pos >= set_word_size || ibit == nbits)
+ {
+ if (word != 0 || ! cleared)
+ {
+ rtx datum = gen_int_mode (word, mode);
+ rtx to_rtx;
+
+ /* The assumption here is that it is safe to
+ use XEXP if the set is multi-word, but not
+ if it's single-word. */
+ if (MEM_P (target))
+ to_rtx = adjust_address (target, mode, offset);
+ else
+ {
+ gcc_assert (!offset);
+ to_rtx = target;
+ }
+ emit_move_insn (to_rtx, datum);
+ }
+
+ if (ibit == nbits)
+ break;
+ word = 0;
+ bit_pos = 0;
+ offset += set_word_size / BITS_PER_UNIT;
+ }
+ }
+ }
+ else if (!cleared)
+ /* Don't bother clearing storage if the set is all ones. */
+ if (TREE_CHAIN (elt) != NULL_TREE
+ || (TREE_PURPOSE (elt) == NULL_TREE
+ ? nbits != 1
+ : ( ! host_integerp (TREE_VALUE (elt), 0)
+ || ! host_integerp (TREE_PURPOSE (elt), 0)
+ || (tree_low_cst (TREE_VALUE (elt), 0)
+ - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
+ != (HOST_WIDE_INT) nbits))))
+ clear_storage (target, expr_size (exp));
+
+ for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
+ {
+ /* Start of range of element or NULL. */
+ tree startbit = TREE_PURPOSE (elt);
+ /* End of range of element, or element value. */
+ tree endbit = TREE_VALUE (elt);
+ HOST_WIDE_INT startb, endb;
+ rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
+
+ bitlength_rtx = expand_expr (bitlength,
+ NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
+
+ /* Handle non-range tuple element like [ expr ]. */
+ if (startbit == NULL_TREE)
+ {
+ startbit = save_expr (endbit);
+ endbit = startbit;
+ }
+
+ startbit = convert (sizetype, startbit);
+ endbit = convert (sizetype, endbit);
+ if (! integer_zerop (domain_min))
+ {
+ startbit = size_binop (MINUS_EXPR, startbit, domain_min);
+ endbit = size_binop (MINUS_EXPR, endbit, domain_min);
+ }
+ startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
+ EXPAND_CONST_ADDRESS);
+ endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
EXPAND_CONST_ADDRESS);
- endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
- EXPAND_CONST_ADDRESS);
-
- if (REG_P (target))
- {
- targetx
- = assign_temp
+
+ if (REG_P (target))
+ {
+ targetx
+ = assign_temp
((build_qualified_type (lang_hooks.types.type_for_mode
(GET_MODE (target), 0),
TYPE_QUAL_CONST)),
0, 1, 1);
- emit_move_insn (targetx, target);
- }
+ emit_move_insn (targetx, target);
+ }
+
+ else
+ {
+ gcc_assert (MEM_P (target));
+ targetx = target;
+ }
- else if (MEM_P (target))
- targetx = target;
- else
- abort ();
-
- /* Optimization: If startbit and endbit are constants divisible
- by BITS_PER_UNIT, call memset instead. */
- if (TREE_CODE (startbit) == INTEGER_CST
- && TREE_CODE (endbit) == INTEGER_CST
- && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
- && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
- {
- emit_library_call (memset_libfunc, LCT_NORMAL,
- VOIDmode, 3,
- plus_constant (XEXP (targetx, 0),
- startb / BITS_PER_UNIT),
- Pmode,
- constm1_rtx, TYPE_MODE (integer_type_node),
- GEN_INT ((endb - startb) / BITS_PER_UNIT),
- TYPE_MODE (sizetype));
- }
- else
- emit_library_call (setbits_libfunc, LCT_NORMAL,
- VOIDmode, 4, XEXP (targetx, 0),
- Pmode, bitlength_rtx, TYPE_MODE (sizetype),
- startbit_rtx, TYPE_MODE (sizetype),
- endbit_rtx, TYPE_MODE (sizetype));
-
- if (REG_P (target))
- emit_move_insn (target, targetx);
- }
+ /* Optimization: If startbit and endbit are constants divisible
+ by BITS_PER_UNIT, call memset instead. */
+ if (TREE_CODE (startbit) == INTEGER_CST
+ && TREE_CODE (endbit) == INTEGER_CST
+ && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
+ && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
+ {
+ emit_library_call (memset_libfunc, LCT_NORMAL,
+ VOIDmode, 3,
+ plus_constant (XEXP (targetx, 0),
+ startb / BITS_PER_UNIT),
+ Pmode,
+ constm1_rtx, TYPE_MODE (integer_type_node),
+ GEN_INT ((endb - startb) / BITS_PER_UNIT),
+ TYPE_MODE (sizetype));
+ }
+ else
+ emit_library_call (setbits_libfunc, LCT_NORMAL,
+ VOIDmode, 4, XEXP (targetx, 0),
+ Pmode, bitlength_rtx, TYPE_MODE (sizetype),
+ startbit_rtx, TYPE_MODE (sizetype),
+ endbit_rtx, TYPE_MODE (sizetype));
+
+ if (REG_P (target))
+ emit_move_insn (target, targetx);
+ }
+ break;
+ }
+ default:
+ gcc_unreachable ();
}
-
- else
- abort ();
}
/* Store the value of EXP (an expression tree)
{
/* We're storing into a struct containing a single __complex. */
- if (bitpos != 0)
- abort ();
+ gcc_assert (!bitpos);
return store_expr (exp, target, value_mode != VOIDmode);
}
boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{
- if (!MEM_P (target) || !MEM_P (temp)
- || bitpos % BITS_PER_UNIT != 0)
- abort ();
+ gcc_assert (MEM_P (target) && MEM_P (temp)
+ && !(bitpos % BITS_PER_UNIT));
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
emit_block_move (target, temp,
gen_int_mode (width_mask, tmode),
NULL_RTX);
- count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
+ count = build_int_cst (NULL_TREE,
+ GET_MODE_BITSIZE (tmode) - bitsize);
temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
}
/* If a size was specified in the ARRAY_REF, it's the size measured
in alignment units of the element type. So multiply by that value. */
if (aligned_size)
- return size_binop (MULT_EXPR, aligned_size,
- size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
+ {
+ /* ??? tree_ssa_useless_type_conversion will eliminate casts to
+ sizetype from another type of the same width and signedness. */
+ if (TREE_TYPE (aligned_size) != sizetype)
+ aligned_size = fold_convert (sizetype, aligned_size);
+ return size_binop (MULT_EXPR, aligned_size,
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
+ }
/* Otherwise, take the size from that of the element type. Substitute
any PLACEHOLDER_EXPR that we have. */
return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
/* Otherwise, return a zero of the appropriate type. */
- return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
+ return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
}
/* Return a tree representing the upper bound of the array mentioned in
in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
value. */
if (aligned_offset)
- return size_binop (MULT_EXPR, aligned_offset,
- size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
+ {
+ /* ??? tree_ssa_useless_type_conversion will eliminate casts to
+ sizetype from another type of the same width and signedness. */
+ if (TREE_TYPE (aligned_offset) != sizetype)
+ aligned_offset = fold_convert (sizetype, aligned_offset);
+ return size_binop (MULT_EXPR, aligned_offset,
+ size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
+ }
/* Otherwise, take the offset from that of the field. Substitute
any PLACEHOLDER_EXPR that we have. */
case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR:
/* Lowered by gimplify.c. */
- abort ();
+ gcc_unreachable ();
case SAVE_EXPR:
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
factor = highest_pow2_factor (exp);
if (TREE_CODE (target) == COMPONENT_REF)
- target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
+ target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
else
- target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
+ target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
return MAX (factor, target_align);
}
\f
? !TREE_ASM_WRITTEN (var)
: !DECL_RTL_SET_P (var))
{
- if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
- {
- /* Prepare a mem & address for the decl. */
- rtx x;
-
- if (TREE_STATIC (var))
- abort ();
-
- x = gen_rtx_MEM (DECL_MODE (var),
- gen_reg_rtx (Pmode));
-
- set_mem_attributes (x, var, 1);
- SET_DECL_RTL (var, x);
- }
+ if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
+ /* Should be ignored. */;
else if (lang_hooks.expand_decl (var))
/* OK. */;
else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
expand_decl (var);
else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
rest_of_decl_compilation (var, 0, 0);
- else if (TREE_CODE (var) == TYPE_DECL
- || TREE_CODE (var) == CONST_DECL
- || TREE_CODE (var) == FUNCTION_DECL
- || TREE_CODE (var) == LABEL_DECL)
- /* No expansion needed. */;
else
- abort ();
+ /* No expansion needed. */
+ gcc_assert (TREE_CODE (var) == TYPE_DECL
+ || TREE_CODE (var) == CONST_DECL
+ || TREE_CODE (var) == FUNCTION_DECL
+ || TREE_CODE (var) == LABEL_DECL);
}
}
}
\f
+/* A subroutine of expand_expr. Evaluate the address of EXP.
+ The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
+
+static rtx
+expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier)
+{
+ rtx result, subtarget;
+ tree inner, offset;
+ HOST_WIDE_INT bitsize, bitpos;
+ int volatilep, unsignedp;
+ enum machine_mode mode1;
+
+ /* If we are taking the address of a constant and are at the top level,
+ we have to use output_constant_def since we can't call force_const_mem
+ at top level. */
+ /* ??? This should be considered a front-end bug. We should not be
+ generating ADDR_EXPR of something that isn't an LVALUE. The only
+ exception here is STRING_CST. */
+ if (TREE_CODE (exp) == CONSTRUCTOR
+ || TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
+ return XEXP (output_constant_def (exp, 0), 0);
+
+ /* Everything must be something allowed by is_gimple_addressable. */
+ switch (TREE_CODE (exp))
+ {
+ case INDIRECT_REF:
+ /* This case will happen via recursion for &a->b. */
+ return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
+
+ case CONST_DECL:
+ /* Recurse and make the output_constant_def clause above handle this. */
+ return expand_expr_addr_expr (DECL_INITIAL (exp), target,
+ tmode, modifier);
+
+ case REALPART_EXPR:
+ /* The real part of the complex number is always first, therefore
+ the address is the same as the address of the parent object. */
+ offset = 0;
+ bitpos = 0;
+ inner = TREE_OPERAND (exp, 0);
+ break;
+
+ case IMAGPART_EXPR:
+ /* The imaginary part of the complex number is always second.
+ The expression is therefore always offset by the size of the
+ scalar type. */
+ offset = 0;
+ bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
+ inner = TREE_OPERAND (exp, 0);
+ break;
+
+ default:
+ /* If the object is a DECL, then expand it for its rtl. Don't bypass
+ expand_expr, as that can have various side effects; LABEL_DECLs for
+ example, may not have their DECL_RTL set yet. Assume language
+ specific tree nodes can be expanded in some interesting way. */
+ if (DECL_P (exp)
+ || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
+ {
+ result = expand_expr (exp, target, tmode,
+ modifier == EXPAND_INITIALIZER
+ ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
+
+ /* If the DECL isn't in memory, then the DECL wasn't properly
+ marked TREE_ADDRESSABLE, which will be either a front-end
+ or a tree optimizer bug. */
+ gcc_assert (GET_CODE (result) == MEM);
+ result = XEXP (result, 0);
+
+ /* ??? Is this needed anymore? */
+ if (!TREE_USED (exp) == 0)
+ {
+ assemble_external (exp);
+ TREE_USED (exp) = 1;
+ }
+
+ if (modifier != EXPAND_INITIALIZER
+ && modifier != EXPAND_CONST_ADDRESS)
+ result = force_operand (result, target);
+ return result;
+ }
+
+ inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode1, &unsignedp, &volatilep);
+ break;
+ }
+
+ /* We must have made progress. */
+ gcc_assert (inner != exp);
+
+ subtarget = offset || bitpos ? NULL_RTX : target;
+ result = expand_expr_addr_expr (inner, subtarget, tmode, modifier);
+
+ if (tmode == VOIDmode)
+ {
+ tmode = GET_MODE (result);
+ if (tmode == VOIDmode)
+ tmode = Pmode;
+ }
+
+ if (offset)
+ {
+ rtx tmp;
+
+ if (modifier != EXPAND_NORMAL)
+ result = force_operand (result, NULL);
+ tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
+
+ if (modifier == EXPAND_SUM)
+ result = gen_rtx_PLUS (tmode, result, tmp);
+ else
+ {
+ subtarget = bitpos ? NULL_RTX : target;
+ result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
+ 1, OPTAB_LIB_WIDEN);
+ }
+ }
+
+ if (bitpos)
+ {
+ /* Someone beforehand should have rejected taking the address
+ of such an object. */
+ gcc_assert (!(bitpos % BITS_PER_UNIT));
+
+ result = plus_constant (result, bitpos / BITS_PER_UNIT);
+ if (modifier < EXPAND_SUM)
+ result = force_operand (result, target);
+ }
+
+ return result;
+}
+
/* expand_expr: generate code for computing expression EXP.
An rtx for the computed value is returned. The value is never null.
In the case of a void EXP, const0_rtx is returned.
return temp;
}
+ case SSA_NAME:
+ return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
+ NULL);
+
case PARM_DECL:
case VAR_DECL:
/* If a static var's type was incomplete when the decl was written,
case FUNCTION_DECL:
case RESULT_DECL:
- if (DECL_RTL (exp) == 0)
- abort ();
+ gcc_assert (DECL_RTL (exp));
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
/* Variables inherited from containing functions should have
been lowered by this point. */
context = decl_function_context (exp);
- if (context != 0
- && context != current_function_decl
- && !TREE_STATIC (exp)
- /* ??? C++ creates functions that are not TREE_STATIC. */
- && TREE_CODE (exp) != FUNCTION_DECL)
- abort ();
+ gcc_assert (!context
+ || context == current_function_decl
+ || TREE_STATIC (exp)
+ /* ??? C++ creates functions that are not TREE_STATIC. */
+ || TREE_CODE (exp) == FUNCTION_DECL);
/* This is the case of an array whose size is to be determined
from its initializer, while the initializer is still being parsed.
See expand_decl. */
- else if (MEM_P (DECL_RTL (exp))
+ if (MEM_P (DECL_RTL (exp))
&& REG_P (XEXP (DECL_RTL (exp), 0)))
temp = validize_mem (DECL_RTL (exp));
if (REG_P (DECL_RTL (exp))
&& GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
{
+ enum machine_mode pmode;
+
/* Get the signedness used for this variable. Ensure we get the
same mode we got when the variable was declared. */
- if (GET_MODE (DECL_RTL (exp))
- != promote_mode (type, DECL_MODE (exp), &unsignedp,
- (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
- abort ();
+ pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
+ (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
+ gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
SUBREG_PROMOTED_VAR_P (temp) = 1;
tree val = TREE_OPERAND (exp, 0);
rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
- if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
+ if (!SAVE_EXPR_RESOLVED_P (exp))
{
/* We can indeed still hit this case, typically via builtin
expanders calling save_expr immediately before expanding
something. Assume this means that we only have to deal
with non-BLKmode values. */
- if (GET_MODE (ret) == BLKmode)
- abort ();
+ gcc_assert (GET_MODE (ret) != BLKmode);
val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
DECL_ARTIFICIAL (val) = 1;
+ DECL_IGNORED_P (val) = 1;
TREE_OPERAND (exp, 0) = val;
+ SAVE_EXPR_RESOLVED_P (exp) = 1;
if (!CONSTANT_P (ret))
ret = copy_to_reg (ret);
case INDIRECT_REF:
{
tree exp1 = TREE_OPERAND (exp, 0);
+ tree orig;
if (modifier != EXPAND_WRITE)
{
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
temp = gen_rtx_MEM (mode, op0);
- set_mem_attributes (temp, exp, 0);
- /* If we are writing to this object and its type is a record with
- readonly fields, we must mark it as readonly so it will
- conflict with readonly references to those fields. */
- if (modifier == EXPAND_WRITE && readonly_fields_p (type))
- RTX_UNCHANGING_P (temp) = 1;
+ orig = REF_ORIGINAL (exp);
+ if (!orig)
+ orig = exp;
+ set_mem_attributes (temp, orig, 0);
return temp;
}
case ARRAY_REF:
-#ifdef ENABLE_CHECKING
- if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
- abort ();
-#endif
-
{
tree array = TREE_OPERAND (exp, 0);
tree low_bound = array_ref_low_bound (exp);
tree index = convert (sizetype, TREE_OPERAND (exp, 1));
HOST_WIDE_INT i;
+ gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
+
/* Optimize the special-case of a zero lower bound.
We convert the low_bound to sizetype to avoid some problems
else
{
tree count
- = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
- 0);
+ = build_int_cst (NULL_TREE,
+ GET_MODE_BITSIZE (imode) - bitsize);
op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
target, 0);
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
infinitely recurse. */
- if (tem == exp)
- abort ();
+ gcc_assert (tem != exp);
/* If TEM's type is a union of variable size, pass TARGET to the inner
computation, since it will need a temporary and TARGET is known
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
EXPAND_SUM);
- if (!MEM_P (op0))
- abort ();
+ gcc_assert (MEM_P (op0));
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (offset_rtx) != Pmode)
one element arrays having the same mode as its element. */
if (GET_CODE (op0) == CONCAT)
{
- if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
- abort ();
+ gcc_assert (bitpos == 0
+ && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
return op0;
}
/* In this case, BITPOS must start at a byte boundary and
TARGET, if specified, must be a MEM. */
- if (!MEM_P (op0)
- || (target != 0 && !MEM_P (target))
- || bitpos % BITS_PER_UNIT != 0)
- abort ();
+ gcc_assert (MEM_P (op0)
+ && (!target || MEM_P (target))
+ && !(bitpos % BITS_PER_UNIT));
emit_block_move (target,
adjust_address (op0, VOIDmode,
adjust_address (target, TYPE_MODE (valtype), 0),
modifier == EXPAND_STACK_PARM ? 2 : 0);
- else if (REG_P (target))
- /* Store this field into a union of the proper type. */
- store_field (target,
- MIN ((int_size_in_bytes (TREE_TYPE
- (TREE_OPERAND (exp, 0)))
- * BITS_PER_UNIT),
- (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
- 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
- VOIDmode, 0, type, 0);
else
- abort ();
+ {
+ gcc_assert (REG_P (target));
+
+ /* Store this field into a union of the proper type. */
+ store_field (target,
+ MIN ((int_size_in_bytes (TREE_TYPE
+ (TREE_OPERAND (exp, 0)))
+ * BITS_PER_UNIT),
+ (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
+ 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
+ VOIDmode, 0, type, 0);
+ }
/* Return the entire union. */
return target;
constants to change mode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- if (TREE_ADDRESSABLE (exp))
- abort ();
+ gcc_assert (!TREE_ADDRESSABLE (exp));
if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
target
temp_size, 0, type);
rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
- if (TREE_ADDRESSABLE (exp))
- abort ();
+ gcc_assert (!TREE_ADDRESSABLE (exp));
if (GET_MODE (op0) == BLKmode)
emit_block_move (new_with_op0_mode, op0,
case FIX_ROUND_EXPR:
case FIX_FLOOR_EXPR:
case FIX_CEIL_EXPR:
- abort (); /* Not used for C. */
+ gcc_unreachable (); /* Not used for C. */
case FIX_TRUNC_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
temp = expand_unop (mode,
optab_for_tree_code (NEGATE_EXPR, type),
op0, target, 0);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return REDUCE_BIT_FIELD (temp);
case ABS_EXPR:
target = 0;
/* ABS_EXPR is not valid for complex arguments. */
- if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
- || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
- abort ();
+ gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
+ && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
/* Unsigned abs is simply the operand. Testing here means we don't
risk generating incorrect code below. */
if (modifier == EXPAND_STACK_PARM)
target = 0;
temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return temp;
/* ??? Can optimize bitwise operations with one arg constant.
only with operands that are always zero or one. */
temp = expand_binop (mode, xor_optab, op0, const1_rtx,
target, 1, OPTAB_LIB_WIDEN);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return temp;
case STATEMENT_LIST:
{
tree_stmt_iterator iter;
- if (!ignore)
- abort ();
+ gcc_assert (ignore);
for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
tree pred = TREE_OPERAND (exp, 0);
tree then_ = TREE_OPERAND (exp, 1);
tree else_ = TREE_OPERAND (exp, 2);
-
- if (TREE_CODE (then_) != GOTO_EXPR
- || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
- || TREE_CODE (else_) != GOTO_EXPR
- || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
- abort ();
-
+
+ gcc_assert (TREE_CODE (then_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
+ && TREE_CODE (else_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
+
jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
return expand_expr (else_, const0_rtx, VOIDmode, 0);
}
-
+
/* Note that COND_EXPRs whose type is a structure or union
are required to be constructed to contain assignments of
a temporary variable, so that we can evaluate them here
for side effect only. If type is void, we must do likewise. */
- if (TREE_ADDRESSABLE (type)
- || ignore
- || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
- || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
- abort ();
-
+ gcc_assert (!TREE_ADDRESSABLE (type)
+ && !ignore
+ && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
+ && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
+
/* If we are not to produce a result, we have no target. Otherwise,
if a target was specified use it; it will not be used as an
intermediate target unless it is safe. If no target, use a
temporary. */
-
+
if (modifier != EXPAND_STACK_PARM
&& original_target
&& safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
temp = original_target;
else
temp = assign_temp (type, 0, 0, 1);
-
+
do_pending_stack_adjust ();
NO_DEFER_POP;
op0 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
store_expr (TREE_OPERAND (exp, 1), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
-
+
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
store_expr (TREE_OPERAND (exp, 2), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
-
+
emit_label (op1);
OK_DEFER_POP;
return temp;
-
+
case MODIFY_EXPR:
{
/* If lhs is complex, expand calls in rhs before computing it.
return const0_rtx;
case ADDR_EXPR:
- if (modifier == EXPAND_STACK_PARM)
- target = 0;
- /* If we are taking the address of something erroneous, just
- return a zero. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
- return const0_rtx;
- /* If we are taking the address of a constant and are at the
- top level, we have to use output_constant_def since we can't
- call force_const_mem at top level. */
- else if (cfun == 0
- && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
- || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
- == 'c')))
- op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
- else
- {
- /* We make sure to pass const0_rtx down if we came in with
- ignore set, to avoid doing the cleanups twice for something. */
- op0 = expand_expr (TREE_OPERAND (exp, 0),
- ignore ? const0_rtx : NULL_RTX, VOIDmode,
- (modifier == EXPAND_INITIALIZER
- ? modifier : EXPAND_CONST_ADDRESS));
-
- /* If we are going to ignore the result, OP0 will have been set
- to const0_rtx, so just return it. Don't get confused and
- think we are taking the address of the constant. */
- if (ignore)
- return op0;
-
- /* We would like the object in memory. If it is a constant, we can
- have it be statically allocated into memory. For a non-constant,
- we need to allocate some memory and store the value into it. */
-
- if (CONSTANT_P (op0))
- op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
- op0);
- else if (REG_P (op0) || GET_CODE (op0) == SUBREG
- || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
- || GET_CODE (op0) == LO_SUM)
- {
- /* If this object is in a register, it can't be BLKmode. */
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- rtx memloc = assign_temp (inner_type, 1, 1, 1);
-
- if (GET_CODE (op0) == PARALLEL)
- /* Handle calls that pass values in multiple
- non-contiguous locations. The Irix 6 ABI has examples
- of this. */
- emit_group_store (memloc, op0, inner_type,
- int_size_in_bytes (inner_type));
- else
- emit_move_insn (memloc, op0);
-
- op0 = memloc;
- }
-
- if (!MEM_P (op0))
- abort ();
-
- mark_temp_addr_taken (op0);
- if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- {
- op0 = XEXP (op0, 0);
- if (GET_MODE (op0) == Pmode && mode == ptr_mode)
- op0 = convert_memory_address (ptr_mode, op0);
- return op0;
- }
-
- /* If OP0 is not aligned as least as much as the type requires, we
- need to make a temporary, copy OP0 to it, and take the address of
- the temporary. We want to use the alignment of the type, not of
- the operand. Note that this is incorrect for FUNCTION_TYPE, but
- the test for BLKmode means that can't happen. The test for
- BLKmode is because we never make mis-aligned MEMs with
- non-BLKmode.
-
- We don't need to do this at all if the machine doesn't have
- strict alignment. */
- if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
- && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- > MEM_ALIGN (op0))
- && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
- {
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- rtx new;
-
- if (TYPE_ALIGN_OK (inner_type))
- abort ();
-
- if (TREE_ADDRESSABLE (inner_type))
- {
- /* We can't make a bitwise copy of this object, so fail. */
- error ("cannot take the address of an unaligned member");
- return const0_rtx;
- }
-
- new = assign_stack_temp_for_type
- (TYPE_MODE (inner_type),
- MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
- : int_size_in_bytes (inner_type),
- 1, build_qualified_type (inner_type,
- (TYPE_QUALS (inner_type)
- | TYPE_QUAL_CONST)));
-
- emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
- (modifier == EXPAND_STACK_PARM
- ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
-
- op0 = new;
- }
-
- op0 = force_operand (XEXP (op0, 0), target);
- }
-
- if (flag_force_addr
- && !REG_P (op0)
- && modifier != EXPAND_CONST_ADDRESS
- && modifier != EXPAND_INITIALIZER
- && modifier != EXPAND_SUM)
- op0 = force_reg (Pmode, op0);
-
- if (REG_P (op0)
- && ! REG_USERVAR_P (op0))
- mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
-
- if (GET_MODE (op0) == Pmode && mode == ptr_mode)
- op0 = convert_memory_address (ptr_mode, op0);
-
- return op0;
+ return expand_expr_addr_expr (TREE_OPERAND (exp, 0), target,
+ tmode, modifier);
/* COMPLEX type for Extended Pascal & Fortran */
case COMPLEX_EXPR:
case EH_FILTER_EXPR:
case TRY_FINALLY_EXPR:
/* Lowered by tree-eh.c. */
- abort ();
+ gcc_unreachable ();
case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR:
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
/* Lowered by gimplify.c. */
- abort ();
+ gcc_unreachable ();
case EXC_PTR_EXPR:
return get_exception_pointer (cfun);
case FDESC_EXPR:
/* Function descriptors are not valid except for as
initialization constants, and should not be expanded. */
- abort ();
+ gcc_unreachable ();
case SWITCH_EXPR:
expand_case (exp);
target = 0;
temp = expand_binop (mode, this_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN);
- if (temp == 0)
- abort ();
+ gcc_assert (temp);
return REDUCE_BIT_FIELD (temp);
}
#undef REDUCE_BIT_FIELD
}
else
{
- tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
+ tree count = build_int_cst (NULL_TREE,
+ GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
/* Put a constant second. */
code = GET_CODE (result);
label = gen_label_rtx ();
- if (bcc_gen_fctn[(int) code] == 0)
- abort ();
+ gcc_assert (bcc_gen_fctn[(int) code]);
emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
emit_move_insn (target, invert ? const1_rtx : const0_rtx);
#endif
index = memory_address_noforce (CASE_VECTOR_MODE, index);
temp = gen_reg_rtx (CASE_VECTOR_MODE);
- vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
- RTX_UNCHANGING_P (vector) = 1;
- MEM_NOTRAP_P (vector) = 1;
+ vector = gen_const_mem (CASE_VECTOR_MODE, index);
convert_move (temp, vector, 0);
emit_jump_insn (gen_tablejump (temp, table_label));
return 0;
/* Hardware support. Woo hoo! */
- if (VECTOR_MODE_SUPPORTED_P (mode))
+ if (targetm.vector_mode_supported_p (mode))
return 1;
innermode = GET_MODE_INNER (mode);
/* If we have support for the inner mode, we can safely emulate it.
We may not have V2DI, but me can emulate with a pair of DIs. */
- return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
+ return targetm.scalar_mode_supported_p (innermode);
}
/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
for (; i < units; ++i)
RTVEC_ELT (v, i) = CONST0_RTX (inner);
- return gen_rtx_raw_CONST_VECTOR (mode, v);
+ return gen_rtx_CONST_VECTOR (mode, v);
}
#include "gt-expr.h"