/* Convert tree expression to rtl instructions, for GNU compiler.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
#include "langhooks.h"
#include "intl.h"
#include "tm_p.h"
+#include "target.h"
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
#endif
#endif
-/* Assume that case vectors are not pc-relative. */
-#ifndef CASE_VECTOR_PC_RELATIVE
-#define CASE_VECTOR_PC_RELATIVE 0
-#endif
-
/* Convert defined/undefined to boolean. */
#ifdef TARGET_MEM_FUNCTIONS
#undef TARGET_MEM_FUNCTIONS
the same indirect address eventually. */
int cse_not_expected;
-/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
-tree placeholder_list = 0;
-
/* This structure is used by move_by_pieces to describe the move to
be performed. */
struct move_by_pieces
static rtx var_rtx (tree);
static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
-static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
+static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
static int is_aligning_offset (tree, tree);
static rtx expand_increment (tree, int, int);
static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
-/* If a memory-to-memory move would take MOVE_RATIO or more simple
- move-instruction sequences, we will do a movstr or libcall instead. */
-
-#ifndef MOVE_RATIO
-#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
-#define MOVE_RATIO 2
-#else
-/* If we are optimizing for space (-Os), cut down the default move ratio. */
-#define MOVE_RATIO (optimize_size ? 3 : 15)
-#endif
-#endif
-
/* This macro is used to determine whether move_by_pieces should be called
to perform a structure copy. */
#ifndef MOVE_BY_PIECES_P
(move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
#endif
-/* If a clear memory operation would take CLEAR_RATIO or more simple
- move-instruction sequences, we will do a clrstr or libcall instead. */
-
-#ifndef CLEAR_RATIO
-#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
-#define CLEAR_RATIO 2
-#else
-/* If we are optimizing for space, cut down the default clear ratio. */
-#define CLEAR_RATIO (optimize_size ? 3 : 15)
-#endif
-#endif
-
/* This macro is used to determine whether clear_by_pieces should be
called to clear storage. */
#ifndef CLEAR_BY_PIECES_P
}
}
-/* Perform all the pending incrementations. */
+/* Retrieve a mark on the queue. */
+
+static rtx
+mark_queue (void)
+{
+ return pending_chain;
+}
-void
-emit_queue (void)
+/* Perform all the pending incrementations that have been enqueued
+ after MARK was retrieved. If MARK is null, perform all the
+ pending incrementations. */
+
+static void
+emit_insns_enqueued_after_mark (rtx mark)
{
rtx p;
- while ((p = pending_chain))
+
+ /* The marked incrementation may have been emitted in the meantime
+ through a call to emit_queue. In this case, the mark is not valid
+ anymore so do nothing. */
+ if (mark && ! QUEUED_BODY (mark))
+ return;
+
+ while ((p = pending_chain) != mark)
{
rtx body = QUEUED_BODY (p);
break;
}
+ QUEUED_BODY (p) = 0;
pending_chain = QUEUED_NEXT (p);
}
}
+
+/* Perform all the pending incrementations. */
+
+void
+emit_queue (void)
+{
+ emit_insns_enqueued_after_mark (NULL_RTX);
+}
\f
/* Copy data from FROM to TO, where the machine modes are not the same.
Both modes may be integer, or both may be floating.
rtx value, insns;
convert_optab tab;
- if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
+ if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
tab = sext_optab;
- else if (GET_MODE_BITSIZE (from_mode) > GET_MODE_BITSIZE (to_mode))
+ else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
tab = trunc_optab;
else
abort ();
1, from, from_mode);
insns = get_insns ();
end_sequence ();
- emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
- from));
+ emit_libcall_block (insns, to, value,
+ tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
+ from)
+ : gen_rtx_FLOAT_EXTEND (to_mode, from));
return;
}
if (to_mode == full_mode)
return;
- /* else proceed to integer conversions below */
+ /* else proceed to integer conversions below. */
from_mode = full_mode;
}
!= CODE_FOR_nothing))
{
if (GET_CODE (to) == REG)
- emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
+ {
+ if (reg_overlap_mentioned_p (to, from))
+ from = force_reg (from_mode, from);
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
+ }
convert_move (gen_lowpart (word_mode, to), from, unsignedp);
emit_unop_insn (code, to,
gen_lowpart (word_mode, to), equiv_code);
static bool
block_move_libcall_safe_for_call_parm (void)
{
+ /* If arguments are pushed on the stack, then they're safe. */
if (PUSH_ARGS)
return true;
- else
- {
- /* Check to see whether memcpy takes all register arguments. */
- static enum {
- takes_regs_uninit, takes_regs_no, takes_regs_yes
- } takes_regs = takes_regs_uninit;
- switch (takes_regs)
- {
- case takes_regs_uninit:
- {
- CUMULATIVE_ARGS args_so_far;
- tree fn, arg;
-
- fn = emit_block_move_libcall_fn (false);
- INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
-
- arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
- for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
- {
- enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
- rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
- if (!tmp || !REG_P (tmp))
- goto fail_takes_regs;
-#ifdef FUNCTION_ARG_PARTIAL_NREGS
- if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
- NULL_TREE, 1))
- goto fail_takes_regs;
+ /* If registers go on the stack anyway, any argument is sure to clobber
+ an outgoing argument. */
+#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
+ {
+ tree fn = emit_block_move_libcall_fn (false);
+ (void) fn;
+ if (REG_PARM_STACK_SPACE (fn) != 0)
+ return false;
+ }
#endif
- FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
- }
- }
- takes_regs = takes_regs_yes;
- /* FALLTHRU */
- case takes_regs_yes:
- return true;
+ /* If any argument goes in memory, then it might clobber an outgoing
+ argument. */
+ {
+ CUMULATIVE_ARGS args_so_far;
+ tree fn, arg;
- fail_takes_regs:
- takes_regs = takes_regs_no;
- /* FALLTHRU */
- case takes_regs_no:
- return false;
+ fn = emit_block_move_libcall_fn (false);
+ INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
- default:
- abort ();
- }
- }
+ arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
+ for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
+ {
+ enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
+ rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
+ if (!tmp || !REG_P (tmp))
+ return false;
+#ifdef FUNCTION_ARG_PARTIAL_NREGS
+ if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
+ NULL_TREE, 1))
+ return false;
+#endif
+ FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
+ }
+ }
+ return true;
}
/* A subroutine of emit_block_move. Expand a movstr pattern;
emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
{
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
+ int save_volatile_ok = volatile_ok;
enum machine_mode mode;
/* Since this is a move insn, we don't care about volatility. */
if (pat)
{
emit_insn (pat);
- volatile_ok = 0;
+ volatile_ok = save_volatile_ok;
return true;
}
else
}
}
- volatile_ok = 0;
+ volatile_ok = save_volatile_ok;
return false;
}
/* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
where DST is non-consecutive registers represented by a PARALLEL.
SSIZE represents the total size of block ORIG_SRC in bytes, or -1
- if not known. */
+ if not known. */
void
emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
emit_move_insn (mem, src);
tmps[i] = adjust_address (mem, mode, (int) bytepos);
}
+ else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
+ && XVECLEN (dst, 0) > 1)
+ tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
else if (CONSTANT_P (src)
|| (GET_CODE (src) == REG && GET_MODE (src) == mode))
tmps[i] = src;
set of registers starting with SRCREG into TGTBLK. If TGTBLK
is null, a stack temporary is created. TGTBLK is returned.
- The primary purpose of this routine is to handle functions
- that return BLKmode structures in registers. Some machines
- (the PA for example) want to return all small structures
- in registers regardless of the structure's alignment. */
+ The purpose of this routine is to handle functions that return
+ BLKmode structures in registers. Some machines (the PA for example)
+ want to return all small structures in registers regardless of the
+ structure's alignment. */
rtx
copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
rtx src = NULL, dst = NULL;
unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
- unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
+ unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
if (tgtblk == 0)
{
if (GET_MODE (srcreg) != BLKmode
&& GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
- srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
-
- /* Structures whose size is not a multiple of a word are aligned
- to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
- machine, this means we must skip the empty high order bytes when
- calculating the bit offset. */
- if (BYTES_BIG_ENDIAN
- && bytes % UNITS_PER_WORD)
- big_endian_correction
+ srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
+
+ /* If the structure doesn't take up a whole number of words, see whether
+ SRCREG is padded on the left or on the right. If it's on the left,
+ set PADDING_CORRECTION to the number of bits to skip.
+
+ In most ABIs, the structure will be returned at the least end of
+ the register, which translates to right padding on little-endian
+ targets and left padding on big-endian targets. The opposite
+ holds if the structure is returned at the most significant
+ end of the register. */
+ if (bytes % UNITS_PER_WORD != 0
+ && (targetm.calls.return_in_msb (type)
+ ? !BYTES_BIG_ENDIAN
+ : BYTES_BIG_ENDIAN))
+ padding_correction
= (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
/* Copy the structure BITSIZE bites at a time.
We could probably emit more efficient code for machines which do not use
strict alignment, but it doesn't seem worth the effort at the current
time. */
- for (bitpos = 0, xbitpos = big_endian_correction;
+ for (bitpos = 0, xbitpos = padding_correction;
bitpos < bytes * BITS_PER_UNIT;
bitpos += bitsize, xbitpos += bitsize)
{
/* We need a new source operand each time xbitpos is on a
- word boundary and when xbitpos == big_endian_correction
+ word boundary and when xbitpos == padding_correction
(the first time through). */
if (xbitpos % BITS_PER_WORD == 0
- || xbitpos == big_endian_correction)
+ || xbitpos == padding_correction)
src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
GET_MODE (srcreg));
rtx temp;
int used = partial * UNITS_PER_WORD;
- int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
+ int offset;
int skip;
+ if (reg && GET_CODE (reg) == PARALLEL)
+ {
+ /* Use the size of the elt to compute offset. */
+ rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
+ used = partial * GET_MODE_SIZE (GET_MODE (elt));
+ offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
+ }
+ else
+ offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
+
if (size == 0)
abort ();
}
to_rtx = offset_address (to_rtx, offset_rtx,
- highest_pow2_factor_for_type (TREE_TYPE (to),
- offset));
+ highest_pow2_factor_for_target (to,
+ offset));
}
if (GET_CODE (to_rtx) == MEM)
}
if (TREE_CODE (to) == COMPONENT_REF
- && TREE_READONLY (TREE_OPERAND (to, 1)))
+ && TREE_READONLY (TREE_OPERAND (to, 1))
+ /* We can't assert that a MEM won't be set more than once
+ if the component is not addressable because another
+ non-addressable component may be referenced by the same MEM. */
+ && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
{
if (to_rtx == orig_to_rtx)
to_rtx = copy_rtx (to_rtx);
return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
TYPE_MODE (TREE_TYPE (from)),
result,
- TREE_UNSIGNED (TREE_TYPE (to)))
+ TYPE_UNSIGNED (TREE_TYPE (to)))
: NULL_RTX);
}
VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
- size, TREE_UNSIGNED (sizetype)),
+ size, TYPE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
else
emit_library_call (bcopy_libfunc, LCT_NORMAL,
XEXP (to_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (integer_type_node),
size,
- TREE_UNSIGNED (integer_type_node)),
+ TYPE_UNSIGNED (integer_type_node)),
TYPE_MODE (integer_type_node));
preserve_temp_slots (to_rtx);
store_expr (tree exp, rtx target, int want_value)
{
rtx temp;
+ rtx alt_rtl = NULL_RTX;
+ rtx mark = mark_queue ();
int dont_return_target = 0;
int dont_store_target = 0;
&& INTEGRAL_TYPE_P (TREE_TYPE (exp))
&& TREE_TYPE (TREE_TYPE (exp)) == 0)
{
- if (TREE_UNSIGNED (TREE_TYPE (exp))
+ if (TYPE_UNSIGNED (TREE_TYPE (exp))
!= SUBREG_PROMOTED_UNSIGNED_P (target))
exp = convert
- ((*lang_hooks.types.signed_or_unsigned_type)
+ (lang_hooks.types.signed_or_unsigned_type
(SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
- exp = convert ((*lang_hooks.types.type_for_mode)
+ exp = convert (lang_hooks.types.type_for_mode
(GET_MODE (SUBREG_REG (target)),
SUBREG_PROMOTED_UNSIGNED_P (target)),
exp);
}
else
{
- temp = expand_expr (exp, target, GET_MODE (target),
- want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
+ temp = expand_expr_real (exp, target, GET_MODE (target),
+ (want_value & 2
+ ? EXPAND_STACK_PARM : EXPAND_NORMAL),
+ &alt_rtl);
/* Return TARGET if it's a specified hardware register.
If TARGET is a volatile mem ref, either return TARGET
or return a reg copied *from* TARGET; ANSI requires this.
&& TREE_CODE (exp) != ERROR_MARK
&& GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
- temp, TREE_UNSIGNED (TREE_TYPE (exp)));
+ temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
/* If value was not generated in the target, store it there.
- Convert the value to TARGET's type first if necessary.
+ Convert the value to TARGET's type first if necessary and emit the
+ pending incrementations that have been queued when expanding EXP.
+ Note that we cannot emit the whole queue blindly because this will
+ effectively disable the POST_INC optimization later.
+
If TEMP and TARGET compare equal according to rtx_equal_p, but
one or both of them are volatile memory refs, we have to distinguish
two cases:
|| side_effects_p (target))))
&& TREE_CODE (exp) != ERROR_MARK
&& ! dont_store_target
- /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
- but TARGET is not valid memory reference, TEMP will differ
- from TARGET although it is really the same location. */
- && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
- || target != DECL_RTL_IF_SET (exp))
+ /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
+ but TARGET is not valid memory reference, TEMP will differ
+ from TARGET although it is really the same location. */
+ && !(alt_rtl && rtx_equal_p (alt_rtl, target))
/* If there's nothing to copy, don't bother. Don't call expr_size
unless necessary, because some front-ends (C++) expr_size-hook
aborts on objects that are not supposed to be bit-copied or
bit-initialized. */
&& expr_size (exp) != const0_rtx)
{
+ emit_insns_enqueued_after_mark (mark);
target = protect_from_queue (target, 1);
+ temp = protect_from_queue (temp, 0);
if (GET_MODE (temp) != GET_MODE (target)
&& GET_MODE (temp) != VOIDmode)
{
- int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
+ int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
if (dont_return_target)
{
/* In this case, we will return TEMP,
/* Copy that much. */
copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
- TREE_UNSIGNED (sizetype));
+ TYPE_UNSIGNED (sizetype));
emit_block_move (target, temp, copy_size_rtx,
(want_value & 2
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (copy_size_rtx) != Pmode)
copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
- TREE_UNSIGNED (sizetype));
+ TYPE_UNSIGNED (sizetype));
#endif
target = offset_address (target, copy_size_rtx,
(want_value & 2
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
- emit_move_insn (target, temp);
+ {
+ temp = force_operand (temp, target);
+ if (temp != target)
+ emit_move_insn (target, temp);
+ }
}
/* If we don't want a value, return NULL_RTX. */
tree exp, tree type, int cleared, int alias_set)
{
if (TREE_CODE (exp) == CONSTRUCTOR
+ /* We can only call store_constructor recursively if the size and
+ bit position are on a byte boundary. */
&& bitpos % BITS_PER_UNIT == 0
+ && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
/* If we have a nonzero bitpos for a register target, then we just
let store_field do the bitfield handling. This is unlikely to
generate unnecessary clear instructions anyways. */
{
rtx offset_rtx;
- if (CONTAINS_PLACEHOLDER_P (offset))
- offset = build (WITH_RECORD_EXPR, sizetype,
- offset, make_tree (TREE_TYPE (exp), target));
+ offset
+ = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
+ make_tree (TREE_TYPE (exp),
+ target));
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
if (GET_CODE (to_rtx) != MEM)
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
- type = (*lang_hooks.types.type_for_size)
- (BITS_PER_WORD, TREE_UNSIGNED (type));
+ type = lang_hooks.types.type_for_size
+ (BITS_PER_WORD, TYPE_UNSIGNED (type));
value = convert (type, value);
}
tree elt;
int i;
int need_to_clear;
- tree domain = TYPE_DOMAIN (type);
+ tree domain;
tree elttype = TREE_TYPE (type);
int const_bounds_p;
HOST_WIDE_INT minelt = 0;
HOST_WIDE_INT maxelt = 0;
+ int icode = 0;
+ rtx *vector = NULL;
+ int elt_size = 0;
+ unsigned n_elts = 0;
- /* Vectors are like arrays, but the domain is stored via an array
- type indirectly. */
- if (TREE_CODE (type) == VECTOR_TYPE)
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ domain = TYPE_DOMAIN (type);
+ else
+ /* Vectors do not have domains; look up the domain of
+ the array embedded in the debug representation type.
+ FIXME Would probably be more efficient to treat vectors
+ separately from arrays. */
{
- /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
- the same field as TYPE_DOMAIN, we are not guaranteed that
- it always will. */
domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
+ if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
+ {
+ enum machine_mode mode = GET_MODE (target);
+
+ icode = (int) vec_init_optab->handlers[mode].insn_code;
+ if (icode != CODE_FOR_nothing)
+ {
+ unsigned int i;
+
+ elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
+ n_elts = (GET_MODE_SIZE (mode) / elt_size);
+ vector = alloca (n_elts);
+ for (i = 0; i < n_elts; i++)
+ vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
+ }
+ }
}
const_bounds_p = (TYPE_MIN_VALUE (domain)
need_to_clear = 1;
}
- if (need_to_clear && size > 0)
+ if (need_to_clear && size > 0 && !vector)
{
if (! cleared)
{
if (cleared && is_zeros_p (value))
continue;
- unsignedp = TREE_UNSIGNED (elttype);
+ unsignedp = TYPE_UNSIGNED (elttype);
mode = TYPE_MODE (elttype);
if (mode == BLKmode)
bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
HOST_WIDE_INT lo, hi, count;
tree position;
+ if (vector)
+ abort ();
+
/* If the range is constant and "small", unroll the loop. */
if (const_bounds_p
&& host_integerp (lo_index, 0)
expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
loop_end = gen_label_rtx ();
- unsignedp = TREE_UNSIGNED (domain);
+ unsignedp = TYPE_UNSIGNED (domain);
index = build_decl (VAR_DECL, NULL_TREE, domain);
{
tree position;
+ if (vector)
+ abort ();
+
if (index == 0)
index = ssize_int (1);
xtarget = adjust_address (xtarget, mode, 0);
store_expr (value, xtarget, 0);
}
+ else if (vector)
+ {
+ int pos;
+
+ if (index != 0)
+ pos = tree_low_cst (index, 0) - minelt;
+ else
+ pos = i;
+ vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
+ }
else
{
if (index != 0)
target = copy_rtx (target);
MEM_KEEP_ALIAS_SET_P (target) = 1;
}
-
store_constructor_field (target, bitsize, bitpos, mode, value,
type, cleared, get_alias_set (elttype));
-
}
}
+ if (vector)
+ {
+ emit_insn (GEN_FCN (icode) (target,
+ gen_rtx_PARALLEL (GET_MODE (target),
+ gen_rtvec_v (n_elts, vector))));
+ }
}
/* Set constructor assignments. */
{
if (word != 0 || ! cleared)
{
- rtx datum = GEN_INT (word);
+ rtx datum = gen_int_mode (word, mode);
rtx to_rtx;
/* The assumption here is that it is safe to use
{
targetx
= assign_temp
- ((build_qualified_type ((*lang_hooks.types.type_for_mode)
+ ((build_qualified_type (lang_hooks.types.type_for_mode
(GET_MODE (target), 0),
TYPE_QUAL_CONST)),
0, 1, 1);
enum machine_mode mode = VOIDmode;
tree offset = size_zero_node;
tree bit_offset = bitsize_zero_node;
- tree placeholder_ptr = 0;
tree tem;
/* First get the mode, signedness, and size. We do this from just the
if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
mode = DECL_MODE (TREE_OPERAND (exp, 1));
- *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
+ *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
}
else if (TREE_CODE (exp) == BIT_FIELD_REF)
{
size_tree = TREE_OPERAND (exp, 1);
- *punsignedp = TREE_UNSIGNED (exp);
+ *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
}
else
{
mode = TYPE_MODE (TREE_TYPE (exp));
- *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
+ *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
if (mode == BLKmode)
size_tree = TYPE_SIZE (TREE_TYPE (exp));
made during type construction. */
if (this_offset == 0)
break;
- else if (CONTAINS_PLACEHOLDER_P (this_offset))
- this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
+ else
+ this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
offset = size_binop (PLUS_EXPR, offset, this_offset);
bit_offset = size_binop (PLUS_EXPR, bit_offset,
index = fold (build (MINUS_EXPR, TREE_TYPE (index),
index, low_bound));
- /* If the index has a self-referential type, pass it to a
- WITH_RECORD_EXPR; if the component size is, pass our
- component to one. */
- if (CONTAINS_PLACEHOLDER_P (index))
- index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
- if (CONTAINS_PLACEHOLDER_P (unit_size))
- unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
-
+ /* If the index has a self-referential type, instantiate it with
+ the object; likewise for the component size. */
+ index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
+ unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
offset = size_binop (PLUS_EXPR, offset,
size_binop (MULT_EXPR,
convert (sizetype, index),
unit_size));
}
- else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
- {
- tree new = find_placeholder (exp, &placeholder_ptr);
-
- /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
- We might have been called from tree optimization where we
- haven't set up an object yet. */
- if (new == 0)
- break;
- else
- exp = new;
-
- continue;
- }
-
/* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
conversions that don't change the mode, and all view conversions
except those that need to "step up" the alignment. */
rtx subtarget = get_subtarget (target);
enum rtx_code code = GET_CODE (value);
+ /* Check for subreg applied to an expression produced by loop optimizer. */
+ if (code == SUBREG
+ && GET_CODE (SUBREG_REG (value)) != REG
+ && GET_CODE (SUBREG_REG (value)) != MEM)
+ {
+ value = simplify_gen_subreg (GET_MODE (value),
+ force_reg (GET_MODE (SUBREG_REG (value)),
+ force_operand (SUBREG_REG (value),
+ NULL_RTX)),
+ GET_MODE (SUBREG_REG (value)),
+ SUBREG_BYTE (value));
+ code = GET_CODE (value);
+ }
+
/* Check for a PIC address load. */
if ((code == PLUS || code == MINUS)
&& XEXP (value, 0) == pic_offset_table_rtx
return target;
}
- if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
+ if (ARITHMETIC_P (value))
{
op2 = XEXP (value, 1);
if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
target, 1, OPTAB_LIB_WIDEN);
}
}
- if (GET_RTX_CLASS (code) == '1')
+ if (UNARY_P (value))
{
op1 = force_operand (XEXP (value, 0), NULL_RTX);
return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
/* A SAVE_EXPR might appear many times in the expression passed to the
top-level safe_from_p call, and if it has a complex subexpression,
examining it multiple times could result in a combinatorial explosion.
- E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
+ E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
with optimization took about 28 minutes to compile -- even though it was
only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
and turn that off when we are done. We keep a list of the SAVE_EXPRs
case '<':
if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
return 0;
- /* FALLTHRU */
+ /* Fall through. */
case '1':
return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
special handling. */
if ((unsigned int) TREE_CODE (exp)
>= (unsigned int) LAST_AND_UNUSED_TREE_CODE
- && !(*lang_hooks.safe_from_p) (x, exp))
+ && !lang_hooks.safe_from_p (x, exp))
return 0;
}
return 0;
}
}
-
-#ifdef MAX_INTEGER_COMPUTATION_MODE
-
-void
-check_max_integer_computation_mode (tree exp)
-{
- enum tree_code code;
- enum machine_mode mode;
-
- /* Strip any NOPs that don't change the mode. */
- STRIP_NOPS (exp);
- code = TREE_CODE (exp);
-
- /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
- if (code == NOP_EXPR
- && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
- return;
-
- /* First check the type of the overall operation. We need only look at
- unary, binary and relational operations. */
- if (TREE_CODE_CLASS (code) == '1'
- || TREE_CODE_CLASS (code) == '2'
- || TREE_CODE_CLASS (code) == '<')
- {
- mode = TYPE_MODE (TREE_TYPE (exp));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && mode > MAX_INTEGER_COMPUTATION_MODE)
- internal_error ("unsupported wide integer operation");
- }
-
- /* Check operand of a unary op. */
- if (TREE_CODE_CLASS (code) == '1')
- {
- mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && mode > MAX_INTEGER_COMPUTATION_MODE)
- internal_error ("unsupported wide integer operation");
- }
-
- /* Check operands of a binary/comparison op. */
- if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
- {
- mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && mode > MAX_INTEGER_COMPUTATION_MODE)
- internal_error ("unsupported wide integer operation");
-
- mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && mode > MAX_INTEGER_COMPUTATION_MODE)
- internal_error ("unsupported wide integer operation");
- }
-}
-#endif
\f
/* Return the highest power of two that EXP is known to be a multiple of.
This is used in updating alignment of MEMs in array references. */
break;
case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
- case SAVE_EXPR: case WITH_RECORD_EXPR:
+ case SAVE_EXPR:
return highest_pow2_factor (TREE_OPERAND (exp, 0));
case COMPOUND_EXPR:
return 1;
}
-/* Similar, except that it is known that the expression must be a multiple
- of the alignment of TYPE. */
+/* Similar, except that the alignment requirements of TARGET are
+ taken into account. Assume it is at least as aligned as its
+ type, unless it is a COMPONENT_REF in which case the layout of
+ the structure gives the alignment. */
static unsigned HOST_WIDE_INT
-highest_pow2_factor_for_type (tree type, tree exp)
+highest_pow2_factor_for_target (tree target, tree exp)
{
- unsigned HOST_WIDE_INT type_align, factor;
+ unsigned HOST_WIDE_INT target_align, factor;
factor = highest_pow2_factor (exp);
- type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
- return MAX (factor, type_align);
+ if (TREE_CODE (target) == COMPONENT_REF)
+ target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
+ else
+ target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
+ return MAX (factor, target_align);
}
\f
-/* Return an object on the placeholder list that matches EXP, a
- PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
- PLACEHOLDER_EXPR or a pointer type to it. For further information, see
- tree.def. If no such object is found, return 0. If PLIST is nonzero, it
- is a location which initially points to a starting location in the
- placeholder list (zero means start of the list) and where a pointer into
- the placeholder list at which the object is found is placed. */
-
-tree
-find_placeholder (tree exp, tree *plist)
-{
- tree type = TREE_TYPE (exp);
- tree placeholder_expr;
-
- for (placeholder_expr
- = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
- placeholder_expr != 0;
- placeholder_expr = TREE_CHAIN (placeholder_expr))
- {
- tree need_type = TYPE_MAIN_VARIANT (type);
- tree elt;
-
- /* Find the outermost reference that is of the type we want. If none,
- see if any object has a type that is a pointer to the type we
- want. */
- for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
- elt = ((TREE_CODE (elt) == COMPOUND_EXPR
- || TREE_CODE (elt) == COND_EXPR)
- ? TREE_OPERAND (elt, 1)
- : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
- || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
- ? TREE_OPERAND (elt, 0) : 0))
- if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
- {
- if (plist)
- *plist = placeholder_expr;
- return elt;
- }
-
- for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
- elt
- = ((TREE_CODE (elt) == COMPOUND_EXPR
- || TREE_CODE (elt) == COND_EXPR)
- ? TREE_OPERAND (elt, 1)
- : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
- || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
- ? TREE_OPERAND (elt, 0) : 0))
- if (POINTER_TYPE_P (TREE_TYPE (elt))
- && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
- == need_type))
- {
- if (plist)
- *plist = placeholder_expr;
- return build1 (INDIRECT_REF, need_type, elt);
- }
- }
-
- return 0;
-}
-
/* Subroutine of expand_expr. Expand the two operands of a binary
expression EXP0 and EXP1 placing the results in OP0 and OP1.
The value may be stored in TARGET if TARGET is nonzero. The
marked TARGET so that it's safe from being trashed by libcalls. We
don't want to use TARGET for anything but the final result;
Intermediate values must go elsewhere. Additionally, calls to
- emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
+ emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
+
+ If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
+ address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
+ DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
+ COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
+ recursively. */
rtx
-expand_expr (tree exp, rtx target, enum machine_mode tmode,
- enum expand_modifier modifier)
+expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier, rtx *alt_rtl)
{
rtx op0, op1, temp;
tree type = TREE_TYPE (exp);
- int unsignedp = TREE_UNSIGNED (type);
+ int unsignedp;
enum machine_mode mode;
enum tree_code code = TREE_CODE (exp);
optab this_optab;
}
mode = TYPE_MODE (type);
+ unsignedp = TYPE_UNSIGNED (type);
+
/* Use subtarget as the target for operand 0 of a binary operation. */
subtarget = get_subtarget (target);
original_target = target;
target = 0;
}
-#ifdef MAX_INTEGER_COMPUTATION_MODE
- /* Only check stuff here if the mode we want is different from the mode
- of the expression; if it's the same, check_max_integer_computation_mode
- will handle it. Do we really need to check this stuff at all? */
-
- if (target
- && GET_MODE (target) != mode
- && TREE_CODE (exp) != INTEGER_CST
- && TREE_CODE (exp) != PARM_DECL
- && TREE_CODE (exp) != ARRAY_REF
- && TREE_CODE (exp) != ARRAY_RANGE_REF
- && TREE_CODE (exp) != COMPONENT_REF
- && TREE_CODE (exp) != BIT_FIELD_REF
- && TREE_CODE (exp) != INDIRECT_REF
- && TREE_CODE (exp) != CALL_EXPR
- && TREE_CODE (exp) != VAR_DECL
- && TREE_CODE (exp) != RTL_EXPR)
- {
- enum machine_mode mode = GET_MODE (target);
-
- if (GET_MODE_CLASS (mode) == MODE_INT
- && mode > MAX_INTEGER_COMPUTATION_MODE)
- internal_error ("unsupported wide integer operation");
- }
-
- if (tmode != mode
- && TREE_CODE (exp) != INTEGER_CST
- && TREE_CODE (exp) != PARM_DECL
- && TREE_CODE (exp) != ARRAY_REF
- && TREE_CODE (exp) != ARRAY_RANGE_REF
- && TREE_CODE (exp) != COMPONENT_REF
- && TREE_CODE (exp) != BIT_FIELD_REF
- && TREE_CODE (exp) != INDIRECT_REF
- && TREE_CODE (exp) != VAR_DECL
- && TREE_CODE (exp) != CALL_EXPR
- && TREE_CODE (exp) != RTL_EXPR
- && GET_MODE_CLASS (tmode) == MODE_INT
- && tmode > MAX_INTEGER_COMPUTATION_MODE)
- internal_error ("unsupported wide integer operation");
-
- check_max_integer_computation_mode (exp);
-#endif
-
/* If will do cse, generate all results into pseudo registers
since 1) that allows cse to find more things
and 2) otherwise cse could produce an insn the machine
DECL_NONLOCAL (exp) = 1;
if (DECL_NO_STATIC_CHAIN (current_function_decl))
abort ();
- (*lang_hooks.mark_addressable) (exp);
+ lang_hooks.mark_addressable (exp);
if (GET_CODE (DECL_RTL (exp)) != MEM)
abort ();
addr = XEXP (DECL_RTL (exp), 0);
XEXP (DECL_RTL (exp), 0))
|| (flag_force_addr
&& GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
- temp = replace_equiv_address (DECL_RTL (exp),
- copy_rtx (XEXP (DECL_RTL (exp), 0)));
+ {
+ if (alt_rtl)
+ *alt_rtl = DECL_RTL (exp);
+ temp = replace_equiv_address (DECL_RTL (exp),
+ copy_rtx (XEXP (DECL_RTL (exp), 0)));
+ }
/* If we got something, return it. But first, set the alignment
if the address is a register. */
rtx temp;
temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
TREE_OPERAND (exp, 0)
- = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
+ = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
return temp;
}
- case PLACEHOLDER_EXPR:
- {
- tree old_list = placeholder_list;
- tree placeholder_expr = 0;
-
- exp = find_placeholder (exp, &placeholder_expr);
- if (exp == 0)
- abort ();
-
- placeholder_list = TREE_CHAIN (placeholder_expr);
- temp = expand_expr (exp, original_target, tmode, modifier);
- placeholder_list = old_list;
- return temp;
- }
-
- case WITH_RECORD_EXPR:
- /* Put the object on the placeholder list, expand our first operand,
- and pop the list. */
- placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
- placeholder_list);
- target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
- modifier);
- placeholder_list = TREE_CHAIN (placeholder_list);
- return target;
-
case GOTO_EXPR:
if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
expand_goto (TREE_OPERAND (exp, 0));
/* Mark the corresponding BLOCK for output in its proper place. */
if (TREE_OPERAND (exp, 2) != 0
&& ! TREE_USED (TREE_OPERAND (exp, 2)))
- (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
+ lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
/* If VARS have not yet been expanded, expand them now. */
while (vars)
}
preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
free_temps_for_rtl_expr (exp);
+ if (alt_rtl)
+ *alt_rtl = RTL_EXPR_ALT_RTL (exp);
return RTL_EXPR_RTL (exp);
case CONSTRUCTOR:
&& modifier != EXPAND_MEMORY
&& TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
&& TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
- && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
+ && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
+ && targetm.binds_local_p (array))
{
if (TREE_CODE (index) == INTEGER_CST)
{
enum machine_mode imode
= TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
- if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
{
op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
op0 = expand_and (imode, op0, op1, target);
(TYPE_QUALS (TREE_TYPE (tem))
| TYPE_QUAL_CONST));
rtx memloc = assign_temp (nt, 1, 1, 1);
-
+
emit_move_insn (memloc, op0);
op0 = memloc;
}
offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
#endif
- /* A constant address in OP0 can have VOIDmode, we must not try
- to call force_reg for that case. Avoid that case. */
- if (GET_CODE (op0) == MEM
- && GET_MODE (op0) == BLKmode
+ if (GET_MODE (op0) == BLKmode
+ /* A constant address in OP0 can have VOIDmode, we must
+ not try to call force_reg in that case. */
&& GET_MODE (XEXP (op0, 0)) != VOIDmode
&& bitsize != 0
&& (bitpos % bitsize) == 0
fetch it as a bit field. */
|| (mode1 != BLKmode
&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
- || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
+ || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
+ || (GET_CODE (op0) == MEM
+ && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
+ || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
&& ((modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_INITIALIZER)
? STRICT_ALIGNMENT
if (ext_mode == BLKmode)
{
+ if (target == 0)
+ target = assign_temp (type, 0, 1, 1);
+
+ if (bitsize == 0)
+ return target;
+
/* In this case, BITPOS must start at a byte boundary and
TARGET, if specified, must be a MEM. */
if (GET_CODE (op0) != MEM
|| bitpos % BITS_PER_UNIT != 0)
abort ();
- op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
- if (target == 0)
- target = assign_temp (type, 0, 1, 1);
-
- emit_block_move (target, op0,
+ emit_block_move (target,
+ adjust_address (op0, VOIDmode,
+ bitpos / BITS_PER_UNIT),
GEN_INT ((bitsize + BITS_PER_UNIT - 1)
/ BITS_PER_UNIT),
(modifier == EXPAND_STACK_PARM
- bitsize),
op0, 1);
+ /* If the result type is BLKmode, store the data into a temporary
+ of the appropriate type, but with the mode corresponding to the
+ mode for the data we have (op0's mode). It's tempting to make
+ this a constant type, since we know it's only being stored once,
+ but that can cause problems if we are taking the address of this
+ COMPONENT_REF because the MEM of any reference via that address
+ will have flags corresponding to the type, which will not
+ necessarily be constant. */
if (mode == BLKmode)
{
- rtx new = assign_temp (build_qualified_type
- ((*lang_hooks.types.type_for_mode)
- (ext_mode, 0),
- TYPE_QUAL_CONST), 0, 1, 1);
+ rtx new
+ = assign_stack_temp_for_type
+ (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
emit_move_insn (new, op0);
op0 = copy_rtx (new);
tree set = TREE_OPERAND (exp, 0);
tree index = TREE_OPERAND (exp, 1);
- int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
+ int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
tree set_type = TREE_TYPE (set);
tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
{
if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
== BUILT_IN_FRONTEND)
+ /* ??? Use (*fun) form because expand_expr is a macro. */
return (*lang_hooks.expand_expr) (exp, original_target,
- tmode, modifier);
+ tmode, modifier,
+ alt_rtl);
else
return expand_builtin (exp, target, subtarget, tmode, ignore);
}
}
if (target == 0)
- target = assign_temp (type, 0, 1, 1);
+ {
+ if (TYPE_MODE (type) != BLKmode)
+ target = gen_reg_rtx (TYPE_MODE (type));
+ else
+ target = assign_temp (type, 0, 1, 1);
+ }
if (GET_CODE (target) == MEM)
/* Store data into beginning of memory target. */
/* If the signedness of the conversion differs and OP0 is
a promoted SUBREG, clear that indication since we now
have to do the proper extension. */
- if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
&& GET_CODE (op0) == SUBREG)
SUBREG_PROMOTED_VAR_P (op0) = 0;
inner_mode));
else
return convert_modes (mode, inner_mode, op0,
- TREE_UNSIGNED (inner_type));
+ TYPE_UNSIGNED (inner_type));
}
if (modifier == EXPAND_INITIALIZER)
if (target == 0)
return
convert_to_mode (mode, op0,
- TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
else
convert_move (target, op0,
- TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
return target;
case VIEW_CONVERT_EXPR:
}
}
- if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
- subtarget = 0;
-
/* No sense saving up arithmetic to be done
if it's all in the wrong mode to form part of an address.
And force_operand won't know whether to sign-extend or
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
EXPAND_SUM);
- /* If we knew for certain that this is arithmetic for an array
- reference, and we knew the bounds of the array, then we could
- apply the distributive law across (PLUS X C) for constant C.
- Without such knowledge, we risk overflowing the computation
- when both X and C are large, but X+C isn't. */
- /* ??? Could perhaps special-case EXP being unsigned and C being
- positive. In that case we are certain that X+C is no smaller
- than X and so the transformed expression will overflow iff the
- original would have. */
-
if (GET_CODE (op0) != REG)
op0 = force_operand (op0, NULL_RTX);
if (GET_CODE (op0) != REG)
TYPE_MODE (TREE_TYPE (exp1))));
}
- if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
- subtarget = 0;
-
if (modifier == EXPAND_STACK_PARM)
target = 0;
|| exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
||
(TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
- && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
- ==
- TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
+ && (TYPE_PRECISION (TREE_TYPE
+ (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
+ == TYPE_PRECISION (TREE_TYPE
+ (TREE_OPERAND
+ (TREE_OPERAND (exp, 0), 0))))
/* If both operands are extended, they must either both
be zero-extended or both be sign-extended. */
- && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
- ==
- TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
- {
- enum machine_mode innermode
- = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
- optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
- ? smul_widen_optab : umul_widen_optab);
- this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
- ? umul_widen_optab : smul_widen_optab);
+ && (TYPE_UNSIGNED (TREE_TYPE
+ (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
+ == TYPE_UNSIGNED (TREE_TYPE
+ (TREE_OPERAND
+ (TREE_OPERAND (exp, 0), 0)))))))
+ {
+ tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
+ enum machine_mode innermode = TYPE_MODE (op0type);
+ bool zextend_p = TYPE_UNSIGNED (op0type);
+ optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
+ this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
+
if (mode == GET_MODE_WIDER_MODE (innermode))
{
if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
&& innermode == word_mode)
{
- rtx htem;
+ rtx htem, hipart;
op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
NULL_RTX, VOIDmode, 0);
if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
NULL_RTX, VOIDmode, 0);
temp = expand_binop (mode, other_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN);
- htem = expand_mult_highpart_adjust (innermode,
- gen_highpart (innermode, temp),
- op0, op1,
- gen_highpart (innermode, temp),
- unsignedp);
- emit_move_insn (gen_highpart (innermode, temp), htem);
+ hipart = gen_highpart (innermode, temp);
+ htem = expand_mult_highpart_adjust (innermode, hipart,
+ op0, op1, hipart,
+ zextend_p);
+ if (htem != hipart)
+ emit_move_insn (hipart, htem);
return temp;
}
}
op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
op0);
expand_float (target, op0,
- TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
return target;
case NEGATE_EXPR:
/* Unsigned abs is simply the operand. Testing here means we don't
risk generating incorrect code below. */
- if (TREE_UNSIGNED (type))
+ if (TYPE_UNSIGNED (type))
return op0;
return expand_abs (mode, op0, target, unsignedp,
target = original_target;
if (target == 0
|| modifier == EXPAND_STACK_PARM
- || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
|| (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|| GET_MODE (target) != mode
|| (GET_CODE (target) == REG
/* First try to do it with a special MIN or MAX instruction.
If that does not win, use a conditional jump to select the proper
value. */
- this_optab = (TREE_UNSIGNED (type)
+ this_optab = (unsignedp
? (code == MIN_EXPR ? umin_optab : umax_optab)
: (code == MIN_EXPR ? smin_optab : smax_optab));
if (GET_CODE (target) == MEM)
target = gen_reg_rtx (mode);
+ /* If op1 was placed in target, swap op0 and op1. */
+ if (target != op0 && target == op1)
+ {
+ rtx tem = op0;
+ op0 = op1;
+ op1 = tem;
+ }
+
if (target != op0)
emit_move_insn (target, op0);
&& ! can_compare_p (GE, mode, ccp_jump))
{
if (code == MAX_EXPR)
- do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
- target, op1, NULL_RTX, op0);
+ do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
+ NULL_RTX, op0);
else
- do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
- op1, target, NULL_RTX, op0);
+ do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
+ NULL_RTX, op0);
}
else
{
- int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
- unsignedp, mode, NULL_RTX, NULL_RTX,
- op0);
+ unsignedp, mode, NULL_RTX, NULL_RTX, op0);
}
emit_move_insn (target, op1);
emit_label (op0);
case COMPOUND_EXPR:
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
emit_queue ();
- return expand_expr (TREE_OPERAND (exp, 1),
- (ignore ? const0_rtx : target),
- VOIDmode, modifier);
+ return expand_expr_real (TREE_OPERAND (exp, 1),
+ (ignore ? const0_rtx : target),
+ VOIDmode, modifier, alt_rtl);
case COND_EXPR:
/* If we would have a "singleton" (see below) were it not for a
else
{
target = assign_temp (type, 2, 0, 1);
- /* All temp slots at this level must not conflict. */
- preserve_temp_slots (target);
SET_DECL_RTL (slot, target);
if (TREE_ADDRESSABLE (slot))
put_var_into_stack (slot, /*rescan=*/false);
if (TREE_OPERAND (exp, 2) == 0)
TREE_OPERAND (exp, 2)
- = (*lang_hooks.maybe_build_cleanup) (slot);
+ = lang_hooks.maybe_build_cleanup (slot);
cleanups = TREE_OPERAND (exp, 2);
}
}
abort ();
default:
- return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
+ /* ??? Use (*fun) form because expand_expr is a macro. */
+ return (*lang_hooks.expand_expr) (exp, original_target, tmode,
+ modifier, alt_rtl);
}
/* Here to do an ordinary binary operator, generating an instruction
static int
is_aligning_offset (tree offset, tree exp)
{
- /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
+ /* Strip off any conversions. */
while (TREE_CODE (offset) == NON_LVALUE_EXPR
|| TREE_CODE (offset) == NOP_EXPR
- || TREE_CODE (offset) == CONVERT_EXPR
- || TREE_CODE (offset) == WITH_RECORD_EXPR)
+ || TREE_CODE (offset) == CONVERT_EXPR)
offset = TREE_OPERAND (offset, 0);
/* We must now have a BIT_AND_EXPR with a constant that is one less than
power of 2 and which is larger than BIGGEST_ALIGNMENT. */
if (TREE_CODE (offset) != BIT_AND_EXPR
|| !host_integerp (TREE_OPERAND (offset, 1), 1)
- || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
+ || compare_tree_int (TREE_OPERAND (offset, 1),
+ BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
|| !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
return 0;
|| TREE_CODE (offset) == CONVERT_EXPR)
offset = TREE_OPERAND (offset, 0);
- /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
- whose type is the same as EXP. */
- return (TREE_CODE (offset) == ADDR_EXPR
- && (TREE_OPERAND (offset, 0) == exp
- || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
- && (TREE_TYPE (TREE_OPERAND (offset, 0))
- == TREE_TYPE (exp)))));
+ /* This must now be the address of EXP. */
+ return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
}
\f
/* Return the tree node if an ARG corresponds to a string constant or zero
{
/* We have a true reference to the value in OP0.
If there is an insn to add or subtract in this mode, queue it.
- Queueing the increment insn avoids the register shuffling
+ Queuing the increment insn avoids the register shuffling
that often results if we must increment now and first save
the old value for subsequent use. */
/* Increment however we can. */
op1 = expand_binop (mode, this_optab, value, op1, op0,
- TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
+ TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
/* Make sure the value is stored into OP0. */
if (op1 != op0)
type = TREE_TYPE (arg0);
operand_mode = TYPE_MODE (type);
- unsignedp = TREE_UNSIGNED (type);
+ unsignedp = TYPE_UNSIGNED (type);
/* We won't bother with BLKmode store-flag operations because it would mean
passing a lot of information to emit_store_flag. */
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
- tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
+ tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
- arg0, arg1, type),
+ arg0, arg1, type),
target, VOIDmode, EXPAND_NORMAL);
}
}
if (! get_subtarget (target)
- || GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, arg1, 1))
+ || GET_MODE (subtarget) != operand_mode)
subtarget = 0;
expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
{
if (TYPE_MODE (index_type) != index_mode)
{
- index_expr = convert ((*lang_hooks.types.type_for_size)
+ index_expr = convert (lang_hooks.types.type_for_size
(index_bits, 0), index_expr);
index_type = TREE_TYPE (index_expr);
}
op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
- op1, TREE_UNSIGNED (TREE_TYPE (minval)));
+ op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
(op1, op_mode))
op1 = copy_to_mode_reg (op_mode, op1);
op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
- op2, TREE_UNSIGNED (TREE_TYPE (range)));
+ op2, TYPE_UNSIGNED (TREE_TYPE (range)));
if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
(op2, op_mode))
op2 = copy_to_mode_reg (op_mode, op2);
if (mode != Pmode)
index = convert_to_mode (Pmode, index, 1);
- /* Don't let a MEM slip thru, because then INDEX that comes
+ /* Don't let a MEM slip through, because then INDEX that comes
out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
and break_out_memory_refs will go to work on it and mess it up. */
#ifdef PIC_CASE_VECTOR_ADDRESS
TYPE_MODE (TREE_TYPE (range)),
expand_expr (range, NULL_RTX,
VOIDmode, 0),
- TREE_UNSIGNED (TREE_TYPE (range))),
+ TYPE_UNSIGNED (TREE_TYPE (range))),
table_label, default_label);
return 1;
}