/* Convert tree expression to rtl instructions, for GNU compiler.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001 Free Software Foundation, Inc.
+ 2000, 2001, 2002 Free Software Foundation, Inc.
This file is part of GCC.
#include "typeclass.h"
#include "toplev.h"
#include "ggc.h"
+#include "langhooks.h"
#include "intl.h"
#include "tm_p.h"
#define CASE_VECTOR_PC_RELATIVE 0
#endif
-/* Hook called by safe_from_p for language-specific tree codes. It is
- up to the language front-end to install a hook if it has any such
- codes that safe_from_p needs to know about. Since same_from_p will
- recursively explore the TREE_OPERANDs of an expression, this hook
- should not reexamine those pieces. This routine may recursively
- call safe_from_p; it should always pass `0' as the TOP_P
- parameter. */
-int (*lang_safe_from_p) PARAMS ((rtx, tree));
-
/* If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to
output indirect addresses. If cse is to follow, we reject
the same indirect address eventually. */
int cse_not_expected;
-/* Don't check memory usage, since code is being emitted to check a memory
- usage. Used when current_function_check_memory_usage is true, to avoid
- infinite recursion. */
-static int in_check_memory_usage;
-
/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
static tree placeholder_list = 0;
extern struct obstack permanent_obstack;
-static rtx get_push_address PARAMS ((int));
-
static rtx enqueue_insn PARAMS ((rtx, rtx));
static unsigned HOST_WIDE_INT move_by_pieces_ninsns
PARAMS ((unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
tree, enum machine_mode, int, tree,
int));
-static enum memory_use_mode
- get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
static rtx var_rtx PARAMS ((tree));
static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
+static int is_aligning_offset PARAMS ((tree, tree));
static rtx expand_increment PARAMS ((tree, int, int));
static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
if ((code = can_extend_p (to_mode, from_mode, unsignedp))
!= CODE_FOR_nothing)
{
+ if (flag_force_mem)
+ from = force_not_mem (from);
+
emit_unop_insn (code, to, from, equiv_code);
return;
}
&& (val & ((HOST_WIDE_INT) 1 << (width - 1))))
val |= (HOST_WIDE_INT) (-1) << width;
- return GEN_INT (trunc_int_for_mode (val, mode));
+ return gen_int_mode (val, mode);
}
return gen_lowpart (mode, x);
from1 = adjust_address (data->from, mode, data->offset);
if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
- emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
+ emit_insn (gen_add2_insn (data->to_addr,
+ GEN_INT (-(HOST_WIDE_INT)size)));
if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
- emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
+ emit_insn (gen_add2_insn (data->from_addr,
+ GEN_INT (-(HOST_WIDE_INT)size)));
if (data->to)
emit_insn ((*genfun) (to1, from1));
/* If SIZE is that of a mode no bigger than a word, just use that
mode's store operation. */
if (size <= UNITS_PER_WORD
- && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
+ && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
+ && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
{
emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
return;
/* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
to the left before storing to memory. Note that the previous test
doesn't handle all cases (e.g. SIZE == 3). */
- if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
+ if (size < UNITS_PER_WORD
+ && BYTES_BIG_ENDIAN
+ && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
{
rtx tem = operand_subword (x, 0, 1, BLKmode);
rtx shift;
/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
registers represented by a PARALLEL. SSIZE represents the total size of
block SRC in bytes, or -1 if not known. */
-/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
+/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
the balance will be in what would be the low-order memory addresses, i.e.
left justified for big endian, right justified for little endian. This
happens to be true for the targets currently using this support. If this
}
else if (GET_CODE (src) == CONCAT)
{
- if (bytepos == 0
- && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
- tmps[i] = XEXP (src, 0);
- else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
- && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
- tmps[i] = XEXP (src, 1);
+ if ((bytepos == 0
+ && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
+ || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
+ && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
+ {
+ tmps[i] = XEXP (src, bytepos != 0);
+ if (! CONSTANT_P (tmps[i])
+ && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
+ tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
+ 0, 1, NULL_RTX, mode, mode, ssize);
+ }
else if (bytepos == 0)
{
rtx mem = assign_stack_temp (GET_MODE (src),
emit_group_load (dst, temp, ssize);
return;
}
- else if (GET_CODE (dst) != MEM)
+ else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
{
dst = gen_reg_rtx (GET_MODE (orig_dst));
/* Make life a bit easier for combine. */
HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
enum machine_mode mode = GET_MODE (tmps[i]);
unsigned int bytelen = GET_MODE_SIZE (mode);
+ rtx dest = dst;
/* Handle trailing fragments that run over the size of the struct. */
if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
bytelen = ssize - bytepos;
}
+ if (GET_CODE (dst) == CONCAT)
+ {
+ if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
+ dest = XEXP (dst, 0);
+ else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
+ {
+ bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
+ dest = XEXP (dst, 1);
+ }
+ else
+ abort ();
+ }
+
/* Optimize the access just a bit. */
- if (GET_CODE (dst) == MEM
- && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
+ if (GET_CODE (dest) == MEM
+ && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
- emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
+ emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
else
- store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
+ store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
mode, tmps[i], ssize);
}
preserve_temp_slots (tgtblk);
}
- /* This code assumes srcreg is at least a full word. If it isn't,
- copy it into a new pseudo which is a full word. */
+ /* This code assumes srcreg is at least a full word. If it isn't, copy it
+ into a new pseudo which is a full word.
+
+ If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
+ the wrong part of the register gets copied so we fake a type conversion
+ in place. */
if (GET_MODE (srcreg) != BLKmode
&& GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
- srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
+ {
+ if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
+ srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
+ else
+ srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
+ }
/* Structures whose size is not a multiple of a word are aligned
to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
machine, this means we must skip the empty high order bytes when
calculating the bit offset. */
- if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
+ if (BYTES_BIG_ENDIAN
+ && !FUNCTION_ARG_REG_LITTLE_ENDIAN
+ && bytes % UNITS_PER_WORD)
big_endian_correction
= (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
enum machine_mode mode = GET_MODE (x);
enum machine_mode submode;
enum mode_class class = GET_MODE_CLASS (mode);
- unsigned int i;
if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
abort ();
/* In case we output to the stack, but the size is smaller machine can
push exactly, we need to use move instructions. */
if (stack
- && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
+ && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
+ != GET_MODE_SIZE (submode)))
{
rtx temp;
- int offset1, offset2;
+ HOST_WIDE_INT offset1, offset2;
/* Do not use anti_adjust_stack, since we don't want to update
stack_pointer_delta. */
#endif
stack_pointer_rtx,
GEN_INT
- (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
- stack_pointer_rtx,
- 0,
- OPTAB_LIB_WIDEN);
+ (PUSH_ROUNDING
+ (GET_MODE_SIZE (GET_MODE (x)))),
+ stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
+
if (temp != stack_pointer_rtx)
emit_move_insn (stack_pointer_rtx, temp);
+
#ifdef STACK_GROWS_DOWNWARD
offset1 = 0;
offset2 = GET_MODE_SIZE (submode);
offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
+ GET_MODE_SIZE (submode));
#endif
+
emit_move_insn (change_address (x, submode,
gen_rtx_PLUS (Pmode,
stack_pointer_rtx,
if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
&& (reload_in_progress | reload_completed) == 0)
{
- int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
- int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
+ int packed_dest_p
+ = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
+ int packed_src_p
+ = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
if (packed_dest_p || packed_src_p)
{
if (packed_dest_p)
{
rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
+
emit_move_insn_1 (cmem, y);
return emit_move_insn_1 (sreg, mem);
}
else
{
rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
+
emit_move_insn_1 (mem, sreg);
return emit_move_insn_1 (x, cmem);
}
&& ! (reload_in_progress || reload_completed)
&& (GET_CODE (realpart_x) == SUBREG
|| GET_CODE (imagpart_x) == SUBREG))
- {
- emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
- }
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
(realpart_x, realpart_y));
rtx last_insn = 0;
rtx seq, inner;
int need_clobber;
+ int i;
#ifdef PUSH_ROUNDING
#endif
stack_pointer_rtx,
GEN_INT
- (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
- stack_pointer_rtx,
- 0,
- OPTAB_LIB_WIDEN);
+ (PUSH_ROUNDING
+ (GET_MODE_SIZE (GET_MODE (x)))),
+ stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
+
if (temp != stack_pointer_rtx)
emit_move_insn (stack_pointer_rtx, temp);
code = GET_CODE (XEXP (x, 0));
+
/* Just hope that small offsets off SP are OK. */
if (code == POST_INC)
temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
- GEN_INT (-(HOST_WIDE_INT)
- GET_MODE_SIZE (GET_MODE (x))));
+ GEN_INT (-((HOST_WIDE_INT)
+ GET_MODE_SIZE (GET_MODE (x)))));
else if (code == POST_DEC)
temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
if (x != y
&& ! (reload_in_progress || reload_completed)
&& need_clobber != 0)
- {
- emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
- }
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
emit_insn (seq);
return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
}
-
-/* Return an rtx for the address of the beginning of an as-if-it-was-pushed
- block of SIZE bytes. */
-
-static rtx
-get_push_address (size)
- int size;
-{
- rtx temp;
-
- if (STACK_PUSH_CODE == POST_DEC)
- temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
- else if (STACK_PUSH_CODE == POST_INC)
- temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
- else
- temp = stack_pointer_rtx;
-
- return copy_to_reg (temp);
-}
-
#ifdef PUSH_ROUNDING
/* Emit single push insn. */
if (icode != CODE_FOR_nothing)
{
if (((pred = insn_data[(int) icode].operand[0].predicate)
- && !((*pred) (x, mode))))
+ && !((*pred) (x, mode))))
x = force_reg (mode, x);
emit_insn (GEN_FCN (icode) (x));
return;
{
#ifdef STACK_GROWS_DOWNWARD
dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
- GEN_INT (-(HOST_WIDE_INT)rounded_size));
+ GEN_INT (-(HOST_WIDE_INT) rounded_size));
#else
dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
GEN_INT (rounded_size));
if (type != 0)
{
set_mem_attributes (dest, type, 1);
- /* Function incoming arguments may overlap with sibling call
- outgoing arguments and we cannot allow reordering of reads
- from function arguments with stores to outgoing arguments
- of sibling calls. */
- set_mem_alias_set (dest, 0);
+
+ if (flag_optimize_sibling_calls)
+ /* Function incoming arguments may overlap with sibling call
+ outgoing arguments and we cannot allow reordering of reads
+ from function arguments with stores to outgoing arguments
+ of sibling calls. */
+ set_mem_alias_set (dest, 0);
}
emit_move_insn (dest, x);
}
anti_adjust_stack (GEN_INT (extra));
move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
-
- if (current_function_check_memory_usage && ! in_check_memory_usage)
- {
- rtx temp;
-
- in_check_memory_usage = 1;
- temp = get_push_address (INTVAL (size) - used);
- if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
- Pmode, XEXP (xinner, 0), Pmode,
- GEN_INT (INTVAL (size) - used),
- TYPE_MODE (sizetype));
- else
- emit_library_call (chkr_set_right_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
- Pmode, GEN_INT (INTVAL (size) - used),
- TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
}
else
#endif /* PUSH_ROUNDING */
args_addr,
args_so_far),
skip));
- if (current_function_check_memory_usage && ! in_check_memory_usage)
- {
- in_check_memory_usage = 1;
- target = copy_to_reg (temp);
- if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
- target, Pmode,
- XEXP (xinner, 0), Pmode,
- size, TYPE_MODE (sizetype));
- else
- emit_library_call (chkr_set_right_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
- target, Pmode,
- size, TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
-
target = gen_rtx_MEM (BLKmode, temp);
if (type != 0)
}
emit_move_insn (dest, x);
-
}
- if (current_function_check_memory_usage && ! in_check_memory_usage)
- {
- in_check_memory_usage = 1;
- if (target == 0)
- target = get_push_address (GET_MODE_SIZE (mode));
-
- if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
- Pmode, XEXP (x, 0), Pmode,
- GEN_INT (GET_MODE_SIZE (mode)),
- TYPE_MODE (sizetype));
- else
- emit_library_call (chkr_set_right_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
- Pmode, GEN_INT (GET_MODE_SIZE (mode)),
- TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
}
ret:
if (mode1 == VOIDmode && want_value)
tem = stabilize_reference (tem);
- orig_to_rtx = to_rtx
- = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
+ orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
+
if (offset != 0)
{
- rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+ rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
if (GET_CODE (to_rtx) != MEM)
abort ();
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
&& MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
{
- rtx temp
- = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
-
- if (GET_CODE (XEXP (temp, 0)) == REG)
- to_rtx = temp;
- else
- to_rtx = (replace_equiv_address
- (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
- XEXP (temp, 0))));
+ to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
bitpos = 0;
}
highest_pow2_factor (offset));
}
+ if (GET_CODE (to_rtx) == MEM)
+ {
+ tree old_expr = MEM_EXPR (to_rtx);
+
+ /* If the field is at offset zero, we could have been given the
+ DECL_RTX of the parent struct. Don't munge it. */
+ to_rtx = shallow_copy_rtx (to_rtx);
+
+ set_mem_attributes (to_rtx, to, 0);
+
+ /* If we changed MEM_EXPR, that means we're now referencing
+ the COMPONENT_REF, which means that MEM_OFFSET must be
+ relative to that field. But we've not yet reflected BITPOS
+ in TO_RTX. This will be done in store_field. Adjust for
+ that by biasing MEM_OFFSET by -bitpos. */
+ if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
+ && (bitpos / BITS_PER_UNIT) != 0)
+ set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
+ - (bitpos / BITS_PER_UNIT)));
+ }
/* Deal with volatile and readonly fields. The former is only done
for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
RTX_UNCHANGING_P (to_rtx) = 1;
}
- if (! can_address_p (to))
+ if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
{
if (to_rtx == orig_to_rtx)
to_rtx = copy_rtx (to_rtx);
MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
}
- /* Check the access. */
- if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
- {
- rtx to_addr;
- int size;
- int best_mode_size;
- enum machine_mode best_mode;
-
- best_mode = get_best_mode (bitsize, bitpos,
- TYPE_ALIGN (TREE_TYPE (tem)),
- mode1, volatilep);
- if (best_mode == VOIDmode)
- best_mode = QImode;
-
- best_mode_size = GET_MODE_BITSIZE (best_mode);
- to_addr = plus_constant (XEXP (to_rtx, 0), bitpos / BITS_PER_UNIT);
- size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
- size *= GET_MODE_SIZE (best_mode);
-
- /* Check the access right of the pointer. */
- in_check_memory_usage = 1;
- if (size)
- emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
- VOIDmode, 3, to_addr, Pmode,
- GEN_INT (size), TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
-
result = store_field (to_rtx, bitsize, bitpos, mode1, from,
(want_value
/* Spurious cast for HPUX compiler. */
push_temp_slots ();
value = expand_expr (from, NULL_RTX, VOIDmode, 0);
if (to_rtx == 0)
- to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
Don't re-expand if it was expanded already (in COMPONENT_REF case). */
if (to_rtx == 0)
- to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
/* Don't move directly into a return register. */
if (TREE_CODE (to) == RESULT_DECL
push_temp_slots ();
size = expr_size (from);
- from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
- EXPAND_MEMORY_USE_DONT);
-
- /* Copy the rights of the bitmap. */
- if (current_function_check_memory_usage)
- emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
- VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
- XEXP (from_rtx, 0), Pmode,
- convert_to_mode (TYPE_MODE (sizetype),
- size, TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
+ from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memmove_libfunc, LCT_NORMAL,
and then convert to the wider mode. Our value is the computed
expression. */
{
+ rtx inner_target = 0;
+
/* If we don't want a value, we can do the conversion inside EXP,
which will often result in some optimizations. Do the conversion
in two steps: first change the signedness, if needed, then
{
if (TREE_UNSIGNED (TREE_TYPE (exp))
!= SUBREG_PROMOTED_UNSIGNED_P (target))
- exp
- = convert
- (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
- TREE_TYPE (exp)),
- exp);
-
- exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
- SUBREG_PROMOTED_UNSIGNED_P (target)),
+ exp = convert
+ ((*lang_hooks.types.signed_or_unsigned_type)
+ (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
+
+ exp = convert ((*lang_hooks.types.type_for_mode)
+ (GET_MODE (SUBREG_REG (target)),
+ SUBREG_PROMOTED_UNSIGNED_P (target)),
exp);
+
+ inner_target = SUBREG_REG (target);
}
- temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
+ temp = expand_expr (exp, inner_target, VOIDmode, 0);
/* If TEMP is a volatile MEM and we want a result value, make
the access now so it gets done only once. Likewise if
target. Otherwise, the caller might get confused by a result whose
mode is larger than expected. */
- if (want_value && GET_MODE (temp) != GET_MODE (target)
- && GET_MODE (temp) != VOIDmode)
+ if (want_value && GET_MODE (temp) != GET_MODE (target))
{
- temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
- SUBREG_PROMOTED_VAR_P (temp) = 1;
- SUBREG_PROMOTED_UNSIGNED_P (temp)
- = SUBREG_PROMOTED_UNSIGNED_P (target);
+ if (GET_MODE (temp) != VOIDmode)
+ {
+ temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
+ SUBREG_PROMOTED_VAR_P (temp) = 1;
+ SUBREG_PROMOTED_UNSIGNED_SET (temp,
+ SUBREG_PROMOTED_UNSIGNED_P (target));
+ }
+ else
+ temp = convert_modes (GET_MODE (target),
+ GET_MODE (SUBREG_REG (target)),
+ temp, SUBREG_PROMOTED_UNSIGNED_P (target));
}
return want_value ? temp : NULL_RTX;
temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
temp, TREE_UNSIGNED (TREE_TYPE (exp)));
- if (current_function_check_memory_usage
- && GET_CODE (target) == MEM
- && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
- {
- in_check_memory_usage = 1;
- if (GET_CODE (temp) == MEM)
- emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
- VOIDmode, 3, XEXP (target, 0), Pmode,
- XEXP (temp, 0), Pmode,
- expr_size (exp), TYPE_MODE (sizetype));
- else
- emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
- VOIDmode, 3, XEXP (target, 0), Pmode,
- expr_size (exp), TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
-
/* If value was not generated in the target, store it there.
- Convert the value to TARGET's type first if nec. */
- /* If TEMP and TARGET compare equal according to rtx_equal_p, but
+ Convert the value to TARGET's type first if necessary.
+ If TEMP and TARGET compare equal according to rtx_equal_p, but
one or both of them are volatile memory refs, we have to distinguish
two cases:
- expand_expr has used TARGET. In this case, we must not generate
else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
{
- /* Handle copying a string constant into an array.
- The string constant may be shorter than the array.
- So copy just the string's actual length, and clear the rest. */
- rtx size;
- rtx addr;
+ /* Handle copying a string constant into an array. The string
+ constant may be shorter than the array. So copy just the string's
+ actual length, and clear the rest. First get the size of the data
+ type of the string, which is actually the size of the target. */
+ rtx size = expr_size (exp);
- /* Get the size of the data type of the string,
- which is actually the size of the target. */
- size = expr_size (exp);
if (GET_CODE (size) == CONST_INT
&& INTVAL (size) < TREE_STRING_LENGTH (exp))
emit_block_move (target, temp, size);
rtx label = 0;
/* Copy that much. */
+ copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
emit_block_move (target, temp, copy_size_rtx);
/* Figure out how much is left in TARGET that we have to clear.
Do all calculations in ptr_mode. */
-
- addr = XEXP (target, 0);
- addr = convert_modes (ptr_mode, Pmode, addr, 1);
-
if (GET_CODE (copy_size_rtx) == CONST_INT)
{
- addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
- size = plus_constant (size, -TREE_STRING_LENGTH (exp));
+ size = plus_constant (size, -INTVAL (copy_size_rtx));
+ target = adjust_address (target, BLKmode,
+ INTVAL (copy_size_rtx));
}
else
{
- addr = force_reg (ptr_mode, addr);
- addr = expand_binop (ptr_mode, add_optab, addr,
- copy_size_rtx, NULL_RTX, 0,
- OPTAB_LIB_WIDEN);
-
size = expand_binop (ptr_mode, sub_optab, size,
copy_size_rtx, NULL_RTX, 0,
OPTAB_LIB_WIDEN);
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (copy_size_rtx) != Pmode)
+ copy_size_rtx = convert_memory_address (Pmode,
+ copy_size_rtx);
+#endif
+
+ target = offset_address (target, copy_size_rtx,
+ highest_pow2_factor (copy_size));
label = gen_label_rtx ();
emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
GET_MODE (size), 0, label);
}
if (size != const0_rtx)
- {
- rtx dest = gen_rtx_MEM (BLKmode, addr);
-
- MEM_COPY_ATTRIBUTES (dest, target);
-
- /* Be sure we can write on ADDR. */
- in_check_memory_usage = 1;
- if (current_function_check_memory_usage)
- emit_library_call (chkr_check_addr_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
- addr, Pmode,
- size, TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_WO),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- clear_storage (dest, size);
- }
+ clear_storage (target, size);
if (label)
emit_label (label);
case CONVERT_EXPR:
case NOP_EXPR:
case NON_LVALUE_EXPR:
+ case VIEW_CONVERT_EXPR:
return is_zeros_p (TREE_OPERAND (exp, 0));
case INTEGER_CST:
case REAL_CST:
return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
+ case VECTOR_CST:
+ for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
+ elt = TREE_CHAIN (elt))
+ if (!is_zeros_p (TREE_VALUE (elt)))
+ return 0;
+
+ return 1;
+
case CONSTRUCTOR:
if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
{
tree field = TREE_PURPOSE (elt);
-#ifdef WORD_REGISTER_OPERATIONS
tree value = TREE_VALUE (elt);
-#endif
enum machine_mode mode;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos = 0;
if (field == 0)
continue;
- if (cleared && is_zeros_p (TREE_VALUE (elt)))
+ if (cleared && is_zeros_p (value))
continue;
if (host_integerp (DECL_SIZE (field), 1))
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
- type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
+ type = (*lang_hooks.types.type_for_size)
+ (BITS_PER_WORD, TREE_UNSIGNED (type));
value = convert (type, value);
}
}
store_constructor_field (to_rtx, bitsize, bitpos, mode,
- TREE_VALUE (elt), type, cleared,
+ value, type, cleared,
get_alias_set (TREE_TYPE (field)));
}
}
- else if (TREE_CODE (type) == ARRAY_TYPE)
+ else if (TREE_CODE (type) == ARRAY_TYPE
+ || TREE_CODE (type) == VECTOR_TYPE)
{
tree elt;
int i;
int need_to_clear;
tree domain = TYPE_DOMAIN (type);
tree elttype = TREE_TYPE (type);
- int const_bounds_p = (TYPE_MIN_VALUE (domain)
- && TYPE_MAX_VALUE (domain)
- && host_integerp (TYPE_MIN_VALUE (domain), 0)
- && host_integerp (TYPE_MAX_VALUE (domain), 0));
+ int const_bounds_p;
HOST_WIDE_INT minelt = 0;
HOST_WIDE_INT maxelt = 0;
+ /* Vectors are like arrays, but the domain is stored via an array
+ type indirectly. */
+ if (TREE_CODE (type) == VECTOR_TYPE)
+ {
+ /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
+ the same field as TYPE_DOMAIN, we are not guaranteed that
+ it always will. */
+ domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
+ domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
+ }
+
+ const_bounds_p = (TYPE_MIN_VALUE (domain)
+ && TYPE_MAX_VALUE (domain)
+ && host_integerp (TYPE_MIN_VALUE (domain), 0)
+ && host_integerp (TYPE_MAX_VALUE (domain), 0));
+
/* If we have constant bounds for the range of the type, get them. */
if (const_bounds_p)
{
if (need_to_clear && size > 0)
{
if (! cleared)
- clear_storage (target, GEN_INT (size));
+ {
+ if (REG_P (target))
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
+ else
+ clear_storage (target, GEN_INT (size));
+ }
cleared = 1;
}
else if (REG_P (target))
if (GET_CODE (target) == MEM
&& !MEM_KEEP_ALIAS_SET_P (target)
+ && TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
{
target = copy_rtx (target);
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
+ && TREE_CODE (type) == ARRAY_TYPE
&& TYPE_NONALIASED_COMPONENT (type))
{
target = copy_rtx (target);
{
targetx
= assign_temp
- ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
+ ((build_qualified_type ((*lang_hooks.types.type_for_mode)
+ (GET_MODE (target), 0),
TYPE_QUAL_CONST)),
0, 1, 1);
emit_move_insn (targetx, target);
= assign_temp
(build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
0, 1, 1);
- rtx blk_object = copy_rtx (object);
-
- PUT_MODE (blk_object, BLKmode);
- MEM_COPY_ATTRIBUTES (blk_object, object);
+ rtx blk_object = adjust_address (object, BLKmode, 0);
if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
emit_move_insn (object, target);
low-order bits. However, if EXP's type is a record and this is
big-endian machine, we want the upper BITSIZE bits. */
if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
- && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
+ && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
&& TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
size_int (GET_MODE_BITSIZE (GET_MODE (temp))
tree count;
enum machine_mode tmode;
- if (unsignedp)
- return expand_and (temp,
- GEN_INT
- (trunc_int_for_mode
- (width_mask,
- GET_MODE (temp) == VOIDmode
- ? value_mode
- : GET_MODE (temp))), NULL_RTX);
-
tmode = GET_MODE (temp);
if (tmode == VOIDmode)
tmode = value_mode;
+
+ if (unsignedp)
+ return expand_and (tmode, temp,
+ gen_int_mode (width_mask, tmode),
+ NULL_RTX);
+
count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
continue;
}
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
+ && TREE_CODE (exp) != VIEW_CONVERT_EXPR
&& ! ((TREE_CODE (exp) == NOP_EXPR
|| TREE_CODE (exp) == CONVERT_EXPR)
&& (TYPE_MODE (TREE_TYPE (exp))
return exp;
}
-/* Subroutine of expand_exp: compute memory_usage from modifier. */
+/* Return 1 if T is an expression that get_inner_reference handles. */
-static enum memory_use_mode
-get_memory_usage_from_modifier (modifier)
- enum expand_modifier modifier;
+int
+handled_component_p (t)
+ tree t;
{
- switch (modifier)
+ switch (TREE_CODE (t))
{
- case EXPAND_NORMAL:
- case EXPAND_SUM:
- return MEMORY_USE_RO;
- break;
- case EXPAND_MEMORY_USE_WO:
- return MEMORY_USE_WO;
- break;
- case EXPAND_MEMORY_USE_RW:
- return MEMORY_USE_RW;
- break;
- case EXPAND_MEMORY_USE_DONT:
- /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
- MEMORY_USE_DONT, because they are modifiers to a call of
- expand_expr in the ADDR_EXPR case of expand_expr. */
- case EXPAND_CONST_ADDRESS:
- case EXPAND_INITIALIZER:
- return MEMORY_USE_DONT;
- case EXPAND_MEMORY_USE_BAD:
+ case BIT_FIELD_REF:
+ case COMPONENT_REF:
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case NON_LVALUE_EXPR:
+ case VIEW_CONVERT_EXPR:
+ return 1;
+
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ return (TYPE_MODE (TREE_TYPE (t))
+ == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
+
default:
- abort ();
+ return 0;
}
}
\f
rtx subtarget = get_subtarget (target);
/* Check for a PIC address load. */
- if (flag_pic
- && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
+ if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
&& XEXP (value, 0) == pic_offset_table_rtx
&& (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
|| GET_CODE (XEXP (value, 1)) == LABEL_REF
special handling. */
if ((unsigned int) TREE_CODE (exp)
>= (unsigned int) LAST_AND_UNUSED_TREE_CODE
- && lang_safe_from_p
- && !(*lang_safe_from_p) (x, exp))
+ && !(*lang_hooks.safe_from_p) (x, exp))
return 0;
}
are memory and they conflict. */
return ! (rtx_equal_p (x, exp_rtl)
|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
- && true_dependence (exp_rtl, GET_MODE (x), x,
+ && true_dependence (exp_rtl, VOIDmode, x,
rtx_addr_varies_p)));
}
switch (TREE_CODE (exp))
{
case INTEGER_CST:
- /* If the integer is expressable in a HOST_WIDE_INT, we can find the
- lowest bit that's a one. If the result is zero, pessimize by
- returning 1. This is overly-conservative, but such things should not
- happen in the offset expressions that we are called with. */
- if (host_integerp (exp, 0))
+ /* We can find the lowest bit that's a one. If the low
+ HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
+ We need to handle this case since we can find it in a COND_EXPR,
+ a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
+ erroneous program, so return BIGGEST_ALIGNMENT to avoid any
+ later ICE. */
+ if (TREE_CONSTANT_OVERFLOW (exp))
+ return BIGGEST_ALIGNMENT;
+ else
{
- c0 = tree_low_cst (exp, 0);
- c0 = c0 < 0 ? - c0 : c0;
- return c0 != 0 ? c0 & -c0 : 1;
+ /* Note: tree_low_cst is intentionally not used here,
+ we don't care about the upper bits. */
+ c0 = TREE_INT_CST_LOW (exp);
+ c0 &= -c0;
+ return c0 ? c0 : BIGGEST_ALIGNMENT;
}
break;
- case PLUS_EXPR: case MINUS_EXPR:
+ case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
return MIN (c0, c1);
case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
case CEIL_DIV_EXPR:
- c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
- c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
- return MAX (1, c0 / c1);
+ if (integer_pow2p (TREE_OPERAND (exp, 1))
+ && host_integerp (TREE_OPERAND (exp, 1), 1))
+ {
+ c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
+ c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
+ return MAX (1, c0 / c1);
+ }
+ break;
case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
- case COMPOUND_EXPR: case SAVE_EXPR: case WITH_RECORD_EXPR:
+ case SAVE_EXPR: case WITH_RECORD_EXPR:
return highest_pow2_factor (TREE_OPERAND (exp, 0));
+ case COMPOUND_EXPR:
+ return highest_pow2_factor (TREE_OPERAND (exp, 1));
+
case COND_EXPR:
c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
rtx subtarget, original_target;
int ignore;
tree context;
- /* Used by check-memory-usage to make modifier read only. */
- enum expand_modifier ro_modifier;
/* Handle ERROR_MARK before anybody tries to access its type. */
if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
ignore = (target == const0_rtx
|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
|| code == CONVERT_EXPR || code == REFERENCE_EXPR
- || code == COND_EXPR)
+ || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
&& TREE_CODE (type) == VOID_TYPE));
- /* Make a read-only version of the modifier. */
- if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
- || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
- ro_modifier = modifier;
- else
- ro_modifier = EXPAND_NORMAL;
-
/* If we are going to ignore this result, we need only do something
if there is a side-effect somewhere in the expression. If there
is, short-circuit the most common cases here. Note that we must
&& mode != VOIDmode && mode != BLKmode
&& modifier != EXPAND_CONST_ADDRESS)
{
- temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
if (GET_CODE (temp) == MEM)
temp = copy_to_reg (temp);
return const0_rtx;
if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
|| code == INDIRECT_REF || code == BUFFER_REF)
- return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
- VOIDmode, ro_modifier);
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
+ modifier);
+
else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
|| code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- ro_modifier);
- expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
- ro_modifier);
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
return const0_rtx;
}
else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
/* If the second operand has no side effects, just evaluate
the first. */
- return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
- VOIDmode, ro_modifier);
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
+ modifier);
else if (code == BIT_FIELD_REF)
{
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- ro_modifier);
- expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
- ro_modifier);
- expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
- ro_modifier);
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
return const0_rtx;
}
- ;
+
target = 0;
}
/* If will do cse, generate all results into pseudo registers
since 1) that allows cse to find more things
and 2) otherwise cse could produce an insn the machine
- cannot support. */
+ cannot support. And exception is a CONSTRUCTOR into a multi-word
+ MEM: that's much more likely to be most efficient into the MEM. */
if (! cse_not_expected && mode != BLKmode && target
- && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
+ && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
target = subtarget;
switch (code)
if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
&& (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
{
- layout_decl (exp, 0);
- PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
- }
+ rtx value = DECL_RTL_IF_SET (exp);
- /* Although static-storage variables start off initialized, according to
- ANSI C, a memcpy could overwrite them with uninitialized values. So
- we check them too. This also lets us check for read-only variables
- accessed via a non-const declaration, in case it won't be detected
- any other way (e.g., in an embedded system or OS kernel without
- memory protection).
-
- Aggregates are not checked here; they're handled elsewhere. */
- if (cfun && current_function_check_memory_usage
- && code == VAR_DECL
- && GET_CODE (DECL_RTL (exp)) == MEM
- && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
- {
- enum memory_use_mode memory_usage;
- memory_usage = get_memory_usage_from_modifier (modifier);
+ layout_decl (exp, 0);
- in_check_memory_usage = 1;
- if (memory_usage != MEMORY_USE_DONT)
- emit_library_call (chkr_check_addr_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
- XEXP (DECL_RTL (exp), 0), Pmode,
- GEN_INT (int_size_in_bytes (type)),
- TYPE_MODE (sizetype),
- GEN_INT (memory_usage),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
+ /* If the RTL was already set, update its mode and memory
+ attributes. */
+ if (value != 0)
+ {
+ PUT_MODE (value, DECL_MODE (exp));
+ SET_DECL_RTL (exp, 0);
+ set_mem_attributes (value, exp, 1);
+ SET_DECL_RTL (exp, value);
+ }
}
/* ... fall through ... */
DECL_NONLOCAL (exp) = 1;
if (DECL_NO_STATIC_CHAIN (current_function_decl))
abort ();
- mark_addressable (exp);
+ (*lang_hooks.mark_addressable) (exp);
if (GET_CODE (DECL_RTL (exp)) != MEM)
abort ();
addr = XEXP (DECL_RTL (exp), 0);
but mark it so that we know that it was already extended. */
if (GET_CODE (DECL_RTL (exp)) == REG
- && GET_MODE (DECL_RTL (exp)) != mode)
+ && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
{
/* Get the signedness used for this variable. Ensure we get the
same mode we got when the variable was declared. */
if (GET_MODE (DECL_RTL (exp))
- != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
+ != promote_mode (type, DECL_MODE (exp), &unsignedp,
+ (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
abort ();
temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
SUBREG_PROMOTED_VAR_P (temp) = 1;
- SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
+ SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
return temp;
}
return DECL_RTL (exp);
case INTEGER_CST:
- return immed_double_const (TREE_INT_CST_LOW (exp),
+ temp = immed_double_const (TREE_INT_CST_LOW (exp),
TREE_INT_CST_HIGH (exp), mode);
+ /* ??? If overflow is set, fold will have done an incomplete job,
+ which can result in (plus xx (const_int 0)), which can get
+ simplified by validate_replace_rtx during virtual register
+ instantiation, which can result in unrecognizable insns.
+ Avoid this by forcing all overflows into registers. */
+ if (TREE_CONSTANT_OVERFLOW (exp))
+ temp = force_reg (mode, temp);
+
+ return temp;
+
case CONST_DECL:
- return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
+ return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
case REAL_CST:
/* If optimized, generate immediate CONST_DOUBLE
{
temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
SUBREG_PROMOTED_VAR_P (temp) = 1;
- SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
+ SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
}
if (temp == const0_rtx)
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
else
store_expr (TREE_OPERAND (exp, 0), temp, 0);
promote_mode (type, mode, &unsignedp, 0);
temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
SUBREG_PROMOTED_VAR_P (temp) = 1;
- SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
+ SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
return temp;
}
{
rtx temp;
temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
- TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
+ TREE_OPERAND (exp, 0)
+ = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
return temp;
}
abort ();
placeholder_list = TREE_CHAIN (placeholder_expr);
- temp = expand_expr (exp, original_target, tmode, ro_modifier);
+ temp = expand_expr (exp, original_target, tmode, modifier);
placeholder_list = old_list;
return temp;
}
and pop the list. */
placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
placeholder_list);
- target = expand_expr (TREE_OPERAND (exp, 0), original_target,
- tmode, ro_modifier);
+ target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
+ modifier);
placeholder_list = TREE_CHAIN (placeholder_list);
return target;
case LABELED_BLOCK_EXPR:
if (LABELED_BLOCK_BODY (exp))
- expand_expr_stmt (LABELED_BLOCK_BODY (exp));
+ expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
/* Should perhaps use expand_label, but this is simpler and safer. */
do_pending_stack_adjust ();
emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
case LOOP_EXPR:
push_temp_slots ();
expand_start_loop (1);
- expand_expr_stmt (TREE_OPERAND (exp, 0));
+ expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
expand_end_loop ();
pop_temp_slots ();
/* Mark the corresponding BLOCK for output in its proper place. */
if (TREE_OPERAND (exp, 2) != 0
&& ! TREE_USED (TREE_OPERAND (exp, 2)))
- insert_block (TREE_OPERAND (exp, 2));
+ (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
/* If VARS have not yet been expanded, expand them now. */
while (vars)
vars = TREE_CHAIN (vars);
}
- temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
+ temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
if (ignore)
{
tree elt;
+
for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
+ expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
+
return const0_rtx;
}
(TYPE_QUALS (type)
| (TREE_READONLY (exp)
* TYPE_QUAL_CONST))),
- TREE_ADDRESSABLE (exp), 1, 1);
+ 0, TREE_ADDRESSABLE (exp), 1);
store_constructor (exp, target, 0,
int_size_in_bytes (TREE_TYPE (exp)));
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
&& GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) == 1
- && modifier != EXPAND_MEMORY_USE_WO)
- return
- GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
+ && modifier != EXPAND_WRITE)
+ return gen_int_mode (TREE_STRING_POINTER (string)
+ [TREE_INT_CST_LOW (index)], mode);
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
-
- if (cfun && current_function_check_memory_usage
- && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
- {
- enum memory_use_mode memory_usage;
- memory_usage = get_memory_usage_from_modifier (modifier);
-
- if (memory_usage != MEMORY_USE_DONT)
- {
- in_check_memory_usage = 1;
- emit_library_call (chkr_check_addr_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
- Pmode, GEN_INT (int_size_in_bytes (type)),
- TYPE_MODE (sizetype),
- GEN_INT (memory_usage),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
- }
-
temp = gen_rtx_MEM (mode, op0);
set_mem_attributes (temp, exp, 0);
/* If we are writing to this object and its type is a record with
readonly fields, we must mark it as readonly so it will
conflict with readonly references to those fields. */
- if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
+ if (modifier == EXPAND_WRITE && readonly_fields_p (type))
RTX_UNCHANGING_P (temp) = 1;
return temp;
&& compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
&& GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) == 1)
- return
- GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
+ return gen_int_mode (TREE_STRING_POINTER (array)
+ [TREE_INT_CST_LOW (index)], mode);
/* If this is a constant index into a constant array,
just get the value from the array. Handle both the cases when
;
if (elem)
- return expand_expr (fold (TREE_VALUE (elem)), target,
- tmode, ro_modifier);
+ return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
+ modifier);
}
else if (optimize >= 1
if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
return expand_expr (fold (TREE_VALUE (elem)), target,
- tmode, ro_modifier);
+ tmode, modifier);
}
else if (TREE_CODE (init) == STRING_CST
&& 0 > compare_tree_int (index,
if (GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_SIZE (mode) == 1)
- return (GEN_INT
- (TREE_STRING_POINTER
- (init)[TREE_INT_CST_LOW (index)]));
+ return gen_int_mode (TREE_STRING_POINTER (init)
+ [TREE_INT_CST_LOW (index)], mode);
}
}
}
{
HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
+ enum machine_mode imode
+ = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
{
op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
- op0 = expand_and (op0, op1, target);
+ op0 = expand_and (imode, op0, op1, target);
}
else
{
- enum machine_mode imode
- = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
tree count
= build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
0);
if (offset != 0)
{
- rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+ rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
/* If this object is in a register, put it into memory.
This case can't occur in C, but can in Ada if we have
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
{
- rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
-
- if (GET_CODE (XEXP (temp, 0)) == REG)
- op0 = temp;
- else
- op0 = (replace_equiv_address
- (op0,
- force_reg (GET_MODE (XEXP (temp, 0)),
- XEXP (temp, 0))));
+ op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
bitpos = 0;
}
highest_pow2_factor (offset));
}
+ /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
+ record its alignment as BIGGEST_ALIGNMENT. */
+ if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
+ && is_aligning_offset (offset, tem))
+ set_mem_align (op0, BIGGEST_ALIGNMENT);
+
/* Don't forget about volatility even if this is a bitfield. */
if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
{
MEM_VOLATILE_P (op0) = 1;
}
- /* Check the access. */
- if (cfun != 0 && current_function_check_memory_usage
- && GET_CODE (op0) == MEM)
- {
- enum memory_use_mode memory_usage;
- memory_usage = get_memory_usage_from_modifier (modifier);
-
- if (memory_usage != MEMORY_USE_DONT)
- {
- rtx to;
- int size;
-
- to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
- size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
-
- /* Check the access right of the pointer. */
- in_check_memory_usage = 1;
- if (size > BITS_PER_UNIT)
- emit_library_call (chkr_check_addr_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
- Pmode, GEN_INT (size / BITS_PER_UNIT),
- TYPE_MODE (sizetype),
- GEN_INT (memory_usage),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
- }
-
/* In cases where an aligned union has an unaligned object
as a field, we might be extracting a BLKmode value from
an integer-mode (e.g., SImode) object. Handle this case
machine, we must put the field into the high-order bits. */
if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
- && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
+ && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
size_int (GET_MODE_BITSIZE (GET_MODE (op0))
- bitsize),
if (mode == BLKmode)
{
- tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
- TYPE_QUAL_CONST);
- rtx new = assign_temp (nt, 0, 1, 1);
+ rtx new = assign_temp (build_qualified_type
+ ((*lang_hooks.types.type_for_mode)
+ (ext_mode, 0),
+ TYPE_QUAL_CONST), 0, 1, 1);
emit_move_insn (new, op0);
op0 = copy_rtx (new);
PUT_MODE (op0, BLKmode);
+ set_mem_attributes (op0, exp, 1);
}
return op0;
if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
{
WITH_CLEANUP_EXPR_RTL (exp)
- = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
- expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
+ = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
+ expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
+ CLEANUP_EH_ONLY (exp));
/* That's it for this cleanup. */
TREE_OPERAND (exp, 1) = 0;
target_temp_slot_level = temp_slot_level;
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
/* If we're going to use this value, load it up now. */
if (! ignore)
op0 = force_not_mem (op0);
{
if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
== BUILT_IN_FRONTEND)
- return (*lang_expand_expr) (exp, original_target, tmode, modifier);
+ return (*lang_hooks.expand_expr)
+ (exp, original_target, tmode, modifier);
else
return expand_builtin (exp, target, subtarget, tmode, ignore);
}
{
tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
- /* If both input and output are BLKmode, this conversion
- isn't actually doing anything unless we need to make the
- alignment stricter. */
- if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
- && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
- || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
- return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
- modifier);
+ /* If both input and output are BLKmode, this conversion isn't doing
+ anything except possibly changing memory attribute. */
+ if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
+ {
+ rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
+ modifier);
+
+ result = copy_rtx (result);
+ set_mem_attributes (result, exp, 0);
+ return result;
+ }
if (target == 0)
target = assign_temp (type, 0, 1, 1);
if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
{
op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
- ro_modifier);
+ modifier);
/* If the signedness of the conversion differs and OP0 is
a promoted SUBREG, clear that indication since we now
return op0;
}
- op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
if (GET_MODE (op0) == mode)
return op0;
/* If OP0 is a constant, just convert it into the proper mode. */
if (CONSTANT_P (op0))
- return
- convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
- op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ enum machine_mode inner_mode = TYPE_MODE (inner_type);
+
+ if (modifier == EXPAND_INITIALIZER)
+ return simplify_gen_subreg (mode, op0, inner_mode,
+ subreg_lowpart_offset (mode,
+ inner_mode));
+ else
+ return convert_modes (mode, inner_mode, op0,
+ TREE_UNSIGNED (inner_type));
+ }
if (modifier == EXPAND_INITIALIZER)
return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
return target;
+ case VIEW_CONVERT_EXPR:
+ op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
+
+ /* If the input and output modes are both the same, we are done.
+ Otherwise, if neither mode is BLKmode and both are within a word, we
+ can use gen_lowpart. If neither is true, make sure the operand is
+ in memory and convert the MEM to the new mode. */
+ if (TYPE_MODE (type) == GET_MODE (op0))
+ ;
+ else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
+ && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
+ && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
+ op0 = gen_lowpart (TYPE_MODE (type), op0);
+ else if (GET_CODE (op0) != MEM)
+ {
+ /* If the operand is not a MEM, force it into memory. Since we
+ are going to be be changing the mode of the MEM, don't call
+ force_const_mem for constants because we don't allow pool
+ constants to change mode. */
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+
+ if (TREE_ADDRESSABLE (exp))
+ abort ();
+
+ if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
+ target
+ = assign_stack_temp_for_type
+ (TYPE_MODE (inner_type),
+ GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
+
+ emit_move_insn (target, op0);
+ op0 = target;
+ }
+
+ /* At this point, OP0 is in the correct mode. If the output type is such
+ that the operand is known to be aligned, indicate that it is.
+ Otherwise, we need only be concerned about alignment for non-BLKmode
+ results. */
+ if (GET_CODE (op0) == MEM)
+ {
+ op0 = copy_rtx (op0);
+
+ if (TYPE_ALIGN_OK (type))
+ set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
+ else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
+ && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ HOST_WIDE_INT temp_size
+ = MAX (int_size_in_bytes (inner_type),
+ (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
+ rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
+ temp_size, 0, type);
+ rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
+
+ if (TREE_ADDRESSABLE (exp))
+ abort ();
+
+ if (GET_MODE (op0) == BLKmode)
+ emit_block_move (new_with_op0_mode, op0,
+ GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
+ else
+ emit_move_insn (new_with_op0_mode, op0);
+
+ op0 = new;
+ }
+
+ op0 = adjust_address (op0, TYPE_MODE (type), 0);
+ }
+
+ return op0;
+
case PLUS_EXPR:
/* We come here from MINUS_EXPR when the second operand is a
constant. */
plus_expr:
this_optab = ! unsignedp && flag_trapv
- && (GET_MODE_CLASS(mode) == MODE_INT)
+ && (GET_MODE_CLASS (mode) == MODE_INT)
? addv_optab : add_optab;
/* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
rtx constant_part;
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
- EXPAND_SUM);
+ (modifier == EXPAND_INITIALIZER
+ ? EXPAND_INITIALIZER : EXPAND_SUM));
if (! CONSTANT_P (op0))
{
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
subtarget = 0;
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
both_summands:
/* Make sure any term that's a sum with a constant comes last. */
&& really_constant_p (TREE_OPERAND (exp, 0))
&& really_constant_p (TREE_OPERAND (exp, 1)))
{
- rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
- VOIDmode, ro_modifier);
- rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
- VOIDmode, ro_modifier);
+ rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
+ modifier);
+ rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
+ modifier);
/* If the last operand is a CONST_INT, use plus_constant of
the negated constant. Else make the MINUS. */
indexed address, for machines that support that. */
if (modifier == EXPAND_SUM && mode == ptr_mode
- && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ && host_integerp (TREE_OPERAND (exp, 1), 0))
{
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
EXPAND_SUM);
- /* Apply distributive law if OP0 is x+c. */
- if (GET_CODE (op0) == PLUS
- && GET_CODE (XEXP (op0, 1)) == CONST_INT)
- return
- gen_rtx_PLUS
- (mode,
- gen_rtx_MULT
- (mode, XEXP (op0, 0),
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
- * INTVAL (XEXP (op0, 1))));
+ /* If we knew for certain that this is arithmetic for an array
+ reference, and we knew the bounds of the array, then we could
+ apply the distributive law across (PLUS X C) for constant C.
+ Without such knowledge, we risk overflowing the computation
+ when both X and C are large, but X+C isn't. */
+ /* ??? Could perhaps special-case EXP being unsigned and C being
+ positive. In that case we are certain that X+C is no smaller
+ than X and so the transformed expression will overflow iff the
+ original would have. */
if (GET_CODE (op0) != REG)
op0 = force_operand (op0, NULL_RTX);
return
gen_rtx_MULT (mode, op0,
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
+ GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
}
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
expensive divide. If not, combine will rebuild the original
computation. */
if (flag_unsafe_math_optimizations && optimize && !optimize_size
+ && TREE_CODE (type) == REAL_TYPE
&& !real_onep (TREE_OPERAND (exp, 0)))
return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
build (RDIV_EXPR, type,
temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
VOIDmode, 0);
+ /* If temp is constant, we can just compute the result. */
+ if (GET_CODE (temp) == CONST_INT)
+ {
+ if (INTVAL (temp) != 0)
+ emit_move_insn (target, const1_rtx);
+ else
+ emit_move_insn (target, const0_rtx);
+
+ return target;
+ }
+
if (temp != original_target)
- temp = copy_to_reg (temp);
+ {
+ enum machine_mode mode1 = GET_MODE (temp);
+ if (mode1 == VOIDmode)
+ mode1 = tmode != VOIDmode ? tmode : mode;
+
+ temp = copy_to_mode_reg (mode1, temp);
+ }
op1 = gen_label_rtx ();
emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
if (ignore)
{
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
- ro_modifier);
+ modifier);
return const0_rtx;
}
- op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
if (GET_MODE (op0) == mode)
return op0;
built here. */
if (TREE_OPERAND (exp, 2) == 0)
- TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
+ TREE_OPERAND (exp, 2)
+ = (*lang_hooks.maybe_build_cleanup) (slot);
cleanups = TREE_OPERAND (exp, 2);
}
}
store_expr (exp1, target, 0);
- expand_decl_cleanup (NULL_TREE, cleanups);
+ expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
return target;
}
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
|| GET_CODE (op0) == PARALLEL)
{
- /* If this object is in a register, it must can't be BLKmode. */
- tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- tree nt = build_qualified_type (inner_type,
- (TYPE_QUALS (inner_type)
- | TYPE_QUAL_CONST));
- rtx memloc = assign_temp (nt, 1, 1, 1);
-
- if (GET_CODE (op0) == PARALLEL)
- /* Handle calls that pass values in multiple non-contiguous
- locations. The Irix 6 ABI has examples of this. */
- emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
+ /* If the operand is a SAVE_EXPR, we can deal with this by
+ forcing the SAVE_EXPR into memory. */
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
+ {
+ put_var_into_stack (TREE_OPERAND (exp, 0));
+ op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
+ }
else
- emit_move_insn (memloc, op0);
-
- op0 = memloc;
+ {
+ /* If this object is in a register, it can't be BLKmode. */
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ rtx memloc = assign_temp (inner_type, 1, 1, 1);
+
+ if (GET_CODE (op0) == PARALLEL)
+ /* Handle calls that pass values in multiple
+ non-contiguous locations. The Irix 6 ABI has examples
+ of this. */
+ emit_group_store (memloc, op0,
+ int_size_in_bytes (inner_type));
+ else
+ emit_move_insn (memloc, op0);
+
+ op0 = memloc;
+ }
}
if (GET_CODE (op0) != MEM)
return op0;
}
- /* If OP0 is not aligned as least as much as the type requires,
- we need to make a temporary, copy OP0 to it, and take the
- address of the temporary. */
- if (expr_align (TREE_OPERAND (exp, 0)) > MEM_ALIGN (op0))
+ /* If OP0 is not aligned as least as much as the type requires, we
+ need to make a temporary, copy OP0 to it, and take the address of
+ the temporary. We want to use the alignment of the type, not of
+ the operand. Note that this is incorrect for FUNCTION_TYPE, but
+ the test for BLKmode means that can't happen. The test for
+ BLKmode is because we never make mis-aligned MEMs with
+ non-BLKmode.
+
+ We don't need to do this at all if the machine doesn't have
+ strict alignment. */
+ if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
+ && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ > MEM_ALIGN (op0))
+ && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
{
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
rtx new
= assign_stack_temp_for_type
(TYPE_MODE (inner_type),
MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
- : int_size_in_bytes (TREE_TYPE (inner_type)),
+ : int_size_in_bytes (inner_type),
1, build_qualified_type (inner_type,
(TYPE_QUALS (inner_type)
| TYPE_QUAL_CONST)));
+ if (TYPE_ALIGN_OK (inner_type))
+ abort ();
+
emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
op0 = new;
}
op0 = force_operand (XEXP (op0, 0), target);
}
- if (flag_force_addr && GET_CODE (op0) != REG)
+ if (flag_force_addr
+ && GET_CODE (op0) != REG
+ && modifier != EXPAND_CONST_ADDRESS
+ && modifier != EXPAND_INITIALIZER
+ && modifier != EXPAND_SUM)
op0 = force_reg (Pmode, op0);
if (GET_CODE (op0) == REG
abort ();
default:
- return (*lang_expand_expr) (exp, original_target, tmode, modifier);
+ return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
}
/* Here to do an ordinary binary operator, generating an instruction
return temp;
}
\f
+/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
+ when applied to the address of EXP produces an address known to be
+ aligned more than BIGGEST_ALIGNMENT. */
+
+static int
+is_aligning_offset (offset, exp)
+ tree offset;
+ tree exp;
+{
+ /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
+ while (TREE_CODE (offset) == NON_LVALUE_EXPR
+ || TREE_CODE (offset) == NOP_EXPR
+ || TREE_CODE (offset) == CONVERT_EXPR
+ || TREE_CODE (offset) == WITH_RECORD_EXPR)
+ offset = TREE_OPERAND (offset, 0);
+
+ /* We must now have a BIT_AND_EXPR with a constant that is one less than
+ power of 2 and which is larger than BIGGEST_ALIGNMENT. */
+ if (TREE_CODE (offset) != BIT_AND_EXPR
+ || !host_integerp (TREE_OPERAND (offset, 1), 1)
+ || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
+ || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
+ return 0;
+
+ /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
+ It must be NEGATE_EXPR. Then strip any more conversions. */
+ offset = TREE_OPERAND (offset, 0);
+ while (TREE_CODE (offset) == NON_LVALUE_EXPR
+ || TREE_CODE (offset) == NOP_EXPR
+ || TREE_CODE (offset) == CONVERT_EXPR)
+ offset = TREE_OPERAND (offset, 0);
+
+ if (TREE_CODE (offset) != NEGATE_EXPR)
+ return 0;
+
+ offset = TREE_OPERAND (offset, 0);
+ while (TREE_CODE (offset) == NON_LVALUE_EXPR
+ || TREE_CODE (offset) == NOP_EXPR
+ || TREE_CODE (offset) == CONVERT_EXPR)
+ offset = TREE_OPERAND (offset, 0);
+
+ /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
+ whose type is the same as EXP. */
+ return (TREE_CODE (offset) == ADDR_EXPR
+ && (TREE_OPERAND (offset, 0) == exp
+ || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
+ && (TREE_TYPE (TREE_OPERAND (offset, 0))
+ == TREE_TYPE (exp)))));
+}
+\f
/* Return the tree node if a ARG corresponds to a string constant or zero
if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
in bytes within the string that ARG is accessing. The type of the
and insns were generated in computing it. */
temp = get_last_insn ();
- op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
+ op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
/* If OP0 is a SUBREG made for a promoted variable, we cannot increment
in place but instead must do sign- or zero-extension during assignment,
op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
&& temp != get_last_insn ());
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
- EXPAND_MEMORY_USE_BAD);
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
/* Decide whether incrementing or decrementing. */
if (TREE_CODE (exp) == POSTDECREMENT_EXPR
}
if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
- this_optab = this_optab == add_optab ? addv_optab : subv_optab;
+ this_optab = this_optab == add_optab ? addv_optab : subv_optab;
/* For a preincrement, see if we can do this with a single instruction. */
if (!post)
temp = copy_rtx (value = op0);
/* Increment however we can. */
- op1 = expand_binop (mode, this_optab, value, op1,
- current_function_check_memory_usage ? NULL_RTX : op0,
+ op1 = expand_binop (mode, this_optab, value, op1, op0,
TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
+
/* Make sure the value is stored into OP0. */
if (op1 != op0)
emit_move_insn (op0, op1);
&& TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
&& (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
&& (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
- && (type = type_for_mode (mode, 1)) != 0
+ && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
&& TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
&& (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
!= CODE_FOR_nothing))
get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep);
- type = type_for_size (bitsize, unsignedp);
+ type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
if (! SLOW_BYTE_ACCESS
&& type != 0 && bitsize >= 0
&& TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
/* Put the AND last so it can combine with more things. */
if (bitnum != TYPE_PRECISION (type) - 1)
- op0 = expand_and (op0, const1_rtx, subtarget);
+ op0 = expand_and (mode, op0, const1_rtx, subtarget);
return op0;
}
|| (result != const0_rtx && invert))
? const0_rtx : const1_rtx);
+ /* The code of RESULT may not match CODE if compare_from_rtx
+ decided to swap its operands and reverse the original code.
+
+ We know that compare_from_rtx returns either a CONST_INT or
+ a new comparison code, so it is safe to just extract the
+ code from RESULT. */
+ code = GET_CODE (result);
+
label = gen_label_rtx ();
if (bcc_gen_fctn[(int) code] == 0)
abort ();
{
if (TYPE_MODE (index_type) != index_mode)
{
- index_expr = convert (type_for_size (index_bits, 0),
- index_expr);
+ index_expr = convert ((*lang_hooks.types.type_for_size)
+ (index_bits, 0), index_expr);
index_type = TREE_TYPE (index_expr);
}