#include "except.h"
#include "function.h"
#include "insn-config.h"
+#include "insn-attr.h"
/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
#include "expr.h"
#include "optabs.h"
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
- tree, tree, unsigned int, int,
- int));
-static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
- HOST_WIDE_INT));
+ tree, tree, int, int));
+static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
- tree, enum machine_mode, int,
- unsigned int, HOST_WIDE_INT, int));
+ tree, enum machine_mode, int, tree,
+ int));
static enum memory_use_mode
get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
static rtx var_rtx PARAMS ((tree));
static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
-static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
static rtx expand_increment PARAMS ((tree, int, int));
static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
abort ();
if (VECTOR_MODE_P (to_mode))
- from = gen_rtx_SUBREG (to_mode, from, 0);
+ from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
else
- to = gen_rtx_SUBREG (from_mode, to, 0);
+ to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
emit_move_insn (to, from);
return;
&& STORE_FLAG_VALUE == -1)
{
emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
- lowpart_mode, 0, 0);
+ lowpart_mode, 0);
fill_value = gen_reg_rtx (word_mode);
emit_insn (gen_slt (fill_value));
}
if (data->to)
{
if (data->autinc_to)
- {
- to1 = replace_equiv_address (data->to, data->to_addr);
- to1 = adjust_address (to1, mode, 0);
- }
+ to1 = adjust_automodify_address (data->to, mode, data->to_addr,
+ data->offset);
else
to1 = adjust_address (data->to, mode, data->offset);
}
if (data->autinc_from)
- {
- from1 = replace_equiv_address (data->from, data->from_addr);
- from1 = adjust_address (from1, mode, 0);
- }
+ from1 = adjust_automodify_address (data->from, mode, data->from_addr,
+ data->offset);
else
from1 = adjust_address (data->from, mode, data->offset);
TREE_UNSIGNED (integer_type_node)),
TYPE_MODE (integer_type_node));
#endif
+
+ /* If we are initializing a readonly value, show the above call
+ clobbered it. Otherwise, a load from it may erroneously be hoisted
+ from a loop. */
+ if (RTX_UNCHANGING_P (x))
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
}
return retval;
/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
registers represented by a PARALLEL. SSIZE represents the total size of
- block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
- SRC in bits. */
+ block SRC in bytes, or -1 if not known. */
/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
the balance will be in what would be the low-order memory addresses, i.e.
left justified for big endian, right justified for little endian. This
would be needed. */
void
-emit_group_load (dst, orig_src, ssize, align)
+emit_group_load (dst, orig_src, ssize)
rtx dst, orig_src;
- unsigned int align;
int ssize;
{
rtx *tmps, src;
src = gen_reg_rtx (mode);
else
src = gen_reg_rtx (GET_MODE (orig_src));
+
emit_move_insn (src, orig_src);
}
/* Optimize the access just a bit. */
if (GET_CODE (src) == MEM
- && align >= GET_MODE_ALIGNMENT (mode)
+ && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
{
else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
&& bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
tmps[i] = XEXP (src, 1);
+ else if (bytepos == 0)
+ {
+ rtx mem = assign_stack_temp (GET_MODE (src),
+ GET_MODE_SIZE (GET_MODE (src)), 0);
+ emit_move_insn (mem, src);
+ tmps[i] = adjust_address (mem, mode, 0);
+ }
else
abort ();
}
else
tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
bytepos * BITS_PER_UNIT, 1, NULL_RTX,
- mode, mode, align, ssize);
+ mode, mode, ssize);
if (BYTES_BIG_ENDIAN && shift)
expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
registers represented by a PARALLEL. SSIZE represents the total size of
- block DST, or -1 if not known. ALIGN is the known alignment of DST. */
+ block DST, or -1 if not known. */
void
-emit_group_store (orig_dst, src, ssize, align)
+emit_group_store (orig_dst, src, ssize)
rtx orig_dst, src;
int ssize;
- unsigned int align;
{
rtx *tmps, dst;
int start, i;
the temporary. */
temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
- emit_group_store (temp, src, ssize, align);
- emit_group_load (dst, temp, ssize, align);
+ emit_group_store (temp, src, ssize);
+ emit_group_load (dst, temp, ssize);
return;
}
else if (GET_CODE (dst) != MEM)
/* Optimize the access just a bit. */
if (GET_CODE (dst) == MEM
- && align >= GET_MODE_ALIGNMENT (mode)
+ && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
else
store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
- mode, tmps[i], align, ssize);
+ mode, tmps[i], ssize);
}
emit_queue ();
extract_bit_field (src, bitsize,
xbitpos % BITS_PER_WORD, 1,
NULL_RTX, word_mode, word_mode,
- bitsize, BITS_PER_WORD),
- bitsize, BITS_PER_WORD);
+ BITS_PER_WORD),
+ BITS_PER_WORD);
}
return tgtblk;
data->offset -= size;
if (data->autinc_to)
- {
- to1 = replace_equiv_address (data->to, data->to_addr);
- to1 = adjust_address (to1, mode, 0);
- }
+ to1 = adjust_automodify_address (data->to, mode, data->to_addr,
+ data->offset);
else
to1 = adjust_address (data->to, mode, data->offset);
VOIDmode, 2, object, Pmode, size,
TYPE_MODE (integer_type_node));
#endif
+
+ /* If we are initializing a readonly value, show the above call
+ clobbered it. Otherwise, a load from it may erroneously be
+ hoisted from a loop. */
+ if (RTX_UNCHANGING_P (object))
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
}
}
last_insn = emit_move_insn_1 (x, y);
if (y_cst && GET_CODE (x) == REG)
- REG_NOTES (last_insn)
- = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
+ set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
return last_insn;
}
/* Handle calls that pass values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
if (GET_CODE (reg) == PARALLEL)
- emit_group_load (reg, x, -1, align); /* ??? size? */
+ emit_group_load (reg, x, -1); /* ??? size? */
else
move_block_to_reg (REGNO (reg), x, partial, mode);
}
{
enum machine_mode mode1;
HOST_WIDE_INT bitsize, bitpos;
+ rtx orig_to_rtx;
tree offset;
int unsignedp;
int volatilep = 0;
tree tem;
- unsigned int alignment;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
- &unsignedp, &volatilep, &alignment);
+ &unsignedp, &volatilep);
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
if (mode1 == VOIDmode && want_value)
tem = stabilize_reference (tem);
- to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
+ orig_to_rtx = to_rtx
+ = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
if (GET_CODE (to_rtx) == MEM
&& GET_MODE (to_rtx) == BLKmode
&& GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
- && bitsize
+ && bitsize > 0
&& (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
- && alignment == GET_MODE_ALIGNMENT (mode1))
+ && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
{
rtx temp
= adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
highest_pow2_factor (offset));
}
- if (volatilep)
- {
- if (GET_CODE (to_rtx) == MEM)
- {
- /* When the offset is zero, to_rtx is the address of the
- structure we are storing into, and hence may be shared.
- We must make a new MEM before setting the volatile bit. */
- if (offset == 0)
- to_rtx = copy_rtx (to_rtx);
- MEM_VOLATILE_P (to_rtx) = 1;
- }
-#if 0 /* This was turned off because, when a field is volatile
- in an object which is not volatile, the object may be in a register,
- and then we would abort over here. */
- else
- abort ();
-#endif
+ /* Deal with volatile and readonly fields. The former is only done
+ for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
+ if (volatilep && GET_CODE (to_rtx) == MEM)
+ {
+ if (to_rtx == orig_to_rtx)
+ to_rtx = copy_rtx (to_rtx);
+ MEM_VOLATILE_P (to_rtx) = 1;
}
if (TREE_CODE (to) == COMPONENT_REF
&& TREE_READONLY (TREE_OPERAND (to, 1)))
{
- if (offset == 0)
+ if (to_rtx == orig_to_rtx)
to_rtx = copy_rtx (to_rtx);
-
RTX_UNCHANGING_P (to_rtx) = 1;
}
+ if (! can_address_p (to))
+ {
+ if (to_rtx == orig_to_rtx)
+ to_rtx = copy_rtx (to_rtx);
+ MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
+ }
+
/* Check the access. */
if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
{
best_mode = QImode;
best_mode_size = GET_MODE_BITSIZE (best_mode);
- to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
+ to_addr = plus_constant (XEXP (to_rtx, 0), bitpos / BITS_PER_UNIT);
size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
size *= GET_MODE_SIZE (best_mode);
in_check_memory_usage = 0;
}
- /* If this is a varying-length object, we must get the address of
- the source and do an explicit block move. */
- if (bitsize < 0)
- {
- unsigned int from_align;
- rtx from_rtx = expand_expr_unaligned (from, &from_align);
- rtx inner_to_rtx
- = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
+ result = store_field (to_rtx, bitsize, bitpos, mode1, from,
+ (want_value
+ /* Spurious cast for HPUX compiler. */
+ ? ((enum machine_mode)
+ TYPE_MODE (TREE_TYPE (to)))
+ : VOIDmode),
+ unsignedp, TREE_TYPE (tem), get_alias_set (to));
- emit_block_move (inner_to_rtx, from_rtx, expr_size (from));
-
- free_temp_slots ();
- pop_temp_slots ();
- return to_rtx;
- }
- else
- {
- if (! can_address_p (to))
- {
- to_rtx = copy_rtx (to_rtx);
- MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
- }
-
- result = store_field (to_rtx, bitsize, bitpos, mode1, from,
- (want_value
- /* Spurious cast for HPUX compiler. */
- ? ((enum machine_mode)
- TYPE_MODE (TREE_TYPE (to)))
- : VOIDmode),
- unsignedp,
- alignment,
- int_size_in_bytes (TREE_TYPE (tem)),
- get_alias_set (to));
-
- preserve_temp_slots (result);
- free_temp_slots ();
- pop_temp_slots ();
+ preserve_temp_slots (result);
+ free_temp_slots ();
+ pop_temp_slots ();
- /* If the value is meaningful, convert RESULT to the proper mode.
- Otherwise, return nothing. */
- return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
- TYPE_MODE (TREE_TYPE (from)),
- result,
- TREE_UNSIGNED (TREE_TYPE (to)))
- : NULL_RTX);
- }
+ /* If the value is meaningful, convert RESULT to the proper mode.
+ Otherwise, return nothing. */
+ return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
+ TYPE_MODE (TREE_TYPE (from)),
+ result,
+ TREE_UNSIGNED (TREE_TYPE (to)))
+ : NULL_RTX);
}
/* If the rhs is a function call and its value is not an aggregate,
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
if (GET_CODE (to_rtx) == PARALLEL)
- emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
- TYPE_ALIGN (TREE_TYPE (from)));
+ emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
else if (GET_MODE (to_rtx) == BLKmode)
emit_block_move (to_rtx, value, expr_size (from));
else
temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
if (GET_CODE (to_rtx) == PARALLEL)
- emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
- TYPE_ALIGN (TREE_TYPE (from)));
+ emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
else
emit_move_insn (to_rtx, temp);
label = gen_label_rtx ();
emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
- GET_MODE (size), 0, 0, label);
+ GET_MODE (size), 0, label);
}
if (size != const0_rtx)
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
else if (GET_CODE (target) == PARALLEL)
- emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
- TYPE_ALIGN (TREE_TYPE (exp)));
+ emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
else if (GET_MODE (temp) == BLKmode)
emit_block_move (target, temp, expr_size (exp));
else
/* Helper function for store_constructor.
TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
TYPE is the type of the CONSTRUCTOR, not the element type.
- ALIGN and CLEARED are as for store_constructor.
+ CLEARED is as for store_constructor.
ALIAS_SET is the alias set to use for any stores.
This provides a recursive shortcut back to store_constructor when it isn't
clear a substructure if the outer structure has already been cleared. */
static void
-store_constructor_field (target, bitsize, bitpos,
- mode, exp, type, align, cleared, alias_set)
+store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
+ alias_set)
rtx target;
unsigned HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
enum machine_mode mode;
tree exp, type;
- unsigned int align;
int cleared;
int alias_set;
{
? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
- /* Show the alignment may no longer be what it was and update the alias
- set, if required. */
- if (bitpos != 0)
- align = MIN (align, (unsigned int) bitpos & - bitpos);
-
+ /* Update the alias set, if required. */
if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
&& MEM_ALIAS_SET (target) != 0)
{
set_mem_alias_set (target, alias_set);
}
- store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
+ store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
}
else
- store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
- int_size_in_bytes (type), alias_set);
+ store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
+ alias_set);
}
/* Store the value of constructor EXP into the rtx TARGET.
- TARGET is either a REG or a MEM.
- ALIGN is the maximum known alignment for TARGET.
+ TARGET is either a REG or a MEM; we know it cannot conflict, since
+ safe_from_p has been called.
CLEARED is true if TARGET is known to have been zero'd.
SIZE is the number of bytes of TARGET we are allowed to modify: this
may not be the same as the size of EXP if we are assigning to a field
which has been packed to exclude padding bits. */
static void
-store_constructor (exp, target, align, cleared, size)
+store_constructor (exp, target, cleared, size)
tree exp;
rtx target;
- unsigned int align;
int cleared;
HOST_WIDE_INT size;
{
HOST_WIDE_INT exp_size = int_size_in_bytes (type);
#endif
- /* We know our target cannot conflict, since safe_from_p has been called. */
-#if 0
- /* Don't try copying piece by piece into a hard register
- since that is vulnerable to being clobbered by EXP.
- Instead, construct in a pseudo register and then copy it all. */
- if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
- {
- rtx temp = gen_reg_rtx (GET_MODE (target));
- store_constructor (exp, temp, align, cleared, size);
- emit_move_insn (target, temp);
- return;
- }
-#endif
-
if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
{
tree elt;
- /* Inform later passes that the whole union value is dead. */
+ /* We either clear the aggregate or indicate the value is dead. */
if ((TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
- && ! cleared)
+ && ! cleared
+ && ! CONSTRUCTOR_ELTS (exp))
+ /* If the constructor is empty, clear the union. */
{
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
- /* If the constructor is empty, clear the union. */
- if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
- clear_storage (target, expr_size (exp));
+ clear_storage (target, expr_size (exp));
+ cleared = 1;
}
/* If we are building a static constructor into a register,
set the initial value as zero so we can fold the value into
a constant. But if more than one register is involved,
this probably loses. */
- else if (GET_CODE (target) == REG && TREE_STATIC (exp)
+ else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
&& GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
{
- if (! cleared)
- emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
-
+ emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
cleared = 1;
}
clear the whole structure first. Don't do this if TARGET is a
register whose mode size isn't equal to SIZE since clear_storage
can't handle this case. */
- else if (size > 0
+ else if (! cleared && size > 0
&& ((list_length (CONSTRUCTOR_ELTS (exp))
!= fields_length (type))
|| mostly_zeros_p (exp))
&& (GET_CODE (target) != REG
- || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
+ || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
+ == size)))
{
- if (! cleared)
- clear_storage (target, GEN_INT (size));
-
+ clear_storage (target, GEN_INT (size));
cleared = 1;
}
- else if (! cleared)
- /* Inform later passes that the old value is dead. */
+
+ if (! cleared)
emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
to_rtx = offset_address (to_rtx, offset_rtx,
highest_pow2_factor (offset));
-
- align = DECL_OFFSET_ALIGN (field);
}
if (TREE_READONLY (field))
&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
{
tree type = TREE_TYPE (value);
+
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
value = convert (type, value);
}
+
if (BYTES_BIG_ENDIAN)
value
= fold (build (LSHIFT_EXPR, type, value,
}
store_constructor_field (to_rtx, bitsize, bitpos, mode,
- TREE_VALUE (elt), type, align, cleared,
+ TREE_VALUE (elt), type, cleared,
get_alias_set (TREE_TYPE (field)));
}
}
HOST_WIDE_INT bitpos;
int unsignedp;
tree value = TREE_VALUE (elt);
- unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
}
store_constructor_field
- (target, bitsize, bitpos, mode, value, type, align,
- cleared, get_alias_set (elttype));
+ (target, bitsize, bitpos, mode, value, type, cleared,
+ get_alias_set (elttype));
}
}
else
highest_pow2_factor (position));
xtarget = adjust_address (xtarget, mode, 0);
if (TREE_CODE (value) == CONSTRUCTOR)
- store_constructor (value, xtarget, align, cleared,
+ store_constructor (value, xtarget, cleared,
bitsize / BITS_PER_UNIT);
else
store_expr (value, xtarget, 0);
}
store_constructor_field (target, bitsize, bitpos, mode, value,
- type, align, cleared,
- get_alias_set (elttype));
+ type, cleared, get_alias_set (elttype));
}
}
has mode VALUE_MODE if that is convenient to do.
In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
- ALIGN is the alignment that TARGET is known to have.
- TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
+ TYPE is the type of the underlying object,
ALIAS_SET is the alias set for the destination. This value will
(in general) be different from that for TARGET, since TARGET is a
reference to the containing structure. */
static rtx
-store_field (target, bitsize, bitpos, mode, exp, value_mode,
- unsignedp, align, total_size, alias_set)
+store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
+ alias_set)
rtx target;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
tree exp;
enum machine_mode value_mode;
int unsignedp;
- unsigned int align;
- HOST_WIDE_INT total_size;
+ tree type;
int alias_set;
{
HOST_WIDE_INT width_mask = 0;
side-effects. */
if (bitsize == 0)
return expand_expr (exp, const0_rtx, VOIDmode, 0);
-
- if (bitsize < HOST_BITS_PER_WIDE_INT)
+ else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
/* If we are storing into an unaligned field of an aligned union that is
{
rtx object
= assign_temp
- (build_qualified_type (type_for_mode (GET_MODE (target), 0),
- TYPE_QUAL_CONST),
+ (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
0, 1, 1);
rtx blk_object = copy_rtx (object);
PUT_MODE (blk_object, BLKmode);
+ MEM_COPY_ATTRIBUTES (blk_object, object);
if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
emit_move_insn (object, target);
- store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
- align, total_size, alias_set);
+ store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
+ alias_set);
- /* Even though we aren't returning target, we need to
- give it the updated value. */
emit_move_insn (target, object);
+ /* We want to return the BLKmode version of the data. */
return blk_object;
}
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
- || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
- && (align < GET_MODE_ALIGNMENT (mode)
+ || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
+ && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
|| bitpos % GET_MODE_ALIGNMENT (mode)))
- || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
- && (TYPE_ALIGN (TREE_TYPE (exp)) > align
- || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
/* If the RHS and field are a constant size and the size of the
RHS isn't the same size as the bitfield, we must use bitfield
operations. */
boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{
- unsigned int exp_align = expr_align (exp);
-
if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
|| bitpos % BITS_PER_UNIT != 0)
abort ();
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
-
- /* Make sure that ALIGN is no stricter than the alignment of EXP. */
- align = MIN (exp_align, align);
-
- /* Find an alignment that is consistent with the bit position. */
- while ((bitpos % align) != 0)
- align >>= 1;
-
emit_block_move (target, temp,
- bitsize == -1 ? expr_size (exp)
- : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT));
+ GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+ / BITS_PER_UNIT));
return value_mode == VOIDmode ? const0_rtx : target;
}
/* Store the value in the bitfield. */
- store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
+ store_bit_field (target, bitsize, bitpos, mode, temp,
+ int_size_in_bytes (type));
+
if (value_mode != VOIDmode)
{
- /* The caller wants an rtx for the value. */
- /* If possible, avoid refetching from the bitfield itself. */
+ /* The caller wants an rtx for the value.
+ If possible, avoid refetching from the bitfield itself. */
if (width_mask != 0
&& ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
{
GET_MODE (temp) == VOIDmode
? value_mode
: GET_MODE (temp))), NULL_RTX);
+
tmode = GET_MODE (temp);
if (tmode == VOIDmode)
tmode = value_mode;
temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
}
+
return extract_bit_field (target, bitsize, bitpos, unsignedp,
- NULL_RTX, value_mode, 0, align,
- total_size);
+ NULL_RTX, value_mode, VOIDmode,
+ int_size_in_bytes (type));
}
return const0_rtx;
}
else
{
rtx addr = XEXP (target, 0);
- rtx to_rtx;
+ rtx to_rtx = target;
/* If a value is wanted, it must be the lhs;
so make the address stable for multiple use. */
&& GET_CODE (XEXP (addr, 1)) == CONST_INT
&& (XEXP (addr, 0) == virtual_incoming_args_rtx
|| XEXP (addr, 0) == virtual_stack_vars_rtx)))
- target = replace_equiv_address (target, copy_to_reg (addr));
+ to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
/* Now build a reference to just the desired component. */
- to_rtx = copy_rtx (adjust_address (target, mode,
- bitpos / BITS_PER_UNIT));
+ to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
+
+ if (to_rtx == target)
+ to_rtx = copy_rtx (to_rtx);
MEM_SET_IN_STRUCT_P (to_rtx, 1);
if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
- {
- to_rtx = copy_rtx (to_rtx);
- set_mem_alias_set (to_rtx, alias_set);
- }
+ set_mem_alias_set (to_rtx, alias_set);
return store_expr (exp, to_rtx, value_mode != VOIDmode);
}
giving the variable offset (in units) in *POFFSET.
This offset is in addition to the bit position.
If the position is not variable, we store 0 in *POFFSET.
- We set *PALIGNMENT to the alignment of the address that will be
- computed. This is the alignment of the thing we return if *POFFSET
- is zero, but can be more less strictly aligned if *POFFSET is nonzero.
If any of the extraction expressions is volatile,
we store 1 in *PVOLATILEP. Otherwise we don't change that.
tree
get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
- punsignedp, pvolatilep, palignment)
+ punsignedp, pvolatilep)
tree exp;
HOST_WIDE_INT *pbitsize;
HOST_WIDE_INT *pbitpos;
enum machine_mode *pmode;
int *punsignedp;
int *pvolatilep;
- unsigned int *palignment;
{
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
tree offset = size_zero_node;
tree bit_offset = bitsize_zero_node;
- unsigned int alignment = BIGGEST_ALIGNMENT;
tree placeholder_ptr = 0;
tree tem;
bit_offset = size_binop (PLUS_EXPR, bit_offset,
DECL_FIELD_BIT_OFFSET (field));
- if (! host_integerp (offset, 0))
- alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
+ /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
}
else if (TREE_CODE (exp) == ARRAY_REF
if (TREE_THIS_VOLATILE (exp))
*pvolatilep = 1;
- /* If the offset is non-constant already, then we can't assume any
- alignment more than the alignment here. */
- if (! TREE_CONSTANT (offset))
- alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
-
exp = TREE_OPERAND (exp, 0);
}
- if (DECL_P (exp))
- alignment = MIN (alignment, DECL_ALIGN (exp));
- else if (TREE_TYPE (exp) != 0)
- alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
-
/* If OFFSET is constant, see if we can return the whole thing as a
constant bit position. Otherwise, split it up. */
if (host_integerp (offset, 0)
*pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
*pmode = mode;
- *palignment = alignment;
return exp;
}
/* We give UNSIGNEDP = 0 to expand_binop
because the only operations we are expanding here are signed ones. */
}
+
+#ifdef INSN_SCHEDULING
+ /* On machines that have insn scheduling, we want all memory reference to be
+ explicit, so we need to deal with such paradoxical SUBREGs. */
+ if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
+ && (GET_MODE_SIZE (GET_MODE (value))
+ > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
+ value
+ = simplify_gen_subreg (GET_MODE (value),
+ force_reg (GET_MODE (SUBREG_REG (value)),
+ force_operand (SUBREG_REG (value),
+ NULL_RTX)),
+ GET_MODE (SUBREG_REG (value)),
+ SUBREG_BYTE (value));
+#endif
+
return value;
}
\f
switch (TREE_CODE (exp))
{
case INTEGER_CST:
- /* If the integer is expressable in a HOST_WIDE_INT, we can find
- the lowest bit that's a one. If the result is zero or negative,
- pessimize by returning 1. This is overly-conservative, but such
- things should not happen in the offset expressions that we are
- called with. */
+ /* If the integer is expressable in a HOST_WIDE_INT, we can find the
+ lowest bit that's a one. If the result is zero, pessimize by
+ returning 1. This is overly-conservative, but such things should not
+ happen in the offset expressions that we are called with. */
if (host_integerp (exp, 0))
{
c0 = tree_low_cst (exp, 0);
- return c0 >= 0 ? c0 & -c0 : 1;
+ c0 = c0 < 0 ? - c0 : c0;
+ return c0 != 0 ? c0 & -c0 : 1;
}
break;
return MAX (1, c0 / c1);
case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
- case COMPOUND_EXPR: case SAVE_EXPR:
+ case COMPOUND_EXPR: case SAVE_EXPR: case WITH_RECORD_EXPR:
return highest_pow2_factor (TREE_OPERAND (exp, 0));
case COND_EXPR:
* TYPE_QUAL_CONST))),
TREE_ADDRESSABLE (exp), 1, 1);
- store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
+ store_constructor (exp, target, 0,
int_size_in_bytes (TREE_TYPE (exp)));
return target;
}
HOST_WIDE_INT bitsize, bitpos;
tree offset;
int volatilep = 0;
- unsigned int alignment;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep,
- &alignment);
+ &mode1, &unsignedp, &volatilep);
rtx orig_op0;
/* If we got back the original object, something is wrong. Perhaps
| TYPE_QUAL_CONST));
rtx memloc = assign_temp (nt, 1, 1, 1);
- mark_temp_addr_taken (memloc);
emit_move_insn (memloc, op0);
op0 = memloc;
}
&& bitsize != 0
&& (bitpos % bitsize) == 0
&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
- && alignment == GET_MODE_ALIGNMENT (mode1))
+ && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
{
rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
|| (mode1 != BLKmode
- && SLOW_UNALIGNED_ACCESS (mode1, alignment)
+ && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
&& ((TYPE_ALIGN (TREE_TYPE (tem))
< GET_MODE_ALIGNMENT (mode))
|| (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
== INTEGER_CST)
&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
- bitsize))
- || (mode == BLKmode
- && SLOW_UNALIGNED_ACCESS (mode, alignment)
- && (TYPE_ALIGN (type) > alignment
- || bitpos % TYPE_ALIGN (type) != 0)))
+ bitsize)))
{
enum machine_mode ext_mode = mode;
target = assign_temp (type, 0, 1, 1);
emit_block_move (target, op0,
- bitsize == -1 ? expr_size (exp)
- : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT));
+ GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+ / BITS_PER_UNIT));
return target;
}
op0 = validize_mem (op0);
if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
- mark_reg_pointer (XEXP (op0, 0), alignment);
+ mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
op0 = extract_bit_field (op0, bitsize, bitpos,
unsignedp, target, ext_mode, ext_mode,
- alignment,
int_size_in_bytes (TREE_TYPE (tem)));
/* If the result is a record type and BITSIZE is narrower than
set_mem_attributes (op0, exp, 0);
if (GET_CODE (XEXP (op0, 0)) == REG)
- mark_reg_pointer (XEXP (op0, 0), alignment);
+ mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
MEM_VOLATILE_P (op0) |= volatilep;
if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
if (! (GET_CODE (index_val) == CONST_INT
&& GET_CODE (lo_r) == CONST_INT))
- {
- emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
- GET_MODE (index_val), iunsignedp, 0, op1);
- }
+ emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
+ GET_MODE (index_val), iunsignedp, op1);
if (! (GET_CODE (index_val) == CONST_INT
&& GET_CODE (hi_r) == CONST_INT))
- {
- emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
- GET_MODE (index_val), iunsignedp, 0, op1);
- }
+ emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
+ GET_MODE (index_val), iunsignedp, op1);
/* Calculate the element number of bit zero in the first word
of the set. */
* BITS_PER_UNIT),
(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
- VOIDmode, 0, BITS_PER_UNIT,
- int_size_in_bytes (type), 0);
+ VOIDmode, 0, type, 0);
else
abort ();
{
int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
- unsignedp, mode, NULL_RTX, 0, NULL_RTX,
+ unsignedp, mode, NULL_RTX, NULL_RTX,
op0);
}
emit_move_insn (target, op1);
op1 = gen_label_rtx ();
emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
- GET_MODE (temp), unsignedp, 0, op1);
+ GET_MODE (temp), unsignedp, op1);
emit_move_insn (temp, const1_rtx);
emit_label (op1);
return temp;
return expand_increment (exp, ! ignore, ignore);
case ADDR_EXPR:
- /* If nonzero, TEMP will be set to the address of something that might
- be a MEM corresponding to a stack slot. */
- temp = 0;
-
/* Are we taking the address of a nested function? */
if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
&& decl_function_context (TREE_OPERAND (exp, 0)) != 0
return a zero. */
else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
return const0_rtx;
+ /* If we are taking the address of a constant and are at the
+ top level, we have to use output_constant_def since we can't
+ call force_const_mem at top level. */
+ else if (cfun == 0
+ && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
+ || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
+ == 'c')))
+ op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
else
{
/* We make sure to pass const0_rtx down if we came in with
if (CONSTANT_P (op0))
op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
op0);
- else if (GET_CODE (op0) == MEM)
- {
- mark_temp_addr_taken (op0);
- temp = XEXP (op0, 0);
- }
-
else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
|| GET_CODE (op0) == PARALLEL)
{
- /* If this object is in a register, it must be not
- be BLKmode. */
+ /* If this object is in a register, it must can't be BLKmode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
tree nt = build_qualified_type (inner_type,
(TYPE_QUALS (inner_type)
| TYPE_QUAL_CONST));
rtx memloc = assign_temp (nt, 1, 1, 1);
- mark_temp_addr_taken (memloc);
if (GET_CODE (op0) == PARALLEL)
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
- emit_group_store (memloc, op0,
- int_size_in_bytes (inner_type),
- TYPE_ALIGN (inner_type));
+ emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
else
emit_move_insn (memloc, op0);
+
op0 = memloc;
}
if (GET_CODE (op0) != MEM)
abort ();
+ mark_temp_addr_taken (op0);
if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
{
- temp = XEXP (op0, 0);
+ op0 = XEXP (op0, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
+ if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
&& mode == ptr_mode)
- temp = convert_memory_address (ptr_mode, temp);
+ op0 = convert_memory_address (ptr_mode, op0);
#endif
- return temp;
+ return op0;
+ }
+
+ /* If OP0 is not aligned as least as much as the type requires,
+ we need to make a temporary, copy OP0 to it, and take the
+ address of the temporary. */
+ if (GET_MODE (op0) == BLKmode
+ && expr_align (TREE_OPERAND (exp, 0)) > MEM_ALIGN (op0))
+ {
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ rtx new
+ = assign_stack_temp_for_type
+ (TYPE_MODE (inner_type),
+ MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
+ : int_size_in_bytes (TREE_TYPE (inner_type)),
+ 1, build_qualified_type (inner_type,
+ (TYPE_QUALS (inner_type)
+ | TYPE_QUAL_CONST)));
+
+ emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
+ op0 = new;
}
op0 = force_operand (XEXP (op0, 0), target);
&& ! REG_USERVAR_P (op0))
mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
- /* If we might have had a temp slot, add an equivalent address
- for it. */
- if (temp != 0)
- update_temp_slot_address (temp, op0);
-
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
&& mode == ptr_mode)
return temp;
}
\f
-/* Similar to expand_expr, except that we don't specify a target, target
- mode, or modifier and we return the alignment of the inner type. This is
- used in cases where it is not necessary to align the result to the
- alignment of its type as long as we know the alignment of the result, for
- example for comparisons of BLKmode values. */
-
-static rtx
-expand_expr_unaligned (exp, palign)
- tree exp;
- unsigned int *palign;
-{
- rtx op0;
- tree type = TREE_TYPE (exp);
- enum machine_mode mode = TYPE_MODE (type);
-
- /* Default the alignment we return to that of the type. */
- *palign = TYPE_ALIGN (type);
-
- /* The only cases in which we do anything special is if the resulting mode
- is BLKmode. */
- if (mode != BLKmode)
- return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
-
- switch (TREE_CODE (exp))
- {
- case CONVERT_EXPR:
- case NOP_EXPR:
- case NON_LVALUE_EXPR:
- /* Conversions between BLKmode values don't change the underlying
- alignment or value. */
- if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
- return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
- break;
-
- case ARRAY_REF:
- /* Much of the code for this case is copied directly from expand_expr.
- We need to duplicate it here because we will do something different
- in the fall-through case, so we need to handle the same exceptions
- it does. */
- {
- tree array = TREE_OPERAND (exp, 0);
- tree domain = TYPE_DOMAIN (TREE_TYPE (array));
- tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
- tree index = convert (sizetype, TREE_OPERAND (exp, 1));
- HOST_WIDE_INT i;
-
- if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
- abort ();
-
- /* Optimize the special-case of a zero lower bound.
-
- We convert the low_bound to sizetype to avoid some problems
- with constant folding. (E.g. suppose the lower bound is 1,
- and its mode is QI. Without the conversion, (ARRAY
- +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
- +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
-
- if (! integer_zerop (low_bound))
- index = size_diffop (index, convert (sizetype, low_bound));
-
- /* If this is a constant index into a constant array,
- just get the value from the array. Handle both the cases when
- we have an explicit constructor and when our operand is a variable
- that was declared const. */
-
- if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
- && host_integerp (index, 0)
- && 0 > compare_tree_int (index,
- list_length (CONSTRUCTOR_ELTS
- (TREE_OPERAND (exp, 0)))))
- {
- tree elem;
-
- for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
- i = tree_low_cst (index, 0);
- elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
- ;
-
- if (elem)
- return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
- }
-
- else if (optimize >= 1
- && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
- && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
- && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
- {
- if (TREE_CODE (index) == INTEGER_CST)
- {
- tree init = DECL_INITIAL (array);
-
- if (TREE_CODE (init) == CONSTRUCTOR)
- {
- tree elem;
-
- for (elem = CONSTRUCTOR_ELTS (init);
- ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
- elem = TREE_CHAIN (elem))
- ;
-
- if (elem)
- return expand_expr_unaligned (fold (TREE_VALUE (elem)),
- palign);
- }
- }
- }
- }
- /* Fall through. */
-
- case COMPONENT_REF:
- case BIT_FIELD_REF:
- case ARRAY_RANGE_REF:
- /* If the operand is a CONSTRUCTOR, we can just extract the
- appropriate field if it is present. Don't do this if we have
- already written the data since we want to refer to that copy
- and varasm.c assumes that's what we'll do. */
- if (TREE_CODE (exp) == COMPONENT_REF
- && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
- && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
- {
- tree elt;
-
- for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
- elt = TREE_CHAIN (elt))
- if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
- /* Note that unlike the case in expand_expr, we know this is
- BLKmode and hence not an integer. */
- return expand_expr_unaligned (TREE_VALUE (elt), palign);
- }
-
- {
- enum machine_mode mode1;
- HOST_WIDE_INT bitsize, bitpos;
- tree offset;
- int volatilep = 0;
- unsigned int alignment;
- int unsignedp;
- tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep,
- &alignment);
-
- /* If we got back the original object, something is wrong. Perhaps
- we are evaluating an expression too early. In any event, don't
- infinitely recurse. */
- if (tem == exp)
- abort ();
-
- op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
-
- /* If this is a constant, put it into a register if it is a
- legitimate constant and OFFSET is 0 and memory if it isn't. */
- if (CONSTANT_P (op0))
- {
- enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
-
- if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
- && offset == 0)
- op0 = force_reg (inner_mode, op0);
- else
- op0 = validize_mem (force_const_mem (inner_mode, op0));
- }
-
- if (offset != 0)
- {
- rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
-
- /* If this object is in a register, put it into memory.
- This case can't occur in C, but can in Ada if we have
- unchecked conversion of an expression from a scalar type to
- an array or record type. */
- if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
- || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
- {
- tree nt = build_qualified_type (TREE_TYPE (tem),
- (TYPE_QUALS (TREE_TYPE (tem))
- | TYPE_QUAL_CONST));
- rtx memloc = assign_temp (nt, 1, 1, 1);
-
- mark_temp_addr_taken (memloc);
- emit_move_insn (memloc, op0);
- op0 = memloc;
- }
-
- if (GET_CODE (op0) != MEM)
- abort ();
-
- if (GET_MODE (offset_rtx) != ptr_mode)
- offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (offset_rtx) != Pmode)
- offset_rtx = convert_memory_address (Pmode, offset_rtx);
-#endif
-
- op0 = offset_address (op0, offset_rtx,
- highest_pow2_factor (offset));
- }
-
- /* Don't forget about volatility even if this is a bitfield. */
- if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
- {
- op0 = copy_rtx (op0);
- MEM_VOLATILE_P (op0) = 1;
- }
-
- /* Check the access. */
- if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
- {
- rtx to;
- int size;
-
- to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
- size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
-
- /* Check the access right of the pointer. */
- in_check_memory_usage = 1;
- if (size > BITS_PER_UNIT)
- emit_library_call (chkr_check_addr_libfunc,
- LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
- to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
- TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RO),
- TYPE_MODE (integer_type_node));
- in_check_memory_usage = 0;
- }
-
- /* In cases where an aligned union has an unaligned object
- as a field, we might be extracting a BLKmode value from
- an integer-mode (e.g., SImode) object. Handle this case
- by doing the extract into an object as wide as the field
- (which we know to be the width of a basic mode), then
- storing into memory, and changing the mode to BLKmode.
- If we ultimately want the address (EXPAND_CONST_ADDRESS or
- EXPAND_INITIALIZER), then we must not copy to a temporary. */
- if (mode1 == VOIDmode
- || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
- || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
- && (TYPE_ALIGN (type) > alignment
- || bitpos % TYPE_ALIGN (type) != 0)))
- {
- enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
-
- if (ext_mode == BLKmode)
- {
- /* In this case, BITPOS must start at a byte boundary. */
- if (GET_CODE (op0) != MEM
- || bitpos % BITS_PER_UNIT != 0)
- abort ();
-
- op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
- }
- else
- {
- tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
- TYPE_QUAL_CONST);
- rtx new = assign_temp (nt, 0, 1, 1);
-
- op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
- unsignedp, NULL_RTX, ext_mode,
- ext_mode, alignment,
- int_size_in_bytes (TREE_TYPE (tem)));
-
- /* If the result is a record type and BITSIZE is narrower than
- the mode of OP0, an integral mode, and this is a big endian
- machine, we must put the field into the high-order bits. */
- if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
- && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
- && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
- op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (GET_MODE_BITSIZE
- (GET_MODE (op0))
- - bitsize),
- op0, 1);
-
- emit_move_insn (new, op0);
- op0 = copy_rtx (new);
- PUT_MODE (op0, BLKmode);
- }
- }
- else
- /* Get a reference to just this component. */
- op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
-
- set_mem_attributes (op0, exp, 0);
-
- /* Adjust the alignment in case the bit position is not
- a multiple of the alignment of the inner object. */
- while (bitpos % alignment != 0)
- alignment >>= 1;
-
- if (GET_CODE (XEXP (op0, 0)) == REG)
- mark_reg_pointer (XEXP (op0, 0), alignment);
-
- MEM_IN_STRUCT_P (op0) = 1;
- MEM_VOLATILE_P (op0) |= volatilep;
-
- *palign = alignment;
- return op0;
- }
-
- default:
- break;
-
- }
-
- return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
-}
-\f
/* Return the tree node if a ARG corresponds to a string constant or zero
if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
in bytes within the string that ARG is accessing. The type of the
tree type;
tree offset;
int volatilep = 0;
- unsigned int alignment;
/* Get description of this reference. We don't actually care
about the underlying object here. */
get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
- &unsignedp, &volatilep, &alignment);
+ &unsignedp, &volatilep);
type = type_for_size (bitsize, unsignedp);
if (! SLOW_BYTE_ACCESS
else if (GET_MODE (temp) != VOIDmode)
do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
NE, TREE_UNSIGNED (TREE_TYPE (exp)),
- GET_MODE (temp), NULL_RTX, 0,
+ GET_MODE (temp), NULL_RTX,
if_false_label, if_true_label);
else
abort ();
/* All but high-order word must be compared as unsigned. */
do_compare_rtx_and_jump (op0_word, op1_word, GT,
- (unsignedp || i > 0), word_mode, NULL_RTX, 0,
+ (unsignedp || i > 0), word_mode, NULL_RTX,
NULL_RTX, if_true_label);
/* Consider lower words only if these are equal. */
do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, 0, NULL_RTX, if_false_label);
+ NULL_RTX, NULL_RTX, if_false_label);
}
if (if_false_label)
do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
operand_subword_force (op1, i, mode),
EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
- word_mode, NULL_RTX, 0, if_false_label,
- NULL_RTX);
+ word_mode, NULL_RTX, if_false_label, NULL_RTX);
if (if_true_label)
emit_jump (if_true_label);
if (part != 0)
{
do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
- NULL_RTX, 0, if_false_label, if_true_label);
+ NULL_RTX, if_false_label, if_true_label);
return;
}
for (i = 0; i < nwords; i++)
do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
- const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
+ const0_rtx, EQ, 1, word_mode, NULL_RTX,
if_false_label, NULL_RTX);
if (if_true_label)
things pushed on the stack that aren't yet used.
If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
- compared.
-
- If ALIGN is non-zero, it is the alignment of this type; if zero, the
- size of MODE should be used. */
+ compared. */
rtx
-compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
+compare_from_rtx (op0, op1, code, unsignedp, mode, size)
rtx op0, op1;
enum rtx_code code;
int unsignedp;
enum machine_mode mode;
rtx size;
- unsigned int align;
{
rtx tem;
}
#endif
- emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
+ emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
}
The decision as to signed or unsigned comparison must be made by the caller.
If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
- compared.
-
- If ALIGN is non-zero, it is the alignment of this type; if zero, the
- size of MODE should be used. */
+ compared. */
void
-do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
+do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
if_false_label, if_true_label)
rtx op0, op1;
enum rtx_code code;
int unsignedp;
enum machine_mode mode;
rtx size;
- unsigned int align;
rtx if_false_label, if_true_label;
{
rtx tem;
if_true_label = gen_label_rtx ();
}
- emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
+ emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
if_true_label);
if (if_false_label)
enum rtx_code signed_code, unsigned_code;
rtx if_false_label, if_true_label;
{
- unsigned int align0, align1;
rtx op0, op1;
tree type;
enum machine_mode mode;
enum rtx_code code;
/* Don't crash if the comparison was erroneous. */
- op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
+ op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
return;
- op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
return;
do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
((mode == BLKmode)
? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
- MIN (align0, align1),
if_false_label, if_true_label);
}
\f
emit_move_insn (target, invert ? const0_rtx : const1_rtx);
result = compare_from_rtx (op0, op1, code, unsignedp,
- operand_mode, NULL_RTX, 0);
+ operand_mode, NULL_RTX);
if (GET_CODE (result) == CONST_INT)
return (((result == const0_rtx && ! invert)
|| (result != const0_rtx && invert))
minval = integer_zero_node;
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
- omode, 1, 0, default_label);
+ omode, 1, default_label);
/* Now we can safely truncate. */
index = convert_to_mode (index_mode, index, 0);
}
the maximum value of the range. */
emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
- 0, default_label);
+ default_label);
/* If index is in range, it must fit in Pmode.
Convert to Pmode so we can index with it. */