/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
#include "expr.h"
#include "recog.h"
+#include "reload.h"
#include "output.h"
#include "typeclass.h"
#include "defaults.h"
rtx to_addr;
int autinc_to;
int explicit_inc_to;
- int to_struct;
- int to_readonly;
rtx from;
rtx from_addr;
int autinc_from;
int explicit_inc_from;
- int from_struct;
- int from_readonly;
- int len;
- int offset;
+ unsigned HOST_WIDE_INT len;
+ HOST_WIDE_INT offset;
int reverse;
};
rtx to_addr;
int autinc_to;
int explicit_inc_to;
- int to_struct;
- int len;
- int offset;
+ unsigned HOST_WIDE_INT len;
+ HOST_WIDE_INT offset;
int reverse;
};
static rtx get_push_address PARAMS ((int));
static rtx enqueue_insn PARAMS ((rtx, rtx));
-static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
+static unsigned HOST_WIDE_INT move_by_pieces_ninsns
+ PARAMS ((unsigned HOST_WIDE_INT,
+ unsigned int));
static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
struct move_by_pieces *));
-static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
+static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
+ unsigned int));
static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
enum machine_mode,
struct clear_by_pieces *));
+static rtx get_subtarget PARAMS ((rtx));
static int is_zeros_p PARAMS ((tree));
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
}
/* Small sanity check that the queue is empty at the end of a function. */
+
void
finish_expr_for_function ()
{
register rtx y = XEXP (x, 0);
register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
- RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
MEM_COPY_ATTRIBUTES (new, x);
- MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
if (QUEUED_INSN (y))
{
return;
}
+ if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
+ {
+ if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
+ abort ();
+
+ if (VECTOR_MODE_P (to_mode))
+ from = gen_rtx_SUBREG (to_mode, from, 0);
+ else
+ to = gen_rtx_SUBREG (from_mode, to, 0);
+
+ emit_move_insn (to, from);
+ return;
+ }
+
+ if (to_real != from_real)
+ abort ();
+
if (to_real)
{
rtx value;
void
move_by_pieces (to, from, len, align)
rtx to, from;
- int len;
+ unsigned HOST_WIDE_INT len;
unsigned int align;
{
struct move_by_pieces data;
if (data.reverse) data.offset = len;
data.len = len;
- data.to_struct = MEM_IN_STRUCT_P (to);
- data.from_struct = MEM_IN_STRUCT_P (from);
- data.to_readonly = RTX_UNCHANGING_P (to);
- data.from_readonly = RTX_UNCHANGING_P (from);
-
/* If copying requires more than two move insns,
copy addresses to registers (to make displacements shorter)
and use post-increment if available. */
/* Return number of insns required to move L bytes by pieces.
ALIGN (in bytes) is maximum alignment we can assume. */
-static int
+static unsigned HOST_WIDE_INT
move_by_pieces_ninsns (l, align)
- unsigned int l;
+ unsigned HOST_WIDE_INT l;
unsigned int align;
{
- register int n_insns = 0;
- unsigned int max_size = MOVE_MAX + 1;
+ unsigned HOST_WIDE_INT n_insns = 0;
+ unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
enum machine_mode mode;
struct move_by_pieces *data;
{
- register int size = GET_MODE_SIZE (mode);
- register rtx to1, from1;
+ unsigned int size = GET_MODE_SIZE (mode);
+ rtx to1, from1;
while (data->len >= size)
{
- if (data->reverse) data->offset -= size;
-
- to1 = (data->autinc_to
- ? gen_rtx_MEM (mode, data->to_addr)
- : copy_rtx (change_address (data->to, mode,
- plus_constant (data->to_addr,
- data->offset))));
- MEM_IN_STRUCT_P (to1) = data->to_struct;
- RTX_UNCHANGING_P (to1) = data->to_readonly;
-
- from1
- = (data->autinc_from
- ? gen_rtx_MEM (mode, data->from_addr)
- : copy_rtx (change_address (data->from, mode,
- plus_constant (data->from_addr,
- data->offset))));
- MEM_IN_STRUCT_P (from1) = data->from_struct;
- RTX_UNCHANGING_P (from1) = data->from_readonly;
+ if (data->reverse)
+ data->offset -= size;
+
+ if (data->autinc_to)
+ {
+ to1 = gen_rtx_MEM (mode, data->to_addr);
+ MEM_COPY_ATTRIBUTES (to1, data->to);
+ }
+ else
+ to1 = change_address (data->to, mode,
+ plus_constant (data->to_addr, data->offset));
+
+ if (data->autinc_from)
+ {
+ from1 = gen_rtx_MEM (mode, data->from_addr);
+ MEM_COPY_ATTRIBUTES (from1, data->from);
+ }
+ else
+ from1 = change_address (data->from, mode,
+ plus_constant (data->from_addr, data->offset));
if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
emit_insn ((*genfun) (to1, from1));
+
if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
- if (! data->reverse) data->offset += size;
+ if (! data->reverse)
+ data->offset += size;
data->len -= size;
}
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
+ /* Since this is a move insn, we don't care about volatility. */
+ volatile_ok = 1;
+
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
if (pat)
{
emit_insn (pat);
+ volatile_ok = 0;
return 0;
}
else
}
}
+ volatile_ok = 0;
+
/* X, Y, or SIZE may have been passed through protect_from_queue.
It is unsafe to save the value generated by protect_from_queue
src = orig_src;
if (GET_CODE (src) != MEM)
{
- if (GET_CODE (src) == VOIDmode)
+ if (GET_MODE (src) == VOIDmode)
src = gen_reg_rtx (GET_MODE (dst));
else
src = gen_reg_rtx (GET_MODE (orig_src));
return tgtblk;
}
-
/* Add a USE expression for REG to the (possibly empty) list pointed
to by CALL_FUSAGE. REG must denote a hard register. */
static void
clear_by_pieces (to, len, align)
rtx to;
- int len;
+ unsigned HOST_WIDE_INT len;
unsigned int align;
{
struct clear_by_pieces data;
rtx to_addr = XEXP (to, 0);
- unsigned int max_size = MOVE_MAX_PIECES + 1;
+ unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
enum machine_mode mode = VOIDmode, tmode;
enum insn_code icode;
if (data.reverse) data.offset = len;
data.len = len;
- data.to_struct = MEM_IN_STRUCT_P (to);
-
/* If copying requires more than two move insns,
copy addresses to registers (to make displacements shorter)
and use post-increment if available. */
data.autinc_to = 1;
data.explicit_inc_to = -1;
}
- if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
+
+ if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse
+ && ! data.autinc_to)
{
data.to_addr = copy_addr_to_reg (to_addr);
data.autinc_to = 1;
data.explicit_inc_to = 1;
}
- if (!data.autinc_to && CONSTANT_P (to_addr))
+
+ if ( !data.autinc_to && CONSTANT_P (to_addr))
data.to_addr = copy_addr_to_reg (to_addr);
}
enum machine_mode mode;
struct clear_by_pieces *data;
{
- register int size = GET_MODE_SIZE (mode);
- register rtx to1;
+ unsigned int size = GET_MODE_SIZE (mode);
+ rtx to1;
while (data->len >= size)
{
- if (data->reverse) data->offset -= size;
+ if (data->reverse)
+ data->offset -= size;
- to1 = (data->autinc_to
- ? gen_rtx_MEM (mode, data->to_addr)
- : copy_rtx (change_address (data->to, mode,
- plus_constant (data->to_addr,
- data->offset))));
- MEM_IN_STRUCT_P (to1) = data->to_struct;
+ if (data->autinc_to)
+ {
+ to1 = gen_rtx_MEM (mode, data->to_addr);
+ MEM_COPY_ATTRIBUTES (to1, data->to);
+ }
+ else
+ to1 = change_address (data->to, mode,
+ plus_constant (data->to_addr, data->offset));
if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
emit_insn ((*genfun) (to1, const0_rtx));
+
if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
- if (! data->reverse) data->offset += size;
+ if (! data->reverse)
+ data->offset += size;
data->len -= size;
}
regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_imagpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_realpart (submode, y)));
#else
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_realpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx_MEM (submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, XEXP (x, 0)),
gen_imagpart (submode, y)));
#endif
}
else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
{
rtx last_insn = 0;
- rtx seq;
+ rtx seq, inner;
int need_clobber;
#ifdef PUSH_ROUNDING
}
#endif
+ /* If we are in reload, see if either operand is a MEM whose address
+ is scheduled for replacement. */
+ if (reload_in_progress && GET_CODE (x) == MEM
+ && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
+ {
+ rtx new = gen_rtx_MEM (GET_MODE (x), inner);
+
+ MEM_COPY_ATTRIBUTES (new, x);
+ x = new;
+ }
+ if (reload_in_progress && GET_CODE (y) == MEM
+ && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
+ {
+ rtx new = gen_rtx_MEM (GET_MODE (y), inner);
+
+ MEM_COPY_ATTRIBUTES (new, y);
+ y = new;
+ }
+
start_sequence ();
need_clobber = 0;
anti_adjust_stack (size);
else
{
- rtx temp = copy_to_mode_reg (Pmode, size);
+ temp = copy_to_mode_reg (Pmode, size);
if (extra != 0)
temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
temp, 0, OPTAB_LIB_WIDEN);
if (1)
#endif
{
-
/* Return the lowest stack address when STACK or ARGS grow downward and
we are not aaccumulating outgoing arguments (the c4x port uses such
conventions). */
- INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
- negate_rtx (Pmode, plus_constant (size, extra)));
+ negate_rtx (Pmode, plus_constant (size, extra)));
else
temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
else
#endif /* PUSH_ROUNDING */
{
+ rtx target;
+
/* Otherwise make space on the stack and copy the data
to the address of that space. */
skip));
if (current_function_check_memory_usage && ! in_check_memory_usage)
{
- rtx target;
-
in_check_memory_usage = 1;
target = copy_to_reg (temp);
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
in_check_memory_usage = 0;
}
+ target = gen_rtx_MEM (BLKmode, temp);
+
+ if (type != 0)
+ {
+ set_mem_attributes (target, type, 1);
+ /* Function incoming arguments may overlap with sibling call
+ outgoing arguments and we cannot allow reordering of reads
+ from function arguments with stores to outgoing arguments
+ of sibling calls. */
+ MEM_ALIAS_SET (target) = 0;
+ }
+
/* TEMP is the address of the block. Copy the data there. */
if (GET_CODE (size) == CONST_INT
&& MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
{
- move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
- INTVAL (size), align);
+ move_by_pieces (target, xinner, INTVAL (size), align);
goto ret;
}
else
{
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
- rtx target = gen_rtx_MEM (BLKmode, temp);
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
mode != VOIDmode;
{
rtx addr;
rtx target = NULL_RTX;
+ rtx dest;
/* Push padding now if padding above and stack grows down,
or if padding below and stack grows up.
target = addr;
}
- emit_move_insn (gen_rtx_MEM (mode, addr), x);
+ dest = gen_rtx_MEM (mode, addr);
+ if (type != 0)
+ {
+ set_mem_attributes (dest, type, 1);
+ /* Function incoming arguments may overlap with sibling call
+ outgoing arguments and we cannot allow reordering of reads
+ from function arguments with stores to outgoing arguments
+ of sibling calls. */
+ MEM_ALIAS_SET (dest) = 0;
+ }
+
+ emit_move_insn (dest, x);
if (current_function_check_memory_usage && ! in_check_memory_usage)
{
if (extra && args_addr == 0 && where_pad == stack_direction)
anti_adjust_stack (GEN_INT (extra));
- if (alignment_pad)
+ if (alignment_pad && args_addr == 0)
anti_adjust_stack (alignment_pad);
}
\f
+/* Return X if X can be used as a subtarget in a sequence of arithmetic
+ operations. */
+
+static rtx
+get_subtarget (x)
+ rtx x;
+{
+ return ((x == 0
+ /* Only registers can be subtargets. */
+ || GET_CODE (x) != REG
+ /* If the register is readonly, it can't be set more than once. */
+ || RTX_UNCHANGING_P (x)
+ /* Don't use hard regs to avoid extending their life. */
+ || REGNO (x) < FIRST_PSEUDO_REGISTER
+ /* Avoid subtargets inside loops,
+ since they hide some invariant expressions. */
+ || preserve_subexpressions_p ())
+ ? 0 : x);
+}
+
/* Expand an assignment that stores the value of FROM into TO.
If WANT_VALUE is nonzero, return an rtx for the value of TO.
(This may contain a QUEUED rtx;
size *= GET_MODE_SIZE (best_mode);
/* Check the access right of the pointer. */
+ in_check_memory_usage = 1;
if (size)
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
to_addr, Pmode,
GEN_INT (size), TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* If this is a varying-length object, we must get the address of
&& GET_CODE (target) == MEM
&& AGGREGATE_TYPE_P (TREE_TYPE (exp)))
{
+ in_check_memory_usage = 1;
if (GET_CODE (temp) == MEM)
emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
XEXP (target, 0), Pmode,
expr_size (exp), TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* If value was not generated in the target, store it there.
= size_binop (MIN_EXPR,
make_tree (sizetype, size),
size_int (TREE_STRING_LENGTH (exp)));
+ unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
VOIDmode, 0);
rtx label = 0;
{
addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
size = plus_constant (size, - TREE_STRING_LENGTH (exp));
+ align = MIN (align, (BITS_PER_UNIT
+ * (INTVAL (copy_size_rtx)
+ & - INTVAL (copy_size_rtx))));
}
else
{
copy_size_rtx, NULL_RTX, 0,
OPTAB_LIB_WIDEN);
+ align = BITS_PER_UNIT;
label = gen_label_rtx ();
emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
GET_MODE (size), 0, 0, label);
}
+ align = MIN (align, expr_align (copy_size));
if (size != const0_rtx)
{
+ rtx dest = gen_rtx_MEM (BLKmode, addr);
+
+ MEM_COPY_ATTRIBUTES (dest, target);
+
/* Be sure we can write on ADDR. */
+ in_check_memory_usage = 1;
if (current_function_check_memory_usage)
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
addr, Pmode,
size, TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
-#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memset_libfunc, 0, VOIDmode, 3,
- addr, ptr_mode,
- const0_rtx, TYPE_MODE (integer_type_node),
- convert_to_mode (TYPE_MODE (sizetype),
- size,
- TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
-#else
- emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
- addr, ptr_mode,
- convert_to_mode (TYPE_MODE (integer_type_node),
- size,
- TREE_UNSIGNED (integer_type_node)),
- TYPE_MODE (integer_type_node));
-#endif
+ in_check_memory_usage = 0;
+ clear_storage (dest, size, align);
}
if (label)
rtx tmp;
register rtx op2;
/* Use subtarget as the target for operand 0 of a binary operation. */
- register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
+ register rtx subtarget = get_subtarget (target);
/* Check for a PIC address load. */
if (flag_pic
return 0;
save_expr_rewritten[save_expr_count++] = exp;
- nops = tree_code_length[(int) SAVE_EXPR];
+ nops = TREE_CODE_LENGTH (SAVE_EXPR);
for (i = 0; i < nops; i++)
{
tree operand = TREE_OPERAND (exp, i);
if (exp_rtl)
break;
- nops = tree_code_length[(int) TREE_CODE (exp)];
+ nops = TREE_CODE_LENGTH (TREE_CODE (exp));
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
mode = TYPE_MODE (type);
/* Use subtarget as the target for operand 0 of a binary operation. */
- subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
+ subtarget = get_subtarget (target);
original_target = target;
ignore = (target == const0_rtx
|| ((code == NON_LVALUE_EXPR || code == NOP_EXPR
else
ro_modifier = EXPAND_NORMAL;
- /* Don't use hard regs as subtargets, because the combiner
- can only handle pseudo regs. */
- if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
- subtarget = 0;
- /* Avoid subtargets inside loops,
- since they hide some invariant expressions. */
- if (preserve_subexpressions_p ())
- subtarget = 0;
-
/* If we are going to ignore this result, we need only do something
if there is a side-effect somewhere in the expression. If there
is, short-circuit the most common cases here. Note that we must
enum memory_use_mode memory_usage;
memory_usage = get_memory_usage_from_modifier (modifier);
+ in_check_memory_usage = 1;
if (memory_usage != MEMORY_USE_DONT)
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
XEXP (DECL_RTL (exp), 0), Pmode,
TYPE_MODE (sizetype),
GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* ... fall through ... */
abort ();
addr = XEXP (DECL_RTL (exp), 0);
if (GET_CODE (addr) == MEM)
- addr = gen_rtx_MEM (Pmode,
- fix_lexical_addr (XEXP (addr, 0), exp));
+ addr = change_address (addr, Pmode,
+ fix_lexical_addr (XEXP (addr, 0), exp));
else
addr = fix_lexical_addr (addr, exp);
+
temp = change_address (DECL_RTL (exp), mode, addr);
}
case EXPR_WITH_FILE_LOCATION:
{
rtx to_return;
- char *saved_input_filename = input_filename;
+ const char *saved_input_filename = input_filename;
int saved_lineno = lineno;
input_filename = EXPR_WFL_FILENAME (exp);
lineno = EXPR_WFL_LINENO (exp);
case INDIRECT_REF:
{
tree exp1 = TREE_OPERAND (exp, 0);
- tree exp2;
tree index;
tree string = string_constant (exp1, &index);
}
temp = gen_rtx_MEM (mode, op0);
- /* If address was computed by addition,
- mark this as an element of an aggregate. */
- if (TREE_CODE (exp1) == PLUS_EXPR
- || (TREE_CODE (exp1) == SAVE_EXPR
- && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
- || AGGREGATE_TYPE_P (TREE_TYPE (exp))
- || (TREE_CODE (exp1) == ADDR_EXPR
- && (exp2 = TREE_OPERAND (exp1, 0))
- && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
- MEM_SET_IN_STRUCT_P (temp, 1);
-
- MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
- MEM_ALIAS_SET (temp) = get_alias_set (exp);
+ set_mem_attributes (temp, exp, 0);
/* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
here, because, in C and C++, the fact that a location is accessed
}
/* Check the access. */
- if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
+ if (cfun != 0 && current_function_check_memory_usage
+ && GET_CODE (op0) == MEM)
{
enum memory_use_mode memory_usage;
memory_usage = get_memory_usage_from_modifier (modifier);
size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
/* Check the access right of the pointer. */
+ in_check_memory_usage = 1;
if (size > BITS_PER_UNIT)
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
to, Pmode,
TYPE_MODE (sizetype),
GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
}
plus_constant (XEXP (op0, 0),
(bitpos / BITS_PER_UNIT)));
- if (GET_CODE (op0) == MEM)
- MEM_ALIAS_SET (op0) = get_alias_set (exp);
-
+ set_mem_attributes (op0, exp, 0);
if (GET_CODE (XEXP (op0, 0)) == REG)
mark_reg_pointer (XEXP (op0, 0), alignment);
- MEM_SET_IN_STRUCT_P (op0, 1);
MEM_VOLATILE_P (op0) |= volatilep;
if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
|| modifier == EXPAND_CONST_ADDRESS
case NOP_EXPR:
case CONVERT_EXPR:
case REFERENCE_EXPR:
+ if (TREE_OPERAND (exp, 0) == error_mark_node)
+ return const0_rtx;
+
if (TREE_CODE (type) == UNION_TYPE)
{
tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
/* Check the access right of the pointer. */
+ in_check_memory_usage = 1;
if (size > BITS_PER_UNIT)
emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RO),
TYPE_MODE (integer_type_node));
+ in_check_memory_usage = 0;
}
/* In cases where an aligned union has an unaligned object
tree exp;
{
register int nops, i;
- int type = TREE_CODE_CLASS (TREE_CODE (exp));
+ int class = TREE_CODE_CLASS (TREE_CODE (exp));
if (! do_preexpand_calls)
return;
/* Only expressions and references can contain calls. */
- if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
+ if (! IS_EXPR_CODE_CLASS (class) && class != 'r')
return;
switch (TREE_CODE (exp))
/* Do nothing if already expanded. */
if (CALL_EXPR_RTL (exp) != 0
/* Do nothing if the call returns a variable-sized object. */
- || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
+ || (TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE
+ && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
/* Do nothing to built-in functions. */
|| (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
&& (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
break;
}
- nops = tree_code_length[(int) TREE_CODE (exp)];
+ nops = TREE_CODE_LENGTH (TREE_CODE (exp));
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0)
{
;
else
{
- type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
- if (type == 'e' || type == '<' || type == '1' || type == '2'
- || type == 'r')
+ class = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
+ if (IS_EXPR_CODE_CLASS (class) || class == 'r')
preexpand_calls (TREE_OPERAND (exp, i));
}
}
#endif
);
- if (subtarget == 0 || GET_CODE (subtarget) != REG
+ if (! get_subtarget (subtarget)
|| GET_MODE (subtarget) != operand_mode
|| ! safe_from_p (subtarget, inner, 1))
subtarget = 0;
}
preexpand_calls (exp);
- if (subtarget == 0 || GET_CODE (subtarget) != REG
+ if (! get_subtarget (target)
|| GET_MODE (subtarget) != operand_mode
|| ! safe_from_p (subtarget, arg1, 1))
subtarget = 0;