/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
+ Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
+ Free Software Foundation, Inc.
This file is part of GNU CC.
#include "ggc.h"
#include "tm_p.h"
-#define CEIL(x,y) (((x) + (y) - 1) / (y))
-
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
extern struct obstack permanent_obstack;
-static rtx get_push_address PROTO ((int));
-
-static rtx enqueue_insn PROTO((rtx, rtx));
-static int move_by_pieces_ninsns PROTO((unsigned int, int));
-static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
- struct move_by_pieces *));
-static void clear_by_pieces PROTO((rtx, int, int));
-static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
- enum machine_mode,
- struct clear_by_pieces *));
-static int is_zeros_p PROTO((tree));
-static int mostly_zeros_p PROTO((tree));
-static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
- tree, tree, int, int));
-static void store_constructor PROTO((tree, rtx, int, int));
-static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
- enum machine_mode, int, int,
- int, int));
+static rtx get_push_address PARAMS ((int));
+
+static rtx enqueue_insn PARAMS ((rtx, rtx));
+static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
+static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
+ struct move_by_pieces *));
+static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
+static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
+ enum machine_mode,
+ struct clear_by_pieces *));
+static int is_zeros_p PARAMS ((tree));
+static int mostly_zeros_p PARAMS ((tree));
+static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
+ tree, tree, unsigned int, int));
+static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
+static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
+ tree, enum machine_mode, int,
+ unsigned int, int, int));
static enum memory_use_mode
- get_memory_usage_from_modifier PROTO((enum expand_modifier));
-static tree save_noncopied_parts PROTO((tree, tree));
-static tree init_noncopied_parts PROTO((tree, tree));
-static int safe_from_p PROTO((rtx, tree, int));
-static int fixed_type_p PROTO((tree));
-static rtx var_rtx PROTO((tree));
-static int readonly_fields_p PROTO((tree));
-static rtx expand_expr_unaligned PROTO((tree, int *));
-static rtx expand_increment PROTO((tree, int, int));
-static void preexpand_calls PROTO((tree));
-static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
-static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
-static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
-static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
+ get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
+static tree save_noncopied_parts PARAMS ((tree, tree));
+static tree init_noncopied_parts PARAMS ((tree, tree));
+static int safe_from_p PARAMS ((rtx, tree, int));
+static int fixed_type_p PARAMS ((tree));
+static rtx var_rtx PARAMS ((tree));
+static int readonly_fields_p PARAMS ((tree));
+static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
+static rtx expand_increment PARAMS ((tree, int, int));
+static void preexpand_calls PARAMS ((tree));
+static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
+static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
+static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
+static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
/* Record for each mode whether we can move a register directly to or
from an object of that mode in memory. If we can't, we won't try
/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
-#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
+#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
#endif
\f
/* This is run once per compilation to set up which modes can be used
void
init_expr ()
{
- current_function->expr
- = (struct expr_status *) xmalloc (sizeof (struct expr_status));
+ cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
pending_chain = 0;
pending_stack_adjust = 0;
+ arg_space_so_far = 0;
inhibit_defer_pop = 0;
saveregs_value = 0;
apply_args_value = 0;
void
move_by_pieces (to, from, len, align)
rtx to, from;
- int len, align;
+ int len;
+ unsigned int align;
{
struct move_by_pieces data;
rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
data.to_addr = copy_addr_to_reg (to_addr);
}
- if (! SLOW_UNALIGNED_ACCESS
+ if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
icode = mov_optab->handlers[(int) mode].insn_code;
if (icode != CODE_FOR_nothing
&& align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
- GET_MODE_SIZE (mode)))
+ (unsigned int) GET_MODE_SIZE (mode)))
move_by_pieces_1 (GEN_FCN (icode), mode, &data);
max_size = GET_MODE_SIZE (mode);
static int
move_by_pieces_ninsns (l, align)
unsigned int l;
- int align;
+ unsigned int align;
{
register int n_insns = 0;
int max_size = MOVE_MAX + 1;
- if (! SLOW_UNALIGNED_ACCESS
+ if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
icode = mov_optab->handlers[(int) mode].insn_code;
if (icode != CODE_FOR_nothing
- && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
- GET_MODE_SIZE (mode)))
+ && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
max_size = GET_MODE_SIZE (mode);
static void
move_by_pieces_1 (genfun, mode, data)
- rtx (*genfun) PROTO ((rtx, ...));
+ rtx (*genfun) PARAMS ((rtx, ...));
enum machine_mode mode;
struct move_by_pieces *data;
{
emit_block_move (x, y, size, align)
rtx x, y;
rtx size;
- int align;
+ unsigned int align;
{
rtx retval = 0;
#ifdef TARGET_MEM_FUNCTIONS
void
emit_group_load (dst, orig_src, ssize, align)
rtx dst, orig_src;
- int align, ssize;
+ unsigned int align;
+ int ssize;
{
rtx *tmps, src;
int start, i;
src = orig_src;
if (GET_CODE (src) != MEM)
{
- src = gen_reg_rtx (GET_MODE (orig_src));
+ if (GET_CODE (src) == VOIDmode)
+ src = gen_reg_rtx (GET_MODE (dst));
+ else
+ src = gen_reg_rtx (GET_MODE (orig_src));
emit_move_insn (src, orig_src);
}
shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
bytelen = ssize - bytepos;
if (bytelen <= 0)
- abort();
+ abort ();
}
/* Optimize the access just a bit. */
if (GET_CODE (src) == MEM
- && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
- && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
+ && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
+ && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
{
tmps[i] = gen_reg_rtx (mode);
void
emit_group_store (orig_dst, src, ssize, align)
rtx orig_dst, src;
- int ssize, align;
+ int ssize;
+ unsigned int align;
{
rtx *tmps, dst;
int start, i;
/* Optimize the access just a bit. */
if (GET_CODE (dst) == MEM
- && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
- && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
+ && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
+ && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
- {
- emit_move_insn (change_address (dst, mode,
- plus_constant (XEXP (dst, 0),
- bytepos)),
- tmps[i]);
- }
+ emit_move_insn (change_address (dst, mode,
+ plus_constant (XEXP (dst, 0),
+ bytepos)),
+ tmps[i]);
else
- {
- store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
+ store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
mode, tmps[i], align, ssize);
- }
}
+
emit_queue();
/* Copy from the pseudo into the (probable) hard reg. */
The primary purpose of this routine is to handle functions
that return BLKmode structures in registers. Some machines
(the PA for example) want to return all small structures
- in registers regardless of the structure's alignment.
- */
+ in registers regardless of the structure's alignment. */
rtx
-copy_blkmode_from_reg(tgtblk,srcreg,type)
+copy_blkmode_from_reg (tgtblk,srcreg,type)
rtx tgtblk;
rtx srcreg;
tree type;
{
int bytes = int_size_in_bytes (type);
rtx src = NULL, dst = NULL;
- int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
+ int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
int bitpos, xbitpos, big_endian_correction = 0;
if (tgtblk == 0)
static void
clear_by_pieces (to, len, align)
rtx to;
- int len, align;
+ int len;
+ unsigned int align;
{
struct clear_by_pieces data;
rtx to_addr = XEXP (to, 0);
data.to_addr = copy_addr_to_reg (to_addr);
}
- if (! SLOW_UNALIGNED_ACCESS
+ if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
icode = mov_optab->handlers[(int) mode].insn_code;
if (icode != CODE_FOR_nothing
- && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
- GET_MODE_SIZE (mode)))
+ && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
max_size = GET_MODE_SIZE (mode);
static void
clear_by_pieces_1 (genfun, mode, data)
- rtx (*genfun) PROTO ((rtx, ...));
+ rtx (*genfun) PARAMS ((rtx, ...));
enum machine_mode mode;
struct clear_by_pieces *data;
{
clear_storage (object, size, align)
rtx object;
rtx size;
- int align;
+ unsigned int align;
{
#ifdef TARGET_MEM_FUNCTIONS
static tree fn;
}
else
{
+ rtx realpart_x, realpart_y;
+ rtx imagpart_x, imagpart_y;
+
/* If this is a complex value with each part being smaller than a
word, the usual calling sequence will likely pack the pieces into
a single register. Unfortunately, SUBREG of hard registers only
rtx cmem = change_address (mem, mode, NULL_RTX);
- current_function->cannot_inline
- = "function uses short complex types";
+ cfun->cannot_inline = "function uses short complex types";
if (packed_dest_p)
{
}
}
- /* Show the output dies here. This is necessary for pseudos;
+ realpart_x = gen_realpart (submode, x);
+ realpart_y = gen_realpart (submode, y);
+ imagpart_x = gen_imagpart (submode, x);
+ imagpart_y = gen_imagpart (submode, y);
+
+ /* Show the output dies here. This is necessary for SUBREGs
+ of pseudos since we cannot track their lifetimes correctly;
hard regs shouldn't appear here except as return values.
We never want to emit such a clobber after reload. */
if (x != y
- && ! (reload_in_progress || reload_completed))
+ && ! (reload_in_progress || reload_completed)
+ && (GET_CODE (realpart_x) == SUBREG
+ || GET_CODE (imagpart_x) == SUBREG))
{
emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
}
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_realpart (submode, x), gen_realpart (submode, y)));
+ (realpart_x, realpart_y));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_imagpart (submode, x), gen_imagpart (submode, y)));
+ (imagpart_x, imagpart_y));
}
return get_last_insn ();
else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
{
rtx last_insn = 0;
+ rtx seq;
+ int need_clobber;
#ifdef PUSH_ROUNDING
}
#endif
- /* Show the output dies here. This is necessary for pseudos;
- hard regs shouldn't appear here except as return values.
- We never want to emit such a clobber after reload. */
- if (x != y
- && ! (reload_in_progress || reload_completed))
- {
- emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
- }
+ start_sequence ();
+ need_clobber = 0;
for (i = 0;
i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
i++)
if (xpart == 0 || ypart == 0)
abort ();
+ need_clobber |= (GET_CODE (xpart) == SUBREG);
+
last_insn = emit_move_insn (xpart, ypart);
}
+ seq = gen_sequence ();
+ end_sequence ();
+
+ /* Show the output dies here. This is necessary for SUBREGs
+ of pseudos since we cannot track their lifetimes correctly;
+ hard regs shouldn't appear here except as return values.
+ We never want to emit such a clobber after reload. */
+ if (x != y
+ && ! (reload_in_progress || reload_completed)
+ && need_clobber != 0)
+ {
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
+ }
+
+ emit_insn (seq);
+
return last_insn;
}
else
enum machine_mode mode;
tree type;
rtx size;
- int align;
+ unsigned int align;
int partial;
rtx reg;
int extra;
/* Here we avoid the case of a structure whose weak alignment
forces many pushes of a small amount of data,
and such small pushes do rounding that causes trouble. */
- && ((! SLOW_UNALIGNED_ACCESS)
+ && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
|| align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
|| PUSH_ROUNDING (align) == align)
&& PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
/* TEMP is the address of the block. Copy the data there. */
if (GET_CODE (size) == CONST_INT
- && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
+ && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
{
move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
INTVAL (size), align);
int unsignedp;
int volatilep = 0;
tree tem;
- int alignment;
+ unsigned int alignment;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
tree copy_size
= size_binop (MIN_EXPR,
make_tree (sizetype, size),
- convert (sizetype,
- build_int_2 (TREE_STRING_LENGTH (exp), 0)));
+ size_int (TREE_STRING_LENGTH (exp)));
rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
VOIDmode, 0);
rtx label = 0;
int bitsize, bitpos;
enum machine_mode mode;
tree exp, type;
- int align;
+ unsigned int align;
int cleared;
{
if (TREE_CODE (exp) == CONSTRUCTOR
&& (bitpos == 0 || GET_CODE (target) == MEM))
{
if (bitpos != 0)
- target = change_address (target, VOIDmode,
- plus_constant (XEXP (target, 0),
- bitpos / BITS_PER_UNIT));
- store_constructor (exp, target, align, cleared);
+ target
+ = change_address (target,
+ GET_MODE (target) == BLKmode
+ || 0 != (bitpos
+ % GET_MODE_ALIGNMENT (GET_MODE (target)))
+ ? BLKmode : VOIDmode,
+ plus_constant (XEXP (target, 0),
+ bitpos / BITS_PER_UNIT));
+ store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
}
else
store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
(align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
- int_size_in_bytes (type), cleared);
+ int_size_in_bytes (type), 0);
}
/* Store the value of constructor EXP into the rtx TARGET.
TARGET is either a REG or a MEM.
ALIGN is the maximum known alignment for TARGET, in bits.
- CLEARED is true if TARGET is known to have been zero'd. */
+ CLEARED is true if TARGET is known to have been zero'd.
+ SIZE is the number of bytes of TARGET we are allowed to modify: this
+ may not be the same as the size of EXP if we are assigning to a field
+ which has been packed to exclude padding bits. */
static void
-store_constructor (exp, target, align, cleared)
+store_constructor (exp, target, align, cleared, size)
tree exp;
rtx target;
- int align;
+ unsigned int align;
int cleared;
+ int size;
{
tree type = TREE_TYPE (exp);
#ifdef WORD_REGISTER_OPERATIONS
if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
{
rtx temp = gen_reg_rtx (GET_MODE (target));
- store_constructor (exp, temp, 0);
+ store_constructor (exp, temp, align, cleared, size);
emit_move_insn (target, temp);
return;
}
/* If the constructor has fewer fields than the structure
or if we are initializing the structure to mostly zeros,
clear the whole structure first. */
- else if ((list_length (CONSTRUCTOR_ELTS (exp))
- != list_length (TYPE_FIELDS (type)))
- || mostly_zeros_p (exp))
+ else if (size > 0
+ && ((list_length (CONSTRUCTOR_ELTS (exp))
+ != list_length (TYPE_FIELDS (type)))
+ || mostly_zeros_p (exp)))
{
if (! cleared)
- clear_storage (target, expr_size (exp),
+ clear_storage (target, GEN_INT (size),
(align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
cleared = 1;
rtx offset_rtx;
if (contains_placeholder_p (offset))
- offset = build (WITH_RECORD_EXPR, sizetype,
+ offset = build (WITH_RECORD_EXPR, bitsizetype,
offset, make_tree (TREE_TYPE (exp), target));
offset = size_binop (EXACT_DIV_EXPR, offset,
- size_int (BITS_PER_UNIT));
+ bitsize_int (BITS_PER_UNIT));
+ offset = convert (sizetype, offset);
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
if (GET_CODE (to_rtx) != MEM)
|| 4 * zero_count >= 3 * count)
need_to_clear = 1;
}
- if (need_to_clear)
+ if (need_to_clear && size > 0)
{
if (! cleared)
- clear_storage (target, expr_size (exp),
+ clear_storage (target, GEN_INT (size),
(align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
cleared = 1;
}
int bitpos;
int unsignedp;
tree value = TREE_VALUE (elt);
- int align = TYPE_ALIGN (TREE_TYPE (value));
+ unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
loop = expand_start_loop (0);
/* Assign value to element index. */
- position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
- size_int (BITS_PER_UNIT));
- position = size_binop (MULT_EXPR,
- size_binop (MINUS_EXPR, index,
- TYPE_MIN_VALUE (domain)),
- position);
+ position
+ = convert (ssizetype,
+ fold (build (MINUS_EXPR, TREE_TYPE (index),
+ index, TYPE_MIN_VALUE (domain))));
+ position = size_binop (MULT_EXPR, position,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (elttype)));
+
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
if (TREE_CODE (value) == CONSTRUCTOR)
- store_constructor (value, xtarget, align, cleared);
+ store_constructor (value, xtarget, align, cleared,
+ bitsize / BITS_PER_UNIT);
else
store_expr (value, xtarget, 0);
index, integer_one_node), 0, 0);
expand_end_loop ();
emit_label (loop_end);
-
- /* Needed by stupid register allocation. to extend the
- lifetime of pseudo-regs used by target past the end
- of the loop. */
- emit_insn (gen_rtx_USE (GET_MODE (target), target));
}
}
else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
tree position;
if (index == 0)
- index = size_int (i);
+ index = ssize_int (1);
if (minelt)
- index = size_binop (MINUS_EXPR, index,
- TYPE_MIN_VALUE (domain));
- position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
- size_int (BITS_PER_UNIT));
- position = size_binop (MULT_EXPR, index, position);
+ index = convert (ssizetype,
+ fold (build (MINUS_EXPR, index,
+ TYPE_MIN_VALUE (domain))));
+ position = size_binop (MULT_EXPR, index,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (elttype)));
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
bzero/memset), and set the bits we want. */
/* Check for all zeros. */
- if (elt == NULL_TREE)
+ if (elt == NULL_TREE && size > 0)
{
if (!cleared)
- clear_storage (target, expr_size (exp),
+ clear_storage (target, GEN_INT (size),
TYPE_ALIGN (type) / BITS_PER_UNIT);
return;
}
domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
bitlength = size_binop (PLUS_EXPR,
- size_binop (MINUS_EXPR, domain_max, domain_min),
- size_one_node);
+ size_diffop (domain_max, domain_min),
+ ssize_int (1));
if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
abort ();
tree exp;
enum machine_mode value_mode;
int unsignedp;
- int align;
+ unsigned int align;
int total_size;
int alias_set;
{
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
- || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
+ || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
&& (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
|| bitpos % GET_MODE_ALIGNMENT (mode)))
- || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
+ || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
&& (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
|| bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
/* If the RHS and field are a constant size and the size of the
boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{
+ unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
+
if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
|| bitpos % BITS_PER_UNIT != 0)
abort ();
plus_constant (XEXP (target, 0),
bitpos / BITS_PER_UNIT));
+ /* Make sure that ALIGN is no stricter than the alignment of EXP. */
+ align = MIN (exp_align, align);
+
/* Find an alignment that is consistent with the bit position. */
while ((bitpos % (align * BITS_PER_UNIT)) != 0)
align >>= 1;
enum machine_mode *pmode;
int *punsignedp;
int *pvolatilep;
- int *palignment;
+ unsigned int *palignment;
{
tree orig_exp = exp;
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
- tree offset = integer_zero_node;
+ tree offset = size_zero_node;
unsigned int alignment = BIGGEST_ALIGNMENT;
if (TREE_CODE (exp) == COMPONENT_REF)
tree pos = (TREE_CODE (exp) == COMPONENT_REF
? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
: TREE_OPERAND (exp, 2));
- tree constant = integer_zero_node, var = pos;
+ tree constant = bitsize_int (0), var = pos;
/* If this field hasn't been filled in yet, don't go
past it. This should only happen when folding expressions
&& TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
else if (TREE_CODE (pos) == INTEGER_CST)
- constant = pos, var = integer_zero_node;
+ constant = pos, var = bitsize_int (0);
*pbitpos += TREE_INT_CST_LOW (constant);
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (EXACT_DIV_EXPR, var,
- size_int (BITS_PER_UNIT)));
+ offset
+ = size_binop (PLUS_EXPR, offset,
+ convert (sizetype,
+ size_binop (EXACT_DIV_EXPR, var,
+ bitsize_int (BITS_PER_UNIT))));
}
else if (TREE_CODE (exp) == ARRAY_REF)
it overflowed. In either case, redo the multiplication
against the size in units. This is especially important
in the non-constant case to avoid a division at runtime. */
- xindex = fold (build (MULT_EXPR, ssizetype, index,
- convert (ssizetype,
- TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
+ xindex
+ = fold (build (MULT_EXPR, ssizetype, index,
+ convert (ssizetype,
+ TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
if (contains_placeholder_p (xindex))
- xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
+ xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp);
- offset = size_binop (PLUS_EXPR, offset, xindex);
+ offset
+ = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex));
}
}
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
memory protection).
Aggregates are not checked here; they're handled elsewhere. */
- if (current_function && current_function_check_memory_usage
+ if (cfun && current_function_check_memory_usage
&& code == VAR_DECL
&& GET_CODE (DECL_RTL (exp)) == MEM
&& ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
RTX_UNCHANGING_P (target) = 1;
}
- store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
+ store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
+ int_size_in_bytes (TREE_TYPE (exp)));
return target;
}
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
- if (current_function && current_function_check_memory_usage
+ if (cfun && current_function_check_memory_usage
&& ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
{
enum memory_use_mode memory_usage;
tree array = TREE_OPERAND (exp, 0);
tree domain = TYPE_DOMAIN (TREE_TYPE (array));
tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
- tree index = TREE_OPERAND (exp, 1);
- tree index_type = TREE_TYPE (index);
+ tree index = convert (sizetype, TREE_OPERAND (exp, 1));
HOST_WIDE_INT i;
/* Optimize the special-case of a zero lower bound.
with constant folding. (E.g. suppose the lower bound is 1,
and its mode is QI. Without the conversion, (ARRAY
+(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
- +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
-
- But sizetype isn't quite right either (especially if
- the lowbound is negative). FIXME */
+ +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, index_type, index,
- convert (sizetype, low_bound)));
+ index = size_diffop (index, convert (sizetype, low_bound));
/* Fold an expression like: "foo"[2].
This is not done in fold so it won't happen inside &.
op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
{
- int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
+ HOST_WIDE_INT bitsize
+ = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
{
int bitpos;
tree offset;
int volatilep = 0;
- int alignment;
+ unsigned int alignment;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep,
&alignment);
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
- || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
+ || (mode1 != BLKmode
+ && SLOW_UNALIGNED_ACCESS (mode1, alignment)
&& ((TYPE_ALIGN (TREE_TYPE (tem))
< (unsigned int) GET_MODE_ALIGNMENT (mode))
- || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
+ || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
+ /* If the type and the field are a constant size and the
+ size of the type isn't the same size as the bitfield,
+ we must use bitfield operations. */
+ || ((bitsize >= 0
+ && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
+ == INTEGER_CST)
+ && ((TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp)))
+ != 0)
+ || (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))
+ != bitsize))))))
|| (modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER
&& mode == BLKmode
- && SLOW_UNALIGNED_ACCESS
+ && SLOW_UNALIGNED_ACCESS (mode, alignment)
&& (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
|| bitpos % TYPE_ALIGN (type) != 0)))
{
/* If this mode is an integer too wide to compare properly,
compare word by word. Rely on cse to optimize constant cases. */
- if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
+ if (GET_MODE_CLASS (mode) == MODE_INT
+ && ! can_compare_p (GE, mode, ccp_jump))
{
if (code == MAX_EXPR)
do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
case GE_EXPR:
case EQ_EXPR:
case NE_EXPR:
+ case UNORDERED_EXPR:
+ case ORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
preexpand_calls (exp);
temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
if (temp != 0)
static rtx
expand_expr_unaligned (exp, palign)
register tree exp;
- int *palign;
+ unsigned int *palign;
{
register rtx op0;
tree type = TREE_TYPE (exp);
tree array = TREE_OPERAND (exp, 0);
tree domain = TYPE_DOMAIN (TREE_TYPE (array));
tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
- tree index = TREE_OPERAND (exp, 1);
- tree index_type = TREE_TYPE (index);
+ tree index = convert (sizetype, TREE_OPERAND (exp, 1));
HOST_WIDE_INT i;
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
with constant folding. (E.g. suppose the lower bound is 1,
and its mode is QI. Without the conversion, (ARRAY
+(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
- +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
-
- But sizetype isn't quite right either (especially if
- the lowbound is negative). FIXME */
+ +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, index_type, index,
- convert (sizetype, low_bound)));
+ index = size_diffop (index, convert (sizetype, low_bound));
/* If this is a constant index into a constant array,
just get the value from the array. Handle both the cases when
int bitpos;
tree offset;
int volatilep = 0;
- int alignment;
+ unsigned int alignment;
int unsignedp;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep,
EXPAND_INITIALIZER), then we must not copy to a temporary. */
if (mode1 == VOIDmode
|| GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
- || (SLOW_UNALIGNED_ACCESS
+ || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
&& (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
|| bitpos % TYPE_ALIGN (type) != 0)))
{
return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
}
\f
-/* Return the tree node and offset if a given argument corresponds to
- a string constant. */
+/* Return the tree node if a ARG corresponds to a string constant or zero
+ if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
+ in bytes within the string that ARG is accessing. The type of the
+ offset will be `sizetype'. */
tree
string_constant (arg, ptr_offset)
if (TREE_CODE (arg) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
{
- *ptr_offset = integer_zero_node;
+ *ptr_offset = size_zero_node;
return TREE_OPERAND (arg, 0);
}
else if (TREE_CODE (arg) == PLUS_EXPR)
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
{
- *ptr_offset = arg1;
+ *ptr_offset = convert (sizetype, arg1);
return TREE_OPERAND (arg0, 0);
}
else if (TREE_CODE (arg1) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
{
- *ptr_offset = arg0;
+ *ptr_offset = convert (sizetype, arg0);
return TREE_OPERAND (arg1, 0);
}
}
tree type;
tree offset;
int volatilep = 0;
- int alignment;
+ unsigned int alignment;
/* Get description of this reference. We don't actually care
about the underlying object here. */
do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
+ && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
do_jump_by_parts_equality (exp, if_false_label, if_true_label);
else
do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
+ && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
do_jump_by_parts_equality (exp, if_true_label, if_false_label);
else
do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
case LT_EXPR:
mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (mode, ccp_jump))
+ && ! can_compare_p (LT, mode, ccp_jump))
do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
else
do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
case LE_EXPR:
mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (mode, ccp_jump))
+ && ! can_compare_p (LE, mode, ccp_jump))
do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
else
do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
case GT_EXPR:
mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (mode, ccp_jump))
+ && ! can_compare_p (GT, mode, ccp_jump))
do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
else
do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
case GE_EXPR:
mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (mode, ccp_jump))
+ && ! can_compare_p (GE, mode, ccp_jump))
do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
else
do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
break;
+ case UNORDERED_EXPR:
+ case ORDERED_EXPR:
+ {
+ enum rtx_code cmp, rcmp;
+ int do_rev;
+
+ if (code == UNORDERED_EXPR)
+ cmp = UNORDERED, rcmp = ORDERED;
+ else
+ cmp = ORDERED, rcmp = UNORDERED;
+ mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+
+ do_rev = 0;
+ if (! can_compare_p (cmp, mode, ccp_jump)
+ && (can_compare_p (rcmp, mode, ccp_jump)
+ /* If the target doesn't provide either UNORDERED or ORDERED
+ comparisons, canonicalize on UNORDERED for the library. */
+ || rcmp == UNORDERED))
+ do_rev = 1;
+
+ if (! do_rev)
+ do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
+ else
+ do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
+ }
+ break;
+
+ {
+ enum rtx_code rcode1;
+ enum tree_code tcode2;
+
+ case UNLT_EXPR:
+ rcode1 = UNLT;
+ tcode2 = LT_EXPR;
+ goto unordered_bcc;
+ case UNLE_EXPR:
+ rcode1 = UNLE;
+ tcode2 = LE_EXPR;
+ goto unordered_bcc;
+ case UNGT_EXPR:
+ rcode1 = UNGT;
+ tcode2 = GT_EXPR;
+ goto unordered_bcc;
+ case UNGE_EXPR:
+ rcode1 = UNGE;
+ tcode2 = GE_EXPR;
+ goto unordered_bcc;
+ case UNEQ_EXPR:
+ rcode1 = UNEQ;
+ tcode2 = EQ_EXPR;
+ goto unordered_bcc;
+
+ unordered_bcc:
+ mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ if (can_compare_p (rcode1, mode, ccp_jump))
+ do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
+ if_true_label);
+ else
+ {
+ tree op0 = save_expr (TREE_OPERAND (exp, 0));
+ tree op1 = save_expr (TREE_OPERAND (exp, 1));
+ tree cmp0, cmp1;
+
+ /* If the target doesn't support combined unordered
+ compares, decompose into UNORDERED + comparison. */
+ cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
+ cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
+ exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
+ do_jump (exp, if_false_label, if_true_label);
+ }
+ }
+ break;
+
default:
normal:
temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
emit_jump (target);
}
else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
- && ! can_compare_p (GET_MODE (temp), ccp_jump))
+ && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
/* Note swapping the labels gives us not-equal. */
do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
else if (GET_MODE (temp) != VOIDmode)
int unsignedp;
enum machine_mode mode;
rtx size;
- int align;
+ unsigned int align;
{
rtx tem;
int unsignedp;
enum machine_mode mode;
rtx size;
- int align;
+ unsigned int align;
rtx if_false_label, if_true_label;
{
rtx tem;
enum rtx_code signed_code, unsigned_code;
rtx if_false_label, if_true_label;
{
- int align0, align1;
+ unsigned int align0, align1;
register rtx op0, op1;
register tree type;
register enum machine_mode mode;
else
code = unsignedp ? GEU : GE;
break;
+
+ case UNORDERED_EXPR:
+ code = UNORDERED;
+ break;
+ case ORDERED_EXPR:
+ code = ORDERED;
+ break;
+ case UNLT_EXPR:
+ code = UNLT;
+ break;
+ case UNLE_EXPR:
+ code = UNLE;
+ break;
+ case UNGT_EXPR:
+ code = UNGT;
+ break;
+ case UNGE_EXPR:
+ code = UNGE;
+ break;
+ case UNEQ_EXPR:
+ code = UNEQ;
+ break;
+
default:
abort ();
}
}
/* Now see if we are likely to be able to do this. Return if not. */
- if (! can_compare_p (operand_mode, ccp_store_flag))
+ if (! can_compare_p (code, operand_mode, ccp_store_flag))
return 0;
+
icode = setcc_gen_code[(int) code];
if (icode == CODE_FOR_nothing
|| (only_cheap && insn_data[(int) icode].operand[0].mode != mode))