/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
+ Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "obstack.h"
#include "flags.h"
#include "regs.h"
+#include "hard-reg-set.h"
#include "function.h"
#include "insn-flags.h"
#include "insn-codes.h"
int reverse;
};
+/* This structure is used by clear_by_pieces to describe the clear to
+ be performed. */
+
+struct clear_by_pieces
+{
+ rtx to;
+ rtx to_addr;
+ int autinc_to;
+ int explicit_inc_to;
+ int to_struct;
+ int len;
+ int offset;
+ int reverse;
+};
+
/* Used to generate bytecodes: keep track of size of local variables,
as well as depth of arithmetic stack. (Notice that variables are
stored on the machine's stack, not the arithmetic stack.) */
extern int stack_depth;
extern int max_stack_depth;
extern struct obstack permanent_obstack;
-
+extern rtx arg_pointer_save_area;
static rtx enqueue_insn PROTO((rtx, rtx));
static int queued_subexp_p PROTO((rtx));
static int move_by_pieces_ninsns PROTO((unsigned int, int));
static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
struct move_by_pieces *));
-static void store_constructor PROTO((tree, rtx));
+static void clear_by_pieces PROTO((rtx, int, int));
+static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
+ struct clear_by_pieces *));
+static int is_zeros_p PROTO((tree));
+static int mostly_zeros_p PROTO((tree));
+static void store_constructor PROTO((tree, rtx, int));
static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
enum machine_mode, int, int, int));
static int get_inner_unaligned_p PROTO((tree));
static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
static tree defer_cleanups_to PROTO((tree));
extern void (*interim_eh_hook) PROTO((tree));
+extern tree truthvalue_conversion PROTO((tree));
/* Record for each mode whether we can move a register directly to or
from an object of that mode in memory. If we can't, we won't try
/* This array records the insn_code of insns to perform block moves. */
enum insn_code movstr_optab[NUM_MACHINE_MODES];
+/* This array records the insn_code of insns to perform block clears. */
+enum insn_code clrstr_optab[NUM_MACHINE_MODES];
+
/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (y, 0), Pmode,
XEXP (x, 0), Pmode,
- convert_to_mode (TYPE_MODE (sizetype), size,
- TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
+ convert_to_mode (TYPE_MODE (integer_type_node), size,
+ TREE_UNSIGNED (integer_type_node)),
+ TYPE_MODE (integer_type_node));
#endif
}
}
use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
}
\f
+/* Generate several move instructions to clear LEN bytes of block TO.
+ (A MEM rtx with BLKmode). The caller must pass TO through
+ protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
+ we can assume. */
+
+static void
+clear_by_pieces (to, len, align)
+ rtx to;
+ int len, align;
+{
+ struct clear_by_pieces data;
+ rtx to_addr = XEXP (to, 0);
+ int max_size = MOVE_MAX + 1;
+
+ data.offset = 0;
+ data.to_addr = to_addr;
+ data.to = to;
+ data.autinc_to
+ = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
+ || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
+
+ data.explicit_inc_to = 0;
+ data.reverse
+ = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
+ if (data.reverse) data.offset = len;
+ data.len = len;
+
+ data.to_struct = MEM_IN_STRUCT_P (to);
+
+ /* If copying requires more than two move insns,
+ copy addresses to registers (to make displacements shorter)
+ and use post-increment if available. */
+ if (!data.autinc_to
+ && move_by_pieces_ninsns (len, align) > 2)
+ {
+#ifdef HAVE_PRE_DECREMENT
+ if (data.reverse && ! data.autinc_to)
+ {
+ data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
+ data.autinc_to = 1;
+ data.explicit_inc_to = -1;
+ }
+#endif
+#ifdef HAVE_POST_INCREMENT
+ if (! data.reverse && ! data.autinc_to)
+ {
+ data.to_addr = copy_addr_to_reg (to_addr);
+ data.autinc_to = 1;
+ data.explicit_inc_to = 1;
+ }
+#endif
+ if (!data.autinc_to && CONSTANT_P (to_addr))
+ data.to_addr = copy_addr_to_reg (to_addr);
+ }
+
+ if (! SLOW_UNALIGNED_ACCESS
+ || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
+ align = MOVE_MAX;
+
+ /* First move what we can in the largest integer mode, then go to
+ successively smaller modes. */
+
+ while (max_size > 1)
+ {
+ enum machine_mode mode = VOIDmode, tmode;
+ enum insn_code icode;
+
+ for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
+ tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
+ if (GET_MODE_SIZE (tmode) < max_size)
+ mode = tmode;
+
+ if (mode == VOIDmode)
+ break;
+
+ icode = mov_optab->handlers[(int) mode].insn_code;
+ if (icode != CODE_FOR_nothing
+ && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
+ GET_MODE_SIZE (mode)))
+ clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
+
+ max_size = GET_MODE_SIZE (mode);
+ }
+
+ /* The code above should have handled everything. */
+ if (data.len != 0)
+ abort ();
+}
+
+/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
+ with move instructions for mode MODE. GENFUN is the gen_... function
+ to make a move insn for that mode. DATA has all the other info. */
+
+static void
+clear_by_pieces_1 (genfun, mode, data)
+ rtx (*genfun) ();
+ enum machine_mode mode;
+ struct clear_by_pieces *data;
+{
+ register int size = GET_MODE_SIZE (mode);
+ register rtx to1;
+
+ while (data->len >= size)
+ {
+ if (data->reverse) data->offset -= size;
+
+ to1 = (data->autinc_to
+ ? gen_rtx (MEM, mode, data->to_addr)
+ : change_address (data->to, mode,
+ plus_constant (data->to_addr, data->offset)));
+ MEM_IN_STRUCT_P (to1) = data->to_struct;
+
+#ifdef HAVE_PRE_DECREMENT
+ if (data->explicit_inc_to < 0)
+ emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
+#endif
+
+ emit_insn ((*genfun) (to1, const0_rtx));
+#ifdef HAVE_POST_INCREMENT
+ if (data->explicit_inc_to > 0)
+ emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
+#endif
+
+ if (! data->reverse) data->offset += size;
+
+ data->len -= size;
+ }
+}
+\f
/* Write zeros through the storage of OBJECT.
- If OBJECT has BLKmode, SIZE is its length in bytes. */
+ If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
+ the maximum alignment we can is has, measured in bytes. */
void
-clear_storage (object, size)
+clear_storage (object, size, align)
rtx object;
- int size;
+ rtx size;
+ int align;
{
if (GET_MODE (object) == BLKmode)
{
+ object = protect_from_queue (object, 1);
+ size = protect_from_queue (size, 0);
+
+ if (GET_CODE (size) == CONST_INT
+ && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
+ clear_by_pieces (object, INTVAL (size), align);
+
+ else
+ {
+ /* Try the most limited insn first, because there's no point
+ including more than one in the machine description unless
+ the more limited one has some advantage. */
+
+ rtx opalign = GEN_INT (align);
+ enum machine_mode mode;
+
+ for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
+ mode = GET_MODE_WIDER_MODE (mode))
+ {
+ enum insn_code code = clrstr_optab[(int) mode];
+
+ if (code != CODE_FOR_nothing
+ /* We don't need MODE to be narrower than
+ BITS_PER_HOST_WIDE_INT here because if SIZE is less than
+ the mode mask, as it is returned by the macro, it will
+ definitely be less than the actual mode mask. */
+ && ((GET_CODE (size) == CONST_INT
+ && ((unsigned HOST_WIDE_INT) INTVAL (size)
+ <= GET_MODE_MASK (mode)))
+ || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
+ && (insn_operand_predicate[(int) code][0] == 0
+ || (*insn_operand_predicate[(int) code][0]) (object,
+ BLKmode))
+ && (insn_operand_predicate[(int) code][2] == 0
+ || (*insn_operand_predicate[(int) code][2]) (opalign,
+ VOIDmode)))
+ {
+ rtx op1;
+ rtx last = get_last_insn ();
+ rtx pat;
+
+ op1 = convert_to_mode (mode, size, 1);
+ if (insn_operand_predicate[(int) code][1] != 0
+ && ! (*insn_operand_predicate[(int) code][1]) (op1,
+ mode))
+ op1 = copy_to_mode_reg (mode, op1);
+
+ pat = GEN_FCN ((int) code) (object, op1, opalign);
+ if (pat)
+ {
+ emit_insn (pat);
+ return;
+ }
+ else
+ delete_insns_since (last);
+ }
+ }
+
+
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memset_libfunc, 0,
- VOIDmode, 3,
- XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
- GEN_INT (size), ptr_mode);
+ emit_library_call (memset_libfunc, 0,
+ VOIDmode, 3,
+ XEXP (object, 0), Pmode,
+ const0_rtx, TYPE_MODE (integer_type_node),
+ convert_to_mode (TYPE_MODE (sizetype),
+ size, TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
- emit_library_call (bzero_libfunc, 0,
- VOIDmode, 2,
- XEXP (object, 0), Pmode,
- GEN_INT (size), ptr_mode);
+ emit_library_call (bzero_libfunc, 0,
+ VOIDmode, 2,
+ XEXP (object, 0), Pmode,
+ convert_to_mode (TYPE_MODE (integer_type_node),
+ size,
+ TREE_UNSIGNED (integer_type_node)),
+ TYPE_MODE (integer_type_node));
#endif
+ }
}
else
emit_move_insn (object, const0_rtx);
#endif
/* Show the output dies here. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
+ if (x != y)
+ emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
for (i = 0;
i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
- convert_to_mode (TYPE_MODE (sizetype),
- size, TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
+ convert_to_mode (TYPE_MODE (integer_type_node),
+ size,
+ TREE_UNSIGNED (integer_type_node)),
+ TYPE_MODE (integer_type_node));
#endif
OK_DEFER_POP;
}
to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
if (GET_MODE (to_rtx) == BLKmode)
- {
- int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
- emit_block_move (to_rtx, value, expr_size (from), align);
- }
+ emit_block_move (to_rtx, value, expr_size (from),
+ TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
else
emit_move_insn (to_rtx, value);
preserve_temp_slots (to_rtx);
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
XEXP (to_rtx, 0), Pmode,
- convert_to_mode (TYPE_MODE (sizetype),
- size, TREE_UNSIGNED (sizetype)),
- TYPE_MODE (sizetype));
+ convert_to_mode (TYPE_MODE (integer_type_node),
+ size, TREE_UNSIGNED (integer_type_node)),
+ TYPE_MODE (integer_type_node));
#endif
preserve_temp_slots (to_rtx);
if (size != const0_rtx)
{
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
- Pmode, const0_rtx, Pmode, size, ptr_mode);
+ emit_library_call (memset_libfunc, 0, VOIDmode, 3,
+ addr, Pmode,
+ const0_rtx, TYPE_MODE (integer_type_node),
+ convert_to_mode (TYPE_MODE (sizetype),
+ size,
+ TREE_UNSIGNED (sizetype)),
+ TYPE_MODE (sizetype));
#else
emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
- addr, Pmode, size, ptr_mode);
+ addr, Pmode,
+ convert_to_mode (TYPE_MODE (integer_type_node),
+ size,
+ TREE_UNSIGNED (integer_type_node)),
+ TYPE_MODE (integer_type_node));
#endif
}
return target;
}
\f
+/* Return 1 if EXP just contains zeros. */
+
+static int
+is_zeros_p (exp)
+ tree exp;
+{
+ tree elt;
+
+ switch (TREE_CODE (exp))
+ {
+ case CONVERT_EXPR:
+ case NOP_EXPR:
+ case NON_LVALUE_EXPR:
+ return is_zeros_p (TREE_OPERAND (exp, 0));
+
+ case INTEGER_CST:
+ return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
+
+ case COMPLEX_CST:
+ return
+ is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
+
+ case REAL_CST:
+ return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
+
+ case CONSTRUCTOR:
+ if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
+ return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
+ for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
+ if (! is_zeros_p (TREE_VALUE (elt)))
+ return 0;
+
+ return 1;
+ }
+
+ return 0;
+}
+
+/* Return 1 if EXP contains mostly (3/4) zeros. */
+
+static int
+mostly_zeros_p (exp)
+ tree exp;
+{
+ if (TREE_CODE (exp) == CONSTRUCTOR)
+ {
+ int elts = 0, zeros = 0;
+ tree elt = CONSTRUCTOR_ELTS (exp);
+ if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
+ {
+ /* If there are no ranges of true bits, it is all zero. */
+ return elt == NULL_TREE;
+ }
+ for (; elt; elt = TREE_CHAIN (elt))
+ {
+ /* We do not handle the case where the index is a RANGE_EXPR,
+ so the statistic will be somewhat inaccurate.
+ We do make a more accurate count in store_constructor itself,
+ so since this function is only used for nested array elements,
+ this should be close enough. */
+ if (mostly_zeros_p (TREE_VALUE (elt)))
+ zeros++;
+ elts++;
+ }
+
+ return 4 * zeros >= 3 * elts;
+ }
+
+ return is_zeros_p (exp);
+}
+\f
+/* Helper function for store_constructor.
+ TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
+ TYPE is the type of the CONSTRUCTOR, not the element type.
+ CLEARED is as for store_constructor. */
+
+static void
+store_constructor_field (target, bitsize, bitpos,
+ mode, exp, type, cleared)
+ rtx target;
+ int bitsize, bitpos;
+ enum machine_mode mode;
+ tree exp, type;
+ int cleared;
+{
+ if (TREE_CODE (exp) == CONSTRUCTOR
+ && (bitpos % BITS_PER_UNIT) == 0)
+ {
+ bitpos /= BITS_PER_UNIT;
+ store_constructor (exp,
+ change_address (target, VOIDmode,
+ plus_constant (XEXP (target, 0),
+ bitpos)),
+ cleared);
+ }
+ else
+ store_field (target, bitsize, bitpos, mode, exp,
+ VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
+ int_size_in_bytes (type));
+}
+
/* Store the value of constructor EXP into the rtx TARGET.
- TARGET is either a REG or a MEM. */
+ TARGET is either a REG or a MEM.
+ CLEARED is true if TARGET is known to have been zero'd. */
static void
-store_constructor (exp, target)
+store_constructor (exp, target, cleared)
tree exp;
rtx target;
+ int cleared;
{
tree type = TREE_TYPE (exp);
if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
{
rtx temp = gen_reg_rtx (GET_MODE (target));
- store_constructor (exp, temp);
+ store_constructor (exp, temp, 0);
emit_move_insn (target, temp);
return;
}
this probably loses. */
else if (GET_CODE (target) == REG && TREE_STATIC (exp)
&& GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
- emit_move_insn (target, const0_rtx);
+ {
+ if (! cleared)
+ emit_move_insn (target, const0_rtx);
- /* If the constructor has fewer fields than the structure,
+ cleared = 1;
+ }
+
+ /* If the constructor has fewer fields than the structure
+ or if we are initializing the structure to mostly zeros,
clear the whole structure first. */
- else if (list_length (CONSTRUCTOR_ELTS (exp))
- != list_length (TYPE_FIELDS (type)))
- clear_storage (target, int_size_in_bytes (type));
+ else if ((list_length (CONSTRUCTOR_ELTS (exp))
+ != list_length (TYPE_FIELDS (type)))
+ || mostly_zeros_p (exp))
+ {
+ if (! cleared)
+ clear_storage (target, expr_size (exp),
+ TYPE_ALIGN (type) / BITS_PER_UNIT);
+
+ cleared = 1;
+ }
else
/* Inform later passes that the old value is dead. */
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
if (field == 0)
continue;
+ if (cleared && is_zeros_p (TREE_VALUE (elt)))
+ continue;
+
bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
unsignedp = TREE_UNSIGNED (field);
mode = DECL_MODE (field);
gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
force_reg (ptr_mode, offset_rtx)));
}
+ if (TREE_READONLY (field))
+ {
+ if (GET_CODE (to_rtx) == MEM)
+ to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
+ XEXP (to_rtx, 0));
+ RTX_UNCHANGING_P (to_rtx) = 1;
+ }
- store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
- /* The alignment of TARGET is
- at least what its type requires. */
- VOIDmode, 0,
- TYPE_ALIGN (type) / BITS_PER_UNIT,
- int_size_in_bytes (type));
+ store_constructor_field (to_rtx, bitsize, bitpos,
+ mode, TREE_VALUE (elt), type, cleared);
}
}
else if (TREE_CODE (type) == ARRAY_TYPE)
{
register tree elt;
register int i;
+ int need_to_clear;
tree domain = TYPE_DOMAIN (type);
HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
tree elttype = TREE_TYPE (type);
- /* If the constructor has fewer fields than the structure,
- clear the whole structure first. Similarly if this this is
- static constructor of a non-BLKmode object. */
-
- if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
- || (GET_CODE (target) == REG && TREE_STATIC (exp)))
- clear_storage (target, int_size_in_bytes (type));
+ /* If the constructor has fewer elements than the array,
+ clear the whole array first. Similarly if this this is
+ static constructor of a non-BLKmode object. */
+ if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
+ need_to_clear = 1;
+ else
+ {
+ HOST_WIDE_INT count = 0, zero_count = 0;
+ need_to_clear = 0;
+ /* This loop is a more accurate version of the loop in
+ mostly_zeros_p (it handles RANGE_EXPR in an index).
+ It is also needed to check for missing elements. */
+ for (elt = CONSTRUCTOR_ELTS (exp);
+ elt != NULL_TREE;
+ elt = TREE_CHAIN (elt), i++)
+ {
+ tree index = TREE_PURPOSE (elt);
+ HOST_WIDE_INT this_node_count;
+ if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (index, 0);
+ tree hi_index = TREE_OPERAND (index, 1);
+ if (TREE_CODE (lo_index) != INTEGER_CST
+ || TREE_CODE (hi_index) != INTEGER_CST)
+ {
+ need_to_clear = 1;
+ break;
+ }
+ this_node_count = TREE_INT_CST_LOW (hi_index)
+ - TREE_INT_CST_LOW (lo_index) + 1;
+ }
+ else
+ this_node_count = 1;
+ count += this_node_count;
+ if (mostly_zeros_p (TREE_VALUE (elt)))
+ zero_count += this_node_count;
+ }
+ if (4 * zero_count >= 3 * count)
+ need_to_clear = 1;
+ }
+ if (need_to_clear)
+ {
+ if (! cleared)
+ clear_storage (target, expr_size (exp),
+ TYPE_ALIGN (type) / BITS_PER_UNIT);
+ cleared = 1;
+ }
else
/* Inform later passes that the old value is dead. */
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
int bitsize;
int bitpos;
int unsignedp;
+ tree value = TREE_VALUE (elt);
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
+ if (cleared && is_zeros_p (value))
+ continue;
+
mode = TYPE_MODE (elttype);
bitsize = GET_MODE_BITSIZE (mode);
unsignedp = TREE_UNSIGNED (elttype);
- if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
+ if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (index, 0);
+ tree hi_index = TREE_OPERAND (index, 1);
+ rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
+ struct nesting *loop;
+ tree position;
+
+ if (TREE_CODE (lo_index) == INTEGER_CST
+ && TREE_CODE (hi_index) == INTEGER_CST)
+ {
+ HOST_WIDE_INT lo = TREE_INT_CST_LOW (lo_index);
+ HOST_WIDE_INT hi = TREE_INT_CST_LOW (hi_index);
+ HOST_WIDE_INT count = hi - lo + 1;
+
+ /* If the range is constant and "small", unroll the loop.
+ We must also use store_field if the target is not MEM. */
+ if (GET_CODE (target) != MEM
+ || count <= 2
+ || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
+ && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
+ <= 40 * 8))
+ {
+ lo -= minelt; hi -= minelt;
+ for (; lo <= hi; lo++)
+ {
+ bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
+ store_constructor_field (target, bitsize, bitpos,
+ mode, value, type, cleared);
+ }
+ }
+ }
+ else
+ {
+ hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
+ loop_top = gen_label_rtx ();
+ loop_end = gen_label_rtx ();
+
+ unsignedp = TREE_UNSIGNED (domain);
+
+ index = build_decl (VAR_DECL, NULL_TREE, domain);
+
+ DECL_RTL (index) = index_r
+ = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
+ &unsignedp, 0));
+
+ if (TREE_CODE (value) == SAVE_EXPR
+ && SAVE_EXPR_RTL (value) == 0)
+ {
+ /* Make sure value gets expanded once before the loop. */
+ expand_expr (value, const0_rtx, VOIDmode, 0);
+ emit_queue ();
+ }
+ store_expr (lo_index, index_r, 0);
+ loop = expand_start_loop (0);
+
+ /* Assign value to element index. */
+ position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
+ size_int (BITS_PER_UNIT));
+ position = size_binop (MULT_EXPR,
+ size_binop (MINUS_EXPR, index,
+ TYPE_MIN_VALUE (domain)),
+ position);
+ pos_rtx = expand_expr (position, 0, VOIDmode, 0);
+ addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ xtarget = change_address (target, mode, addr);
+ if (TREE_CODE (value) == CONSTRUCTOR)
+ store_constructor (exp, xtarget, cleared);
+ else
+ store_expr (value, xtarget, 0);
+
+ expand_exit_loop_if_false (loop,
+ build (LT_EXPR, integer_type_node,
+ index, hi_index));
+
+ expand_increment (build (PREINCREMENT_EXPR,
+ TREE_TYPE (index),
+ index, integer_one_node), 0);
+ expand_end_loop ();
+ emit_label (loop_end);
+
+ /* Needed by stupid register allocation. to extend the
+ lifetime of pseudo-regs used by target past the end
+ of the loop. */
+ emit_insn (gen_rtx (USE, GET_MODE (target), target));
+ }
+ }
+ else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
|| TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
{
- rtx pos_rtx, addr, xtarget;
+ rtx pos_rtx, addr;
tree position;
if (index == 0)
index = size_int (i);
+ if (minelt)
+ index = size_binop (MINUS_EXPR, index,
+ TYPE_MIN_VALUE (domain));
position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
size_int (BITS_PER_UNIT));
position = size_binop (MULT_EXPR, index, position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
- store_expr (TREE_VALUE (elt), xtarget, 0);
+ store_expr (value, xtarget, 0);
}
else
{
* TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
else
bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
-
- store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
- /* The alignment of TARGET is
- at least what its type requires. */
- VOIDmode, 0,
- TYPE_ALIGN (type) / BITS_PER_UNIT,
- int_size_in_bytes (type));
+ store_constructor_field (target, bitsize, bitpos,
+ mode, value, type, cleared);
}
}
}
/* set constructor assignments */
else if (TREE_CODE (type) == SET_TYPE)
{
- tree elt;
+ tree elt = CONSTRUCTOR_ELTS (exp);
rtx xtarget = XEXP (target, 0);
int set_word_size = TYPE_ALIGN (type);
- int nbytes = int_size_in_bytes (type);
- tree non_const_elements;
- int need_to_clear_first;
+ int nbytes = int_size_in_bytes (type), nbits;
tree domain = TYPE_DOMAIN (type);
tree domain_min, domain_max, bitlength;
bzero/memset), and set the bits we want. */
/* Check for all zeros. */
- if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
+ if (elt == NULL_TREE)
{
- clear_storage (target, nbytes);
+ if (!cleared)
+ clear_storage (target, expr_size (exp),
+ TYPE_ALIGN (type) / BITS_PER_UNIT);
return;
}
- if (nbytes < 0)
- abort();
-
domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
bitlength = size_binop (PLUS_EXPR,
size_binop (MINUS_EXPR, domain_max, domain_min),
size_one_node);
- /* Check for range all ones, or at most a single range.
- (This optimization is only a win for big sets.) */
- if (GET_MODE (target) == BLKmode && nbytes > 16
- && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
- {
- need_to_clear_first = 1;
- non_const_elements = CONSTRUCTOR_ELTS (exp);
- }
- else
+ if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
+ abort ();
+ nbits = TREE_INT_CST_LOW (bitlength);
+
+ /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
+ are "complicated" (more than one range), initialize (the
+ constant parts) by copying from a constant. */
+ if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
+ || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
{
- int nbits = nbytes * BITS_PER_UNIT;
int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
char *bit_buffer = (char*) alloca (nbits);
int bit_pos = 0;
int ibit = 0;
int offset = 0; /* In bytes from beginning of set. */
- non_const_elements = get_set_constructor_bits (exp,
- bit_buffer, nbits);
+ elt = get_set_constructor_bits (exp, bit_buffer, nbits);
for (;;)
{
if (bit_buffer[ibit])
bit_pos++; ibit++;
if (bit_pos >= set_word_size || ibit == nbits)
{
- rtx datum = GEN_INT (word);
- rtx to_rtx;
- /* The assumption here is that it is safe to use XEXP if
- the set is multi-word, but not if it's single-word. */
- if (GET_CODE (target) == MEM)
- to_rtx = change_address (target, mode,
- plus_constant (XEXP (target, 0),
- offset));
- else if (offset == 0)
- to_rtx = target;
- else
- abort ();
- emit_move_insn (to_rtx, datum);
+ if (word != 0 || ! cleared)
+ {
+ rtx datum = GEN_INT (word);
+ rtx to_rtx;
+ /* The assumption here is that it is safe to use XEXP if
+ the set is multi-word, but not if it's single-word. */
+ if (GET_CODE (target) == MEM)
+ {
+ to_rtx = plus_constant (XEXP (target, 0), offset);
+ to_rtx = change_address (target, mode, to_rtx);
+ }
+ else if (offset == 0)
+ to_rtx = target;
+ else
+ abort ();
+ emit_move_insn (to_rtx, datum);
+ }
if (ibit == nbits)
break;
word = 0;
offset += set_word_size / BITS_PER_UNIT;
}
}
- need_to_clear_first = 0;
}
-
- for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
+ else if (!cleared)
+ {
+ /* Don't bother clearing storage if the set is all ones. */
+ if (TREE_CHAIN (elt) != NULL_TREE
+ || (TREE_PURPOSE (elt) == NULL_TREE
+ ? nbits != 1
+ : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
+ || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
+ || (TREE_INT_CST_LOW (TREE_VALUE (elt))
+ - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
+ != nbits))))
+ clear_storage (target, expr_size (exp),
+ TYPE_ALIGN (type) / BITS_PER_UNIT);
+ }
+
+ for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
{
/* start of range of element or NULL */
tree startbit = TREE_PURPOSE (elt);
if (TREE_CODE (startbit) == INTEGER_CST
&& TREE_CODE (endbit) == INTEGER_CST
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
- && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
+ && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
{
-
- if (need_to_clear_first
- && endb - startb != nbytes * BITS_PER_UNIT)
- clear_storage (target, nbytes);
- need_to_clear_first = 0;
emit_library_call (memset_libfunc, 0,
VOIDmode, 3,
- plus_constant (XEXP (targetx, 0), startb),
+ plus_constant (XEXP (targetx, 0),
+ startb / BITS_PER_UNIT),
Pmode,
- constm1_rtx, Pmode,
+ constm1_rtx, TYPE_MODE (integer_type_node),
GEN_INT ((endb - startb) / BITS_PER_UNIT),
- Pmode);
+ TYPE_MODE (sizetype));
}
else
#endif
{
- if (need_to_clear_first)
- {
- clear_storage (target, nbytes);
- need_to_clear_first = 0;
- }
emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
bitlength_rtx, TYPE_MODE (sizetype),
if (! integer_zerop (low_bound))
index = fold (build (MINUS_EXPR, index_type, index, low_bound));
- if (TYPE_PRECISION (index_type) != POINTER_SIZE)
+ if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
{
- index = convert (type_for_size (POINTER_SIZE, 0), index);
+ index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
+ index);
index_type = TREE_TYPE (index);
}
tree part = TREE_VALUE (tail);
tree part_type = TREE_TYPE (part);
tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
- rtx target = assign_stack_temp (TYPE_MODE (part_type),
- int_size_in_bytes (part_type), 0);
- MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
+ rtx target = assign_temp (part_type, 0, 1, 1);
if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
parts = tree_cons (to_be_saved,
break;
case RTL_EXPR:
- exp_rtl = RTL_EXPR_RTL (exp);
- if (exp_rtl == 0)
- /* We don't know what this can modify. */
+ /* If a sequence exists, we would have to scan every instruction
+ in the sequence to see if it was safe. This is probably not
+ worthwhile. */
+ if (RTL_EXPR_SEQUENCE (exp))
return 0;
+ exp_rtl = RTL_EXPR_RTL (exp);
break;
case WITH_CLEANUP_EXPR:
TREE_USED (exp) = 1;
}
+ /* Show we haven't gotten RTL for this yet. */
+ temp = 0;
+
/* Handle variables inherited from containing functions. */
context = decl_function_context (exp);
fix_lexical_addr (XEXP (addr, 0), exp));
else
addr = fix_lexical_addr (addr, exp);
- return change_address (DECL_RTL (exp), mode, addr);
+ temp = change_address (DECL_RTL (exp), mode, addr);
}
/* This is the case of an array whose size is to be determined
from its initializer, while the initializer is still being parsed.
See expand_decl. */
- if (GET_CODE (DECL_RTL (exp)) == MEM
- && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
- return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
+ else if (GET_CODE (DECL_RTL (exp)) == MEM
+ && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
+ temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
XEXP (DECL_RTL (exp), 0));
/* If DECL_RTL is memory, we are in the normal case and either
the address is not valid or it is not a register and -fforce-addr
is specified, get the address into a register. */
- if (GET_CODE (DECL_RTL (exp)) == MEM
- && modifier != EXPAND_CONST_ADDRESS
- && modifier != EXPAND_SUM
- && modifier != EXPAND_INITIALIZER
- && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
- || (flag_force_addr
- && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
- return change_address (DECL_RTL (exp), VOIDmode,
+ else if (GET_CODE (DECL_RTL (exp)) == MEM
+ && modifier != EXPAND_CONST_ADDRESS
+ && modifier != EXPAND_SUM
+ && modifier != EXPAND_INITIALIZER
+ && (! memory_address_p (DECL_MODE (exp),
+ XEXP (DECL_RTL (exp), 0))
+ || (flag_force_addr
+ && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
+ temp = change_address (DECL_RTL (exp), VOIDmode,
copy_rtx (XEXP (DECL_RTL (exp), 0)));
+ /* If we got something, return it. But first, set the alignment
+ the address is a register. */
+ if (temp != 0)
+ {
+ if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
+ mark_reg_pointer (XEXP (temp, 0),
+ DECL_ALIGN (exp) / BITS_PER_UNIT);
+
+ return temp;
+ }
+
/* If the mode of DECL_RTL does not match that of the decl, it
must be a promoted value. We return a SUBREG of the wanted mode,
but mark it so that we know that it was already extended. */
}
if (SAVE_EXPR_RTL (exp) == 0)
{
- if (mode == BLKmode)
- {
- temp
- = assign_stack_temp (mode, int_size_in_bytes (type), 0);
- MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
- }
- else if (mode == VOIDmode)
+ if (mode == VOIDmode)
temp = const0_rtx;
else
- temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
+ temp = assign_temp (type, 0, 0, 0);
SAVE_EXPR_RTL (exp) = temp;
if (!optimize && GET_CODE (temp) == REG)
&& (move_by_pieces_ninsns
(TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
TYPE_ALIGN (type) / BITS_PER_UNIT)
- > MOVE_RATIO))))
+ > MOVE_RATIO)
+ && ! mostly_zeros_p (exp))))
|| (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
{
rtx constructor = output_constant_def (exp);
if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
else
- {
- target
- = assign_stack_temp (mode, int_size_in_bytes (type), 0);
- if (AGGREGATE_TYPE_P (type))
- MEM_IN_STRUCT_P (target) = 1;
- }
+ target = assign_temp (type, 0, 1, 1);
}
- store_constructor (exp, target);
+
+ if (TREE_READONLY (exp))
+ {
+ if (GET_CODE (target) == MEM)
+ target = change_address (target, GET_MODE (target),
+ XEXP (target, 0));
+ RTX_UNCHANGING_P (target) = 1;
+ }
+
+ store_constructor (exp, target, 0);
return target;
}
through a pointer to const does not mean that the value there can
never change. Languages where it can never change should
also set TREE_STATIC. */
- RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) | TREE_STATIC (exp);
+ RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
return temp;
}
tree elt;
tree size = size_in_bytes (type);
- /* Convert the integer argument to a type the same size as a
- pointer so the multiply won't overflow spuriously. */
- if (TYPE_PRECISION (index_type) != POINTER_SIZE)
- index = convert (type_for_size (POINTER_SIZE, 0), index);
+ /* Convert the integer argument to a type the same size as sizetype
+ so the multiply won't overflow spuriously. */
+ if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
+ index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
+ index);
if (TREE_CODE (size) != INTEGER_CST
&& contains_placeholder_p (size))
matter, since expand_expr should not care.) */
TREE_SIDE_EFFECTS (array_adr) = 0;
- elt = build1 (INDIRECT_REF, type,
- fold (build (PLUS_EXPR,
- TYPE_POINTER_TO (variant_type),
- array_adr,
- fold (build (MULT_EXPR,
- TYPE_POINTER_TO (variant_type),
- index, size)))));
+ elt
+ = build1
+ (INDIRECT_REF, type,
+ fold (build (PLUS_EXPR,
+ TYPE_POINTER_TO (variant_type),
+ array_adr,
+ fold
+ (build1
+ (NOP_EXPR,
+ TYPE_POINTER_TO (variant_type),
+ fold (build (MULT_EXPR, TREE_TYPE (index),
+ index,
+ convert (TREE_TYPE (index),
+ size))))))));;
/* Volatility, etc., of new expression is same as old
expression. */
if (tem == exp)
abort ();
- /* In some cases, we will be offsetting OP0's address by a constant.
- So get it as a sum, if possible. If we will be using it
- directly in an insn, we validate it.
-
- If TEM's type is a union of variable size, pass TARGET to the inner
+ /* If TEM's type is a union of variable size, pass TARGET to the inner
computation, since it will need a temporary and TARGET is known
to have to do. This occurs in unchecked conversion in Ada. */
&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
!= INTEGER_CST)
? target : NULL_RTX),
- VOIDmode, EXPAND_SUM);
+ VOIDmode,
+ modifier == EXPAND_INITIALIZER ? modifier : 0);
/* If this is a constant, put it into a register if it is a
legitimate constant and memory if it isn't. */
if (ext_mode == BLKmode)
abort ();
- op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
+ op0 = validize_mem (op0);
+
+ if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
+ mark_reg_pointer (XEXP (op0, 0), alignment);
+
+ op0 = extract_bit_field (op0, bitsize, bitpos,
unsignedp, target, ext_mode, ext_mode,
alignment,
int_size_in_bytes (TREE_TYPE (tem)));
return op0;
}
+ /* If the result is BLKmode, use that to access the object
+ now as well. */
+ if (mode == BLKmode)
+ mode1 = BLKmode;
+
/* Get a reference to just this component. */
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0),
(bitpos / BITS_PER_UNIT)));
+ if (GET_CODE (XEXP (op0, 0)) == REG)
+ mark_reg_pointer (XEXP (op0, 0), alignment);
+
MEM_IN_STRUCT_P (op0) = 1;
MEM_VOLATILE_P (op0) |= volatilep;
if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
if (target == 0)
{
- if (mode == BLKmode)
- {
- if (TYPE_SIZE (type) == 0
- || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- abort ();
- target = assign_stack_temp (BLKmode,
- (TREE_INT_CST_LOW (TYPE_SIZE (type))
- + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT, 0);
- MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
- }
- else
+ if (mode != BLKmode)
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
+ else
+ target = assign_temp (type, 0, 1, 1);
}
if (GET_CODE (target) == MEM)
if (modifier == EXPAND_INITIALIZER)
return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
- if (flag_force_mem && GET_CODE (op0) == MEM)
- op0 = copy_to_reg (op0);
-
if (target == 0)
return
convert_to_mode (mode, op0,
{
enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
+ optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
+ ? smul_widen_optab : umul_widen_optab);
this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
? umul_widen_optab : smul_widen_optab);
- if (mode == GET_MODE_WIDER_MODE (innermode)
- && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
+ if (mode == GET_MODE_WIDER_MODE (innermode))
{
- op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
- NULL_RTX, VOIDmode, 0);
- if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
- VOIDmode, 0);
- else
- op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
- NULL_RTX, VOIDmode, 0);
- goto binop2;
+ if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
+ {
+ op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
+ NULL_RTX, VOIDmode, 0);
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
+ VOIDmode, 0);
+ else
+ op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
+ NULL_RTX, VOIDmode, 0);
+ goto binop2;
+ }
+ else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
+ && innermode == word_mode)
+ {
+ rtx htem;
+ op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
+ NULL_RTX, VOIDmode, 0);
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
+ VOIDmode, 0);
+ else
+ op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
+ NULL_RTX, VOIDmode, 0);
+ temp = expand_binop (mode, other_optab, op0, op1, target,
+ unsignedp, OPTAB_LIB_WIDEN);
+ htem = expand_mult_highpart_adjust (innermode,
+ gen_highpart (innermode, temp),
+ op0, op1,
+ gen_highpart (innermode, temp),
+ unsignedp);
+ emit_move_insn (gen_highpart (innermode, temp), htem);
+ return temp;
+ }
}
}
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
&& ! (GET_CODE (original_target) == MEM
&& MEM_VOLATILE_P (original_target)))
temp = original_target;
- else if (mode == BLKmode)
- {
- if (TYPE_SIZE (type) == 0
- || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- abort ();
-
- temp = assign_stack_temp (BLKmode,
- (TREE_INT_CST_LOW (TYPE_SIZE (type))
- + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT, 0);
- MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
- }
else
- temp = gen_reg_rtx (mode);
+ temp = assign_temp (type, 0, 0, 1);
/* Check for X ? A + B : A. If we have this, we can copy
A to the output and conditionally add B. Similarly for unary
/* ??? deprecated, use sequences instead. */
reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
/* convert flag, which is an rtx, into a tree. */
cond = make_node (RTL_EXPR);
TREE_TYPE (cond) = integer_type_node;
left_cleanups, right_cleanups);
new_cleanups = fold (new_cleanups);
+ pop_obstacks ();
+
/* Now add in the conditionalized cleanups. */
cleanups_this_call
= tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
}
else
{
- target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
- MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
+ target = assign_temp (type, 2, 1, 1);
/* All temp slots at this level must not conflict. */
preserve_temp_slots (target);
DECL_RTL (slot) = target;
if (ignore)
return op0;
+ op0 = protect_from_queue (op0, 0);
+
/* We would like the object in memory. If it is a constant,
we can have it be statically allocated into memory. For
a non-constant (REG, SUBREG or CONCAT), we need to allocate some
/* If this object is in a register, it must be not
be BLKmode. */
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- enum machine_mode inner_mode = TYPE_MODE (inner_type);
- rtx memloc
- = assign_stack_temp (inner_mode,
- int_size_in_bytes (inner_type), 1);
- MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
+ rtx memloc = assign_temp (inner_type, 1, 1, 1);
mark_temp_addr_taken (memloc);
emit_move_insn (memloc, op0);
if (flag_force_addr && GET_CODE (op0) != REG)
op0 = force_reg (Pmode, op0);
- if (GET_CODE (op0) == REG)
- mark_reg_pointer (op0);
+ if (GET_CODE (op0) == REG
+ && ! REG_USERVAR_P (op0))
+ mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
/* If we might have had a temp slot, add an equivalent address
for it. */
plus_constant (tem, GET_MODE_SIZE (Pmode)));
tem = gen_rtx (MEM, Pmode, tem);
#endif
+ return tem;
}
\f
/* Expand an expression EXP that calls a built-in function,
tree arg = TREE_VALUE (arglist);
/* Strip off all nops for the sake of the comparison. This
- is not quite the same as STRIP_NOPS. It does more. */
+ is not quite the same as STRIP_NOPS. It does more.
+ We must also strip off INDIRECT_EXPR for C++ reference
+ parameters. */
while (TREE_CODE (arg) == NOP_EXPR
|| TREE_CODE (arg) == CONVERT_EXPR
- || TREE_CODE (arg) == NON_LVALUE_EXPR)
+ || TREE_CODE (arg) == NON_LVALUE_EXPR
+ || TREE_CODE (arg) == INDIRECT_REF)
arg = TREE_OPERAND (arg, 0);
if (arg != last_parm)
warning ("second parameter of `va_start' not last named argument");
break;
#endif
+ /* __builtin_setjmp is passed a pointer to an array of five words
+ (not all will be used on all machines). It operates similarly to
+ the C library function of the same name, but is more efficient.
+ Much of the code below (and for longjmp) is copied from the handling
+ of non-local gotos.
+
+ NOTE: This is intended for use by GNAT and will only work in
+ the method used by it. This code will likely NOT survive to
+ the GCC 2.8.0 release. */
+ case BUILT_IN_SETJMP:
+ if (arglist == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+
+ {
+ rtx buf_addr
+ = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), subtarget,
+ VOIDmode, 0));
+ rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
+ enum machine_mode sa_mode = Pmode;
+ rtx stack_save;
+
+ if (target == 0 || GET_CODE (target) != REG
+ || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ target = gen_reg_rtx (value_mode);
+
+ emit_queue ();
+
+ emit_note (NULL_PTR, NOTE_INSN_SETJMP);
+ current_function_calls_setjmp = 1;
+
+ /* We store the frame pointer and the address of lab1 in the buffer
+ and use the rest of it for the stack save area, which is
+ machine-dependent. */
+ emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
+ virtual_stack_vars_rtx);
+ emit_move_insn
+ (validize_mem (gen_rtx (MEM, Pmode,
+ plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)))),
+ gen_rtx (LABEL_REF, Pmode, lab1));
+
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
+#endif
+
+ stack_save = gen_rtx (MEM, sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
+ emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
+
+ /* Set TARGET to zero and branch around the other case. */
+ emit_move_insn (target, const0_rtx);
+ emit_jump_insn (gen_jump (lab2));
+ emit_barrier ();
+ emit_label (lab1);
+
+ /* Now put in the code to restore the frame pointer, and argument
+ pointer, if needed. The code below is from expand_end_bindings
+ in stmt.c; see detailed documentation there. */
+#ifdef HAVE_nonlocal_goto
+ if (! HAVE_nonlocal_goto)
+#endif
+ emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
+
+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ {
+#ifdef ELIMINABLE_REGS
+ static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
+ int i;
+
+ for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
+ if (elim_regs[i].from == ARG_POINTER_REGNUM
+ && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
+ break;
+
+ if (i == sizeof elim_regs / sizeof elim_regs [0])
+#endif
+ {
+ /* Now restore our arg pointer from the address at which it
+ was saved in our stack frame.
+ If there hasn't be space allocated for it yet, make
+ some now. */
+ if (arg_pointer_save_area == 0)
+ arg_pointer_save_area
+ = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ emit_move_insn (virtual_incoming_args_rtx,
+ copy_to_reg (arg_pointer_save_area));
+ }
+ }
+#endif
+
+ /* The result to return is in the static chain pointer. */
+ if (GET_MODE (static_chain_rtx) == GET_MODE (target))
+ emit_move_insn (target, static_chain_rtx);
+ else
+ convert_move (target, static_chain_rtx, 0);
+
+ emit_label (lab2);
+ return target;
+ }
+
+ /* __builtin_longjmp is passed a pointer to an array of five words
+ and a value to return. It's similar to the C library longjmp
+ function but works with __builtin_setjmp above. */
+ case BUILT_IN_LONGJMP:
+ if (arglist == 0 || TREE_CHAIN (arglist) == 0
+ || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
+ break;
+
+ {
+ rtx buf_addr
+ = force_reg (Pmode, expand_expr (TREE_VALUE (arglist), NULL_RTX,
+ VOIDmode, 0));
+ rtx fp = gen_rtx (MEM, Pmode, buf_addr);
+ rtx lab = gen_rtx (MEM, Pmode,
+ plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
+ enum machine_mode sa_mode
+#ifdef HAVE_save_stack_nonlocal
+ = (HAVE_save_stack_nonlocal
+ ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
+ : Pmode);
+#else
+ = Pmode;
+#endif
+ rtx stack = gen_rtx (MEM, sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
+ rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), NULL_RTX,
+ VOIDmode, 0);
+
+ /* Pick up FP, label, and SP from the block and jump. This code is
+ from expand_goto in stmt.c; see there for detailed comments. */
+#if HAVE_nonlocal_goto
+ if (HAVE_nonlocal_goto)
+ emit_insn (gen_nonlocal_goto (fp, lab, stack, value));
+ else
+#endif
+ {
+ emit_move_insn (hard_frame_pointer_rtx, fp);
+ emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
+
+ /* Put in the static chain register the return value. */
+ emit_move_insn (static_chain_rtx, value);
+ emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
+ emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
+ emit_indirect_jump (copy_to_reg (lab));
+ }
+
+ return const0_rtx;
+ }
+
default: /* just do library call, if unknown builtin */
error ("built-in function `%s' not currently supported",
IDENTIFIER_POINTER (DECL_NAME (fndecl)));
clear_pending_stack_adjust ()
{
#ifdef EXIT_IGNORE_STACK
- if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
+ if (optimize > 0
+ && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
&& ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
&& ! flag_inline_functions)
pending_stack_adjust = 0;
/* reverse them so that we can build them in the right order. */
cleanups = nreverse (cleanups);
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
while (cleanups)
{
if (new_cleanups)
cleanups = TREE_CHAIN (cleanups);
}
+
+ pop_obstacks ();
}
return new_cleanups;
emit_move_insn (flag, const1_rtx);
emit_insns (seq2);
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
/* convert flag, which is an rtx, into a tree. */
cond = make_node (RTL_EXPR);
TREE_TYPE (cond) = integer_type_node;
cleanups, integer_zero_node);
new_cleanups = fold (new_cleanups);
+ pop_obstacks ();
+
/* Now add in the conditionalized cleanups. */
cleanups_this_call
= tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
emit_move_insn (flag, const1_rtx);
emit_insns (seq2);
+ /* All cleanups must be on the function_obstack. */
+ push_obstacks_nochange ();
+ resume_temporary_allocation ();
+
/* convert flag, which is an rtx, into a tree. */
cond = make_node (RTL_EXPR);
TREE_TYPE (cond) = integer_type_node;
cleanups, integer_zero_node);
new_cleanups = fold (new_cleanups);
+ pop_obstacks ();
+
/* Now add in the conditionalized cleanups. */
cleanups_this_call
= tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);