#define CASE_VECTOR_PC_RELATIVE 0
#endif
+/* Hook called by safe_from_p for language-specific tree codes. It is
+ up to the language front-end to install a hook if it has any such
+ codes that safe_from_p needs to know about. Since same_from_p will
+ recursively explore the TREE_OPERANDs of an expression, this hook
+ should not reexamine those pieces. This routine may recursively
+ call safe_from_p; it should always pass `0' as the TOP_P
+ parameter. */
+int (*lang_safe_from_p) PARAMS ((rtx, tree));
+
/* If this is nonzero, we do not bother generating VOLATILE
around volatile memory references, and we are willing to
output indirect addresses. If cse is to follow, we reject
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
- tree, tree, unsigned int, int));
+ tree, tree, unsigned int, int,
+ int));
static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
HOST_WIDE_INT));
static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
static tree save_noncopied_parts PARAMS ((tree, tree));
static tree init_noncopied_parts PARAMS ((tree, tree));
-static int safe_from_p PARAMS ((rtx, tree, int));
static int fixed_type_p PARAMS ((tree));
static rtx var_rtx PARAMS ((tree));
static int readonly_fields_p PARAMS ((tree));
if (to_real)
{
- rtx value;
+ rtx value, insns;
if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
{
/* This conversion is not implemented yet. */
abort ();
- value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
+ start_sequence ();
+ value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
1, from, from_mode);
- emit_move_insn (to, value);
+ insns = get_insns ();
+ end_sequence ();
+ emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
+ from));
return;
}
else
{
#ifdef HAVE_extendpsisi2
- if (HAVE_extendpsisi2)
+ if (! unsignedp && HAVE_extendpsisi2)
{
emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
return;
}
#endif /* HAVE_extendpsisi2 */
+#ifdef HAVE_zero_extendpsisi2
+ if (unsignedp && HAVE_zero_extendpsisi2)
+ {
+ emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
+ return;
+ }
+#endif /* HAVE_zero_extendpsisi2 */
abort ();
}
}
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
#else
- emit_library_call (bcopy_libfunc, 0,
+ emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, y, Pmode, x, Pmode,
convert_to_mode (TYPE_MODE (integer_type_node), size,
TREE_UNSIGNED (integer_type_node)),
#endif
rtx retval = 0;
- if (GET_MODE (object) == BLKmode)
+ /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
+ just move a zero. Otherwise, do this a piece at a time. */
+ if (GET_MODE (object) != BLKmode
+ && GET_CODE (size) == CONST_INT
+ && GET_MODE_SIZE (GET_MODE (object)) == INTVAL (size))
+ emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
+ else
{
object = protect_from_queue (object, 1);
size = protect_from_queue (size, 0);
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
#else
- emit_library_call (bzero_libfunc, 0,
+ emit_library_call (bzero_libfunc, LCT_NORMAL,
VOIDmode, 2, object, Pmode, size,
TYPE_MODE (integer_type_node));
#endif
}
}
- else
- emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
return retval;
}
in_check_memory_usage = 1;
temp = get_push_address (INTVAL (size) - used);
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- temp, Pmode,
- XEXP (xinner, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
+ Pmode, XEXP (xinner, 0), Pmode,
GEN_INT (INTVAL (size) - used),
TYPE_MODE (sizetype));
else
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
- temp, Pmode,
- GEN_INT (INTVAL (size) - used),
+ emit_library_call (chkr_set_right_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
+ Pmode, GEN_INT (INTVAL (size) - used),
TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RW),
TYPE_MODE (integer_type_node));
in_check_memory_usage = 1;
target = copy_to_reg (temp);
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_copy_bitmap_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
target, Pmode,
XEXP (xinner, 0), Pmode,
size, TYPE_MODE (sizetype));
else
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_set_right_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
target, Pmode,
size, TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RW),
to force it to pop the bcopy-arguments right away. */
NO_DEFER_POP;
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memcpy_libfunc, 0,
+ emit_library_call (memcpy_libfunc, LCT_NORMAL,
VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#else
- emit_library_call (bcopy_libfunc, 0,
+ emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
convert_to_mode (TYPE_MODE (integer_type_node),
size,
target = get_push_address (GET_MODE_SIZE (mode));
if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- target, Pmode,
- XEXP (x, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
+ Pmode, XEXP (x, 0), Pmode,
GEN_INT (GET_MODE_SIZE (mode)),
TYPE_MODE (sizetype));
else
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
- target, Pmode,
- GEN_INT (GET_MODE_SIZE (mode)),
+ emit_library_call (chkr_set_right_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
+ Pmode, GEN_INT (GET_MODE_SIZE (mode)),
TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RW),
TYPE_MODE (integer_type_node));
/* Check the access right of the pointer. */
in_check_memory_usage = 1;
if (size)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- to_addr, Pmode,
+ emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, to_addr, Pmode,
GEN_INT (size), TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
/* Copy the rights of the bitmap. */
if (current_function_check_memory_usage)
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- XEXP (to_rtx, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#ifdef TARGET_MEM_FUNCTIONS
- emit_library_call (memcpy_libfunc, 0,
+ emit_library_call (memcpy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
TYPE_MODE (sizetype));
#else
- emit_library_call (bcopy_libfunc, 0,
+ emit_library_call (bcopy_libfunc, LCT_NORMAL,
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
XEXP (to_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (integer_type_node),
{
in_check_memory_usage = 1;
if (GET_CODE (temp) == MEM)
- emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
- XEXP (target, 0), Pmode,
+ emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, XEXP (target, 0), Pmode,
XEXP (temp, 0), Pmode,
expr_size (exp), TYPE_MODE (sizetype));
else
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- XEXP (target, 0), Pmode,
+ emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
+ VOIDmode, 3, XEXP (target, 0), Pmode,
expr_size (exp), TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TYPE_MODE (integer_type_node));
/* Be sure we can write on ADDR. */
in_check_memory_usage = 1;
if (current_function_check_memory_usage)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
addr, Pmode,
size, TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_WO),
TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
TYPE is the type of the CONSTRUCTOR, not the element type.
ALIGN and CLEARED are as for store_constructor.
+ ALIAS_SET is the alias set to use for any stores.
This provides a recursive shortcut back to store_constructor when it isn't
necessary to go through store_field. This is so that we can pass through
static void
store_constructor_field (target, bitsize, bitpos,
- mode, exp, type, align, cleared)
+ mode, exp, type, align, cleared, alias_set)
rtx target;
unsigned HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
tree exp, type;
unsigned int align;
int cleared;
+ int alias_set;
{
if (TREE_CODE (exp) == CONSTRUCTOR
&& bitpos % BITS_PER_UNIT == 0
? BLKmode : VOIDmode,
plus_constant (XEXP (target, 0),
bitpos / BITS_PER_UNIT));
+
+ MEM_ALIAS_SET (target) = alias_set;
store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
}
else
store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
- int_size_in_bytes (type), 0);
+ int_size_in_bytes (type), alias_set);
}
/* Store the value of constructor EXP into the rtx TARGET.
/* If the constructor has fewer fields than the structure
or if we are initializing the structure to mostly zeros,
- clear the whole structure first. */
+ clear the whole structure first. Don't do this is TARGET is
+ register whose mode size isn't equal to SIZE since clear_storage
+ can't handle this case. */
else if (size > 0
&& ((list_length (CONSTRUCTOR_ELTS (exp))
!= fields_length (type))
- || mostly_zeros_p (exp)))
+ || mostly_zeros_p (exp))
+ && (GET_CODE (target) != REG
+ || GET_MODE_SIZE (GET_MODE (target)) == size))
{
if (! cleared)
clear_storage (target, GEN_INT (size), align);
}
#endif
store_constructor_field (to_rtx, bitsize, bitpos, mode,
- TREE_VALUE (elt), type, align, cleared);
+ TREE_VALUE (elt), type, align, cleared,
+ DECL_NONADDRESSABLE_P (field)
+ ? MEM_ALIAS_SET (to_rtx)
+ : get_alias_set (TREE_TYPE (field)));
}
}
else if (TREE_CODE (type) == ARRAY_TYPE)
register int i;
int need_to_clear;
tree domain = TYPE_DOMAIN (type);
- HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
- HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
tree elttype = TREE_TYPE (type);
+ int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
+ && host_integerp (TYPE_MAX_VALUE (domain), 0));
+ HOST_WIDE_INT minelt;
+ HOST_WIDE_INT maxelt;
+
+ /* If we have constant bounds for the range of the type, get them. */
+ if (const_bounds_p)
+ {
+ minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
+ maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
+ }
/* If the constructor has fewer elements than the array,
clear the whole array first. Similarly if this is
else
{
HOST_WIDE_INT count = 0, zero_count = 0;
- need_to_clear = 0;
+ need_to_clear = ! const_bounds_p;
+
/* This loop is a more accurate version of the loop in
mostly_zeros_p (it handles RANGE_EXPR in an index).
It is also needed to check for missing elements. */
for (elt = CONSTRUCTOR_ELTS (exp);
- elt != NULL_TREE;
+ elt != NULL_TREE && ! need_to_clear;
elt = TREE_CHAIN (elt))
{
tree index = TREE_PURPOSE (elt);
}
else
this_node_count = 1;
+
count += this_node_count;
if (mostly_zeros_p (TREE_VALUE (elt)))
zero_count += this_node_count;
}
+
/* Clear the entire array first if there are any missing elements,
or if the incidence of zero elements is >= 75%. */
- if (count < maxelt - minelt + 1
- || 4 * zero_count >= 3 * count)
+ if (! need_to_clear
+ && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
need_to_clear = 1;
}
+
if (need_to_clear && size > 0)
{
if (! cleared)
tree position;
/* If the range is constant and "small", unroll the loop. */
- if (host_integerp (lo_index, 0)
+ if (const_bounds_p
+ && host_integerp (lo_index, 0)
&& host_integerp (hi_index, 0)
&& (lo = tree_low_cst (lo_index, 0),
hi = tree_low_cst (hi_index, 0),
for (; lo <= hi; lo++)
{
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
- store_constructor_field (target, bitsize, bitpos, mode,
- value, type, align, cleared);
+ store_constructor_field
+ (target, bitsize, bitpos, mode, value, type, align,
+ cleared,
+ TYPE_NONALIASED_COMPONENT (type)
+ ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
}
}
else
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
store_constructor_field (target, bitsize, bitpos, mode, value,
- type, align, cleared);
+ type, align, cleared,
+ TYPE_NONALIASED_COMPONENT (type)
+ ? MEM_ALIAS_SET (target) :
+ get_alias_set (elttype));
+
}
}
}
&& (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
&& (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
{
- emit_library_call (memset_libfunc, 0,
+ emit_library_call (memset_libfunc, LCT_NORMAL,
VOIDmode, 3,
plus_constant (XEXP (targetx, 0),
startb / BITS_PER_UNIT),
else
#endif
emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
- 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
- bitlength_rtx, TYPE_MODE (sizetype),
+ LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
+ Pmode, bitlength_rtx, TYPE_MODE (sizetype),
startbit_rtx, TYPE_MODE (sizetype),
endbit_rtx, TYPE_MODE (sizetype));
align >>= 1;
emit_block_move (target, temp,
- GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT),
+ bitsize == -1 ? expr_size (exp)
+ : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+ / BITS_PER_UNIT),
align);
return value_mode == VOIDmode ? const0_rtx : target;
It is always safe for this routine to return zero since it merely
searches for optimization opportunities. */
-static int
+int
safe_from_p (x, exp, top_p)
rtx x;
tree exp;
if (exp_rtl)
break;
- nops = TREE_CODE_LENGTH (TREE_CODE (exp));
+ nops = first_rtl_op (TREE_CODE (exp));
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
return 0;
+
+ /* If this is a language-specific tree code, it may require
+ special handling. */
+ if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
+ && lang_safe_from_p
+ && !(*lang_safe_from_p) (x, exp))
+ return 0;
}
/* If we have an rtl, find any enclosed object. Then see if we conflict
enum expand_modifier ro_modifier;
/* Handle ERROR_MARK before anybody tries to access its type. */
- if (TREE_CODE (exp) == ERROR_MARK)
+ if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
{
op0 = CONST0_RTX (tmode);
if (op0 != 0)
in_check_memory_usage = 1;
if (memory_usage != MEMORY_USE_DONT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
XEXP (DECL_RTL (exp), 0), Pmode,
GEN_INT (int_size_in_bytes (type)),
TYPE_MODE (sizetype),
if (memory_usage != MEMORY_USE_DONT)
{
in_check_memory_usage = 1;
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- op0, Pmode,
- GEN_INT (int_size_in_bytes (type)),
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
+ Pmode, GEN_INT (int_size_in_bytes (type)),
TYPE_MODE (sizetype),
GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
else if (TREE_CODE (init) == STRING_CST
&& 0 > compare_tree_int (index,
TREE_STRING_LENGTH (init)))
- return (GEN_INT
- (TREE_STRING_POINTER
- (init)[TREE_INT_CST_LOW (index)]));
+ {
+ tree type = TREE_TYPE (TREE_TYPE (init));
+ enum machine_mode mode = TYPE_MODE (type);
+
+ if (GET_MODE_CLASS (mode) == MODE_INT
+ && GET_MODE_SIZE (mode) == 1)
+ return (GEN_INT
+ (TREE_STRING_POINTER
+ (init)[TREE_INT_CST_LOW (index)]));
+ }
}
}
}
/* Check the access right of the pointer. */
in_check_memory_usage = 1;
if (size > BITS_PER_UNIT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
- to, Pmode,
- GEN_INT (size / BITS_PER_UNIT),
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
+ Pmode, GEN_INT (size / BITS_PER_UNIT),
TYPE_MODE (sizetype),
GEN_INT (memory_usage),
TYPE_MODE (integer_type_node));
target = assign_temp (type, 0, 1, 1);
emit_block_move (target, op0,
- GEN_INT ((bitsize + BITS_PER_UNIT - 1)
- / BITS_PER_UNIT),
+ bitsize == -1 ? expr_size (exp)
+ : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+ / BITS_PER_UNIT),
BITS_PER_UNIT);
return target;
/* Get a reference to just this component. */
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
- (bitpos / BITS_PER_UNIT)));
+ {
+ rtx new = gen_rtx_MEM (mode1,
+ plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
+
+ MEM_COPY_ATTRIBUTES (new, op0);
+ op0 = new;
+ }
else
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0),
&& (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
== FUNCTION_DECL)
&& DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
- return expand_builtin (exp, target, subtarget, tmode, ignore);
+ {
+ if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ == BUILT_IN_FRONTEND)
+ return (*lang_expand_expr) (exp, original_target, tmode, modifier);
+ else
+ return expand_builtin (exp, target, subtarget, tmode, ignore);
+ }
/* If this call was expanded already by preexpand_calls,
just return the result we got. */
/* Check the access right of the pointer. */
in_check_memory_usage = 1;
if (size > BITS_PER_UNIT)
- emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
+ emit_library_call (chkr_check_addr_libfunc,
+ LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
TYPE_MODE (sizetype),
GEN_INT (MEMORY_USE_RO),