pending_chain = 0;
pending_stack_adjust = 0;
- arg_space_so_far = 0;
+ stack_pointer_delta = 0;
inhibit_defer_pop = 0;
saveregs_value = 0;
apply_args_value = 0;
if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
- align = MOVE_MAX;
+ align = MOVE_MAX * BITS_PER_UNIT;
while (max_size > 1)
{
if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
|| align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
- align = MOVE_MAX;
+ align = MOVE_MAX * BITS_PER_UNIT;
/* First move what we can in the largest integer mode, then go to
successively smaller modes. */
&& where_pad != none && where_pad != stack_direction)
anti_adjust_stack (GEN_INT (extra));
+ stack_pointer_delta += INTVAL (size) - used;
move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
INTVAL (size) - used, align);
#ifdef PUSH_ROUNDING
if (args_addr == 0 && PUSH_ARGS)
- addr = gen_push_operand ();
+ {
+ addr = gen_push_operand ();
+ stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
+ }
else
#endif
{
rtx offset_rtx;
if (contains_placeholder_p (offset))
- offset = build (WITH_RECORD_EXPR, bitsizetype,
+ offset = build (WITH_RECORD_EXPR, sizetype,
offset, make_tree (TREE_TYPE (exp), target));
- offset = size_binop (EXACT_DIV_EXPR, offset, bitsize_unit_node);
- offset = convert (sizetype, offset);
-
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
if (GET_CODE (to_rtx) != MEM)
abort ();
gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
force_reg (ptr_mode,
offset_rtx)));
+ align = DECL_OFFSET_ALIGN (field);
}
if (TREE_READONLY (field))
}
#endif
store_constructor_field (to_rtx, bitsize, bitpos, mode,
- TREE_VALUE (elt), type,
- MIN (align,
- DECL_ALIGN (TREE_PURPOSE (elt))),
- cleared);
+ TREE_VALUE (elt), type, align, cleared);
}
}
else if (TREE_CODE (type) == ARRAY_TYPE)
&& contains_placeholder_p (this_offset))
this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
- offset = size_binop (PLUS_EXPR, offset, DECL_FIELD_OFFSET (field));
+ offset = size_binop (PLUS_EXPR, offset, this_offset);
bit_offset = size_binop (PLUS_EXPR, bit_offset,
DECL_FIELD_BIT_OFFSET (field));
if (! host_integerp (offset, 0))
alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
}
+
else if (TREE_CODE (exp) == ARRAY_REF)
{
tree index = TREE_OPERAND (exp, 1);
tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
+ tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
/* We assume all arrays have sizes that are a multiple of a byte.
First subtract the lower bound, if any, in the type of the
index = fold (build (MINUS_EXPR, TREE_TYPE (index),
index, low_bound));
+ /* If the index has a self-referential type, pass it to a
+ WITH_RECORD_EXPR; if the component size is, pass our
+ component to one. */
if (! TREE_CONSTANT (index)
&& contains_placeholder_p (index))
index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
+ if (! TREE_CONSTANT (unit_size)
+ && contains_placeholder_p (unit_size))
+ unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
+ TREE_OPERAND (exp, 0));
offset = size_binop (PLUS_EXPR, offset,
size_binop (MULT_EXPR,
convert (sizetype, index),
- TYPE_SIZE_UNIT (TREE_TYPE (exp))));
+ unit_size));
}
+
else if (TREE_CODE (exp) != NON_LVALUE_EXPR
&& ! ((TREE_CODE (exp) == NOP_EXPR
|| TREE_CODE (exp) == CONVERT_EXPR)
if (temp != 0)
{
if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
- mark_reg_pointer (XEXP (temp, 0),
- DECL_ALIGN (exp) / BITS_PER_UNIT);
+ mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
return temp;
}
op0 = validize_mem (op0);
if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
- mark_reg_pointer (XEXP (op0, 0), alignment / BITS_PER_UNIT);
+ mark_reg_pointer (XEXP (op0, 0), alignment);
op0 = extract_bit_field (op0, bitsize, bitpos,
unsignedp, target, ext_mode, ext_mode,
MEM_ALIAS_SET (op0) = get_alias_set (exp);
if (GET_CODE (XEXP (op0, 0)) == REG)
- mark_reg_pointer (XEXP (op0, 0), alignment / BITS_PER_UNIT);
+ mark_reg_pointer (XEXP (op0, 0), alignment);
MEM_SET_IN_STRUCT_P (op0, 1);
MEM_VOLATILE_P (op0) |= volatilep;
* BITS_PER_UNIT),
GET_MODE_BITSIZE (mode)),
0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
- VOIDmode, 0, 1, int_size_in_bytes (type), 0);
+ VOIDmode, 0, BITS_PER_UNIT,
+ int_size_in_bytes (type), 0);
else
abort ();
if (GET_CODE (op0) == REG
&& ! REG_USERVAR_P (op0))
- mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
+ mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
/* If we might have had a temp slot, add an equivalent address
for it. */
alignment >>= 1;
if (GET_CODE (XEXP (op0, 0)) == REG)
- mark_reg_pointer (XEXP (op0, 0), alignment / BITS_PER_UNIT);
+ mark_reg_pointer (XEXP (op0, 0), alignment);
MEM_IN_STRUCT_P (op0) = 1;
MEM_VOLATILE_P (op0) |= volatilep;
&& EXIT_IGNORE_STACK
&& ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
&& ! flag_inline_functions)
- pending_stack_adjust = 0;
+ {
+ stack_pointer_delta -= pending_stack_adjust,
+ pending_stack_adjust = 0;
+ }
#endif
}