gcc_assert (GET_CODE (dst) == PARALLEL);
- if (!SCALAR_INT_MODE_P (m)
- && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
+ if (m != VOIDmode
+ && !SCALAR_INT_MODE_P (m)
+ && !MEM_P (orig_src)
+ && GET_CODE (orig_src) != CONCAT)
{
enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
if (imode == BLKmode)
and find the ultimate containing object. */
while (1)
{
- if (TREE_CODE (exp) == BIT_FIELD_REF)
- bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
- else if (TREE_CODE (exp) == COMPONENT_REF)
+ switch (TREE_CODE (exp))
{
- tree field = TREE_OPERAND (exp, 1);
- tree this_offset = component_ref_field_offset (exp);
+ case BIT_FIELD_REF:
+ bit_offset = size_binop (PLUS_EXPR, bit_offset,
+ TREE_OPERAND (exp, 2));
+ break;
- /* If this field hasn't been filled in yet, don't go
- past it. This should only happen when folding expressions
- made during type construction. */
- if (this_offset == 0)
- break;
+ case COMPONENT_REF:
+ {
+ tree field = TREE_OPERAND (exp, 1);
+ tree this_offset = component_ref_field_offset (exp);
- offset = size_binop (PLUS_EXPR, offset, this_offset);
- bit_offset = size_binop (PLUS_EXPR, bit_offset,
- DECL_FIELD_BIT_OFFSET (field));
+ /* If this field hasn't been filled in yet, don't go past it.
+ This should only happen when folding expressions made during
+ type construction. */
+ if (this_offset == 0)
+ break;
- /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
- }
+ offset = size_binop (PLUS_EXPR, offset, this_offset);
+ bit_offset = size_binop (PLUS_EXPR, bit_offset,
+ DECL_FIELD_BIT_OFFSET (field));
- else if (TREE_CODE (exp) == ARRAY_REF
- || TREE_CODE (exp) == ARRAY_RANGE_REF)
- {
- tree index = TREE_OPERAND (exp, 1);
- tree low_bound = array_ref_low_bound (exp);
- tree unit_size = array_ref_element_size (exp);
-
- /* We assume all arrays have sizes that are a multiple of a byte.
- First subtract the lower bound, if any, in the type of the
- index, then convert to sizetype and multiply by the size of the
- array element. */
- if (! integer_zerop (low_bound))
- index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
- index, low_bound));
-
- offset = size_binop (PLUS_EXPR, offset,
- size_binop (MULT_EXPR,
- convert (sizetype, index),
- unit_size));
- }
+ /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
+ }
+ break;
- /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
- conversions that don't change the mode, and all view conversions
- except those that need to "step up" the alignment. */
- else if (TREE_CODE (exp) != NON_LVALUE_EXPR
- && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
- && ! ((TYPE_ALIGN (TREE_TYPE (exp))
- > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
- && STRICT_ALIGNMENT
- && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- < BIGGEST_ALIGNMENT)
- && (TYPE_ALIGN_OK (TREE_TYPE (exp))
- || TYPE_ALIGN_OK (TREE_TYPE
- (TREE_OPERAND (exp, 0))))))
- && ! ((TREE_CODE (exp) == NOP_EXPR
- || TREE_CODE (exp) == CONVERT_EXPR)
- && (TYPE_MODE (TREE_TYPE (exp))
- == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
- break;
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ {
+ tree index = TREE_OPERAND (exp, 1);
+ tree low_bound = array_ref_low_bound (exp);
+ tree unit_size = array_ref_element_size (exp);
+
+ /* We assume all arrays have sizes that are a multiple of a byte.
+ First subtract the lower bound, if any, in the type of the
+ index, then convert to sizetype and multiply by the size of
+ the array element. */
+ if (! integer_zerop (low_bound))
+ index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, low_bound));
+
+ offset = size_binop (PLUS_EXPR, offset,
+ size_binop (MULT_EXPR,
+ convert (sizetype, index),
+ unit_size));
+ }
+ break;
+
+ case REALPART_EXPR:
+ bit_offset = bitsize_zero_node;
+ break;
+
+ case IMAGPART_EXPR:
+ bit_offset = build_int_cst (bitsizetype, *pbitsize);
+ break;
+
+ /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
+ conversions that don't change the mode, and all view conversions
+ except those that need to "step up" the alignment. */
+
+ case NON_LVALUE_EXPR:
+ break;
+
+ case NOP_EXPR:
+ case CONVERT_EXPR:
+ if (TYPE_MODE (TREE_TYPE (exp))
+ != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ goto done;
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ if ((TYPE_ALIGN (TREE_TYPE (exp))
+ > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ && STRICT_ALIGNMENT
+ && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ < BIGGEST_ALIGNMENT)
+ && (TYPE_ALIGN_OK (TREE_TYPE (exp))
+ || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ goto done;
+ break;
+
+ default:
+ goto done;
+ }
/* If any reference in the chain is volatile, the effect is volatile. */
if (TREE_THIS_VOLATILE (exp))
exp = TREE_OPERAND (exp, 0);
}
+ done:
/* If OFFSET is constant, see if we can return the whole thing as a
constant bit position. Otherwise, split it up. */
case ARRAY_RANGE_REF:
case NON_LVALUE_EXPR:
case VIEW_CONVERT_EXPR:
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
return 1;
/* ??? Sure they are handled, but get_inner_reference may return
{
tree array = TREE_OPERAND (exp, 0);
- tree low_bound = array_ref_low_bound (exp);
- tree index = convert (sizetype, TREE_OPERAND (exp, 1));
- HOST_WIDE_INT i;
-
- gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
-
- /* Optimize the special-case of a zero lower bound.
-
- We convert the low_bound to sizetype to avoid some problems
- with constant folding. (E.g. suppose the lower bound is 1,
- and its mode is QI. Without the conversion, (ARRAY
- +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
- +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
-
- if (! integer_zerop (low_bound))
- index = size_diffop (index, convert (sizetype, low_bound));
+ tree index = TREE_OPERAND (exp, 1);
/* Fold an expression like: "foo"[2].
This is not done in fold so it won't happen inside &.
&& modifier != EXPAND_MEMORY
&& TREE_CODE (array) == CONSTRUCTOR
&& ! TREE_SIDE_EFFECTS (array)
- && TREE_CODE (index) == INTEGER_CST
- && 0 > compare_tree_int (index,
- list_length (CONSTRUCTOR_ELTS
- (TREE_OPERAND (exp, 0)))))
+ && TREE_CODE (index) == INTEGER_CST)
{
tree elem;
- for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
- i = TREE_INT_CST_LOW (index);
- elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
+ for (elem = CONSTRUCTOR_ELTS (array);
+ (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
+ elem = TREE_CHAIN (elem))
;
- if (elem)
+ if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
modifier);
}
}
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
- op0 = REDUCE_BIT_FIELD (op0);
if (GET_MODE (op0) == mode)
- return op0;
+ ;
/* If OP0 is a constant, just convert it into the proper mode. */
- if (CONSTANT_P (op0))
+ else if (CONSTANT_P (op0))
{
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
enum machine_mode inner_mode = TYPE_MODE (inner_type);
if (modifier == EXPAND_INITIALIZER)
- return simplify_gen_subreg (mode, op0, inner_mode,
- subreg_lowpart_offset (mode,
- inner_mode));
+ op0 = simplify_gen_subreg (mode, op0, inner_mode,
+ subreg_lowpart_offset (mode,
+ inner_mode));
else
- return convert_modes (mode, inner_mode, op0,
- TYPE_UNSIGNED (inner_type));
+ op0= convert_modes (mode, inner_mode, op0,
+ TYPE_UNSIGNED (inner_type));
}
- if (modifier == EXPAND_INITIALIZER)
- return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
+ else if (modifier == EXPAND_INITIALIZER)
+ op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
- if (target == 0)
- return
- convert_to_mode (mode, op0,
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ else if (target == 0)
+ op0 = convert_to_mode (mode, op0,
+ TYPE_UNSIGNED (TREE_TYPE
+ (TREE_OPERAND (exp, 0))));
else
- convert_move (target, op0,
- TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
- return target;
+ {
+ convert_move (target, op0,
+ TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
+ op0 = target;
+ }
+
+ return REDUCE_BIT_FIELD (op0);
case VIEW_CONVERT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);