/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 1992 Free Software Foundation, Inc.
+ Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
This file is part of GNU CC.
#ifdef PUSH_ROUNDING
-#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
+#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
#define PUSH_ARGS_REVERSED /* If it's last to first */
#endif
end_sequence ();
emit_no_conflict_block (insns, to, from, NULL_RTX,
- gen_rtx (equiv_code, to_mode, from));
+ gen_rtx (equiv_code, to_mode, copy_rtx (from)));
return;
}
here because if SIZE is less than the mode mask, as it is
returned by the macro, it will definitely be less than the
actual mode mask. */
- && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
+ && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
&& (insn_operand_predicate[(int) code][0] == 0
|| (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
&& (insn_operand_predicate[(int) code][1] == 0
VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
- size, TYPE_MODE (sizetype));
+ TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
- size, TYPE_MODE (sizetype));
+ TYPE_MODE (sizetype));
#endif
OK_DEFER_POP;
}
preserve_temp_slots (result);
free_temp_slots ();
- return result;
+ /* If we aren't returning a result, just pass on what expand_expr
+ returned; it was probably const0_rtx. Otherwise, convert RESULT
+ to the proper mode. */
+ return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
+ TREE_UNSIGNED (TREE_TYPE (to)))
+ : result);
+ }
+
+ /* If the rhs is a function call and its value is not an aggregate,
+ call the function before we start to compute the lhs.
+ This is needed for correct code for cases such as
+ val = setjmp (buf) on machines where reference to val
+ requires loading up part of an address in a separate insn. */
+ if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
+ {
+ rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
+ if (to_rtx == 0)
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
+ emit_move_insn (to_rtx, value);
+ preserve_temp_slots (to_rtx);
+ free_temp_slots ();
+ return to_rtx;
}
/* Ordinary treatment. Expand TO to get a REG or MEM rtx.
XEXP (from_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
- size, TYPE_MODE (sizetype));
+ TYPE_MODE (sizetype));
#else
emit_library_call (bcopy_libfunc, 0,
VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
XEXP (to_rtx, 0), Pmode,
convert_to_mode (TYPE_MODE (sizetype),
size, TREE_UNSIGNED (sizetype)),
- size, TYPE_MODE (sizetype));
+ TYPE_MODE (sizetype));
#endif
preserve_temp_slots (to_rtx);
OK_DEFER_POP;
return target;
}
- else if (suggest_reg && GET_CODE (target) == MEM
+ else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
&& GET_MODE (target) != BLKmode)
/* If target is in memory and caller wants value in a register instead,
arrange that. Pass TARGET as target for expand_expr so that,
if EXP is another assignment, SUGGEST_REG will be nonzero for it.
- We know expand_expr will not use the target in that case. */
+ We know expand_expr will not use the target in that case.
+ Don't do this if TARGET is volatile because we are supposed
+ to write it and then read it. */
{
temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
GET_MODE (target), 0);
So copy the value through a temporary and use that temp
as the result. */
{
+ /* ??? There may be a bug here in the case of a target
+ that is volatile, but I' too sleepy today to write anything
+ to handle it. */
if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
{
/* Expand EXP into a new pseudo. */
{
temp = expand_expr (exp, target, GET_MODE (target), 0);
/* DO return TARGET if it's a specified hardware register.
- expand_return relies on this. */
+ expand_return relies on this.
+ DO return TARGET if it's a volatile mem ref; ANSI requires this. */
if (!(target && GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
- && CONSTANT_P (temp))
+ && CONSTANT_P (temp)
+ && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
dont_return_target = 1;
}
{
/* Compute the size of the data to copy from the string. */
tree copy_size
- = fold (build (MIN_EXPR, sizetype,
- size_binop (CEIL_DIV_EXPR,
- TYPE_SIZE (TREE_TYPE (exp)),
- size_int (BITS_PER_UNIT)),
- convert (sizetype,
- build_int_2 (TREE_STRING_LENGTH (exp), 0))));
+ = size_binop (MIN_EXPR,
+ size_binop (CEIL_DIV_EXPR,
+ TYPE_SIZE (TREE_TYPE (exp)),
+ size_int (BITS_PER_UNIT)),
+ convert (sizetype,
+ build_int_2 (TREE_STRING_LENGTH (exp), 0)));
rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
VOIDmode, 0);
rtx label = 0;
}
#endif
- if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
+ if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
{
register tree elt;
/* Inform later passes that the whole union value is dead. */
- if (TREE_CODE (type) == UNION_TYPE)
+ if (TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
/* If we are building a static constructor into a register,
if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
|| (GET_CODE (target) == REG && TREE_STATIC (exp)))
- clear_storage (target, maxelt - minelt + 1);
+ clear_storage (target, int_size_in_bytes (type));
else
/* Inform later passes that the old value is dead. */
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
if (mode == VOIDmode
|| (mode != BLKmode && ! direct_store[(int) mode])
|| GET_CODE (target) == REG
- || GET_CODE (target) == SUBREG)
+ || GET_CODE (target) == SUBREG
+ /* If the field isn't aligned enough to fetch as a unit,
+ fetch it as a bit field. */
+#ifdef STRICT_ALIGNMENT
+ || align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
+ || bitpos % GET_MODE_ALIGNMENT (mode) != 0
+#endif
+ )
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
/* Store the value in the bitfield. */
? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
: TREE_OPERAND (exp, 2));
+ /* If this field hasn't been filled in yet, don't go
+ past it. This should only happen when folding expressions
+ made during type construction. */
+ if (pos == 0)
+ break;
+
if (TREE_CODE (pos) == PLUS_EXPR)
{
tree constant, var;
/* If this was a bit-field, see if there is a mode that allows direct
access in case EXP is in memory. */
- if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
+ if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
{
mode = mode_for_size (*pbitsize, MODE_INT, 0);
if (mode == BLKmode)
switch (TREE_CODE (exp))
{
case ADDR_EXPR:
- return staticp (TREE_OPERAND (exp, 0));
+ return (staticp (TREE_OPERAND (exp, 0))
+ || safe_from_p (x, TREE_OPERAND (exp, 0)));
case INDIRECT_REF:
if (GET_CODE (x) == MEM)
/* Use subtarget as the target for operand 0 of a binary operation. */
rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
rtx original_target = target;
- int ignore = target == const0_rtx;
+ int ignore = (target == const0_rtx
+ || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
+ || code == CONVERT_EXPR || code == REFERENCE_EXPR)
+ && TREE_CODE (type) == VOID_TYPE));
tree context;
/* Don't use hard regs as subtargets, because the combiner
if (preserve_subexpressions_p ())
subtarget = 0;
- if (ignore) target = 0, original_target = 0;
+ /* If we are going to ignore this result, we need only do something
+ if there is a side-effect somewhere in the expression. If there
+ is, short-circuit the most common cases here. */
+
+ if (ignore)
+ {
+ if (! TREE_SIDE_EFFECTS (exp))
+ return const0_rtx;
+
+ /* Ensure we reference a volatile object even if value is ignored. */
+ if (TREE_THIS_VOLATILE (exp)
+ && TREE_CODE (exp) != FUNCTION_DECL
+ && mode != VOIDmode && mode != BLKmode)
+ {
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
+ if (GET_CODE (temp) == MEM)
+ temp = copy_to_reg (temp);
+ return const0_rtx;
+ }
+
+ if (TREE_CODE_CLASS (code) == '1')
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
+ VOIDmode, modifier);
+ else if (TREE_CODE_CLASS (code) == '2'
+ || TREE_CODE_CLASS (code) == '<')
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
+ return const0_rtx;
+ }
+ else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
+ && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
+ /* If the second operand has no side effects, just evaluate
+ the first. */
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
+ VOIDmode, modifier);
+
+ target = 0, original_target = 0;
+ }
/* If will do cse, generate all results into pseudo registers
since 1) that allows cse to find more things
&& (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
target = subtarget;
- /* Ensure we reference a volatile object even if value is ignored. */
- if (ignore && TREE_THIS_VOLATILE (exp)
- && TREE_CODE (exp) != FUNCTION_DECL
- && mode != VOIDmode && mode != BLKmode)
- {
- target = gen_reg_rtx (mode);
- temp = expand_expr (exp, target, VOIDmode, modifier);
- if (temp != target)
- emit_move_insn (target, temp);
- return target;
- }
-
switch (code)
{
case LABEL_DECL:
case RESULT_DECL:
if (DECL_RTL (exp) == 0)
abort ();
- /* Ensure variable marked as used
- even if it doesn't go through a parser. */
- TREE_USED (exp) = 1;
+ /* Ensure variable marked as used even if it doesn't go through
+ a parser. If it hasn't be used yet, write out an external
+ definition. */
+ if (! TREE_USED (exp))
+ {
+ assemble_external (exp);
+ TREE_USED (exp) = 1;
+ }
+
/* Handle variables inherited from containing functions. */
context = decl_function_context (exp);
}
SAVE_EXPR_RTL (exp) = temp;
- store_expr (TREE_OPERAND (exp, 0), temp, 0);
if (!optimize && GET_CODE (temp) == REG)
save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
save_expr_regs);
+
+ /* If the mode of TEMP does not match that of the expression, it
+ must be a promoted value. We pass store_expr a SUBREG of the
+ wanted mode but mark it so that we know that it was already
+ extended. Note that `unsignedp' was modified above in
+ this case. */
+
+ if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
+ {
+ temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ SUBREG_PROMOTED_VAR_P (temp) = 1;
+ SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
+ }
+
+ store_expr (TREE_OPERAND (exp, 0), temp, 0);
}
/* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
return SAVE_EXPR_RTL (exp);
case EXIT_EXPR:
- /* Exit the current loop if the body-expression is true. */
- {
- rtx label = gen_label_rtx ();
- do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
- expand_exit_loop (NULL_PTR);
- emit_label (label);
- }
+ expand_exit_loop_if_false (NULL_PTR,
+ invert_truthvalue (TREE_OPERAND (exp, 0)));
return const0_rtx;
case LOOP_EXPR:
return RTL_EXPR_RTL (exp);
case CONSTRUCTOR:
+ /* If we don't need the result, just ensure we evaluate any
+ subexpressions. */
+ if (ignore)
+ {
+ tree elt;
+ for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
+ expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
+ return const0_rtx;
+ }
/* All elts simple constants => refer to a constant in memory. But
if this is a non-BLKmode mode, let it store a field at a time
since that should make a CONST_INT or CONST_DOUBLE when we
- fold. */
- if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
+ fold. If we are making an initializer and all operands are
+ constant, put it in memory as well. */
+ else if ((TREE_STATIC (exp)
+ && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
+ || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
{
rtx constructor = output_constant_def (exp);
if (modifier != EXPAND_CONST_ADDRESS
return constructor;
}
- if (ignore)
- {
- tree elt;
- for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
- return const0_rtx;
- }
else
{
if (target == 0 || ! safe_from_p (target, exp))
enum tree_code c = TREE_CODE (type);
target
= assign_stack_temp (mode, int_size_in_bytes (type), 0);
- if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
+ if (c == RECORD_TYPE || c == UNION_TYPE
+ || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
MEM_IN_STRUCT_P (target) = 1;
}
}
|| TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
|| TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
+ || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
|| (TREE_CODE (exp1) == ADDR_EXPR
&& (exp2 = TREE_OPERAND (exp1, 0))
&& (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
- || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
+ || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
+ || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
MEM_IN_STRUCT_P (temp) = 1;
MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep);
+ /* If we got back the original object, something is wrong. Perhaps
+ we are evaluating an expression too early. In any event, don't
+ infinitely recurse. */
+ if (tem == exp)
+ abort ();
+
/* In some cases, we will be offsetting OP0's address by a constant.
So get it as a sum, if possible. If we will be using it
directly in an insn, we validate it. */
case OFFSET_REF:
{
- tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
+ tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
temp = gen_rtx (MEM, mode, memory_address (mode, op0));
case NOP_EXPR:
case CONVERT_EXPR:
case REFERENCE_EXPR:
- if (TREE_CODE (type) == VOID_TYPE || ignore)
- {
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
- return const0_rtx;
- }
if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
if (TREE_CODE (type) == UNION_TYPE)
address.
If this is an EXPAND_SUM call, always return the sum. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
- && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
- || mode == Pmode))
+ if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
+ || mode == Pmode)
{
- op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
- EXPAND_SUM);
- op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- op1 = force_operand (op1, target);
- return op1;
- }
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
+ {
+ op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
+ EXPAND_SUM);
+ op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+ op1 = force_operand (op1, target);
+ return op1;
+ }
- else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
- && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
- || mode == Pmode))
- {
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
- EXPAND_SUM);
- op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- op0 = force_operand (op0, target);
- return op0;
+ else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
+ && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
+ {
+ op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
+ EXPAND_SUM);
+ if (! CONSTANT_P (op0))
+ {
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
+ VOIDmode, modifier);
+ goto both_summands;
+ }
+ op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+ op0 = force_operand (op0, target);
+ return op0;
+ }
}
/* No sense saving up arithmetic to be done
And force_operand won't know whether to sign-extend or
zero-extend. */
if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- || mode != Pmode) goto binop;
+ || mode != Pmode)
+ goto binop;
preexpand_calls (exp);
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
+ both_summands:
/* Make sure any term that's a sum with a constant comes last. */
if (GET_CODE (op0) == PLUS
&& CONSTANT_P (XEXP (op0, 1)))
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- if (target == 0 || ! safe_from_p (target, exp)
- /* Make sure we don't have a hard reg (such as function's return
- value) live across basic blocks, if not optimizing. */
- || (!optimize && GET_CODE (target) == REG
- && REGNO (target) < FIRST_PSEUDO_REGISTER))
+ if (! ignore
+ && (target == 0 || ! safe_from_p (target, exp)
+ /* Make sure we don't have a hard reg (such as function's return
+ value) live across basic blocks, if not optimizing. */
+ || (!optimize && GET_CODE (target) == REG
+ && REGNO (target) < FIRST_PSEUDO_REGISTER)))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
- emit_clr_insn (target);
+
+ if (target)
+ emit_clr_insn (target);
+
op1 = gen_label_rtx ();
jumpifnot (exp, op1);
- emit_0_to_1_insn (target);
+
+ if (target)
+ emit_0_to_1_insn (target);
+
emit_label (op1);
- return target;
+ return ignore ? const0_rtx : target;
case TRUTH_NOT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
&& integer_zerop (TREE_OPERAND (exp, 2))
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
{
+ if (ignore)
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
+ modifier);
+ return const0_rtx;
+ }
+
op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
if (GET_MODE (op0) == mode)
return op0;
intermediate target unless it is safe. If no target, use a
temporary. */
- if (mode == VOIDmode || ignore)
+ if (ignore)
temp = 0;
else if (original_target
&& safe_from_p (original_target, TREE_OPERAND (exp, 0)))
/* If we had X ? A + 1 : A and we can do the test of X as a store-flag
operation, do this as A + (X != 0). Similarly for other simple
binary operators. */
- if (singleton && binary_op
+ if (temp && singleton && binary_op
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
&& (TREE_CODE (binary_op) == PLUS_EXPR
|| TREE_CODE (binary_op) == MINUS_EXPR
}
else
expand_expr (singleton,
- ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
+ ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
if (cleanups_this_call)
{
sorry ("aggregate value in COND_EXPR");
is the actual stack address that we want to initialize.
The function we call will perform the cleanup in this case. */
+ /* If we have already assigned it space, use that space,
+ not target that we were passed in, as our target
+ parameter is only a hint. */
+ if (DECL_RTL (slot) != 0)
+ {
+ target = DECL_RTL (slot);
+ /* If we have already expanded the slot, so don't do
+ it again. (mrs) */
+ if (TREE_OPERAND (exp, 1) == NULL_TREE)
+ return target;
+ }
+
DECL_RTL (slot) = target;
}
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
(modifier == EXPAND_INITIALIZER
? modifier : EXPAND_CONST_ADDRESS));
+
+ /* We would like the object in memory. If it is a constant,
+ we can have it be statically allocated into memory. For
+ a non-constant (REG or SUBREG), we need to allocate some
+ memory and store the value into it. */
+
+ if (CONSTANT_P (op0))
+ op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
+ op0);
+
+ if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
+ {
+ /* If this object is in a register, it must be not
+ be BLKmode. */
+ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
+ enum machine_mode inner_mode = TYPE_MODE (inner_type);
+ rtx memloc
+ = assign_stack_temp (inner_mode,
+ int_size_in_bytes (inner_type), 1);
+
+ emit_move_insn (memloc, op0);
+ op0 = memloc;
+ }
+
if (GET_CODE (op0) != MEM)
abort ();
return GEN_INT (method_type_class);
if (code == RECORD_TYPE)
return GEN_INT (record_type_class);
- if (code == UNION_TYPE)
+ if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
return GEN_INT (union_type_class);
if (code == ARRAY_TYPE)
return GEN_INT (array_type_class);
int icode;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
int op0_is_copy = 0;
+ int single_insn = 0;
/* Stabilize any component ref that might need to be
evaluated more than once below. */
|| TREE_CODE (exp) == PREDECREMENT_EXPR)
this_optab = sub_optab;
+ /* For a preincrement, see if we can do this with a single instruction. */
+ if (!post)
+ {
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode)
+ && (*insn_operand_predicate[icode][2]) (op1, mode))
+ single_insn = 1;
+ }
+
/* If OP0 is not the actual lvalue, but rather a copy in a register,
then we cannot just increment OP0. We must therefore contrive to
increment the original value. Then, for postincrement, we can return
- OP0 since it is a copy of the old value. For preincrement, we want
- to always expand here, since this generates better or equivalent code. */
- if (!post || op0_is_copy)
+ OP0 since it is a copy of the old value. For preincrement, expand here
+ unless we can do it with a single insn. */
+ if (op0_is_copy || (!post && !single_insn))
{
/* This is the easiest way to increment the value wherever it is.
Problems with multiple evaluation of INCREMENTED are prevented
#if 0
/* There's no need to do this now that combine.c can eliminate lots of
sign extensions. This can be less efficient in certain cases on other
- machines.
+ machines. */
/* If this is a signed equality comparison, we can do it as an
unsigned comparison since zero-extension is cheaper than sign
code = unsignedp ? LTU : LT;
break;
case LE_EXPR:
- if (integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = LT;
else
code = unsignedp ? LEU : LE;
break;
case GT_EXPR:
- if (integer_all_onesp (arg1))
- arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
+ if (! unsignedp && integer_all_onesp (arg1))
+ arg1 = integer_zero_node, code = GE;
else
code = unsignedp ? GTU : GT;
break;
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
{
+ tree inner = TREE_OPERAND (arg0, 0);
int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
NULL_RTX, VOIDmode, 0)));
+ int ops_unsignedp;
+
+ /* If INNER is a right shift of a constant and it plus BITNUM does
+ not overflow, adjust BITNUM and INNER. */
+
+ if (TREE_CODE (inner) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
+ && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
+ < TYPE_PRECISION (type)))
+ {
+ bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ inner = TREE_OPERAND (inner, 0);
+ }
+
+ /* If we are going to be able to omit the AND below, we must do our
+ operations as unsigned. If we must use the AND, we have a choice.
+ Normally unsigned is faster, but for some machines signed is. */
+ ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
+#ifdef BYTE_LOADS_SIGN_EXTEND
+ : 0
+#else
+ : 1
+#endif
+ );
if (subtarget == 0 || GET_CODE (subtarget) != REG
|| GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
+ || ! safe_from_p (subtarget, inner))
subtarget = 0;
- op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
+ op0 = expand_expr (inner, subtarget, VOIDmode, 0);
if (bitnum != 0)
op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (bitnum), target, 1);
+ size_int (bitnum), target, ops_unsignedp);
if (GET_MODE (op0) != mode)
- op0 = convert_to_mode (mode, op0, 1);
+ op0 = convert_to_mode (mode, op0, ops_unsignedp);
+ if ((code == EQ && ! invert) || (code == NE && invert))
+ op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
+ ops_unsignedp, OPTAB_LIB_WIDEN);
+
+ /* Put the AND last so it can combine with more things. */
if (bitnum != TYPE_PRECISION (type) - 1)
op0 = expand_and (op0, const1_rtx, target);
- if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
- OPTAB_LIB_WIDEN);
-
return op0;
}