/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 1992 Free Software Foundation, Inc.
+ Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
This file is part of GNU CC.
#ifdef PUSH_ROUNDING
-#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
+#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
#define PUSH_ARGS_REVERSED /* If it's last to first */
#endif
preserve_temp_slots (result);
free_temp_slots ();
- return convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
- TREE_UNSIGNED (TREE_TYPE (to)));
+ /* If we aren't returning a result, just pass on what expand_expr
+ returned; it was probably const0_rtx. Otherwise, convert RESULT
+ to the proper mode. */
+ return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
+ TREE_UNSIGNED (TREE_TYPE (to)))
+ : result);
+ }
+
+ /* If the rhs is a function call and its value is not an aggregate,
+ call the function before we start to compute the lhs.
+ This is needed for correct code for cases such as
+ val = setjmp (buf) on machines where reference to val
+ requires loading up part of an address in a separate insn. */
+ if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
+ {
+ rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
+ if (to_rtx == 0)
+ to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
+ emit_move_insn (to_rtx, value);
+ preserve_temp_slots (to_rtx);
+ free_temp_slots ();
+ return to_rtx;
}
/* Ordinary treatment. Expand TO to get a REG or MEM rtx.
OK_DEFER_POP;
return target;
}
- else if (suggest_reg && GET_CODE (target) == MEM
+ else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
&& GET_MODE (target) != BLKmode)
/* If target is in memory and caller wants value in a register instead,
arrange that. Pass TARGET as target for expand_expr so that,
if EXP is another assignment, SUGGEST_REG will be nonzero for it.
- We know expand_expr will not use the target in that case. */
+ We know expand_expr will not use the target in that case.
+ Don't do this if TARGET is volatile because we are supposed
+ to write it and then read it. */
{
temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
GET_MODE (target), 0);
So copy the value through a temporary and use that temp
as the result. */
{
+ /* ??? There may be a bug here in the case of a target
+ that is volatile, but I' too sleepy today to write anything
+ to handle it. */
if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
{
/* Expand EXP into a new pseudo. */
{
temp = expand_expr (exp, target, GET_MODE (target), 0);
/* DO return TARGET if it's a specified hardware register.
- expand_return relies on this. */
+ expand_return relies on this.
+ DO return TARGET if it's a volatile mem ref; ANSI requires this. */
if (!(target && GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
- && CONSTANT_P (temp))
+ && CONSTANT_P (temp)
+ && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
dont_return_target = 1;
}
}
#endif
- if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
+ if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
{
register tree elt;
/* Inform later passes that the whole union value is dead. */
- if (TREE_CODE (type) == UNION_TYPE)
+ if (TREE_CODE (type) == UNION_TYPE
+ || TREE_CODE (type) == QUAL_UNION_TYPE)
emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
/* If we are building a static constructor into a register,
if (mode == VOIDmode
|| (mode != BLKmode && ! direct_store[(int) mode])
|| GET_CODE (target) == REG
- || GET_CODE (target) == SUBREG)
+ || GET_CODE (target) == SUBREG
+ /* If the field isn't aligned enough to fetch as a unit,
+ fetch it as a bit field. */
+#ifdef STRICT_ALIGNMENT
+ || align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
+ || bitpos % GET_MODE_ALIGNMENT (mode) != 0
+#endif
+ )
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
/* Store the value in the bitfield. */
switch (TREE_CODE (exp))
{
case ADDR_EXPR:
- return staticp (TREE_OPERAND (exp, 0));
+ return (staticp (TREE_OPERAND (exp, 0))
+ || safe_from_p (x, TREE_OPERAND (exp, 0)));
case INDIRECT_REF:
if (GET_CODE (x) == MEM)
/* Use subtarget as the target for operand 0 of a binary operation. */
rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
rtx original_target = target;
- int ignore = target == const0_rtx;
+ int ignore = (target == const0_rtx
+ || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
+ || code == CONVERT_EXPR || code == REFERENCE_EXPR)
+ && TREE_CODE (type) == VOID_TYPE));
tree context;
/* Don't use hard regs as subtargets, because the combiner
if (preserve_subexpressions_p ())
subtarget = 0;
- if (ignore) target = 0, original_target = 0;
+ /* If we are going to ignore this result, we need only do something
+ if there is a side-effect somewhere in the expression. If there
+ is, short-circuit the most common cases here. */
+
+ if (ignore)
+ {
+ if (! TREE_SIDE_EFFECTS (exp))
+ return const0_rtx;
+
+ /* Ensure we reference a volatile object even if value is ignored. */
+ if (TREE_THIS_VOLATILE (exp)
+ && TREE_CODE (exp) != FUNCTION_DECL
+ && mode != VOIDmode && mode != BLKmode)
+ {
+ temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
+ if (GET_CODE (temp) == MEM)
+ temp = copy_to_reg (temp);
+ return const0_rtx;
+ }
+
+ if (TREE_CODE_CLASS (code) == '1')
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
+ VOIDmode, modifier);
+ else if (TREE_CODE_CLASS (code) == '2'
+ || TREE_CODE_CLASS (code) == '<')
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
+ return const0_rtx;
+ }
+ else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
+ && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
+ /* If the second operand has no side effects, just evaluate
+ the first. */
+ return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
+ VOIDmode, modifier);
+
+ target = 0, original_target = 0;
+ }
/* If will do cse, generate all results into pseudo registers
since 1) that allows cse to find more things
&& (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
target = subtarget;
- /* Ensure we reference a volatile object even if value is ignored. */
- if (ignore && TREE_THIS_VOLATILE (exp)
- && TREE_CODE (exp) != FUNCTION_DECL
- && mode != VOIDmode && mode != BLKmode)
- {
- target = gen_reg_rtx (mode);
- temp = expand_expr (exp, target, VOIDmode, modifier);
- if (temp != target)
- emit_move_insn (target, temp);
- return target;
- }
-
switch (code)
{
case LABEL_DECL:
case RESULT_DECL:
if (DECL_RTL (exp) == 0)
abort ();
- /* Ensure variable marked as used
- even if it doesn't go through a parser. */
- TREE_USED (exp) = 1;
+ /* Ensure variable marked as used even if it doesn't go through
+ a parser. If it hasn't be used yet, write out an external
+ definition. */
+ if (! TREE_USED (exp))
+ {
+ assemble_external (exp);
+ TREE_USED (exp) = 1;
+ }
+
/* Handle variables inherited from containing functions. */
context = decl_function_context (exp);
return SAVE_EXPR_RTL (exp);
case EXIT_EXPR:
- /* Exit the current loop if the body-expression is true. */
- {
- rtx label = gen_label_rtx ();
- do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
- expand_exit_loop (NULL_PTR);
- emit_label (label);
- }
+ expand_exit_loop_if_false (NULL_PTR,
+ invert_truthvalue (TREE_OPERAND (exp, 0)));
return const0_rtx;
case LOOP_EXPR:
return RTL_EXPR_RTL (exp);
case CONSTRUCTOR:
+ /* If we don't need the result, just ensure we evaluate any
+ subexpressions. */
+ if (ignore)
+ {
+ tree elt;
+ for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
+ expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
+ return const0_rtx;
+ }
/* All elts simple constants => refer to a constant in memory. But
if this is a non-BLKmode mode, let it store a field at a time
since that should make a CONST_INT or CONST_DOUBLE when we
- fold. */
- if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
+ fold. If we are making an initializer and all operands are
+ constant, put it in memory as well. */
+ else if ((TREE_STATIC (exp)
+ && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
+ || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
{
rtx constructor = output_constant_def (exp);
if (modifier != EXPAND_CONST_ADDRESS
return constructor;
}
- if (ignore)
- {
- tree elt;
- for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
- return const0_rtx;
- }
else
{
if (target == 0 || ! safe_from_p (target, exp))
case NOP_EXPR:
case CONVERT_EXPR:
case REFERENCE_EXPR:
- if (TREE_CODE (type) == VOID_TYPE || ignore)
- {
- expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
- return const0_rtx;
- }
if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
if (TREE_CODE (type) == UNION_TYPE)
address.
If this is an EXPAND_SUM call, always return the sum. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
- && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
- || mode == Pmode))
+ if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
+ || mode == Pmode)
{
- op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
- EXPAND_SUM);
- op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- op1 = force_operand (op1, target);
- return op1;
- }
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
+ {
+ op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
+ EXPAND_SUM);
+ op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+ op1 = force_operand (op1, target);
+ return op1;
+ }
- else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
- && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
- && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
- || mode == Pmode))
- {
- op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
- EXPAND_SUM);
- op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
- if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- op0 = force_operand (op0, target);
- return op0;
+ else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
+ && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
+ {
+ op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
+ EXPAND_SUM);
+ if (! CONSTANT_P (op0))
+ {
+ op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
+ VOIDmode, modifier);
+ goto both_summands;
+ }
+ op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
+ if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+ op0 = force_operand (op0, target);
+ return op0;
+ }
}
/* No sense saving up arithmetic to be done
And force_operand won't know whether to sign-extend or
zero-extend. */
if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
- || mode != Pmode) goto binop;
+ || mode != Pmode)
+ goto binop;
preexpand_calls (exp);
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
+ both_summands:
/* Make sure any term that's a sum with a constant comes last. */
if (GET_CODE (op0) == PLUS
&& CONSTANT_P (XEXP (op0, 1)))
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
- if (target == 0 || ! safe_from_p (target, exp)
- /* Make sure we don't have a hard reg (such as function's return
- value) live across basic blocks, if not optimizing. */
- || (!optimize && GET_CODE (target) == REG
- && REGNO (target) < FIRST_PSEUDO_REGISTER))
+ if (! ignore
+ && (target == 0 || ! safe_from_p (target, exp)
+ /* Make sure we don't have a hard reg (such as function's return
+ value) live across basic blocks, if not optimizing. */
+ || (!optimize && GET_CODE (target) == REG
+ && REGNO (target) < FIRST_PSEUDO_REGISTER)))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
- emit_clr_insn (target);
+
+ if (target)
+ emit_clr_insn (target);
+
op1 = gen_label_rtx ();
jumpifnot (exp, op1);
- emit_0_to_1_insn (target);
+
+ if (target)
+ emit_0_to_1_insn (target);
+
emit_label (op1);
- return target;
+ return ignore ? const0_rtx : target;
case TRUTH_NOT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
&& integer_zerop (TREE_OPERAND (exp, 2))
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
{
+ if (ignore)
+ {
+ expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
+ modifier);
+ return const0_rtx;
+ }
+
op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
if (GET_MODE (op0) == mode)
return op0;
intermediate target unless it is safe. If no target, use a
temporary. */
- if (mode == VOIDmode || ignore)
+ if (ignore)
temp = 0;
else if (original_target
&& safe_from_p (original_target, TREE_OPERAND (exp, 0)))
/* If we had X ? A + 1 : A and we can do the test of X as a store-flag
operation, do this as A + (X != 0). Similarly for other simple
binary operators. */
- if (singleton && binary_op
+ if (temp && singleton && binary_op
&& ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
&& (TREE_CODE (binary_op) == PLUS_EXPR
|| TREE_CODE (binary_op) == MINUS_EXPR
}
else
expand_expr (singleton,
- ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
+ ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
if (cleanups_this_call)
{
sorry ("aggregate value in COND_EXPR");
is the actual stack address that we want to initialize.
The function we call will perform the cleanup in this case. */
+ /* If we have already assigned it space, use that space,
+ not target that we were passed in, as our target
+ parameter is only a hint. */
+ if (DECL_RTL (slot) != 0)
+ {
+ target = DECL_RTL (slot);
+ /* If we have already expanded the slot, so don't do
+ it again. (mrs) */
+ if (TREE_OPERAND (exp, 1) == NULL_TREE)
+ return target;
+ }
+
DECL_RTL (slot) = target;
}
int icode;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
int op0_is_copy = 0;
+ int single_insn = 0;
/* Stabilize any component ref that might need to be
evaluated more than once below. */
|| TREE_CODE (exp) == PREDECREMENT_EXPR)
this_optab = sub_optab;
+ /* For a preincrement, see if we can do this with a single instruction. */
+ if (!post)
+ {
+ icode = (int) this_optab->handlers[(int) mode].insn_code;
+ if (icode != (int) CODE_FOR_nothing
+ /* Make sure that OP0 is valid for operands 0 and 1
+ of the insn we want to queue. */
+ && (*insn_operand_predicate[icode][0]) (op0, mode)
+ && (*insn_operand_predicate[icode][1]) (op0, mode)
+ && (*insn_operand_predicate[icode][2]) (op1, mode))
+ single_insn = 1;
+ }
+
/* If OP0 is not the actual lvalue, but rather a copy in a register,
then we cannot just increment OP0. We must therefore contrive to
increment the original value. Then, for postincrement, we can return
- OP0 since it is a copy of the old value. For preincrement, we want
- to always expand here, since this generates better or equivalent code. */
- if (!post || op0_is_copy)
+ OP0 since it is a copy of the old value. For preincrement, expand here
+ unless we can do it with a single insn. */
+ if (op0_is_copy || (!post && !single_insn))
{
/* This is the easiest way to increment the value wherever it is.
Problems with multiple evaluation of INCREMENTED are prevented
&& integer_pow2p (TREE_OPERAND (arg0, 1))
&& TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
{
+ tree inner = TREE_OPERAND (arg0, 0);
int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
NULL_RTX, VOIDmode, 0)));
+ int ops_unsignedp;
+
+ /* If INNER is a right shift of a constant and it plus BITNUM does
+ not overflow, adjust BITNUM and INNER. */
+
+ if (TREE_CODE (inner) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
+ && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
+ < TYPE_PRECISION (type)))
+ {
+ bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ inner = TREE_OPERAND (inner, 0);
+ }
+
+ /* If we are going to be able to omit the AND below, we must do our
+ operations as unsigned. If we must use the AND, we have a choice.
+ Normally unsigned is faster, but for some machines signed is. */
+ ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
+#ifdef BYTE_LOADS_SIGN_EXTEND
+ : 0
+#else
+ : 1
+#endif
+ );
if (subtarget == 0 || GET_CODE (subtarget) != REG
|| GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
+ || ! safe_from_p (subtarget, inner))
subtarget = 0;
- op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
+ op0 = expand_expr (inner, subtarget, VOIDmode, 0);
if (bitnum != 0)
op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (bitnum), target, 1);
+ size_int (bitnum), target, ops_unsignedp);
if (GET_MODE (op0) != mode)
- op0 = convert_to_mode (mode, op0, 1);
+ op0 = convert_to_mode (mode, op0, ops_unsignedp);
+
+ if ((code == EQ && ! invert) || (code == NE && invert))
+ op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
+ ops_unsignedp, OPTAB_LIB_WIDEN);
+ /* Put the AND last so it can combine with more things. */
if (bitnum != TYPE_PRECISION (type) - 1)
op0 = expand_and (op0, const1_rtx, target);
- if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
- OPTAB_LIB_WIDEN);
-
return op0;
}