pending_stack_adjust = 0;
}
+/* Discard any pending stack adjustment. This avoid relying on the
+ RTL optimizers to remove useless adjustments when we know the
+ stack pointer value is dead. */
+void discard_pending_stack_adjust (void)
+{
+ stack_pointer_delta -= pending_stack_adjust;
+ pending_stack_adjust = 0;
+}
+
/* When exiting from function, if safe, clear out any pending stack adjust
so the adjustment won't get done.
if (optimize > 0
&& (! flag_omit_frame_pointer || current_function_calls_alloca)
&& EXIT_IGNORE_STACK
- && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
- && ! flag_inline_functions)
- {
- stack_pointer_delta -= pending_stack_adjust,
- pending_stack_adjust = 0;
- }
+ && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
+ discard_pending_stack_adjust ();
}
/* Pop any previously-pushed arguments that have not been popped yet. */
do_jump always does any pending stack adjust except when it does not
actually perform a jump. An example where there is no jump
- is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
-
- This function is responsible for optimizing cases such as
- &&, || and comparison operators in EXP. */
+ is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
void
do_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
enum tree_code code = TREE_CODE (exp);
- /* Some cases need to create a label to jump to
- in order to properly fall through.
- These cases set DROP_THROUGH_LABEL nonzero. */
- rtx drop_through_label = 0;
rtx temp;
int i;
tree type;
case COMPOUND_EXPR:
case COND_EXPR:
/* Lowered by gimplify.c. */
- abort ();
+ gcc_unreachable ();
case COMPONENT_REF:
case BIT_FIELD_REF:
{
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
- || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
- abort ();
- else if (integer_zerop (TREE_OPERAND (exp, 1)))
+ gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
+ != MODE_COMPLEX_FLOAT);
+ gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
+ != MODE_COMPLEX_INT);
+
+ if (integer_zerop (TREE_OPERAND (exp, 1)))
do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
&& !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
{
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
- if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
- || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
- abort ();
- else if (integer_zerop (TREE_OPERAND (exp, 1)))
+ gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
+ != MODE_COMPLEX_FLOAT);
+ gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
+ != MODE_COMPLEX_INT);
+
+ if (integer_zerop (TREE_OPERAND (exp, 1)))
do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
&& !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
tree op0 = save_expr (TREE_OPERAND (exp, 0));
tree op1 = save_expr (TREE_OPERAND (exp, 1));
tree cmp0, cmp1;
+ rtx drop_through_label = 0;
/* If the target doesn't support combined unordered
compares, decompose into two comparisons. */
+ if (if_true_label == 0)
+ drop_through_label = if_true_label = gen_label_rtx ();
+
cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
- exp = build2 (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
- do_jump (exp, if_false_label, if_true_label);
+ do_jump (cmp0, 0, if_true_label);
+ do_jump (cmp1, if_false_label, if_true_label);
+
+ if (drop_through_label)
+ {
+ do_pending_stack_adjust ();
+ emit_label (drop_through_label);
+ }
}
}
break;
&& ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
/* Note swapping the labels gives us not-equal. */
do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
- else if (GET_MODE (temp) != VOIDmode)
+ else
{
+ gcc_assert (GET_MODE (temp) != VOIDmode);
+
/* The RTL optimizers prefer comparisons against pseudos. */
if (GET_CODE (temp) == SUBREG)
{
GET_MODE (temp), NULL_RTX,
if_false_label, if_true_label);
}
- else
- abort ();
- }
-
- if (drop_through_label)
- {
- /* If do_jump produces code that might be jumped around,
- do any stack adjusts from that code, before the place
- where control merges in. */
- do_pending_stack_adjust ();
- emit_label (drop_through_label);
}
}
\f
}
\f
/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
- (including code to compute the values to be compared)
- and set (CC0) according to the result.
- The decision as to signed or unsigned comparison must be made by the caller.
+ MODE is the machine mode of the comparison, not of the result.
+ (including code to compute the values to be compared) and set CC0
+ according to the result. The decision as to signed or unsigned
+ comparison must be made by the caller.
We force a stack adjustment unless there are currently
things pushed on the stack that aren't yet used.
do_pending_stack_adjust ();
code = unsignedp ? unsigned_condition (code) : code;
- if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
- op0, op1)))
+ tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
+ if (tem)
{
if (CONSTANT_P (tem))
return tem;
- code = GET_CODE (tem);
- mode = GET_MODE (tem);
- op0 = XEXP (tem, 0);
- op1 = XEXP (tem, 1);
- unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
+ if (COMPARISON_P (tem))
+ {
+ code = GET_CODE (tem);
+ op0 = XEXP (tem, 0);
+ op1 = XEXP (tem, 1);
+ mode = GET_MODE (op0);
+ unsignedp = (code == GTU || code == LTU
+ || code == GEU || code == LEU);
+ }
}
emit_cmp_insn (op0, op1, code, size, mode, unsignedp);