+
+ /* If the two arms are identical, we don't need the comparison. */
+
+ if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
+ && ! side_effects_p (XEXP (x, 0)))
+ return XEXP (x, 1);
+
+ /* Look for cases where we have (abs x) or (neg (abs X)). */
+
+ if (GET_MODE_CLASS (mode) == MODE_INT
+ && GET_CODE (XEXP (x, 2)) == NEG
+ && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
+ && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
+ && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
+ && ! side_effects_p (XEXP (x, 1)))
+ switch (GET_CODE (XEXP (x, 0)))
+ {
+ case GT:
+ case GE:
+ x = gen_unary (ABS, mode, XEXP (x, 1));
+ goto restart;
+ case LT:
+ case LE:
+ x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
+ goto restart;
+ }
+
+ /* Look for MIN or MAX. */
+
+ if (! FLOAT_MODE_P (mode)
+ && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
+ && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
+ && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
+ && ! side_effects_p (XEXP (x, 0)))
+ switch (GET_CODE (XEXP (x, 0)))
+ {
+ case GE:
+ case GT:
+ x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
+ goto restart;
+ case LE:
+ case LT:
+ x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
+ goto restart;
+ case GEU:
+ case GTU:
+ x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
+ goto restart;
+ case LEU:
+ case LTU:
+ x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
+ goto restart;
+ }
+
+ /* If we have something like (if_then_else (ne A 0) (OP X C) X),
+ A is known to be either 0 or 1, and OP is an identity when its
+ second operand is zero, this can be done as (OP X (mult A C)).
+ Similarly if A is known to be 0 or -1 and also similarly if we have
+ a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
+ we don't destroy it). */
+
+ if (mode != VOIDmode
+ && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
+ && XEXP (XEXP (x, 0), 1) == const0_rtx
+ && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
+ || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
+ == GET_MODE_BITSIZE (mode))))
+ {
+ rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
+ ? XEXP (x, 1) : XEXP (x, 2));
+ rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
+ rtx dir = (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
+ ? const1_rtx : constm1_rtx);
+ rtx c = 0;
+ enum machine_mode m = mode;
+ enum rtx_code op, extend_op = 0;
+
+ if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
+ || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
+ || GET_CODE (nz) == ASHIFT
+ || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
+ && rtx_equal_p (XEXP (nz, 0), z))
+ c = XEXP (nz, 1), op = GET_CODE (nz);
+ else if (GET_CODE (nz) == SIGN_EXTEND
+ && (GET_CODE (XEXP (nz, 0)) == PLUS
+ || GET_CODE (XEXP (nz, 0)) == MINUS
+ || GET_CODE (XEXP (nz, 0)) == IOR
+ || GET_CODE (XEXP (nz, 0)) == XOR
+ || GET_CODE (XEXP (nz, 0)) == ASHIFT
+ || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
+ || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
+ && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
+ && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
+ && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
+ && (num_sign_bit_copies (z, GET_MODE (z))
+ >= (GET_MODE_BITSIZE (mode)
+ - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
+ {
+ c = XEXP (XEXP (nz, 0), 1);
+ op = GET_CODE (XEXP (nz, 0));
+ extend_op = SIGN_EXTEND;
+ m = GET_MODE (XEXP (nz, 0));
+ }
+ else if (GET_CODE (nz) == ZERO_EXTEND
+ && (GET_CODE (XEXP (nz, 0)) == PLUS
+ || GET_CODE (XEXP (nz, 0)) == MINUS
+ || GET_CODE (XEXP (nz, 0)) == IOR
+ || GET_CODE (XEXP (nz, 0)) == XOR
+ || GET_CODE (XEXP (nz, 0)) == ASHIFT
+ || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
+ || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
+ && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+ && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
+ && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
+ && ((nonzero_bits (z, GET_MODE (z))
+ & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
+ == 0))
+ {
+ c = XEXP (XEXP (nz, 0), 1);
+ op = GET_CODE (XEXP (nz, 0));
+ extend_op = ZERO_EXTEND;
+ m = GET_MODE (XEXP (nz, 0));
+ }
+
+ if (c && ! side_effects_p (c) && ! side_effects_p (z))
+ {
+ temp
+ = gen_binary (MULT, m,
+ gen_lowpart_for_combine (m,
+ XEXP (XEXP (x, 0), 0)),
+ gen_binary (MULT, m, c, dir));
+
+ temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
+
+ if (extend_op != 0)
+ temp = gen_unary (extend_op, mode, temp);
+
+ return temp;
+ }
+ }
+
+ /* If we have (if_then_else (ne A 0) C1 0) and either A is known to
+ be 0 or 1 and C1 is a single bit or A is known to be 0 or -1 and
+ C1 is the negation of a single bit, we can convert this operation
+ to a shift. We can actually do this in more general cases, but it
+ doesn't seem worth it. */
+
+ if (GET_CODE (XEXP (x, 0)) == NE && XEXP (XEXP (x, 0), 1) == const0_rtx
+ && XEXP (x, 2) == const0_rtx && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && ((1 == nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
+ && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
+ || ((num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
+ == GET_MODE_BITSIZE (mode))
+ && (i = exact_log2 (- INTVAL (XEXP (x, 1)))) >= 0)))
+ return
+ simplify_shift_const (NULL_RTX, ASHIFT, mode,
+ gen_lowpart_for_combine (mode,
+ XEXP (XEXP (x, 0), 0)),
+ i);