/* Optimize by combining instructions for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
#include "real.h"
#include "toplev.h"
#include "target.h"
+#include "optabs.h"
+#include "insn-codes.h"
#include "rtlhooks-def.h"
/* Include output.h for dump_file. */
#include "output.h"
+#include "params.h"
/* Number of attempts to combine instructions in this function. */
{
/* Sanity check that we're replacing oldval with a CONST_INT
that is a valid sign-extension for the original mode. */
- if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
- GET_MODE (oldval)))
- abort ();
+ gcc_assert (INTVAL (newval)
+ == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
/* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
CONST_INT is not valid, because after the replacement, the
when do_SUBST is called to replace the operand thereof, so we
perform this test on oldval instead, checking whether an
invalid replacement took place before we got here. */
- if ((GET_CODE (oldval) == SUBREG
- && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
- || (GET_CODE (oldval) == ZERO_EXTEND
- && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
- abort ();
+ gcc_assert (!(GET_CODE (oldval) == SUBREG
+ && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
+ gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
+ && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
}
if (undobuf.frees)
new_i2_cost = 0;
}
+ if (undobuf.other_insn)
+ {
+ int old_other_cost, new_other_cost;
+
+ old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
+ ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
+ new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
+ if (old_other_cost > 0 && new_other_cost > 0)
+ {
+ old_cost += old_other_cost;
+ new_cost += new_other_cost;
+ }
+ else
+ old_cost = 0;
+ }
+
/* Disallow this recombination if both new_cost and old_cost are
greater than zero, and new_cost is greater than old cost. */
- if (!undobuf.other_insn
- && old_cost > 0
+ if (old_cost > 0
&& new_cost > old_cost)
{
if (dump_file)
/* Don't substitute into an incremented register. */
|| FIND_REG_INC_NOTE (i3, dest)
|| (succ && FIND_REG_INC_NOTE (succ, dest))
+ /* Don't substitute into a non-local goto, this confuses CFG. */
+ || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
#if 0
/* Don't combine the end of a libcall into anything. */
/* ??? This gives worse code, and appears to be unnecessary, since no
/* If the clobber represents an earlyclobber operand, we must not
substitute an expression containing the clobbered register.
- As we do not analyse the constraint strings here, we have to
+ As we do not analyze the constraint strings here, we have to
make the conservative assumption. However, if the register is
a fixed hard reg, the clobber cannot represent any operand;
we leave it up to the machine description to either accept or
{
/* New patterns for I3 and I2, respectively. */
rtx newpat, newi2pat = 0;
+ rtvec newpat_vec_with_clobbers = 0;
int substed_i2 = 0, substed_i1 = 0;
/* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
int added_sets_1, added_sets_2;
{
/* We don't handle the case of the target word being wider
than a host wide int. */
- if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
- abort ();
+ gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD);
lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
lo |= (INTVAL (SET_SRC (PATTERN (i3)))
else
/* We don't handle the case of the higher word not fitting
entirely in either hi or lo. */
- abort ();
+ gcc_unreachable ();
combine_merges++;
subst_insn = i3;
/* Note which hard regs this insn has as inputs. */
mark_used_regs_combine (newpat);
+ /* If recog_for_combine fails, it strips existing clobbers. If we'll
+ consider splitting this pattern, we might need these clobbers. */
+ if (i1 && GET_CODE (newpat) == PARALLEL
+ && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
+ {
+ int len = XVECLEN (newpat, 0);
+
+ newpat_vec_with_clobbers = rtvec_alloc (len);
+ for (i = 0; i < len; i++)
+ RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
+ }
+
/* Is the result of combination a valid instruction? */
insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
}
}
+ /* If recog_for_combine has discarded clobbers, try to use them
+ again for the split. */
+ if (m_split == 0 && newpat_vec_with_clobbers)
+ m_split
+ = split_insns (gen_rtx_PARALLEL (VOIDmode,
+ newpat_vec_with_clobbers), i3);
+
if (m_split && NEXT_INSN (m_split) == NULL_RTX)
{
m_split = PATTERN (m_split);
SUBST (*split, newdest);
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
+ /* recog_for_combine might have added CLOBBERs to newi2pat.
+ Make sure NEWPAT does not depend on the clobbered regs. */
+ if (GET_CODE (newi2pat) == PARALLEL)
+ for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
+ {
+ rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
+ if (reg_overlap_mentioned_p (reg, newpat))
+ {
+ undo_all ();
+ return 0;
+ }
+ }
+
/* If the split point was a MULT and we didn't have one before,
don't use one now. */
if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
The simplest way to remove the link is to point it at I1,
which we know will be a NOTE. */
- ni2dest = SET_DEST (newi2pat);
+ /* newi2pat is usually a SET here; however, recog_for_combine might
+ have added some clobbers. */
+ if (GET_CODE (newi2pat) == PARALLEL)
+ ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
+ else
+ ni2dest = SET_DEST (newi2pat);
+
for (insn = NEXT_INSN (i3);
insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|| insn != BB_HEAD (this_basic_block->next_bb));
/* If this is a register being set, ignore it. */
new = XEXP (x, i);
if (in_dest
- && (code == SUBREG || code == STRICT_LOW_PART
- || code == ZERO_EXTRACT)
&& i == 0
- && REG_P (new))
+ && (((code == SUBREG || code == ZERO_EXTRACT)
+ && REG_P (new))
+ || code == STRICT_LOW_PART))
;
else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
{
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
new, GET_MODE (XEXP (x, 0)));
- if (! x)
- abort ();
+ gcc_assert (x);
}
else
SUBST (XEXP (x, i), new);
rtx op1 = XEXP (x, 1);
int len;
- if (GET_CODE (op1) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (op1) == PARALLEL);
len = XVECLEN (op1, 0);
if (len == 1
&& GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
if (GET_CODE (op0) == AND)
{
- x = apply_distributive_law
+ rtx tmp = apply_distributive_law
(gen_binary (AND, mode,
gen_binary (IOR, mode, XEXP (op0, 0), op1),
gen_binary (IOR, mode, XEXP (op0, 1),
copy_rtx (op1))));
- if (GET_CODE (x) != IOR)
- return x;
+ if (GET_CODE (tmp) != IOR
+ && rtx_cost (tmp, SET) < rtx_cost (x, SET))
+ return tmp;
}
if (GET_CODE (op1) == AND)
{
- x = apply_distributive_law
+ rtx tmp = apply_distributive_law
(gen_binary (AND, mode,
gen_binary (IOR, mode, XEXP (op1, 0), op0),
gen_binary (IOR, mode, XEXP (op1, 1),
copy_rtx (op0))));
- if (GET_CODE (x) != IOR)
- return x;
+ if (GET_CODE (tmp) != IOR
+ && rtx_cost (tmp, SET) < rtx_cost (x, SET))
+ return tmp;
}
/* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
break;
default:
- abort ();
+ gcc_unreachable ();
}
return x;
case ZERO_EXTRACT:
unsignedp = 1;
+
+ /* ... fall through ... */
+
case SIGN_EXTRACT:
/* If the operand is a CLOBBER, just return it. */
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
&& (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
return gen_lowpart (mode, x);
- /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
- MASK are already known to be zero in X, we need not do anything. */
- if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
- return x;
-
switch (code)
{
case CLOBBER:
in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
which is equal to STORE_FLAG_VALUE. */
if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
+ && GET_MODE (XEXP (x, 0)) == mode
&& exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
&& (nonzero_bits (XEXP (x, 0), mode)
== (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
return x;
}
- else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
- && subreg_lowpart_p (XEXP (src, 0))
- && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
- && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
- && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
- && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
- && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
+ if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
+ && subreg_lowpart_p (XEXP (src, 0))
+ && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
+ < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
+ && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
+ && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
+ && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
+ && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
{
assign = make_extraction (VOIDmode, dest, 0,
XEXP (SUBREG_REG (XEXP (src, 0)), 1),
/* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
one-bit field. */
- else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
- && XEXP (XEXP (src, 0), 0) == const1_rtx
- && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
+ if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
+ && XEXP (XEXP (src, 0), 0) == const1_rtx
+ && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
{
assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
1, 1, 1, 0);
return x;
}
+ /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
+ SRC is an AND with all bits of that field set, then we can discard
+ the AND. */
+ if (GET_CODE (dest) == ZERO_EXTRACT
+ && GET_CODE (XEXP (dest, 1)) == CONST_INT
+ && GET_CODE (src) == AND
+ && GET_CODE (XEXP (src, 1)) == CONST_INT)
+ {
+ HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
+ unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
+ unsigned HOST_WIDE_INT ze_mask;
+
+ if (width >= HOST_BITS_PER_WIDE_INT)
+ ze_mask = -1;
+ else
+ ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
+
+ /* Complete overlap. We can remove the source AND. */
+ if ((and_mask & ze_mask) == ze_mask)
+ return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
+
+ /* Partial overlap. We can reduce the source AND. */
+ if ((and_mask & ze_mask) != and_mask)
+ {
+ mode = GET_MODE (src);
+ src = gen_rtx_AND (mode, XEXP (src, 0),
+ gen_int_mode (and_mask & ze_mask, mode));
+ return gen_rtx_SET (VOIDmode, dest, src);
+ }
+ }
+
/* The other case we handle is assignments into a constant-position
field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
a mask that has all one bits except for a group of zero bits and
/* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
to VAROP and return the new constant. */
if (GET_CODE (varop) == CONST_INT)
- return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
+ return gen_int_mode (INTVAL (varop) & constop, mode);
/* See what bits may be nonzero in VAROP. Unlike the general case of
a call to nonzero_bits, here we don't care about bits outside
varop = XEXP (varop, 0);
continue;
}
+
+ /* Check for 'PLUS signbit', which is the canonical form of 'XOR
+ signbit', and attempt to change the PLUS to an XOR and move it to
+ the outer operation as is done above in the AND/IOR/XOR case
+ leg for shift(logical). See details in logical handling above
+ for reasoning in doing so. */
+ if (code == LSHIFTRT
+ && GET_CODE (XEXP (varop, 1)) == CONST_INT
+ && mode_signbit_p (result_mode, XEXP (varop, 1))
+ && (new = simplify_binary_operation (code, result_mode,
+ XEXP (varop, 1),
+ GEN_INT (count))) != 0
+ && GET_CODE (new) == CONST_INT
+ && merge_outer_ops (&outer_op, &outer_const, XOR,
+ INTVAL (new), result_mode, &complement_p))
+ {
+ varop = XEXP (varop, 0);
+ continue;
+ }
+
break;
case MINUS:
An insn containing that will not be recognized. */
static rtx
-gen_lowpart_for_combine (enum machine_mode mode, rtx x)
+gen_lowpart_for_combine (enum machine_mode omode, rtx x)
{
+ enum machine_mode imode = GET_MODE (x);
+ unsigned int osize = GET_MODE_SIZE (omode);
+ unsigned int isize = GET_MODE_SIZE (imode);
rtx result;
- if (GET_MODE (x) == mode)
+ if (omode == imode)
return x;
- /* Return identity if this is a CONST or symbolic
- reference. */
- if (mode == Pmode
+ /* Return identity if this is a CONST or symbolic reference. */
+ if (omode == Pmode
&& (GET_CODE (x) == CONST
|| GET_CODE (x) == SYMBOL_REF
|| GET_CODE (x) == LABEL_REF))
/* We can only support MODE being wider than a word if X is a
constant integer or has a mode the same size. */
-
- if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
- && ! ((GET_MODE (x) == VOIDmode
+ if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
+ && ! ((imode == VOIDmode
&& (GET_CODE (x) == CONST_INT
|| GET_CODE (x) == CONST_DOUBLE))
- || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
- return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
+ || isize == osize))
+ goto fail;
/* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
won't know what to do. So we will strip off the SUBREG here and
if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
{
x = SUBREG_REG (x);
- if (GET_MODE (x) == mode)
+
+ /* For use in case we fall down into the address adjustments
+ further below, we need to adjust the known mode and size of
+ x; imode and isize, since we just adjusted x. */
+ imode = GET_MODE (x);
+
+ if (imode == omode)
return x;
+
+ isize = GET_MODE_SIZE (imode);
}
- result = gen_lowpart_common (mode, x);
+ result = gen_lowpart_common (omode, x);
+
#ifdef CANNOT_CHANGE_MODE_CLASS
- if (result != 0
- && GET_CODE (result) == SUBREG
- && REG_P (SUBREG_REG (result))
- && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER)
- bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (result))
- * MAX_MACHINE_MODE
- + GET_MODE (result));
+ if (result != 0 && GET_CODE (result) == SUBREG)
+ record_subregs_of_mode (result);
#endif
if (result)
/* Refuse to work on a volatile memory ref or one with a mode-dependent
address. */
if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
- return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
+ goto fail;
/* If we want to refer to something bigger than the original memref,
generate a paradoxical subreg instead. That will force a reload
of the original memref X. */
- if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
- return gen_rtx_SUBREG (mode, x, 0);
+ if (isize < osize)
+ return gen_rtx_SUBREG (omode, x, 0);
if (WORDS_BIG_ENDIAN)
- offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
- - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
+ offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
+ /* Adjust the address so that the address-after-the-data is
+ unchanged. */
if (BYTES_BIG_ENDIAN)
- {
- /* Adjust the address so that the address-after-the-data is
- unchanged. */
- offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
- - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
- }
+ offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
- return adjust_address_nv (x, mode, offset);
+ return adjust_address_nv (x, omode, offset);
}
/* If X is a comparison operator, rewrite it in a new mode. This
probably won't match, but may allow further simplifications. */
else if (COMPARISON_P (x))
- return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
+ return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
/* If we couldn't simplify X any other way, just enclose it in a
SUBREG. Normally, this SUBREG won't match, but some patterns may
{
int offset = 0;
rtx res;
- enum machine_mode sub_mode = GET_MODE (x);
- offset = subreg_lowpart_offset (mode, sub_mode);
- if (sub_mode == VOIDmode)
+ offset = subreg_lowpart_offset (omode, imode);
+ if (imode == VOIDmode)
{
- sub_mode = int_mode_for_mode (mode);
- x = gen_lowpart_common (sub_mode, x);
- if (x == 0)
- return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
+ imode = int_mode_for_mode (omode);
+ x = gen_lowpart_common (imode, x);
+ if (x == NULL)
+ goto fail;
}
- res = simplify_gen_subreg (mode, x, sub_mode, offset);
+ res = simplify_gen_subreg (omode, x, imode, offset);
if (res)
return res;
- return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
}
+
+ fail:
+ return gen_rtx_CLOBBER (imode, const0_rtx);
}
\f
/* These routines make binary and unary operations by first seeing if they
break;
case SIGN_EXTEND:
- /* Can simplify (compare (zero/sign_extend FOO) CONST)
- to (compare FOO CONST) if CONST fits in FOO's mode and we
- are either testing inequality or have an unsigned comparison
- with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
- if (! unsigned_comparison_p
- && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
- <= HOST_BITS_PER_WIDE_INT)
+ /* Can simplify (compare (zero/sign_extend FOO) CONST) to
+ (compare FOO CONST) if CONST fits in FOO's mode and we
+ are either testing inequality or have an unsigned
+ comparison with ZERO_EXTEND or a signed comparison with
+ SIGN_EXTEND. But don't do it if we don't have a compare
+ insn of the given mode, since we'd have to revert it
+ later on, and then we wouldn't know whether to sign- or
+ zero-extend. */
+ mode = GET_MODE (XEXP (op0, 0));
+ if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
+ && ! unsigned_comparison_p
+ && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
&& ((unsigned HOST_WIDE_INT) const_op
- < (((unsigned HOST_WIDE_INT) 1
- << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
+ < (((unsigned HOST_WIDE_INT) 1
+ << (GET_MODE_BITSIZE (mode) - 1))))
+ && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
op0 = XEXP (op0, 0);
continue;
break;
case SUBREG:
- /* Check for the case where we are comparing A - C1 with C2,
- both constants are smaller than 1/2 the maximum positive
- value in MODE, and the comparison is equality or unsigned.
- In that case, if A is either zero-extended to MODE or has
- sufficient sign bits so that the high-order bit in MODE
- is a copy of the sign in the inner mode, we can prove that it is
- safe to do the operation in the wider mode. This simplifies
- many range checks. */
+ /* Check for the case where we are comparing A - C1 with C2, that is
+
+ (subreg:MODE (plus (A) (-C1))) op (C2)
+
+ with C1 a constant, and try to lift the SUBREG, i.e. to do the
+ comparison in the wider mode. One of the following two conditions
+ must be true in order for this to be valid:
+
+ 1. The mode extension results in the same bit pattern being added
+ on both sides and the comparison is equality or unsigned. As
+ C2 has been truncated to fit in MODE, the pattern can only be
+ all 0s or all 1s.
+
+ 2. The mode extension results in the sign bit being copied on
+ each side.
+
+ The difficulty here is that we have predicates for A but not for
+ (A - C1) so we need to check that C1 is within proper bounds so
+ as to perturbate A as little as possible. */
if (mode_width <= HOST_BITS_PER_WIDE_INT
&& subreg_lowpart_p (op0)
+ && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
&& GET_CODE (SUBREG_REG (op0)) == PLUS
- && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
- && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
- && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
- < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
- && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
- && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
- GET_MODE (SUBREG_REG (op0)))
- & ~GET_MODE_MASK (mode))
- || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
- GET_MODE (SUBREG_REG (op0)))
- > (unsigned int)
- (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
- - GET_MODE_BITSIZE (mode)))))
- {
- op0 = SUBREG_REG (op0);
- continue;
+ && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
+ {
+ enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
+ rtx a = XEXP (SUBREG_REG (op0), 0);
+ HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
+
+ if ((c1 > 0
+ && (unsigned HOST_WIDE_INT) c1
+ < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
+ && (equality_comparison_p || unsigned_comparison_p)
+ /* (A - C1) zero-extends if it is positive and sign-extends
+ if it is negative, C2 both zero- and sign-extends. */
+ && ((0 == (nonzero_bits (a, inner_mode)
+ & ~GET_MODE_MASK (mode))
+ && const_op >= 0)
+ /* (A - C1) sign-extends if it is positive and 1-extends
+ if it is negative, C2 both sign- and 1-extends. */
+ || (num_sign_bit_copies (a, inner_mode)
+ > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
+ - mode_width)
+ && const_op < 0)))
+ || ((unsigned HOST_WIDE_INT) c1
+ < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
+ /* (A - C1) always sign-extends, like C2. */
+ && num_sign_bit_copies (a, inner_mode)
+ > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
+ - mode_width - 1)))
+ {
+ op0 = SUBREG_REG (op0);
+ continue;
+ }
}
/* If the inner mode is narrower and we are extracting the low part,
/* ... fall through ... */
case ZERO_EXTEND:
- if ((unsigned_comparison_p || equality_comparison_p)
- && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
- <= HOST_BITS_PER_WIDE_INT)
- && ((unsigned HOST_WIDE_INT) const_op
- < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
+ mode = GET_MODE (XEXP (op0, 0));
+ if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
+ && (unsigned_comparison_p || equality_comparison_p)
+ && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
+ && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
op0 = XEXP (op0, 0);
continue;
return gen_binary (reversed_code, mode, op0, op1);
}
\f
+/* Utility function for record_value_for_reg. Count number of
+ rtxs in X. */
+static int
+count_rtxs (rtx x)
+{
+ enum rtx_code code = GET_CODE (x);
+ const char *fmt;
+ int i, ret = 1;
+
+ if (GET_RTX_CLASS (code) == '2'
+ || GET_RTX_CLASS (code) == 'c')
+ {
+ rtx x0 = XEXP (x, 0);
+ rtx x1 = XEXP (x, 1);
+
+ if (x0 == x1)
+ return 1 + 2 * count_rtxs (x0);
+
+ if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
+ || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
+ && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
+ return 2 + 2 * count_rtxs (x0)
+ + count_rtxs (x == XEXP (x1, 0)
+ ? XEXP (x1, 1) : XEXP (x1, 0));
+
+ if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
+ || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
+ && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
+ return 2 + 2 * count_rtxs (x1)
+ + count_rtxs (x == XEXP (x0, 0)
+ ? XEXP (x0, 1) : XEXP (x0, 0));
+ }
+
+ fmt = GET_RTX_FORMAT (code);
+ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
+ if (fmt[i] == 'e')
+ ret += count_rtxs (XEXP (x, i));
+
+ return ret;
+}
+\f
/* Utility function for following routine. Called when X is part of a value
being stored into last_set_value. Sets last_set_table_tick
for each register mentioned. Similar to mention_regs in cse.c */
&& GET_CODE (XEXP (tem, 0)) == CLOBBER
&& GET_CODE (XEXP (tem, 1)) == CLOBBER)
tem = XEXP (tem, 0);
+ else if (count_occurrences (value, reg, 1) >= 2)
+ {
+ /* If there are two or more occurrences of REG in VALUE,
+ prevent the value from growing too much. */
+ if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
+ tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
+ }
value = replace_rtx (copy_rtx (value), reg, tem);
}
while (GET_CODE (testreg) == SUBREG
|| GET_CODE (testreg) == ZERO_EXTRACT
- || GET_CODE (testreg) == SIGN_EXTRACT
|| GET_CODE (testreg) == STRICT_LOW_PART)
testreg = XEXP (testreg, 0);
case REG_NON_LOCAL_GOTO:
if (JUMP_P (i3))
place = i3;
- else if (i2 && JUMP_P (i2))
- place = i2;
else
- abort ();
+ {
+ gcc_assert (i2 && JUMP_P (i2));
+ place = i2;
+ }
break;
case REG_EH_REGION:
place = i3;
else if (i2 && CALL_P (i2))
place = i2;
- else if (flag_non_call_exceptions)
+ else
{
+ gcc_assert (flag_non_call_exceptions);
if (may_trap_p (i3))
place = i3;
else if (i2 && may_trap_p (i2))
can now prove that the instructions can't trap. Drop the
note in this case. */
}
- else
- abort ();
break;
- case REG_ALWAYS_RETURN:
case REG_NORETURN:
case REG_SETJMP:
/* These notes must remain with the call. It should not be
possible for both I2 and I3 to be a call. */
if (CALL_P (i3))
place = i3;
- else if (i2 && CALL_P (i2))
- place = i2;
else
- abort ();
+ {
+ gcc_assert (i2 && CALL_P (i2));
+ place = i2;
+ }
break;
case REG_UNUSED:
a JUMP_LABEL instead or decrement LABEL_NUSES. */
if (place && JUMP_P (place))
{
- if (!JUMP_LABEL (place))
+ rtx label = JUMP_LABEL (place);
+
+ if (!label)
JUMP_LABEL (place) = XEXP (note, 0);
- else if (JUMP_LABEL (place) != XEXP (note, 0))
- abort ();
- else if (LABEL_P (JUMP_LABEL (place)))
- LABEL_NUSES (JUMP_LABEL (place))--;
+ else
+ {
+ gcc_assert (label == XEXP (note, 0));
+ if (LABEL_P (label))
+ LABEL_NUSES (label)--;
+ }
place = 0;
}
if (place2 && JUMP_P (place2))
{
- if (!JUMP_LABEL (place2))
+ rtx label = JUMP_LABEL (place2);
+
+ if (!label)
JUMP_LABEL (place2) = XEXP (note, 0);
- else if (JUMP_LABEL (place2) != XEXP (note, 0))
- abort ();
- else if (LABEL_P (JUMP_LABEL (place2)))
- LABEL_NUSES (JUMP_LABEL (place2))--;
+ else
+ {
+ gcc_assert (label == XEXP (note, 0));
+ if (LABEL_P (label))
+ LABEL_NUSES (label)--;
+ }
place2 = 0;
}
break;
default:
/* Any other notes should not be present at this point in the
compilation. */
- abort ();
+ gcc_unreachable ();
}
if (place)
reg = SET_DEST (set);
while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
- || GET_CODE (reg) == SIGN_EXTRACT
|| GET_CODE (reg) == STRICT_LOW_PART)
reg = XEXP (reg, 0);
&& NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
insn = NEXT_INSN (insn);
- if (INSN_UID (insn) > max_uid_cuid)
- abort ();
+ gcc_assert (INSN_UID (insn) <= max_uid_cuid);
return INSN_CUID (insn);
}