/* Optimize by combining instructions for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* This module is essentially the "combiner" phase of the U. of Arizona
Portable Optimizer, but redone to work on our list-structured
#include "hard-reg-set.h"
#include "basic-block.h"
#include "insn-config.h"
-#include "insn-codes.h"
#include "function.h"
/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
#include "expr.h"
BITS_PER_WORD would invoke undefined behavior. Work around it. */
#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
- (((unsigned HOST_WIDE_INT)(val) << (BITS_PER_WORD - 1)) << 1)
+ (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
/* Maximum register number, which is the size of the tables below. */
rtx f;
unsigned int nregs;
{
- register rtx insn, next;
+ rtx insn, next;
#ifdef HAVE_cc0
- register rtx prev;
+ rtx prev;
#endif
- register int i;
- register rtx links, nextlinks;
+ int i;
+ rtx links, nextlinks;
int new_direct_jump_p = 0;
rtx link = XEXP (links, 0);
/* If the linked insn has been replaced by a note, then there
- is no point in persuing this chain any further. */
+ is no point in pursuing this chain any further. */
if (GET_CODE (link) == NOTE)
- break;
+ continue;
for (nextlinks = LOG_LINKS (link);
nextlinks;
nextlinks = XEXP (nextlinks, 1))
- if ((next = try_combine (insn, XEXP (links, 0),
+ if ((next = try_combine (insn, link,
XEXP (nextlinks, 0),
&new_direct_jump_p)) != 0)
goto retry;
if (need_refresh)
{
- compute_bb_for_insn (get_max_uid ());
update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
PROP_DEATH_NOTES);
}
for the SH4 port. */
case USE:
/* Combining an isolated USE doesn't make sense.
- We depend here on combinable_i3_pat to reject them. */
+ We depend here on combinable_i3pat to reject them. */
/* The code below this loop only verifies that the inputs of
the SET in INSN do not change. We call reg_set_between_p
- to verify that the REG in the USE does not change betweeen
+ to verify that the REG in the USE does not change between
I3 and INSN.
If the USE in INSN was for a pseudo register, the matching
insn pattern will likely match any register; combining this
with any other USE would only be safe if we knew that the
used registers have identical values, or if there was
something to tell them apart, e.g. different modes. For
- now, we forgo such compilcated tests and simply disallow
+ now, we forgo such complicated tests and simply disallow
combining of USES of pseudo registers with any other USE. */
if (GET_CODE (XEXP (elt, 0)) == REG
&& GET_CODE (PATTERN (i3)) == PARALLEL)
for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
- return 0;
+ return 0;
}
/* If INSN is an asm, and DEST is a hard register, reject, since it has
static rtx
try_combine (i3, i2, i1, new_direct_jump_p)
- register rtx i3, i2, i1;
- register int *new_direct_jump_p;
+ rtx i3, i2, i1;
+ int *new_direct_jump_p;
{
/* New patterns for I3 and I2, respectively. */
rtx newpat, newi2pat = 0;
int maxreg;
rtx temp;
- register rtx link;
+ rtx link;
int i;
/* Exit early if one of the insns involved can't be used for
abort ();
lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
- lo |= INTVAL (SET_SRC (PATTERN (i3)));
+ lo |= (INTVAL (SET_SRC (PATTERN (i3)))
+ & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
}
else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
hi = INTVAL (SET_SRC (PATTERN (i3)));
isn't mentioned in any SETs in NEWPAT that are field assignments. */
if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
- 0, (rtx*)0))
+ 0, (rtx*) 0))
{
undo_all ();
return 0;
XVECEXP (newpat, 0, 0) = old;
}
- if (added_sets_1)
- XVECEXP (newpat, 0, --total_sets)
- = (GET_CODE (PATTERN (i1)) == PARALLEL
- ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
+ if (added_sets_1)
+ XVECEXP (newpat, 0, --total_sets)
+ = (GET_CODE (PATTERN (i1)) == PARALLEL
+ ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
- if (added_sets_2)
- {
- /* If there is no I1, use I2's body as is. We used to also not do
- the subst call below if I2 was substituted into I3,
- but that could lose a simplification. */
- if (i1 == 0)
- XVECEXP (newpat, 0, --total_sets) = i2pat;
- else
- /* See comment where i2pat is assigned. */
- XVECEXP (newpat, 0, --total_sets)
- = subst (i2pat, i1dest, i1src, 0, 0);
- }
+ if (added_sets_2)
+ {
+ /* If there is no I1, use I2's body as is. We used to also not do
+ the subst call below if I2 was substituted into I3,
+ but that could lose a simplification. */
+ if (i1 == 0)
+ XVECEXP (newpat, 0, --total_sets) = i2pat;
+ else
+ /* See comment where i2pat is assigned. */
+ XVECEXP (newpat, 0, --total_sets)
+ = subst (i2pat, i1dest, i1src, 0, 0);
+ }
}
/* We come here when we are replacing a destination in I2 with the
}
}
+ /* If we've split a jump pattern, we'll wind up with a sequence even
+ with one instruction. We can handle that below, so extract it. */
+ if (m_split && GET_CODE (m_split) == SEQUENCE
+ && XVECLEN (m_split, 0) == 1)
+ m_split = PATTERN (XVECEXP (m_split, 0, 0));
+
if (m_split && GET_CODE (m_split) != SEQUENCE)
{
insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
&& GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
&& GET_CODE (SET_SRC (x)) == CONST_INT
&& ((INTVAL (XEXP (SET_DEST (x), 1))
- + INTVAL (XEXP (SET_DEST (x), 2)))
+ + INTVAL (XEXP (SET_DEST (x), 2)))
<= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
&& ! side_effects_p (XEXP (SET_DEST (x), 0)))
{
&& GET_CODE (XEXP (SET_SRC (x), 0)) == REG
&& (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
&& GET_CODE (SET_DEST (x)) == REG
- && (split = find_single_use (SET_DEST (x), insn, (rtx*)0)) != 0
+ && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
&& (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
&& XEXP (*split, 0) == SET_DEST (x)
&& XEXP (*split, 1) == const0_rtx)
static rtx
subst (x, from, to, in_dest, unique_copy)
- register rtx x, from, to;
+ rtx x, from, to;
int in_dest;
int unique_copy;
{
- register enum rtx_code code = GET_CODE (x);
+ enum rtx_code code = GET_CODE (x);
enum machine_mode op0_mode = VOIDmode;
- register const char *fmt;
- register int len, i;
+ const char *fmt;
+ int len, i;
rtx new;
/* Two expressions are equal if they are identical copies of a shared
{
if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
{
if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
just make the comparison operation. */
if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
x = gen_binary (cond_code, mode, cond, cop1);
- else if (true_rtx == const0_rtx && false_rtx == const_true_rtx)
+ else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
+ && reverse_condition (cond_code) != UNKNOWN)
x = gen_binary (reverse_condition (cond_code),
mode, cond, cop1);
&& subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x))
return gen_lowpart_for_combine (mode, SUBREG_REG (x));
+ if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
+ break;
{
rtx temp;
temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
simplify_gen_unary (NEG, mode,
simplify_gen_unary (ABS, mode, true_rtx, mode),
mode);
- default:
- break;
+ default:
+ break;
}
/* Look for MIN or MAX. */
if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
&& GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
{
- int byte_offset = SUBREG_BYTE (XEXP (SET_DEST (x), 0));
-
inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
- pos = GEN_INT (BITS_PER_WORD * (byte_offset / UNITS_PER_WORD));
+ pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
}
else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
&& GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
/* Get the mode to use should INNER not be a MEM, the mode for the position,
and the mode for the result. */
-#ifdef HAVE_insv
- if (in_dest)
+ if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
{
- wanted_inner_reg_mode
- = insn_data[(int) CODE_FOR_insv].operand[0].mode;
- if (wanted_inner_reg_mode == VOIDmode)
- wanted_inner_reg_mode = word_mode;
-
- pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
- if (pos_mode == VOIDmode)
- pos_mode = word_mode;
-
- extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
- if (extraction_mode == VOIDmode)
- extraction_mode = word_mode;
+ wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
+ pos_mode = mode_for_extraction (EP_insv, 2);
+ extraction_mode = mode_for_extraction (EP_insv, 3);
}
-#endif
-#ifdef HAVE_extzv
- if (! in_dest && unsignedp)
+ if (! in_dest && unsignedp
+ && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
{
- wanted_inner_reg_mode
- = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
- if (wanted_inner_reg_mode == VOIDmode)
- wanted_inner_reg_mode = word_mode;
-
- pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
- if (pos_mode == VOIDmode)
- pos_mode = word_mode;
-
- extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
- if (extraction_mode == VOIDmode)
- extraction_mode = word_mode;
+ wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
+ pos_mode = mode_for_extraction (EP_extzv, 3);
+ extraction_mode = mode_for_extraction (EP_extzv, 0);
}
-#endif
-#ifdef HAVE_extv
- if (! in_dest && ! unsignedp)
+ if (! in_dest && ! unsignedp
+ && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
{
- wanted_inner_reg_mode
- = insn_data[(int) CODE_FOR_extv].operand[1].mode;
- if (wanted_inner_reg_mode == VOIDmode)
- wanted_inner_reg_mode = word_mode;
-
- pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
- if (pos_mode == VOIDmode)
- pos_mode = word_mode;
-
- extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
- if (extraction_mode == VOIDmode)
- extraction_mode = word_mode;
+ wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
+ pos_mode = mode_for_extraction (EP_extv, 3);
+ extraction_mode = mode_for_extraction (EP_extv, 0);
}
-#endif
/* Never narrow an object, since that might not be safe. */
rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
/* If we know that no extraneous bits are set, and that the high
- bit is not set, convert extraction to cheaper one - eighter
+ bit is not set, convert extraction to cheaper one - either
SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
cases. */
if (flag_expensive_optimizations
NULL_RTX, 0);
/* If we have something other than a SUBREG, we might have
- done an expansion, so rerun outselves. */
+ done an expansion, so rerun ourselves. */
if (GET_CODE (newer) != SUBREG)
newer = make_compound_operation (newer, in_code);
need it. */
if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
- && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
+ && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
+ == (HOST_WIDE_INT) mask))
x = XEXP (x, 0);
/* If it remains an AND, try making another AND with the bits
binop:
/* For most binary operations, just propagate into the operation and
- change the mode if we have an operation of that mode. */
+ change the mode if we have an operation of that mode. */
op0 = gen_lowpart_for_combine (op_mode,
force_to_mode (XEXP (x, 0), mode, mask,
{
int i = -1;
- /* If the considered data is wider then HOST_WIDE_INT, we can't
+ /* If the considered data is wider than HOST_WIDE_INT, we can't
represent a mask for all its bits in a single scalar.
But we only care about the lower bits, so calculate these. */
case NEG:
/* If we just want the low-order bit, the NEG isn't needed since it
- won't change the low-order bit. */
+ won't change the low-order bit. */
if (mask == 1)
return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
}
}
- /* Similarly for MULT, AND and UMIN, execpt that for these the result
+ /* Similarly for MULT, AND and UMIN, except that for these the result
is always zero. */
if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
&& (code == MULT || code == AND || code == UMIN)
if (side_effects_p (x))
return x;
- if (cond == EQ && rtx_equal_p (x, reg) && !FLOAT_MODE_P (cond))
+ /* If either operand of the condition is a floating point value,
+ then we have to avoid collapsing an EQ comparison. */
+ if (cond == EQ
+ && rtx_equal_p (x, reg)
+ && ! FLOAT_MODE_P (GET_MODE (x))
+ && ! FLOAT_MODE_P (GET_MODE (val)))
return val;
+
if (cond == UNEQ && rtx_equal_p (x, reg))
return val;
MODE. */
nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
- nonzero = trunc_int_for_mode (nonzero, mode);
/* Turn off all bits in the constant that are known to already be zero.
Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
simplify_and_const_int (NULL_RTX, GET_MODE (varop),
XEXP (varop, 1), constop))));
+ /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
+ the AND and see if one of the operands simplifies to zero. If so, we
+ may eliminate it. */
+
+ if (GET_CODE (varop) == PLUS
+ && exact_log2 (constop + 1) >= 0)
+ {
+ rtx o0, o1;
+
+ o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
+ o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
+ if (o0 == const0_rtx)
+ return o1;
+ if (o1 == const0_rtx)
+ return o0;
+ }
+
/* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
if we already had one (just check for the simplest cases). */
if (x && GET_CODE (XEXP (x, 0)) == SUBREG
/* If we are only masking insignificant bits, return VAROP. */
if (constop == nonzero)
x = varop;
-
- /* Otherwise, return an AND. See how much, if any, of X we can use. */
- else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
- x = gen_binary (AND, mode, varop, GEN_INT (constop));
-
else
{
- if (GET_CODE (XEXP (x, 1)) != CONST_INT
- || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
- SUBST (XEXP (x, 1), GEN_INT (constop));
+ /* Otherwise, return an AND. */
+ constop = trunc_int_for_mode (constop, mode);
+ /* See how much, if any, of X we can use. */
+ if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
+ x = gen_binary (AND, mode, varop, GEN_INT (constop));
- SUBST (XEXP (x, 0), varop);
+ else
+ {
+ if (GET_CODE (XEXP (x, 1)) != CONST_INT
+ || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
+ SUBST (XEXP (x, 1), GEN_INT (constop));
+
+ SUBST (XEXP (x, 0), varop);
+ }
}
return x;
return nonzero_bits (tem, mode);
}
else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
- return reg_nonzero_bits[REGNO (x)] & nonzero;
+ {
+ unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)];
+
+ if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width)
+ /* We don't know anything about the upper bits. */
+ mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
+ return nonzero & mask;
+ }
else
return nonzero;
if (tem != 0)
return num_sign_bit_copies (tem, mode);
- if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
+ if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0
+ && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth)
return reg_sign_bit_copies[REGNO (x)];
break;
if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
&& (code == PLUS || code == MINUS)
&& GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0)))
- result = MAX ((GET_MODE_BITSIZE (Pmode)
- - GET_MODE_BITSIZE (ptr_mode) + 1),
+ result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
+ - GET_MODE_BITSIZE (ptr_mode) + 1),
result);
#endif
return result;
return num_sign_bit_copies (XEXP (x, 0), mode);
case UMOD:
- /* The result must be <= the scond operand. */
+ /* The result must be <= the second operand. */
return num_sign_bit_copies (XEXP (x, 1), mode);
case DIV:
are ASHIFTRT and ROTATE, which are always done in their original mode, */
static rtx
-simplify_shift_const (x, code, result_mode, varop, input_count)
+simplify_shift_const (x, code, result_mode, varop, orig_count)
rtx x;
enum rtx_code code;
enum machine_mode result_mode;
rtx varop;
- int input_count;
+ int orig_count;
{
enum rtx_code orig_code = code;
- int orig_count = input_count;
unsigned int count;
int signed_count;
enum machine_mode mode = result_mode;
int complement_p = 0;
rtx new;
+ /* Make sure and truncate the "natural" shift on the way in. We don't
+ want to do this inside the loop as it makes it more difficult to
+ combine shifts. */
+#ifdef SHIFT_COUNT_TRUNCATED
+ if (SHIFT_COUNT_TRUNCATED)
+ orig_count &= GET_MODE_BITSIZE (mode) - 1;
+#endif
+
/* If we were given an invalid count, don't do anything except exactly
what was requested. */
- if (input_count < 0 || input_count > (int) GET_MODE_BITSIZE (mode))
+ if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
{
if (x)
return x;
- return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (input_count));
+ return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
}
- count = input_count;
-
- /* Make sure and truncate the "natural" shift on the way in. We don't
- want to do this inside the loop as it makes it more difficult to
- combine shifts. */
-#ifdef SHIFT_COUNT_TRUNCATED
- if (SHIFT_COUNT_TRUNCATED)
- count %= GET_MODE_BITSIZE (mode);
-#endif
+ count = orig_count;
/* Unless one of the branches of the `if' in this loop does a `continue',
we will `break' the loop after the `if'. */
if (new != 0)
x = new;
else
- {
- if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
- x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
-
- SUBST (XEXP (x, 0), varop);
- SUBST (XEXP (x, 1), const_rtx);
- }
+ x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
/* If we have an outer operation and we just made a shift, it is
possible that we could have simplified the shift were it not
rtx insn;
rtx *pnotes;
{
- register rtx pat = *pnewpat;
+ rtx pat = *pnewpat;
int insn_code_number;
int num_clobbers_to_add = 0;
int i;
static rtx
gen_lowpart_for_combine (mode, x)
enum machine_mode mode;
- register rtx x;
+ rtx x;
{
rtx result;
if (GET_CODE (x) == MEM)
{
- register int offset = 0;
+ int offset = 0;
/* Refuse to work on a volatile memory ref or one with a mode-dependent
address. */
{
if (BITS_BIG_ENDIAN)
{
-#ifdef HAVE_extzv
- mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
- if (mode == VOIDmode)
- mode = word_mode;
- i = (GET_MODE_BITSIZE (mode) - 1 - i);
-#else
- i = BITS_PER_WORD - 1 - i;
-#endif
+ enum machine_mode new_mode
+ = mode_for_extraction (EP_extzv, 1);
+ if (new_mode == MAX_MACHINE_MODE)
+ i = BITS_PER_WORD - 1 - i;
+ else
+ {
+ mode = new_mode;
+ i = (GET_MODE_BITSIZE (mode) - 1 - i);
+ }
}
op0 = XEXP (op0, 2);
if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
+ && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
&& (code == NE || code == EQ)
&& ((GET_MODE_SIZE (GET_MODE (op0))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
+ && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
&& (code == NE || code == EQ)
&& (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
<= HOST_BITS_PER_WIDE_INT)
combine_reversed_comparison_code (exp)
rtx exp;
{
- enum rtx_code code1 = reversed_comparison_code (exp, NULL);
- rtx x;
-
- if (code1 != UNKNOWN
- || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
- return code1;
- /* Otherwise try and find where the condition codes were last set and
- use that. */
- x = get_last_value (XEXP (exp, 0));
- if (!x || GET_CODE (x) != COMPARE)
- return UNKNOWN;
- return reversed_comparison_code_parts (GET_CODE (exp),
- XEXP (x, 0), XEXP (x, 1), NULL);
+ enum rtx_code code1 = reversed_comparison_code (exp, NULL);
+ rtx x;
+
+ if (code1 != UNKNOWN
+ || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC)
+ return code1;
+ /* Otherwise try and find where the condition codes were last set and
+ use that. */
+ x = get_last_value (XEXP (exp, 0));
+ if (!x || GET_CODE (x) != COMPARE)
+ return UNKNOWN;
+ return reversed_comparison_code_parts (GET_CODE (exp),
+ XEXP (x, 0), XEXP (x, 1), NULL);
}
/* Return comparison with reversed code of EXP and operands OP0 and OP1.
Return NULL_RTX in case we fail to do the reversal. */
update_table_tick (x)
rtx x;
{
- register enum rtx_code code = GET_CODE (x);
- register const char *fmt = GET_RTX_FORMAT (code);
- register int i;
+ enum rtx_code code = GET_CODE (x);
+ const char *fmt = GET_RTX_FORMAT (code);
+ int i;
if (code == REG)
{
record_dead_and_set_regs (insn)
rtx insn;
{
- register rtx link;
+ rtx link;
unsigned int i;
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (GET_CODE (insn) == CALL_INSN)
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_regs[i])
+ if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
{
reg_last_set_value[i] = 0;
reg_last_set_mode[i] = 0;
}
last_call_cuid = mem_last_set = INSN_CUID (insn);
+
+ /* Don't bother recording what this insn does. It might set the
+ return value register, but we can't combine into a call
+ pattern anyway, so there's no point trying (and it may cause
+ a crash, if e.g. we wind up asking for last_set_value of a
+ SUBREG of the return value register). */
+ return;
}
note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
it's either a hard register, set more than once, or it's a live
at the beginning of the function, return 0.
- Because if it's not live at the beginnning of the function then the reg
+ Because if it's not live at the beginning of the function then the reg
is always set before being used (is never used without being set).
And, if it's set only once, and it's always set before use, then all
uses must have the same last value, even if it's not from this basic
static int
use_crosses_set_p (x, from_cuid)
- register rtx x;
+ rtx x;
int from_cuid;
{
- register const char *fmt;
- register int i;
- register enum rtx_code code = GET_CODE (x);
+ const char *fmt;
+ int i;
+ enum rtx_code code = GET_CODE (x);
if (code == REG)
{
{
if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
return 1;
{
/* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
the address. */
- register rtx testreg = SET_DEST (x);
+ rtx testreg = SET_DEST (x);
while (GET_CODE (testreg) == SUBREG
|| GET_CODE (testreg) == ZERO_EXTRACT
/* Recursively scan the operands of this expression. */
{
- register const char *fmt = GET_RTX_FORMAT (code);
+ const char *fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
mark_used_regs_combine (XEXP (x, i));
else if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = 0; j < XVECLEN (x, i); j++)
mark_used_regs_combine (XVECEXP (x, i, j));
unsigned int regno;
rtx insn;
{
- register rtx note = find_regno_note (insn, REG_DEAD, regno);
+ rtx note = find_regno_note (insn, REG_DEAD, regno);
if (note)
{
rtx to_insn;
rtx *pnotes;
{
- register const char *fmt;
- register int len, i;
- register enum rtx_code code = GET_CODE (x);
+ const char *fmt;
+ int len, i;
+ enum rtx_code code = GET_CODE (x);
if (code == REG)
{
unsigned int regno = REGNO (x);
- register rtx where_dead = reg_last_death[regno];
- register rtx before_dead, after_dead;
+ rtx where_dead = reg_last_death[regno];
+ rtx before_dead, after_dead;
/* Don't move the register if it gets killed in between from and to */
if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
{
if (fmt[i] == 'E')
{
- register int j;
+ int j;
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
to_insn, pnotes);
place = i3;
break;
+ case REG_VTABLE_REF:
+ /* ??? Should remain with *a particular* memory load. Given the
+ nature of vtable data, the last insn seems relatively safe. */
+ place = i3;
+ break;
+
case REG_NON_LOCAL_GOTO:
if (GET_CODE (i3) == JUMP_INSN)
place = i3;
else if (i2 && GET_CODE (i2) == JUMP_INSN)
place = i2;
else
- abort();
+ abort ();
break;
case REG_EH_REGION:
else
place = i2;
}
+
+ /* Don't attach REG_LABEL note to a JUMP_INSN which has
+ JUMP_LABEL already. Instead, decrement LABEL_NUSES. */
+ if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
+ {
+ if (JUMP_LABEL (place) != XEXP (note, 0))
+ abort ();
+ if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
+ LABEL_NUSES (JUMP_LABEL (place))--;
+ place = 0;
+ }
+ if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
+ {
+ if (JUMP_LABEL (place2) != XEXP (note, 0))
+ abort ();
+ if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
+ LABEL_NUSES (JUMP_LABEL (place2))--;
+ place2 = 0;
+ }
break;
case REG_NONNEG: