/* Optimize by combining instructions for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
#include "real.h"
#include "toplev.h"
#include "target.h"
+#include "optabs.h"
+#include "insn-codes.h"
#include "rtlhooks-def.h"
/* Include output.h for dump_file. */
#include "output.h"
+#include "params.h"
/* Number of attempts to combine instructions in this function. */
int last_set_label;
/* These fields are maintained in parallel with last_set_value and are
- used to store the mode in which the register was last set, te bits
+ used to store the mode in which the register was last set, the bits
that were known to be zero when it was last set, and the number of
sign bits copies it was known to have when it was last set. */
those blocks as starting points. */
static sbitmap refresh_blocks;
\f
-/* The following array records the combine_insn_cost for every insn
+/* The following array records the insn_rtx_cost for every insn
in the instruction stream. */
static int *uid_insn_cost;
{
/* Sanity check that we're replacing oldval with a CONST_INT
that is a valid sign-extension for the original mode. */
- if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval),
- GET_MODE (oldval)))
- abort ();
+ gcc_assert (INTVAL (newval)
+ == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
/* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
CONST_INT is not valid, because after the replacement, the
when do_SUBST is called to replace the operand thereof, so we
perform this test on oldval instead, checking whether an
invalid replacement took place before we got here. */
- if ((GET_CODE (oldval) == SUBREG
- && GET_CODE (SUBREG_REG (oldval)) == CONST_INT)
- || (GET_CODE (oldval) == ZERO_EXTEND
- && GET_CODE (XEXP (oldval, 0)) == CONST_INT))
- abort ();
+ gcc_assert (!(GET_CODE (oldval) == SUBREG
+ && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
+ gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
+ && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
}
if (undobuf.frees)
#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
\f
-/* Calculate the rtx_cost of a single instruction. A return value of zero
- indicates an instruction without a known cost. */
-
-static int
-combine_insn_cost (rtx pat)
-{
- int i, cost;
- rtx set;
-
- /* Extract the single set rtx from the instruction pattern.
- We can't use single_set since we only have the pattern. */
- if (GET_CODE (pat) == SET)
- set = pat;
- else if (GET_CODE (pat) == PARALLEL)
- {
- set = NULL_RTX;
- for (i = 0; i < XVECLEN (pat, 0); i++)
- {
- rtx x = XVECEXP (pat, 0, i);
- if (GET_CODE (x) == SET)
- {
- if (set)
- return 0;
- set = x;
- }
- }
- if (!set)
- return 0;
- }
- else
- return 0;
-
- cost = rtx_cost (SET_SRC (set), SET);
- return cost > 0 ? cost : COSTS_N_INSNS (1);
-}
-
/* Subroutine of try_combine. Determine whether the combine replacement
- patterns NEWPAT and NEWI2PAT are cheaper according to combine_insn_cost
+ patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
that the original instruction sequence I1, I2 and I3. Note that I1
and/or NEWI2PAT may be NULL_RTX. This function returns false, if the
costs of all instructions can be estimated, and the replacements are
int new_i2_cost, new_i3_cost;
int old_cost, new_cost;
- /* Lookup the original combine_insn_costs. */
+ /* Lookup the original insn_rtx_costs. */
i2_cost = INSN_UID (i2) <= last_insn_cost
? uid_insn_cost[INSN_UID (i2)] : 0;
i3_cost = INSN_UID (i3) <= last_insn_cost
i1_cost = 0;
}
- /* Calculate the replacement combine_insn_costs. */
- new_i3_cost = combine_insn_cost (newpat);
+ /* Calculate the replacement insn_rtx_costs. */
+ new_i3_cost = insn_rtx_cost (newpat);
if (newi2pat)
{
- new_i2_cost = combine_insn_cost (newi2pat);
+ new_i2_cost = insn_rtx_cost (newi2pat);
new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
? new_i2_cost + new_i3_cost : 0;
}
new_i2_cost = 0;
}
+ if (undobuf.other_insn)
+ {
+ int old_other_cost, new_other_cost;
+
+ old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
+ ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
+ new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
+ if (old_other_cost > 0 && new_other_cost > 0)
+ {
+ old_cost += old_other_cost;
+ new_cost += new_other_cost;
+ }
+ else
+ old_cost = 0;
+ }
+
/* Disallow this recombination if both new_cost and old_cost are
greater than zero, and new_cost is greater than old cost. */
- if (!undobuf.other_insn
- && old_cost > 0
+ if (old_cost > 0
&& new_cost > old_cost)
{
if (dump_file)
refresh_blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (refresh_blocks);
- /* Allocate array of current combine_insn_costs. */
+ /* Allocate array of current insn_rtx_costs. */
uid_insn_cost = xcalloc (max_uid_cuid + 1, sizeof (int));
last_insn_cost = max_uid_cuid;
NULL);
#endif
- /* Record the current combine_insn_cost of this instruction. */
- uid_insn_cost[INSN_UID (insn)] = combine_insn_cost (PATTERN (insn));
+ /* Record the current insn_rtx_cost of this instruction. */
+ if (NONJUMP_INSN_P (insn))
+ uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn));
if (dump_file)
fprintf(dump_file, "insn_cost %d: %d\n",
INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
}
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
label_tick++;
}
{
next = 0;
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
label_tick++;
else if (INSN_P (insn))
/* If the linked insn has been replaced by a note, then there
is no point in pursuing this chain any further. */
- if (GET_CODE (link) == NOTE)
+ if (NOTE_P (link))
continue;
for (nextlinks = LOG_LINKS (link);
We need this special code because data flow connections
via CC0 do not get entered in LOG_LINKS. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (prev = prev_nonnote_insn (insn)) != 0
- && GET_CODE (prev) == INSN
+ && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev)))
{
if ((next = try_combine (insn, prev,
}
/* Do the same for an insn that explicitly references CC0. */
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& (prev = prev_nonnote_insn (insn)) != 0
- && GET_CODE (prev) == INSN
+ && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev))
&& GET_CODE (PATTERN (insn)) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
explicitly references CC0. If so, try this insn, that insn,
and its predecessor if it sets CC0. */
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
- if (GET_CODE (XEXP (links, 0)) == INSN
+ if (NONJUMP_INSN_P (XEXP (links, 0))
&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
- && GET_CODE (prev) == INSN
+ && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev))
&& (next = try_combine (insn, XEXP (links, 0),
prev, &new_direct_jump_p)) != 0)
}
}
- if (GET_CODE (insn) != NOTE)
+ if (!NOTE_P (insn))
record_dead_and_set_regs (insn);
retry:
/* Can't merge a function call. */
|| GET_CODE (src) == CALL
/* Don't eliminate a function call argument. */
- || (GET_CODE (i3) == CALL_INSN
+ || (CALL_P (i3)
&& (find_reg_fusage (i3, USE, dest)
|| (REG_P (dest)
&& REGNO (dest) < FIRST_PSEUDO_REGISTER
/* Don't substitute into an incremented register. */
|| FIND_REG_INC_NOTE (i3, dest)
|| (succ && FIND_REG_INC_NOTE (succ, dest))
+ /* Don't substitute into a non-local goto, this confuses CFG. */
+ || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
#if 0
/* Don't combine the end of a libcall into anything. */
/* ??? This gives worse code, and appears to be unnecessary, since no
if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
{
/* Don't substitute for a register intended as a clobberable
- operand. */
+ operand. */
rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
if (rtx_equal_p (reg, dest))
return 0;
/* If the clobber represents an earlyclobber operand, we must not
substitute an expression containing the clobbered register.
- As we do not analyse the constraint strings here, we have to
+ As we do not analyze the constraint strings here, we have to
make the conservative assumption. However, if the register is
a fixed hard reg, the clobber cannot represent any operand;
we leave it up to the machine description to either accept or
#ifdef AUTO_INC_DEC
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC
- && (GET_CODE (i3) == JUMP_INSN
+ && (JUMP_P (i3)
|| reg_used_between_p (XEXP (link, 0), insn, i3)
|| reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
return 0;
but that would be much slower, and this ought to be equivalent. */
p = prev_nonnote_insn (insn);
- if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
+ if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
&& ! all_adjacent)
return 0;
#endif
{
/* New patterns for I3 and I2, respectively. */
rtx newpat, newi2pat = 0;
+ rtvec newpat_vec_with_clobbers = 0;
int substed_i2 = 0, substed_i1 = 0;
/* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
int added_sets_1, added_sets_2;
int i3_subst_into_i2 = 0;
/* Notes that I1, I2 or I3 is a MULT operation. */
int have_mult = 0;
+ int swap_i2i3 = 0;
int maxreg;
rtx temp;
where I2 and I3 are adjacent to avoid making difficult register
usage tests. */
- if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
+ if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
&& REG_P (SET_SRC (PATTERN (i3)))
&& REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
&& find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
{
/* We don't handle the case of the target word being wider
than a host wide int. */
- if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD)
- abort ();
+ gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD);
lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
lo |= (INTVAL (SET_SRC (PATTERN (i3)))
else
/* We don't handle the case of the higher word not fitting
entirely in either hi or lo. */
- abort ();
+ gcc_unreachable ();
combine_merges++;
subst_insn = i3;
/* Note which hard regs this insn has as inputs. */
mark_used_regs_combine (newpat);
+ /* If recog_for_combine fails, it strips existing clobbers. If we'll
+ consider splitting this pattern, we might need these clobbers. */
+ if (i1 && GET_CODE (newpat) == PARALLEL
+ && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
+ {
+ int len = XVECLEN (newpat, 0);
+
+ newpat_vec_with_clobbers = rtvec_alloc (len);
+ for (i = 0; i < len; i++)
+ RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
+ }
+
/* Is the result of combination a valid instruction? */
insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
}
}
+ /* If recog_for_combine has discarded clobbers, try to use them
+ again for the split. */
+ if (m_split == 0 && newpat_vec_with_clobbers)
+ m_split
+ = split_insns (gen_rtx_PARALLEL (VOIDmode,
+ newpat_vec_with_clobbers), i3);
+
if (m_split && NEXT_INSN (m_split) == NULL_RTX)
{
m_split = PATTERN (m_split);
SUBST (*split, newdest);
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
+ /* recog_for_combine might have added CLOBBERs to newi2pat.
+ Make sure NEWPAT does not depend on the clobbered regs. */
+ if (GET_CODE (newi2pat) == PARALLEL)
+ for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
+ if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
+ {
+ rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
+ if (reg_overlap_mentioned_p (reg, newpat))
+ {
+ undo_all ();
+ return 0;
+ }
+ }
+
/* If the split point was a MULT and we didn't have one before,
don't use one now. */
if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
if (insn_code_number >= 0)
- {
- rtx insn;
- rtx link;
-
- /* If we will be able to accept this, we have made a change to the
- destination of I3. This requires us to do a few adjustments. */
- PATTERN (i3) = newpat;
- adjust_for_new_dest (i3);
-
- /* I3 now uses what used to be its destination and which is
- now I2's destination. That means we need a LOG_LINK from
- I3 to I2. But we used to have one, so we still will.
-
- However, some later insn might be using I2's dest and have
- a LOG_LINK pointing at I3. We must remove this link.
- The simplest way to remove the link is to point it at I1,
- which we know will be a NOTE. */
-
- for (insn = NEXT_INSN (i3);
- insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
- || insn != BB_HEAD (this_basic_block->next_bb));
- insn = NEXT_INSN (insn))
- {
- if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
- {
- for (link = LOG_LINKS (insn); link;
- link = XEXP (link, 1))
- if (XEXP (link, 0) == i3)
- XEXP (link, 0) = i1;
-
- break;
- }
- }
- }
+ swap_i2i3 = 1;
}
/* Similarly, check for a case where we have a PARALLEL of two independent
they are adjacent to each other or not. */
{
rtx p = prev_nonnote_insn (i3);
- if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
+ if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
&& sets_cc0_p (newi2pat))
{
undo_all ();
}
#endif
- /* Only allow this combination if combine_insn_costs reports that the
+ /* Only allow this combination if insn_rtx_costs reports that the
replacement instructions are cheaper than the originals. */
if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat))
{
/* We now know that we can do this combination. Merge the insns and
update the status of registers and LOG_LINKS. */
+ if (swap_i2i3)
+ {
+ rtx insn;
+ rtx link;
+ rtx ni2dest;
+
+ /* I3 now uses what used to be its destination and which is now
+ I2's destination. This requires us to do a few adjustments. */
+ PATTERN (i3) = newpat;
+ adjust_for_new_dest (i3);
+
+ /* We need a LOG_LINK from I3 to I2. But we used to have one,
+ so we still will.
+
+ However, some later insn might be using I2's dest and have
+ a LOG_LINK pointing at I3. We must remove this link.
+ The simplest way to remove the link is to point it at I1,
+ which we know will be a NOTE. */
+
+ /* newi2pat is usually a SET here; however, recog_for_combine might
+ have added some clobbers. */
+ if (GET_CODE (newi2pat) == PARALLEL)
+ ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
+ else
+ ni2dest = SET_DEST (newi2pat);
+
+ for (insn = NEXT_INSN (i3);
+ insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
+ || insn != BB_HEAD (this_basic_block->next_bb));
+ insn = NEXT_INSN (insn))
+ {
+ if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
+ {
+ for (link = LOG_LINKS (insn); link;
+ link = XEXP (link, 1))
+ if (XEXP (link, 0) == i3)
+ XEXP (link, 0) = i1;
+
+ break;
+ }
+ }
+ }
+
{
rtx i3notes, i2notes, i1notes = 0;
rtx i3links, i2links, i1links = 0;
INSN_CODE (i3) = insn_code_number;
PATTERN (i3) = newpat;
- if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
+ if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
{
rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
mark_jump_label (PATTERN (i3), i3, 0);
if ((temp = next_nonnote_insn (i3)) == NULL_RTX
- || GET_CODE (temp) != BARRIER)
+ || !BARRIER_P (temp))
emit_barrier_after (i3);
}
*new_direct_jump_p = 1;
if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
- || GET_CODE (temp) != BARRIER)
+ || !BARRIER_P (temp))
emit_barrier_after (undobuf.other_insn);
}
if (seq
&& NEXT_INSN (seq) != NULL_RTX
&& NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
- && GET_CODE (seq) == INSN
+ && NONJUMP_INSN_P (seq)
&& GET_CODE (PATTERN (seq)) == SET
&& SET_DEST (PATTERN (seq)) == reg
&& ! reg_mentioned_p (reg,
SET_SRC (PATTERN (seq)))
- && GET_CODE (NEXT_INSN (seq)) == INSN
+ && NONJUMP_INSN_P (NEXT_INSN (seq))
&& GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
&& SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
&& memory_address_p (GET_MODE (x),
/* If this is a register being set, ignore it. */
new = XEXP (x, i);
if (in_dest
- && (code == SUBREG || code == STRICT_LOW_PART
- || code == ZERO_EXTRACT)
&& i == 0
- && REG_P (new))
+ && (((code == SUBREG || code == ZERO_EXTRACT)
+ && REG_P (new))
+ || code == STRICT_LOW_PART))
;
else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
{
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
new, GET_MODE (XEXP (x, 0)));
- if (! x)
- abort ();
+ gcc_assert (x);
}
else
SUBST (XEXP (x, i), new);
rtx op1 = XEXP (x, 1);
int len;
- if (GET_CODE (op1) != PARALLEL)
- abort ();
+ gcc_assert (GET_CODE (op1) == PARALLEL);
len = XVECLEN (op1, 0);
if (len == 1
&& GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
rtx f = make_compound_operation (false_rtx, SET);
rtx cond_op0 = XEXP (cond, 0);
rtx cond_op1 = XEXP (cond, 1);
- enum rtx_code op = NIL, extend_op = NIL;
+ enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
enum machine_mode m = mode;
rtx z = 0, c1 = NULL_RTX;
temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
temp = gen_binary (op, m, gen_lowpart (m, z), temp);
- if (extend_op != NIL)
+ if (extend_op != UNKNOWN)
temp = simplify_gen_unary (extend_op, mode, temp, m);
return temp;
if (GET_CODE (src) == COMPARE)
op0 = XEXP (src, 0), op1 = XEXP (src, 1);
else
- op0 = src, op1 = const0_rtx;
+ op0 = src, op1 = CONST0_RTX (GET_MODE (src));
tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
op0, op1);
zero_extend to avoid the reload that would otherwise be required. */
if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
- && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
+ && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
&& SUBREG_BYTE (src) == 0
&& (GET_MODE_SIZE (GET_MODE (src))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
if (GET_CODE (op0) == AND)
{
- x = apply_distributive_law
+ rtx tmp = apply_distributive_law
(gen_binary (AND, mode,
gen_binary (IOR, mode, XEXP (op0, 0), op1),
gen_binary (IOR, mode, XEXP (op0, 1),
copy_rtx (op1))));
- if (GET_CODE (x) != IOR)
- return x;
+ if (GET_CODE (tmp) != IOR
+ && rtx_cost (tmp, SET) < rtx_cost (x, SET))
+ return tmp;
}
if (GET_CODE (op1) == AND)
{
- x = apply_distributive_law
+ rtx tmp = apply_distributive_law
(gen_binary (AND, mode,
gen_binary (IOR, mode, XEXP (op1, 0), op0),
gen_binary (IOR, mode, XEXP (op1, 1),
copy_rtx (op0))));
- if (GET_CODE (x) != IOR)
- return x;
+ if (GET_CODE (tmp) != IOR
+ && rtx_cost (tmp, SET) < rtx_cost (x, SET))
+ return tmp;
}
/* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
break;
default:
- abort ();
+ gcc_unreachable ();
}
return x;
case ZERO_EXTRACT:
unsignedp = 1;
+
+ /* ... fall through ... */
+
case SIGN_EXTRACT:
/* If the operand is a CLOBBER, just return it. */
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
&& (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
return gen_lowpart (mode, x);
- /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
- MASK are already known to be zero in X, we need not do anything. */
- if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0)
- return x;
-
switch (code)
{
case CLOBBER:
/* We can only change the mode of the shift if we can do arithmetic
in the mode of the shift and INNER_MASK is no wider than the
- width of OP_MODE. */
- if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
- || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0)
+ width of X's mode. */
+ if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
op_mode = GET_MODE (x);
inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
which is equal to STORE_FLAG_VALUE. */
if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
+ && GET_MODE (XEXP (x, 0)) == mode
&& exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
&& (nonzero_bits (XEXP (x, 0), mode)
== (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
return x;
}
- else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
- && subreg_lowpart_p (XEXP (src, 0))
- && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
- < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
- && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
- && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
- && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
- && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
+ if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
+ && subreg_lowpart_p (XEXP (src, 0))
+ && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
+ < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
+ && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
+ && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
+ && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
+ && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
{
assign = make_extraction (VOIDmode, dest, 0,
XEXP (SUBREG_REG (XEXP (src, 0)), 1),
/* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
one-bit field. */
- else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
- && XEXP (XEXP (src, 0), 0) == const1_rtx
- && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
+ if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
+ && XEXP (XEXP (src, 0), 0) == const1_rtx
+ && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
{
assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
1, 1, 1, 0);
return x;
}
+ /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
+ SRC is an AND with all bits of that field set, then we can discard
+ the AND. */
+ if (GET_CODE (dest) == ZERO_EXTRACT
+ && GET_CODE (XEXP (dest, 1)) == CONST_INT
+ && GET_CODE (src) == AND
+ && GET_CODE (XEXP (src, 1)) == CONST_INT)
+ {
+ HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
+ unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
+ unsigned HOST_WIDE_INT ze_mask;
+
+ if (width >= HOST_BITS_PER_WIDE_INT)
+ ze_mask = -1;
+ else
+ ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
+
+ /* Complete overlap. We can remove the source AND. */
+ if ((and_mask & ze_mask) == ze_mask)
+ return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
+
+ /* Partial overlap. We can reduce the source AND. */
+ if ((and_mask & ze_mask) != and_mask)
+ {
+ mode = GET_MODE (src);
+ src = gen_rtx_AND (mode, XEXP (src, 0),
+ gen_int_mode (and_mask & ze_mask, mode));
+ return gen_rtx_SET (VOIDmode, dest, src);
+ }
+ }
+
/* The other case we handle is assignments into a constant-position
field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
a mask that has all one bits except for a group of zero bits and
/* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
to VAROP and return the new constant. */
if (GET_CODE (varop) == CONST_INT)
- return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode));
+ return gen_int_mode (INTVAL (varop) & constop, mode);
/* See what bits may be nonzero in VAROP. Unlike the general case of
a call to nonzero_bits, here we don't care about bits outside
the width of this mode matter. It is assumed that the width of this mode
is smaller than or equal to HOST_BITS_PER_WIDE_INT.
- If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
+ If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
result is simply *PCONST0.
if (op0 == AND)
const1 &= const0;
- /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
+ /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
if OP0 is SET. */
- if (op1 == NIL || op0 == SET)
+ if (op1 == UNKNOWN || op0 == SET)
return 1;
- else if (op0 == NIL)
+ else if (op0 == UNKNOWN)
op0 = op1, const0 = const1;
else if (op0 == op1)
const0 += const1;
break;
case NEG:
- op0 = NIL;
+ op0 = UNKNOWN;
break;
default:
break;
const0 &= GET_MODE_MASK (mode);
if (const0 == 0
&& (op0 == IOR || op0 == XOR || op0 == PLUS))
- op0 = NIL;
+ op0 = UNKNOWN;
else if (const0 == 0 && op0 == AND)
op0 = SET;
else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
&& op0 == AND)
- op0 = NIL;
+ op0 = UNKNOWN;
/* ??? Slightly redundant with the above mask, but not entirely.
Moving this above means we'd have to sign-extend the mode mask
unsigned int mode_words
= (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
/* We form (outer_op (code varop count) (outer_const)). */
- enum rtx_code outer_op = NIL;
+ enum rtx_code outer_op = UNKNOWN;
HOST_WIDE_INT outer_const = 0;
rtx const_rtx;
int complement_p = 0;
varop = XEXP (varop, 0);
continue;
}
+
+ /* Check for 'PLUS signbit', which is the canonical form of 'XOR
+ signbit', and attempt to change the PLUS to an XOR and move it to
+ the outer operation as is done above in the AND/IOR/XOR case
+ leg for shift(logical). See details in logical handling above
+ for reasoning in doing so. */
+ if (code == LSHIFTRT
+ && GET_CODE (XEXP (varop, 1)) == CONST_INT
+ && mode_signbit_p (result_mode, XEXP (varop, 1))
+ && (new = simplify_binary_operation (code, result_mode,
+ XEXP (varop, 1),
+ GEN_INT (count))) != 0
+ && GET_CODE (new) == CONST_INT
+ && merge_outer_ops (&outer_op, &outer_const, XOR,
+ INTVAL (new), result_mode, &complement_p))
+ {
+ varop = XEXP (varop, 0);
+ continue;
+ }
+
break;
case MINUS:
/* We have now finished analyzing the shift. The result should be
a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
- OUTER_OP is non-NIL, it is an operation that needs to be applied
+ OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
to the result of the shift. OUTER_CONST is the relevant constant,
but we must turn off all bits turned off in the shift.
for the outer operation. So try to do the simplification
recursively. */
- if (outer_op != NIL && GET_CODE (x) == code
+ if (outer_op != UNKNOWN && GET_CODE (x) == code
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
INTVAL (XEXP (x, 1)));
if (complement_p)
x = simplify_gen_unary (NOT, result_mode, x, result_mode);
- if (outer_op != NIL)
+ if (outer_op != UNKNOWN)
{
if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
outer_const = trunc_int_for_mode (outer_const, result_mode);
An insn containing that will not be recognized. */
static rtx
-gen_lowpart_for_combine (enum machine_mode mode, rtx x)
+gen_lowpart_for_combine (enum machine_mode omode, rtx x)
{
+ enum machine_mode imode = GET_MODE (x);
+ unsigned int osize = GET_MODE_SIZE (omode);
+ unsigned int isize = GET_MODE_SIZE (imode);
rtx result;
- if (GET_MODE (x) == mode)
+ if (omode == imode)
return x;
- /* Return identity if this is a CONST or symbolic
- reference. */
- if (mode == Pmode
+ /* Return identity if this is a CONST or symbolic reference. */
+ if (omode == Pmode
&& (GET_CODE (x) == CONST
|| GET_CODE (x) == SYMBOL_REF
|| GET_CODE (x) == LABEL_REF))
/* We can only support MODE being wider than a word if X is a
constant integer or has a mode the same size. */
-
- if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
- && ! ((GET_MODE (x) == VOIDmode
+ if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
+ && ! ((imode == VOIDmode
&& (GET_CODE (x) == CONST_INT
|| GET_CODE (x) == CONST_DOUBLE))
- || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
- return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
+ || isize == osize))
+ goto fail;
/* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
won't know what to do. So we will strip off the SUBREG here and
if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
{
x = SUBREG_REG (x);
- if (GET_MODE (x) == mode)
+
+ /* For use in case we fall down into the address adjustments
+ further below, we need to adjust the known mode and size of
+ x; imode and isize, since we just adjusted x. */
+ imode = GET_MODE (x);
+
+ if (imode == omode)
return x;
+
+ isize = GET_MODE_SIZE (imode);
}
- result = gen_lowpart_common (mode, x);
+ result = gen_lowpart_common (omode, x);
+
#ifdef CANNOT_CHANGE_MODE_CLASS
- if (result != 0
- && GET_CODE (result) == SUBREG
- && REG_P (SUBREG_REG (result))
- && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER)
- bitmap_set_bit (&subregs_of_mode, REGNO (SUBREG_REG (result))
- * MAX_MACHINE_MODE
- + GET_MODE (result));
+ if (result != 0 && GET_CODE (result) == SUBREG)
+ record_subregs_of_mode (result);
#endif
if (result)
/* Refuse to work on a volatile memory ref or one with a mode-dependent
address. */
if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
- return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
+ goto fail;
/* If we want to refer to something bigger than the original memref,
generate a paradoxical subreg instead. That will force a reload
of the original memref X. */
- if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
- return gen_rtx_SUBREG (mode, x, 0);
+ if (isize < osize)
+ return gen_rtx_SUBREG (omode, x, 0);
if (WORDS_BIG_ENDIAN)
- offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
- - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
+ offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
+ /* Adjust the address so that the address-after-the-data is
+ unchanged. */
if (BYTES_BIG_ENDIAN)
- {
- /* Adjust the address so that the address-after-the-data is
- unchanged. */
- offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
- - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
- }
+ offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
- return adjust_address_nv (x, mode, offset);
+ return adjust_address_nv (x, omode, offset);
}
/* If X is a comparison operator, rewrite it in a new mode. This
probably won't match, but may allow further simplifications. */
else if (COMPARISON_P (x))
- return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
+ return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
/* If we couldn't simplify X any other way, just enclose it in a
SUBREG. Normally, this SUBREG won't match, but some patterns may
{
int offset = 0;
rtx res;
- enum machine_mode sub_mode = GET_MODE (x);
- offset = subreg_lowpart_offset (mode, sub_mode);
- if (sub_mode == VOIDmode)
+ offset = subreg_lowpart_offset (omode, imode);
+ if (imode == VOIDmode)
{
- sub_mode = int_mode_for_mode (mode);
- x = gen_lowpart_common (sub_mode, x);
- if (x == 0)
- return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
+ imode = int_mode_for_mode (omode);
+ x = gen_lowpart_common (imode, x);
+ if (x == NULL)
+ goto fail;
}
- res = simplify_gen_subreg (mode, x, sub_mode, offset);
+ res = simplify_gen_subreg (omode, x, imode, offset);
if (res)
return res;
- return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
}
+
+ fail:
+ return gen_rtx_CLOBBER (imode, const0_rtx);
}
\f
/* These routines make binary and unary operations by first seeing if they
break;
case GEU:
- /* >= C is equivalent to < (C - 1). */
+ /* >= C is equivalent to > (C - 1). */
if (const_op > 1)
{
const_op -= 1;
break;
case SIGN_EXTEND:
- /* Can simplify (compare (zero/sign_extend FOO) CONST)
- to (compare FOO CONST) if CONST fits in FOO's mode and we
- are either testing inequality or have an unsigned comparison
- with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
- if (! unsigned_comparison_p
- && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
- <= HOST_BITS_PER_WIDE_INT)
+ /* Can simplify (compare (zero/sign_extend FOO) CONST) to
+ (compare FOO CONST) if CONST fits in FOO's mode and we
+ are either testing inequality or have an unsigned
+ comparison with ZERO_EXTEND or a signed comparison with
+ SIGN_EXTEND. But don't do it if we don't have a compare
+ insn of the given mode, since we'd have to revert it
+ later on, and then we wouldn't know whether to sign- or
+ zero-extend. */
+ mode = GET_MODE (XEXP (op0, 0));
+ if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
+ && ! unsigned_comparison_p
+ && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
&& ((unsigned HOST_WIDE_INT) const_op
- < (((unsigned HOST_WIDE_INT) 1
- << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
+ < (((unsigned HOST_WIDE_INT) 1
+ << (GET_MODE_BITSIZE (mode) - 1))))
+ && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
op0 = XEXP (op0, 0);
continue;
break;
case SUBREG:
- /* Check for the case where we are comparing A - C1 with C2,
- both constants are smaller than 1/2 the maximum positive
- value in MODE, and the comparison is equality or unsigned.
- In that case, if A is either zero-extended to MODE or has
- sufficient sign bits so that the high-order bit in MODE
- is a copy of the sign in the inner mode, we can prove that it is
- safe to do the operation in the wider mode. This simplifies
- many range checks. */
+ /* Check for the case where we are comparing A - C1 with C2, that is
+
+ (subreg:MODE (plus (A) (-C1))) op (C2)
+
+ with C1 a constant, and try to lift the SUBREG, i.e. to do the
+ comparison in the wider mode. One of the following two conditions
+ must be true in order for this to be valid:
+
+ 1. The mode extension results in the same bit pattern being added
+ on both sides and the comparison is equality or unsigned. As
+ C2 has been truncated to fit in MODE, the pattern can only be
+ all 0s or all 1s.
+
+ 2. The mode extension results in the sign bit being copied on
+ each side.
+
+ The difficulty here is that we have predicates for A but not for
+ (A - C1) so we need to check that C1 is within proper bounds so
+ as to perturbate A as little as possible. */
if (mode_width <= HOST_BITS_PER_WIDE_INT
&& subreg_lowpart_p (op0)
+ && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
&& GET_CODE (SUBREG_REG (op0)) == PLUS
- && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
- && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
- && (-INTVAL (XEXP (SUBREG_REG (op0), 1))
- < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2))
- && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
- && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
- GET_MODE (SUBREG_REG (op0)))
- & ~GET_MODE_MASK (mode))
- || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
- GET_MODE (SUBREG_REG (op0)))
- > (unsigned int)
- (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
- - GET_MODE_BITSIZE (mode)))))
- {
- op0 = SUBREG_REG (op0);
- continue;
+ && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
+ {
+ enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
+ rtx a = XEXP (SUBREG_REG (op0), 0);
+ HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
+
+ if ((c1 > 0
+ && (unsigned HOST_WIDE_INT) c1
+ < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
+ && (equality_comparison_p || unsigned_comparison_p)
+ /* (A - C1) zero-extends if it is positive and sign-extends
+ if it is negative, C2 both zero- and sign-extends. */
+ && ((0 == (nonzero_bits (a, inner_mode)
+ & ~GET_MODE_MASK (mode))
+ && const_op >= 0)
+ /* (A - C1) sign-extends if it is positive and 1-extends
+ if it is negative, C2 both sign- and 1-extends. */
+ || (num_sign_bit_copies (a, inner_mode)
+ > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
+ - mode_width)
+ && const_op < 0)))
+ || ((unsigned HOST_WIDE_INT) c1
+ < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
+ /* (A - C1) always sign-extends, like C2. */
+ && num_sign_bit_copies (a, inner_mode)
+ > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
+ - mode_width - 1)))
+ {
+ op0 = SUBREG_REG (op0);
+ continue;
+ }
}
/* If the inner mode is narrower and we are extracting the low part,
/* ... fall through ... */
case ZERO_EXTEND:
- if ((unsigned_comparison_p || equality_comparison_p)
- && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
- <= HOST_BITS_PER_WIDE_INT)
- && ((unsigned HOST_WIDE_INT) const_op
- < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
+ mode = GET_MODE (XEXP (op0, 0));
+ if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
+ && (unsigned_comparison_p || equality_comparison_p)
+ && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
+ && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
op0 = XEXP (op0, 0);
continue;
those bits.
3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
- NIL. In that case we know those bits are zeros or ones. We must
+ UNKNOWN. In that case we know those bits are zeros or ones. We must
also be sure that they are the same as the upper bits of op1.
We can never remove a SUBREG for a non-equality comparison because
return gen_binary (reversed_code, mode, op0, op1);
}
\f
+/* Utility function for record_value_for_reg. Count number of
+ rtxs in X. */
+static int
+count_rtxs (rtx x)
+{
+ enum rtx_code code = GET_CODE (x);
+ const char *fmt;
+ int i, ret = 1;
+
+ if (GET_RTX_CLASS (code) == '2'
+ || GET_RTX_CLASS (code) == 'c')
+ {
+ rtx x0 = XEXP (x, 0);
+ rtx x1 = XEXP (x, 1);
+
+ if (x0 == x1)
+ return 1 + 2 * count_rtxs (x0);
+
+ if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
+ || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
+ && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
+ return 2 + 2 * count_rtxs (x0)
+ + count_rtxs (x == XEXP (x1, 0)
+ ? XEXP (x1, 1) : XEXP (x1, 0));
+
+ if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
+ || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
+ && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
+ return 2 + 2 * count_rtxs (x1)
+ + count_rtxs (x == XEXP (x0, 0)
+ ? XEXP (x0, 1) : XEXP (x0, 0));
+ }
+
+ fmt = GET_RTX_FORMAT (code);
+ for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
+ if (fmt[i] == 'e')
+ ret += count_rtxs (XEXP (x, i));
+
+ return ret;
+}
+\f
/* Utility function for following routine. Called when X is part of a value
being stored into last_set_value. Sets last_set_table_tick
for each register mentioned. Similar to mention_regs in cse.c */
&& GET_CODE (XEXP (tem, 0)) == CLOBBER
&& GET_CODE (XEXP (tem, 1)) == CLOBBER)
tem = XEXP (tem, 0);
+ else if (count_occurrences (value, reg, 1) >= 2)
+ {
+ /* If there are two or more occurrences of REG in VALUE,
+ prevent the value from growing too much. */
+ if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
+ tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
+ }
value = replace_rtx (copy_rtx (value), reg, tem);
}
record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
/* If this is a memory reference, make sure that there were
no stores after it that might have clobbered the value. We don't
have alias info, so we assume any store invalidates it. */
- else if (MEM_P (x) && ! RTX_UNCHANGING_P (x)
+ else if (MEM_P (x) && !MEM_READONLY_P (x)
&& INSN_CUID (insn) <= mem_last_set)
{
if (replace)
/* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
beginning of function. */
- for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
+ for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
insn = prev_nonnote_insn (insn))
{
note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
while (GET_CODE (testreg) == SUBREG
|| GET_CODE (testreg) == ZERO_EXTRACT
- || GET_CODE (testreg) == SIGN_EXTRACT
|| GET_CODE (testreg) == STRICT_LOW_PART)
testreg = XEXP (testreg, 0);
/* Just get rid of this note, as it is unused later anyway. */
break;
- case REG_VTABLE_REF:
- /* ??? Should remain with *a particular* memory load. Given the
- nature of vtable data, the last insn seems relatively safe. */
- place = i3;
- break;
-
case REG_NON_LOCAL_GOTO:
- if (GET_CODE (i3) == JUMP_INSN)
+ if (JUMP_P (i3))
place = i3;
- else if (i2 && GET_CODE (i2) == JUMP_INSN)
- place = i2;
else
- abort ();
+ {
+ gcc_assert (i2 && JUMP_P (i2));
+ place = i2;
+ }
break;
case REG_EH_REGION:
/* These notes must remain with the call or trapping instruction. */
- if (GET_CODE (i3) == CALL_INSN)
+ if (CALL_P (i3))
place = i3;
- else if (i2 && GET_CODE (i2) == CALL_INSN)
+ else if (i2 && CALL_P (i2))
place = i2;
- else if (flag_non_call_exceptions)
+ else
{
+ gcc_assert (flag_non_call_exceptions);
if (may_trap_p (i3))
place = i3;
else if (i2 && may_trap_p (i2))
can now prove that the instructions can't trap. Drop the
note in this case. */
}
- else
- abort ();
break;
- case REG_ALWAYS_RETURN:
case REG_NORETURN:
case REG_SETJMP:
/* These notes must remain with the call. It should not be
possible for both I2 and I3 to be a call. */
- if (GET_CODE (i3) == CALL_INSN)
+ if (CALL_P (i3))
place = i3;
- else if (i2 && GET_CODE (i2) == CALL_INSN)
- place = i2;
else
- abort ();
+ {
+ gcc_assert (i2 && CALL_P (i2));
+ place = i2;
+ }
break;
case REG_UNUSED:
place = i2;
}
- /* Don't attach REG_LABEL note to a JUMP_INSN which has
- JUMP_LABEL already. Instead, decrement LABEL_NUSES. */
- if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
+ /* Don't attach REG_LABEL note to a JUMP_INSN. Add
+ a JUMP_LABEL instead or decrement LABEL_NUSES. */
+ if (place && JUMP_P (place))
{
- if (JUMP_LABEL (place) != XEXP (note, 0))
- abort ();
- if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
- LABEL_NUSES (JUMP_LABEL (place))--;
+ rtx label = JUMP_LABEL (place);
+
+ if (!label)
+ JUMP_LABEL (place) = XEXP (note, 0);
+ else
+ {
+ gcc_assert (label == XEXP (note, 0));
+ if (LABEL_P (label))
+ LABEL_NUSES (label)--;
+ }
place = 0;
}
- if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
+ if (place2 && JUMP_P (place2))
{
- if (JUMP_LABEL (place2) != XEXP (note, 0))
- abort ();
- if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
- LABEL_NUSES (JUMP_LABEL (place2))--;
+ rtx label = JUMP_LABEL (place2);
+
+ if (!label)
+ JUMP_LABEL (place2) = XEXP (note, 0);
+ else
+ {
+ gcc_assert (label == XEXP (note, 0));
+ if (LABEL_P (label))
+ LABEL_NUSES (label)--;
+ }
place2 = 0;
}
break;
/* If the insn previously containing this note still exists,
put it back where it was. Otherwise move it to the previous
insn. Adjust the corresponding REG_LIBCALL note. */
- if (GET_CODE (from_insn) != NOTE)
+ if (!NOTE_P (from_insn))
place = from_insn;
else
{
case REG_LIBCALL:
/* This is handled similarly to REG_RETVAL. */
- if (GET_CODE (from_insn) != NOTE)
+ if (!NOTE_P (from_insn))
place = from_insn;
else
{
use of A and put the death note there. */
if (from_insn
- && GET_CODE (from_insn) == CALL_INSN
+ && CALL_P (from_insn)
&& find_reg_fusage (from_insn, USE, XEXP (note, 0)))
place = from_insn;
else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
}
}
else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
- || (GET_CODE (tem) == CALL_INSN
+ || (CALL_P (tem)
&& find_reg_fusage (tem, USE, XEXP (note, 0))))
{
place = tem;
default:
/* Any other notes should not be present at this point in the
compilation. */
- abort ();
+ gcc_unreachable ();
}
if (place)
replace I3, I2, and I1 by I3 and I2. But in that case the
destination of I2 also remains unchanged. */
- if (GET_CODE (XEXP (link, 0)) == NOTE
+ if (NOTE_P (XEXP (link, 0))
|| (set = single_set (XEXP (link, 0))) == 0)
continue;
reg = SET_DEST (set);
while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
- || GET_CODE (reg) == SIGN_EXTRACT
|| GET_CODE (reg) == STRICT_LOW_PART)
reg = XEXP (reg, 0);
place = insn;
break;
}
- else if (GET_CODE (insn) == CALL_INSN
+ else if (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))
{
place = insn;
insn_cuid (rtx insn)
{
while (insn != 0 && INSN_UID (insn) > max_uid_cuid
- && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
+ && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
insn = NEXT_INSN (insn);
- if (INSN_UID (insn) > max_uid_cuid)
- abort ();
+ gcc_assert (INSN_UID (insn) <= max_uid_cuid);
return INSN_CUID (insn);
}