switch (code)
{
case MEM:
- return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
+ return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
- case QUEUED:
- return 1;
-
- case ADDRESSOF:
case CONST:
case CONST_INT:
case CONST_DOUBLE:
/* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
/* The arg pointer varies if it is not a fixed register. */
- || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
- || RTX_UNCHANGING_P (x))
+ || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
return 0;
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
/* ??? When call-clobbered, the value is stable modulo the restore
switch (code)
{
case MEM:
- return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
-
- case QUEUED:
- return 1;
+ return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
case CONST:
case CONST_INT:
case LABEL_REF:
return 0;
- case ADDRESSOF:
- /* This will resolve to some offset from the frame pointer. */
- return 0;
-
case REG:
/* Note that we have to test for the actual rtx used for the frame
and arg pointers and not just the register number in case we have
case LABEL_REF:
return 0;
- case ADDRESSOF:
- /* This will resolve to some offset from the frame pointer. */
- return 0;
-
case REG:
/* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
case LABEL_REF:
return true;
- case ADDRESSOF:
- /* This will resolve to some offset from the frame pointer. */
- return true;
-
case REG:
/* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
{
if (INSN_P (x))
{
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
if (beg == end)
return 0;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
return 0;
return 1;
}
{
rtx p;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
- if (GET_CODE (p) == JUMP_INSN)
+ if (JUMP_P (p))
return 0;
return 1;
}
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& (find_reg_fusage (insn, USE, reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return 1;
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_referenced_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))))
return 1;
return 0;
check if a side-effect of the insn clobbers REG. */
if (INSN_P (insn)
&& (FIND_REG_INC_NOTE (insn, reg)
- || (GET_CODE (insn) == CALL_INSN
- /* We'd like to test call_used_regs here, but rtlanal.c can't
- reference that variable due to its use in genattrtab. So
- we'll just be more conservative.
-
- ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
- information holds all clobbered registers. */
+ || (CALL_P (insn)
&& ((REG_P (reg)
- && REGNO (reg) < FIRST_PSEUDO_REGISTER)
+ && REGNO (reg) < FIRST_PSEUDO_REGISTER
+ && TEST_HARD_REG_BIT (regs_invalidated_by_call,
+ REGNO (reg)))
|| MEM_P (reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return 1;
return 1;
case MEM:
- if (RTX_UNCHANGING_P (x))
+ if (MEM_READONLY_P (x))
return 0;
if (modified_between_p (XEXP (x, 0), start, end))
return 1;
return 1;
case MEM:
- if (RTX_UNCHANGING_P (x))
+ if (MEM_READONLY_P (x))
return 0;
if (modified_in_p (XEXP (x, 0), insn))
return 1;
{
rtx tmp;
- if (! INSN_P (x) || ! INSN_P (y))
- abort ();
+ gcc_assert (INSN_P (x));
+ gcc_assert (INSN_P (y));
tmp = PATTERN (y);
note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
{
rtx p;
- for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
+ for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
p = PREV_INSN (p))
if (INSN_P (p))
{
}
default:
-#ifdef ENABLE_CHECKING
- if (!CONSTANT_P (x))
- abort ();
-#endif
-
+ gcc_assert (CONSTANT_P (x));
return 0;
}
}
if (GET_CODE (x) == CC0)
return 1;
- if (!REG_P (x))
- abort ();
+ gcc_assert (REG_P (x));
regno = REGNO (x);
last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
if (find_regno_note (insn, REG_DEAD, test_regno))
return 1;
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& find_regno_fusage (insn, CLOBBER, test_regno))
return 1;
{
/* If it's not a CALL_INSN, it can't possibly have a
CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
- if (GET_CODE (insn) != CALL_INSN)
+ if (!CALL_P (insn))
return 0;
- if (! datum)
- abort ();
+ gcc_assert (datum);
if (!REG_P (datum))
{
to pseudo registers, so don't bother checking. */
if (regno >= FIRST_PSEUDO_REGISTER
- || GET_CODE (insn) != CALL_INSN )
+ || !CALL_P (insn) )
return 0;
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
{
rtx link;
- if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
+ if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
return 0;
/* Look for the note that differentiates const and pure functions. */
return;
}
- abort ();
+ gcc_unreachable ();
}
/* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
x = simplify_subreg (GET_MODE (x), new,
GET_MODE (SUBREG_REG (x)),
SUBREG_BYTE (x));
- if (! x)
- abort ();
+ gcc_assert (x);
}
else
SUBREG_REG (x) = new;
{
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
new, GET_MODE (XEXP (x, 0)));
- if (! x)
- abort ();
+ gcc_assert (x);
}
else
XEXP (x, 0) = new;
/* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
field. This is not handled by for_each_rtx because it doesn't
handle unprinted ('0') fields. */
- if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
+ if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
JUMP_LABEL (l) = new_label;
if ((GET_CODE (l) == LABEL_REF
return y == NULL_RTX;
/* Return true if a label_ref *BODY refers to label Y. */
- if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
+ if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
return XEXP (*body, 0) == y;
/* If *BODY is a reference to pool constant traverse the constant. */
{
rtx label, table;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (label = JUMP_LABEL (insn)) != NULL_RTX
&& (table = next_active_insn (label)) != NULL_RTX
- && GET_CODE (table) == JUMP_INSN
+ && JUMP_P (table)
&& (GET_CODE (PATTERN (table)) == ADDR_VEC
|| GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
{
computed_jump_p (rtx insn)
{
int i;
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx pat = PATTERN (insn);
if (code == CONST_DOUBLE)
return -6;
op = avoid_constant_pool_reference (op);
+ code = GET_CODE (op);
switch (GET_RTX_CLASS (code))
{
while (r)
{
- if (GET_CODE (r) == NOTE)
+ if (NOTE_P (r))
{
switch (NOTE_LINE_NUMBER (r))
{
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
{
- if (loc == &in->u.fld[i].rtx)
+ if (loc == &in->u.fld[i].rt_rtx)
return 1;
if (fmt[i] == 'e')
{
if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
/* If the subreg crosses a word boundary ensure that
it also begins and ends on a word boundary. */
- if ((subreg_byte % UNITS_PER_WORD
- + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
- && (subreg_byte % UNITS_PER_WORD
- || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
- abort ();
+ gcc_assert (!((subreg_byte % UNITS_PER_WORD
+ + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
+ && (subreg_byte % UNITS_PER_WORD
+ || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
if (WORDS_BIG_ENDIAN)
word = (GET_MODE_SIZE (inner_mode)
int mode_multiple, nregs_multiple;
int y_offset;
- if (xregno >= FIRST_PSEUDO_REGISTER)
- abort ();
+ gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
nregs_xmode = hard_regno_nregs[xregno][xmode];
nregs_ymode = hard_regno_nregs[xregno][ymode];
/* size of ymode must not be greater than the size of xmode. */
mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
- if (mode_multiple == 0)
- abort ();
+ gcc_assert (mode_multiple != 0);
y_offset = offset / GET_MODE_SIZE (ymode);
nregs_multiple = nregs_xmode / nregs_ymode;
int mode_multiple, nregs_multiple;
int y_offset;
- if (xregno >= FIRST_PSEUDO_REGISTER)
- abort ();
+ gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
nregs_xmode = hard_regno_nregs[xregno][xmode];
nregs_ymode = hard_regno_nregs[xregno][ymode];
if (offset == subreg_lowpart_offset (ymode, xmode))
return true;
-#ifdef ENABLE_CHECKING
/* This should always pass, otherwise we don't know how to verify the
constraint. These conditions may be relaxed but subreg_offset would
need to be redesigned. */
- if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
- || GET_MODE_SIZE (ymode) % nregs_ymode
- || nregs_xmode % nregs_ymode)
- abort ();
-#endif
+ gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
+ gcc_assert ((GET_MODE_SIZE (ymode) % nregs_ymode) == 0);
+ gcc_assert ((nregs_xmode % nregs_ymode) == 0);
/* The XMODE value can be seen as a vector of NREGS_XMODE
values. The subreg must represent a lowpart of given field.
/* size of ymode must not be greater than the size of xmode. */
mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
- if (mode_multiple == 0)
- abort ();
+ gcc_assert (mode_multiple != 0);
y_offset = offset / GET_MODE_SIZE (ymode);
nregs_multiple = nregs_xmode / nregs_ymode;
-#ifdef ENABLE_CHECKING
- if (offset % GET_MODE_SIZE (ymode)
- || mode_multiple % nregs_multiple)
- abort ();
-#endif
+
+ gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
+ gcc_assert ((mode_multiple % nregs_multiple) == 0);
+
return (!(y_offset % (mode_multiple / nregs_multiple)));
}
if (GET_CODE (XEXP (p, 0)) == USE
&& REG_P (XEXP (XEXP (p, 0), 0)))
{
- if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
- abort ();
+ gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
/* We only care about registers which can hold function
arguments. */
/* It is possible that some loads got CSEed from one call to
another. Stop in that case. */
- if (GET_CODE (before) == CALL_INSN)
+ if (CALL_P (before))
break;
/* Our caller needs either ensure that we will find all sets
(in case code has not been optimized yet), or take care
for possible labels in a way by setting boundary to preceding
CODE_LABEL. */
- if (GET_CODE (before) == CODE_LABEL)
+ if (LABEL_P (before))
{
- if (before != boundary)
- abort ();
+ gcc_assert (before == boundary);
break;
}
return false;
/* We can move CALL_INSN, but we need to check that all caller clobbered
regs are dead. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return false;
/* In future we will handle hoisting of libcall sequences, but
give up for now. */
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
return true;
}
x = *xp;
}
- if (!REG_P (x))
- abort ();
+ gcc_assert (REG_P (x));
/* We've verified that hard registers are dead, so we may keep the side
effect. Otherwise replace it by new pseudo. */
rtx pat;
int i;
rtx note;
+ int applied;
insn = emit_copy_of_insn_after (insn, after);
pat = PATTERN (insn);
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
- if (!apply_change_group ())
- abort ();
+ applied = apply_change_group ();
+ gcc_assert (applied);
return insn;
}
/* We cannot insert instructions on an abnormal critical edge.
It will be easier to find the culprit if we die now. */
- if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
- abort ();
+ gcc_assert (!(e->flags & EDGE_ABNORMAL) || !EDGE_CRITICAL_P (e));
/* Do not use emit_insn_on_edge as we want to preserve notes and similar
stuff. We also emit CALL_INSNS and firends. */
int
address_cost (rtx x, enum machine_mode mode)
{
- /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
- during CSE, such nodes are present. Using an ADDRESSOF node which
- refers to the address of a REG is a good thing because we can then
- turn (MEM (ADDRESSOF (REG))) into just plain REG. */
-
- if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
- return -1;
-
/* We may be asked for cost of various unusual addresses, such as operands
of push instruction. It is not worthwhile to complicate writing
of the target hook by such cases. */
result_low = MIN (low0, low1);
break;
default:
- abort ();
+ gcc_unreachable ();
}
if (result_width < mode_width)
return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
? 1 : bitwidth - floor_log2 (nonzero) - 1;
}
+
+/* Calculate the rtx_cost of a single instruction. A return value of
+ zero indicates an instruction pattern without a known cost. */
+
+int
+insn_rtx_cost (rtx pat)
+{
+ int i, cost;
+ rtx set;
+
+ /* Extract the single set rtx from the instruction pattern.
+ We can't use single_set since we only have the pattern. */
+ if (GET_CODE (pat) == SET)
+ set = pat;
+ else if (GET_CODE (pat) == PARALLEL)
+ {
+ set = NULL_RTX;
+ for (i = 0; i < XVECLEN (pat, 0); i++)
+ {
+ rtx x = XVECEXP (pat, 0, i);
+ if (GET_CODE (x) == SET)
+ {
+ if (set)
+ return 0;
+ set = x;
+ }
+ }
+ if (!set)
+ return 0;
+ }
+ else
+ return 0;
+
+ cost = rtx_cost (SET_SRC (set), SET);
+ return cost > 0 ? cost : COSTS_N_INSNS (1);
+}