case MEM:
return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
- case QUEUED:
- return 1;
-
- case ADDRESSOF:
case CONST:
case CONST_INT:
case CONST_DOUBLE:
case MEM:
return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
case LABEL_REF:
return 0;
- case ADDRESSOF:
- /* This will resolve to some offset from the frame pointer. */
- return 0;
-
case REG:
/* Note that we have to test for the actual rtx used for the frame
and arg pointers and not just the register number in case we have
case LABEL_REF:
return 0;
- case ADDRESSOF:
- /* This will resolve to some offset from the frame pointer. */
- return 0;
-
case REG:
/* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
case LABEL_REF:
return true;
- case ADDRESSOF:
- /* This will resolve to some offset from the frame pointer. */
- return true;
-
case REG:
/* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
}
/* If X isn't a MEM then this isn't a tablejump we understand. */
- if (GET_CODE (x) != MEM)
+ if (!MEM_P (x))
return NULL_RTX;
/* Strip off the MEM. */
{
if (INSN_P (x))
{
- if (GET_CODE (x) == CALL_INSN)
+ if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
return 0;
case MEM:
- if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
+ if (MEM_P (find) && rtx_equal_p (x, find))
return 1;
break;
if (beg == end)
return 0;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
- if (GET_CODE (p) == CODE_LABEL)
+ if (LABEL_P (p))
return 0;
return 1;
}
{
rtx p;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
- if (GET_CODE (p) == JUMP_INSN)
+ if (JUMP_P (p))
return 0;
return 1;
}
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& (find_reg_fusage (insn, USE, reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return 1;
return 0;
case CLOBBER:
- if (GET_CODE (XEXP (body, 0)) == MEM)
+ if (MEM_P (XEXP (body, 0)))
if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
return 1;
return 0;
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_referenced_p (reg, PATTERN (insn))
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))))
return 1;
return 0;
check if a side-effect of the insn clobbers REG. */
if (INSN_P (insn)
&& (FIND_REG_INC_NOTE (insn, reg)
- || (GET_CODE (insn) == CALL_INSN
+ || (CALL_P (insn)
/* We'd like to test call_used_regs here, but rtlanal.c can't
reference that variable due to its use in genattrtab. So
we'll just be more conservative.
information holds all clobbered registers. */
&& ((REG_P (reg)
&& REGNO (reg) < FIRST_PSEUDO_REGISTER)
- || GET_CODE (reg) == MEM
+ || MEM_P (reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return 1;
{
struct set_of_data *data = (struct set_of_data *) (data1);
if (rtx_equal_p (x, data->pat)
- || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
+ || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
data->found = pat;
}
if (dst == pc_rtx && src == pc_rtx)
return 1;
- if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
+ if (MEM_P (dst) && MEM_P (src))
return rtx_equal_p (dst, src) && !side_effects_p (dst);
if (GET_CODE (dst) == SIGN_EXTRACT
{
rtx p;
- for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
+ for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
p = PREV_INSN (p))
if (INSN_P (p))
{
const char *fmt;
int i;
- if (GET_CODE (in) == MEM)
+ if (MEM_P (in))
return 1;
fmt = GET_RTX_FORMAT (GET_CODE (in));
return;
case CLOBBER:
- if (GET_CODE (XEXP (body, 0)) == MEM)
+ if (MEM_P (XEXP (body, 0)))
(*fun) (&XEXP (XEXP (body, 0), 0), data);
return;
while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
dest = XEXP (dest, 0);
- if (GET_CODE (dest) == MEM)
+ if (MEM_P (dest))
(*fun) (&XEXP (dest, 0), data);
}
return;
if (find_regno_note (insn, REG_DEAD, test_regno))
return 1;
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& find_regno_fusage (insn, CLOBBER, test_regno))
return 1;
{
/* If it's not a CALL_INSN, it can't possibly have a
CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
- if (GET_CODE (insn) != CALL_INSN)
+ if (!CALL_P (insn))
return 0;
if (! datum)
to pseudo registers, so don't bother checking. */
if (regno >= FIRST_PSEUDO_REGISTER
- || GET_CODE (insn) != CALL_INSN )
+ || !CALL_P (insn) )
return 0;
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
{
rtx link;
- if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
+ if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
return 0;
/* Look for the note that differentiates const and pure functions. */
rtx u, m;
if (GET_CODE (u = XEXP (link, 0)) == USE
- && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
+ && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
&& GET_CODE (XEXP (m, 0)) == SCRATCH)
return 1;
}
if (replace_dest)
SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
- else if (GET_CODE (SET_DEST (x)) == MEM
+ else if (MEM_P (SET_DEST (x))
|| GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
/* Even if we are not to replace destinations, replace register if it
is CONTAINED in destination (destination is memory or
/* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
field. This is not handled by for_each_rtx because it doesn't
handle unprinted ('0') fields. */
- if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
+ if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
JUMP_LABEL (l) = new_label;
if ((GET_CODE (l) == LABEL_REF
return y == NULL_RTX;
/* Return true if a label_ref *BODY refers to label Y. */
- if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
+ if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
return XEXP (*body, 0) == y;
/* If *BODY is a reference to pool constant traverse the constant. */
{
rtx label, table;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (label = JUMP_LABEL (insn)) != NULL_RTX
&& (table = next_active_insn (label)) != NULL_RTX
- && GET_CODE (table) == JUMP_INSN
+ && JUMP_P (table)
&& (GET_CODE (PATTERN (table)) == ADDR_VEC
|| GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
{
computed_jump_p (rtx insn)
{
int i;
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
{
rtx pat = PATTERN (insn);
while (r)
{
- if (GET_CODE (r) == NOTE)
+ if (NOTE_P (r))
{
switch (NOTE_LINE_NUMBER (r))
{
/* It is possible that some loads got CSEed from one call to
another. Stop in that case. */
- if (GET_CODE (before) == CALL_INSN)
+ if (CALL_P (before))
break;
/* Our caller needs either ensure that we will find all sets
(in case code has not been optimized yet), or take care
for possible labels in a way by setting boundary to preceding
CODE_LABEL. */
- if (GET_CODE (before) == CODE_LABEL)
+ if (LABEL_P (before))
{
if (before != boundary)
abort ();
return false;
/* We can move CALL_INSN, but we need to check that all caller clobbered
regs are dead. */
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
return false;
/* In future we will handle hoisting of libcall sequences, but
give up for now. */
int
address_cost (rtx x, enum machine_mode mode)
{
- /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
- during CSE, such nodes are present. Using an ADDRESSOF node which
- refers to the address of a REG is a good thing because we can then
- turn (MEM (ADDRESSOF (REG))) into just plain REG. */
-
- if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
- return -1;
-
/* We may be asked for cost of various unusual addresses, such as operands
of push instruction. It is not worthwhile to complicate writing
of the target hook by such cases. */
<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
!= 0))
: LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
- || GET_CODE (SUBREG_REG (x)) != MEM)
+ || !MEM_P (SUBREG_REG (x)))
#endif
{
/* On many CISC machines, accessing an object in a wider mode
if ((GET_MODE_SIZE (GET_MODE (x))
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
&& LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
- && GET_CODE (SUBREG_REG (x)) == MEM)
+ && MEM_P (SUBREG_REG (x)))
return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
known_x, known_mode, known_ret);
#endif
return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
? 1 : bitwidth - floor_log2 (nonzero) - 1;
}
+
+/* Calculate the rtx_cost of a single instruction. A return value of
+ zero indicates an instruction pattern without a known cost. */
+
+int
+insn_rtx_cost (rtx pat)
+{
+ int i, cost;
+ rtx set;
+
+ /* Extract the single set rtx from the instruction pattern.
+ We can't use single_set since we only have the pattern. */
+ if (GET_CODE (pat) == SET)
+ set = pat;
+ else if (GET_CODE (pat) == PARALLEL)
+ {
+ set = NULL_RTX;
+ for (i = 0; i < XVECLEN (pat, 0); i++)
+ {
+ rtx x = XVECEXP (pat, 0, i);
+ if (GET_CODE (x) == SET)
+ {
+ if (set)
+ return 0;
+ set = x;
+ }
+ }
+ if (!set)
+ return 0;
+ }
+ else
+ return 0;
+
+ cost = rtx_cost (SET_SRC (set), SET);
+ return cost > 0 ? cost : COSTS_N_INSNS (1);
+}