/* Definitions for computing resource usage of specific insns.
- Copyright (C) 1999 Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000 Free Software Foundation, Inc.
This file is part of GNU CC.
Boston, MA 02111-1307, USA. */
#include "config.h"
+#include "system.h"
+#include "toplev.h"
#include "rtl.h"
+#include "tm_p.h"
#include "hard-reg-set.h"
-#include "system.h"
#include "basic-block.h"
+#include "function.h"
#include "regs.h"
#include "flags.h"
#include "output.h"
#include "resource.h"
+#include "except.h"
+#include "insn-attr.h"
/* This structure is used to record liveness information at the targets or
fallthrough insns of branches. We will most likely need the information
static HARD_REG_SET pending_dead_regs;
\f
-static void update_live_status PROTO ((rtx, rtx));
-static int find_basic_block PROTO ((rtx));
-static rtx next_insn_no_annul PROTO ((rtx));
-static rtx find_dead_or_set_registers PROTO ((rtx, struct resources*,
+static void update_live_status PARAMS ((rtx, rtx, void *));
+static int find_basic_block PARAMS ((rtx));
+static rtx next_insn_no_annul PARAMS ((rtx));
+static rtx find_dead_or_set_registers PARAMS ((rtx, struct resources*,
rtx*, int, struct resources,
struct resources));
\f
It deadens any CLOBBERed registers and livens any SET registers. */
static void
-update_live_status (dest, x)
+update_live_status (dest, x, data)
rtx dest;
rtx x;
+ void *data ATTRIBUTE_UNUSED;
{
int first_regno, last_regno;
int i;
register struct resources *res;
register int include_delayed_effects;
{
- register enum rtx_code code = GET_CODE (x);
- register int i, j;
- register char *format_ptr;
+ enum rtx_code code = GET_CODE (x);
+ int i, j;
+ unsigned int r;
+ register const char *format_ptr;
/* Handle leaf items for which we set resource flags. Also, special-case
CALL, SET and CLOBBER operators. */
mark_referenced_resources (SUBREG_REG (x), res, 0);
else
{
- int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
- int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
- for (i = regno; i < last_regno; i++)
- SET_HARD_REG_BIT (res->regs, i);
+ unsigned int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
+ unsigned int last_regno
+ = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+
+ for (r = regno; r < last_regno; r++)
+ SET_HARD_REG_BIT (res->regs, r);
}
return;
case REG:
- for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
- SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
+ for (r = 0; r < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); r++)
+ SET_HARD_REG_BIT (res->regs, REGNO (x) + r);
return;
case MEM:
res->unch_memory = 1;
else
res->memory = 1;
- res->volatil = MEM_VOLATILE_P (x);
+ res->volatil |= MEM_VOLATILE_P (x);
/* Mark registers used to access memory. */
mark_referenced_resources (XEXP (x, 0), res, 0);
break;
case ASM_OPERANDS:
- res->volatil = MEM_VOLATILE_P (x);
+ res->volatil |= MEM_VOLATILE_P (x);
/* For all ASM_OPERANDS, we must traverse the vector of input operands.
We can not just fall through here since then we would be confused
mark_referenced_resources (SET_SRC (x), res, 0);
x = SET_DEST (x);
- if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
+ if (GET_CODE (x) == SIGN_EXTRACT
+ || GET_CODE (x) == ZERO_EXTRACT
+ || GET_CODE (x) == STRICT_LOW_PART)
mark_referenced_resources (x, res, 0);
else if (GET_CODE (x) == SUBREG)
x = SUBREG_REG (x);
rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
if (GET_CODE (slot_pat) == SET
&& rtx_equal_p (SET_DEST (slot_pat),
- SET_DEST (XEXP (link, 0))))
+ XEXP (XEXP (link, 0), 0)))
break;
}
if (i >= seq_size)
- mark_referenced_resources (SET_DEST (XEXP (link, 0)),
+ mark_referenced_resources (XEXP (XEXP (link, 0), 0),
res, 0);
}
}
rtx this_jump_insn = insn;
next = NEXT_INSN (insn);
+
+ /* If this instruction can throw an exception, then we don't
+ know where we might end up next. That means that we have to
+ assume that whatever we have already marked as live really is
+ live. */
+ if (can_throw (insn))
+ break;
+
switch (GET_CODE (insn))
{
case CODE_LABEL:
underlying insn. Any registers set by the underlying insn
are live since the insn is being done somewhere else. */
if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
- mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
+ mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
+ MARK_SRC_DEST_CALL);
/* All other USE insns are to be ignored. */
continue;
{
if (jump_count++ < 10)
{
- if (simplejump_p (this_jump_insn)
+ if (any_uncondjump_p (this_jump_insn)
|| GET_CODE (PATTERN (this_jump_insn)) == RETURN)
{
next = JUMP_LABEL (this_jump_insn);
*jump_target = JUMP_LABEL (this_jump_insn);
}
}
- else if (condjump_p (this_jump_insn)
- || condjump_in_parallel_p (this_jump_insn))
+ else if (any_condjump_p (this_jump_insn))
{
struct resources target_set, target_res;
struct resources fallthrough_res;
= ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
target_set = set;
- mark_set_resources (insn, &target_set, 0, 1);
+ mark_set_resources (insn, &target_set, 0,
+ MARK_SRC_DEST_CALL);
for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
= ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
- mark_set_resources (insn, &set, 0, 1);
+ mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
}
else
{
- mark_set_resources (insn, &set, 0, 1);
+ mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
target_set = set;
}
}
mark_referenced_resources (insn, &needed, 1);
- mark_set_resources (insn, &set, 0, 1);
+ mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
COPY_HARD_REG_SET (scratch, set.regs);
AND_COMPL_HARD_REG_SET (scratch, needed.regs);
\f
/* Given X, a part of an insn, and a pointer to a `struct resource',
RES, indicate which resources are modified by the insn. If
- INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
- set by the called routine.
+ MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
+ set by the called routine. If MARK_TYPE is MARK_DEST, only mark SET_DESTs
If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
objects are being referenced instead of set.
our computation and thus may be placed in a delay slot. */
void
-mark_set_resources (x, res, in_dest, include_delayed_effects)
+mark_set_resources (x, res, in_dest, mark_type)
register rtx x;
register struct resources *res;
int in_dest;
- int include_delayed_effects;
+ enum mark_resource_type mark_type;
{
- register enum rtx_code code;
- register int i, j;
- register char *format_ptr;
+ enum rtx_code code;
+ int i, j;
+ unsigned int r;
+ const char *format_ptr;
restart:
that aren't saved across calls, global registers and anything
explicitly CLOBBERed immediately after the CALL_INSN. */
- if (include_delayed_effects)
+ if (mark_type == MARK_SRC_DEST_CALL)
{
rtx next = NEXT_INSN (x);
rtx prev = PREV_INSN (x);
rtx link;
res->cc = res->memory = 1;
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_regs[i] || global_regs[i])
- SET_HARD_REG_BIT (res->regs, i);
+ for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
+ if (call_used_regs[r] || global_regs[r])
+ SET_HARD_REG_BIT (res->regs, r);
/* If X is part of a delay slot sequence, then NEXT should be
the first insn after the sequence. */
for (link = CALL_INSN_FUNCTION_USAGE (x);
link; link = XEXP (link, 1))
if (GET_CODE (XEXP (link, 0)) == CLOBBER)
- mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
+ mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
+ MARK_SRC_DEST);
/* Check for a NOTE_INSN_SETJMP. If it exists, then we must
assume that this call can clobber any register. */
and doesn't actually do anything, so we ignore it. */
#ifdef INSN_SETS_ARE_DELAYED
- if (! include_delayed_effects
+ if (mark_type != MARK_SRC_DEST_CALL
&& INSN_SETS_ARE_DELAYED (x))
return;
#endif
effects of the calling routine. */
mark_set_resources (SET_DEST (x), res,
- (include_delayed_effects
+ (mark_type == MARK_SRC_DEST_CALL
|| GET_CODE (SET_SRC (x)) != CALL),
- 0);
+ mark_type);
- mark_set_resources (SET_SRC (x), res, 0, 0);
+ if (mark_type != MARK_DEST)
+ mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
return;
case CLOBBER:
- mark_set_resources (XEXP (x, 0), res, 1, 0);
+ mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
return;
case SEQUENCE:
for (i = 0; i < XVECLEN (x, 0); i++)
if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
&& INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
- mark_set_resources (XVECEXP (x, 0, i), res, 0,
- include_delayed_effects);
+ mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
return;
case POST_INC:
case PRE_INC:
case POST_DEC:
case PRE_DEC:
- mark_set_resources (XEXP (x, 0), res, 1, 0);
+ mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
return;
+ case SIGN_EXTRACT:
case ZERO_EXTRACT:
- mark_set_resources (XEXP (x, 0), res, in_dest, 0);
- mark_set_resources (XEXP (x, 1), res, 0, 0);
- mark_set_resources (XEXP (x, 2), res, 0, 0);
+ if (! (mark_type == MARK_DEST && in_dest))
+ {
+ mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
+ mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
+ mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
+ }
return;
case MEM:
if (in_dest)
{
res->memory = 1;
- res->unch_memory = RTX_UNCHANGING_P (x);
- res->volatil = MEM_VOLATILE_P (x);
+ res->unch_memory |= RTX_UNCHANGING_P (x);
+ res->volatil |= MEM_VOLATILE_P (x);
}
- mark_set_resources (XEXP (x, 0), res, 0, 0);
+ mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
return;
case SUBREG:
if (in_dest)
{
if (GET_CODE (SUBREG_REG (x)) != REG)
- mark_set_resources (SUBREG_REG (x), res,
- in_dest, include_delayed_effects);
+ mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
else
{
- int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
- int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
- for (i = regno; i < last_regno; i++)
- SET_HARD_REG_BIT (res->regs, i);
+ unsigned int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
+ unsigned int last_regno
+ = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
+
+ for (r = regno; r < last_regno; r++)
+ SET_HARD_REG_BIT (res->regs, r);
}
}
return;
case REG:
if (in_dest)
- for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
- SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
+ for (r = 0; r < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); r++)
+ SET_HARD_REG_BIT (res->regs, REGNO (x) + r);
+ return;
+
+ case STRICT_LOW_PART:
+ if (! (mark_type == MARK_DEST && in_dest))
+ {
+ mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
+ return;
+ }
+
+ case UNSPEC_VOLATILE:
+ case ASM_INPUT:
+ /* Traditional asm's are always volatile. */
+ res->volatil = 1;
+ return;
+
+ case TRAP_IF:
+ res->volatil = 1;
+ break;
+
+ case ASM_OPERANDS:
+ res->volatil |= MEM_VOLATILE_P (x);
+
+ /* For all ASM_OPERANDS, we must traverse the vector of input operands.
+ We can not just fall through here since then we would be confused
+ by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
+ traditional asms unlike their normal usage. */
+
+ for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
+ mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
+ MARK_SRC_DEST);
return;
default:
switch (*format_ptr++)
{
case 'e':
- mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
+ mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
break;
case 'E':
for (j = 0; j < XVECLEN (x, i); j++)
- mark_set_resources (XVECEXP (x, i, j), res, in_dest,
- include_delayed_effects);
+ mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
break;
}
}
TARGET. Otherwise, we must assume everything is live. */
if (b != -1)
{
- regset regs_live = basic_block_live_at_start[b];
- int j;
- int regno;
+ regset regs_live = BASIC_BLOCK (b)->global_live_at_start;
+ unsigned int j;
+ unsigned int regno;
rtx start_insn, stop_insn;
/* Compute hard regs live at start of block -- this is the real hard regs
EXECUTE_IF_SET_IN_REG_SET
(regs_live, FIRST_PSEUDO_REGISTER, i,
{
- if ((regno = reg_renumber[i]) >= 0)
- for (j = regno;
- j < regno + HARD_REGNO_NREGS (regno,
- PSEUDO_REGNO_MODE (i));
- j++)
- SET_HARD_REG_BIT (current_live_regs, j);
+ if (reg_renumber[i] >= 0)
+ {
+ regno = reg_renumber[i];
+ for (j = regno;
+ j < regno + HARD_REGNO_NREGS (regno,
+ PSEUDO_REGNO_MODE (i));
+ j++)
+ SET_HARD_REG_BIT (current_live_regs, j);
+ }
});
/* Get starting and ending insn, handling the case where each might
#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
&& ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
#endif
-#ifdef PIC_OFFSET_TABLE_REGNUM
+#if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
&& ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
#endif
)
SET_HARD_REG_BIT (pending_dead_regs, i);
}
- note_stores (PATTERN (real_insn), update_live_status);
+ note_stores (PATTERN (real_insn), update_live_status, NULL);
/* If any registers were unused after this insn, kill them.
These notes will always be accurate. */
/* If we hit an unconditional branch, we have another way of finding out
what is live: we can see what is live at the branch target and include
- anything used but not set before the branch. The only things that are
- live are those that are live using the above test and the test below. */
+ anything used but not set before the branch. We add the live
+ resources found using the test below to those found until now. */
if (jump_insn)
{
AND_COMPL_HARD_REG_SET (scratch, set.regs);
IOR_HARD_REG_SET (new_resources.regs, scratch);
- mark_set_resources (insn, &set, 0, 1);
+ mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
}
- AND_HARD_REG_SET (res->regs, new_resources.regs);
+ IOR_HARD_REG_SET (res->regs, new_resources.regs);
}
if (tinfo != NULL)
start_of_epilogue_needs = end_of_function_needs;
while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
- mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
+ mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
+ MARK_SRC_DEST_CALL);
/* Allocate and initialize the tables used by mark_target_live_regs. */
- target_hash_table
- = (struct target_info **) xmalloc ((TARGET_HASH_PRIME
- * sizeof (struct target_info *)));
- bzero ((char *) target_hash_table,
- TARGET_HASH_PRIME * sizeof (struct target_info *));
-
- bb_ticks = (int *) xmalloc (n_basic_blocks * sizeof (int));
- bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
+ target_hash_table = (struct target_info **)
+ xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
+ bb_ticks = (int *) xcalloc (n_basic_blocks, sizeof (int));
}
\f
/* Free up the resources allcated to mark_target_live_regs (). This
mark_referenced_resources (trial, &end_of_function_needs,
include_delayed_effects);
}
-\f
-/* Try to find an available hard register of mode MODE at
- CURRENT_INSN, matching the register class in CLASS_STR. Registers
- that already have bits set in REG_SET will not be considered.
-
- If an appropriate register is available, it will be returned and the
- corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
- returned. */
-
-rtx
-find_free_register (current_insn, class_str, mode, reg_set)
- rtx current_insn;
- char *class_str;
- int mode;
- HARD_REG_SET *reg_set;
-{
- int i, j;
- struct resources used;
- unsigned char clet = class_str[0];
- enum reg_class class
- = (clet == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (clet));
-
- mark_target_live_regs (get_insns (), current_insn, &used);
-
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- {
- int success = 1;
-
- if (! TEST_HARD_REG_BIT (reg_class_contents[class], i))
- continue;
- for (j = HARD_REGNO_NREGS (i, mode) - 1; j >= 0; j--)
- {
- if (TEST_HARD_REG_BIT (*reg_set, i + j)
- || TEST_HARD_REG_BIT (used.regs, i + j))
- {
- success = 0;
- break;
- }
- }
- if (success)
- {
- for (j = HARD_REGNO_NREGS (i, mode) - 1; j >= 0; j--)
- {
- SET_HARD_REG_BIT (*reg_set, i + j);
- }
- return gen_rtx_REG (mode, i);
- }
- }
- return NULL_RTX;
-}