/* Allocate registers for pseudo-registers that span basic blocks.
- Copyright (C) 1987, 1988, 1991, 1994 Free Software Foundation, Inc.
+ Copyright (C) 1987, 88, 91, 94, 96, 1997 Free Software Foundation, Inc.
This file is part of GNU CC.
You should have received a copy of the GNU General Public License
along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
+the Free Software Foundation, 59 Temple Place - Suite 330,
+Boston, MA 02111-1307, USA. */
-#include <stdio.h>
#include "config.h"
+#include "system.h"
+
+#include "machmode.h"
+#include "hard-reg-set.h"
#include "rtl.h"
#include "flags.h"
#include "basic-block.h"
-#include "hard-reg-set.h"
#include "regs.h"
#include "insn-config.h"
#include "output.h"
+#include "toplev.h"
/* This pass of the compiler performs global register allocation.
It assigns hard register numbers to all the pseudo registers
/* Indexed by (pseudo) reg number, gives the allocno, or -1
for pseudo registers already allocated by local_allocate. */
-static int *reg_allocno;
+int *reg_allocno;
/* Indexed by allocno, gives the reg number. */
static rtx *regs_set;
static int n_regs_set;
-/* All register that can be eliminated. */
+/* All registers that can be eliminated. */
static HARD_REG_SET eliminable_regset;
-static int allocno_compare PROTO((int *, int *));
+static int allocno_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
static void global_conflicts PROTO((void));
static void expand_preferences PROTO((void));
static void prune_preferences PROTO((void));
global_alloc (file)
FILE *file;
{
+ int retval;
#ifdef ELIMINABLE_REGS
static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
#endif
- register int i;
+ int need_fp
+ = (! flag_omit_frame_pointer
+#ifdef EXIT_IGNORE_STACK
+ || (current_function_calls_alloca && EXIT_IGNORE_STACK)
+#endif
+ || FRAME_POINTER_REQUIRED);
+
+ register size_t i;
rtx x;
max_allocno = 0;
SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
if (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
- || (eliminables[i].from == HARD_FRAME_POINTER_REGNUM
- && (! flag_omit_frame_pointer || FRAME_POINTER_REQUIRED)))
+ || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp))
SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
}
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- if (!flag_omit_frame_pointer || FRAME_POINTER_REQUIRED)
+ SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
+ if (need_fp)
SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
#endif
+
#else
SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
-
- /* If we know we will definitely not be eliminating the frame pointer,
- don't allocate it. */
- if (! flag_omit_frame_pointer || FRAME_POINTER_REQUIRED)
+ if (need_fp)
SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
#endif
/* Note that reg_live_length[i] < 0 indicates a "constant" reg
that we are supposed to refrain from putting in a hard reg.
-2 means do make an allocno but don't allocate it. */
- if (reg_n_refs[i] != 0 && reg_renumber[i] < 0 && reg_live_length[i] != -1
+ if (REG_N_REFS (i) != 0 && reg_renumber[i] < 0 && REG_LIVE_LENGTH (i) != -1
/* Don't allocate pseudos that cross calls,
if this function receives a nonlocal goto. */
&& (! current_function_has_nonlocal_label
- || reg_n_calls_crossed[i] == 0))
+ || REG_N_CALLS_CROSSED (i) == 0))
{
if (reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
reg_allocno[i] = reg_allocno[reg_may_share[i]];
else
reg_allocno[i] = max_allocno++;
- if (reg_live_length[i] == 0)
+ if (REG_LIVE_LENGTH (i) == 0)
abort ();
}
else
int allocno = reg_allocno[i];
allocno_reg[allocno] = i;
allocno_size[allocno] = PSEUDO_REGNO_SIZE (i);
- allocno_calls_crossed[allocno] += reg_n_calls_crossed[i];
- allocno_n_refs[allocno] += reg_n_refs[i];
- if (allocno_live_length[allocno] < reg_live_length[i])
- allocno_live_length[allocno] = reg_live_length[i];
+ allocno_calls_crossed[allocno] += REG_N_CALLS_CROSSED (i);
+ allocno_n_refs[allocno] += REG_N_REFS (i);
+ if (allocno_live_length[allocno] < REG_LIVE_LENGTH (i))
+ allocno_live_length[allocno] = REG_LIVE_LENGTH (i);
}
/* Calculate amount of usage of each hard reg by pseudos
for (j = regno; j < endregno; j++)
{
- local_reg_n_refs[j] += reg_n_refs[i];
- local_reg_live_length[j] += reg_live_length[i];
+ local_reg_n_refs[j] += REG_N_REFS (i);
+ local_reg_live_length[j] += REG_LIVE_LENGTH (i);
}
}
if (regs_ever_live[i])
local_reg_n_refs[i] = 0;
+ /* Likewise for regs used in a SCRATCH. */
+ for (i = 0; i < scratch_list_length; i++)
+ if (scratch_list[i])
+ {
+ int regno = REGNO (scratch_list[i]);
+ int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch_list[i]));
+ int j;
+
+ for (j = regno; j < lim; j++)
+ local_reg_n_refs[j] = 0;
+ }
+
/* Allocate the space for the conflict and preference tables and
initialize them. */
allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
- conflicts = (INT_TYPE *) alloca (max_allocno * allocno_row_words
- * sizeof (INT_TYPE));
+ /* We used to use alloca here, but the size of what it would try to
+ allocate would occasionally cause it to exceed the stack limit and
+ cause unpredictable core dumps. Some examples were > 2Mb in size. */
+ conflicts = (INT_TYPE *) xmalloc (max_allocno * allocno_row_words
+ * sizeof (INT_TYPE));
bzero ((char *) conflicts,
max_allocno * allocno_row_words * sizeof (INT_TYPE));
except for parameters marked with reg_live_length[regno] == -2. */
for (i = 0; i < max_allocno; i++)
- if (reg_live_length[allocno_reg[allocno_order[i]]] >= 0)
+ if (REG_LIVE_LENGTH (allocno_reg[allocno_order[i]]) >= 0)
{
/* If we have more than one register class,
first try allocating in the class that is cheapest
for this pseudo-reg. If that fails, try any reg. */
if (N_REG_CLASSES > 1)
{
- find_reg (allocno_order[i], HARD_CONST (0), 0, 0, 0);
+ find_reg (allocno_order[i], 0, 0, 0, 0);
if (reg_renumber[allocno_reg[allocno_order[i]]] >= 0)
continue;
}
if (reg_alternate_class (allocno_reg[allocno_order[i]]) != NO_REGS)
- find_reg (allocno_order[i], HARD_CONST (0), 1, 0, 0);
+ find_reg (allocno_order[i], 0, 1, 0, 0);
}
}
for the sake of debugging information. */
if (n_basic_blocks > 0)
#endif
- return reload (get_insns (), 1, file);
+ retval = reload (get_insns (), 1, file);
+
+ free (conflicts);
+ return retval;
}
/* Sort predicate for ordering the allocnos.
Returns -1 (1) if *v1 should be allocated before (after) *v2. */
static int
-allocno_compare (v1, v2)
- int *v1, *v2;
+allocno_compare (v1p, v2p)
+ const GENERIC_PTR v1p;
+ const GENERIC_PTR v2p;
{
+ int v1 = *(int *)v1p, v2 = *(int *)v2p;
/* Note that the quotient will never be bigger than
the value of floor_log2 times the maximum number of
times a register can occur in one insn (surely less than 100).
Multiplying this by 10000 can't overflow. */
register int pri1
- = (((double) (floor_log2 (allocno_n_refs[*v1]) * allocno_n_refs[*v1])
- / allocno_live_length[*v1])
- * 10000 * allocno_size[*v1]);
+ = (((double) (floor_log2 (allocno_n_refs[v1]) * allocno_n_refs[v1])
+ / allocno_live_length[v1])
+ * 10000 * allocno_size[v1]);
register int pri2
- = (((double) (floor_log2 (allocno_n_refs[*v2]) * allocno_n_refs[*v2])
- / allocno_live_length[*v2])
- * 10000 * allocno_size[*v2]);
+ = (((double) (floor_log2 (allocno_n_refs[v2]) * allocno_n_refs[v2])
+ / allocno_live_length[v2])
+ * 10000 * allocno_size[v2]);
if (pri2 - pri1)
return pri2 - pri1;
/* If regs are equally good, sort by allocno,
so that the results of qsort leave nothing to chance. */
- return *v1 - *v2;
+ return v1 - v2;
}
\f
/* Scan the rtl code and record all conflicts and register preferences in the
are explicitly marked in basic_block_live_at_start. */
{
- register int offset;
- REGSET_ELT_TYPE bit;
register regset old = basic_block_live_at_start[b];
int ax = 0;
-#ifdef HARD_REG_SET
- hard_regs_live = old[0];
-#else
- COPY_HARD_REG_SET (hard_regs_live, old);
-#endif
- for (offset = 0, i = 0; offset < regset_size; offset++)
- if (old[offset] == 0)
- i += REGSET_ELT_BITS;
- else
- for (bit = 1; bit; bit <<= 1, i++)
- {
- if (i >= max_regno)
- break;
- if (old[offset] & bit)
- {
- register int a = reg_allocno[i];
- if (a >= 0)
- {
- SET_ALLOCNO_LIVE (a);
- block_start_allocnos[ax++] = a;
- }
- else if ((a = reg_renumber[i]) >= 0)
- mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
- }
- }
+ REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
+ EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i,
+ {
+ register int a = reg_allocno[i];
+ if (a >= 0)
+ {
+ SET_ALLOCNO_LIVE (a);
+ block_start_allocnos[ax++] = a;
+ }
+ else if ((a = reg_renumber[i]) >= 0)
+ mark_reg_live_nc
+ (a, PSEUDO_REGNO_MODE (i));
+ });
/* Record that each allocno now live conflicts with each other
allocno now live, and with each hard reg now live. */
record_conflicts (block_start_allocnos, ax);
+
+#ifdef STACK_REGS
+ /* Pseudos can't go in stack regs at the start of a basic block
+ that can be reached through a computed goto, since reg-stack
+ can't handle computed gotos. */
+ if (basic_block_computed_jump_target[b])
+ for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
+ record_one_conflict (ax);
+#endif
}
insn = basic_block_head[b];
we want to give the lower-priority allocno the first chance for
these registers). */
for (j = i + 1; j < max_allocno; j++)
- if (CONFLICTP (allocno, allocno_order[j]))
+ if (CONFLICTP (allocno, allocno_order[j])
+ || CONFLICTP (allocno_order[j], allocno))
{
COPY_HARD_REG_SET (temp,
hard_reg_full_preferences[allocno_order[j]]);
IOR_HARD_REG_SET (used1, hard_reg_conflicts[allocno]);
+#ifdef CLASS_CANNOT_CHANGE_SIZE
+ if (REG_CHANGES_SIZE (allocno_reg[allocno]))
+ IOR_HARD_REG_SET (used1,
+ reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE]);
+#endif
+
/* Try each hard reg to see if it fits. Do this in two passes.
In the first pass, skip registers that are preferred by some other pseudo
to give it a better chance of getting one of those registers. Only if
&& CALLER_SAVE_PROFITABLE (allocno_n_refs[allocno],
allocno_calls_crossed[allocno]))
{
- find_reg (allocno, losers, alt_regs_p, 1, retrying);
+ HARD_REG_SET new_losers;
+ if (! losers)
+ CLEAR_HARD_REG_SET (new_losers);
+ else
+ COPY_HARD_REG_SET (new_losers, losers);
+
+ IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
+ find_reg (allocno, new_losers, alt_regs_p, 1, retrying);
if (reg_renumber[allocno_reg[allocno]] >= 0)
{
caller_save_needed = 1;
/* Don't use a reg no good for this pseudo. */
&& ! TEST_HARD_REG_BIT (used2, regno)
&& HARD_REGNO_MODE_OK (regno, mode)
- && (((double) local_reg_n_refs[regno]
- / local_reg_live_length[regno])
- < ((double) allocno_n_refs[allocno]
- / allocno_live_length[allocno])))
+#ifdef CLASS_CANNOT_CHANGE_SIZE
+ && ! (REG_CHANGES_SIZE (allocno_reg[allocno])
+ && (TEST_HARD_REG_BIT
+ (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
+ regno)))
+#endif
+ )
{
- /* Hard reg REGNO was used less in total by local regs
- than it would be used by this one allocno! */
- int k;
- for (k = 0; k < max_regno; k++)
- if (reg_renumber[k] >= 0)
- {
- int r = reg_renumber[k];
- int endregno
- = r + HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (k));
+ /* We explicitly evaluate the divide results into temporary
+ variables so as to avoid excess precision problems that occur
+ on a i386-unknown-sysv4.2 (unixware) host. */
+
+ double tmp1 = ((double) local_reg_n_refs[regno]
+ / local_reg_live_length[regno]);
+ double tmp2 = ((double) allocno_n_refs[allocno]
+ / allocno_live_length[allocno]);
+
+ if (tmp1 < tmp2)
+ {
+ /* Hard reg REGNO was used less in total by local regs
+ than it would be used by this one allocno! */
+ int k;
+ for (k = 0; k < max_regno; k++)
+ if (reg_renumber[k] >= 0)
+ {
+ int r = reg_renumber[k];
+ int endregno
+ = r + HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (k));
- if (regno >= r && regno < endregno)
- reg_renumber[k] = -1;
- }
+ if (regno >= r && regno < endregno)
+ reg_renumber[k] = -1;
+ }
- best_reg = regno;
- break;
+ best_reg = regno;
+ break;
+ }
}
}
}
int i;
for (i = 0; i < n_basic_blocks; i++)
- if ((basic_block_live_at_start[i][from / REGSET_ELT_BITS]
- & ((REGSET_ELT_TYPE) 1 << (from % REGSET_ELT_BITS))) != 0)
+ if (REGNO_REG_SET_P (basic_block_live_at_start[i], from))
{
- basic_block_live_at_start[i][from / REGSET_ELT_BITS]
- &= ~ ((REGSET_ELT_TYPE) 1 << (from % REGSET_ELT_BITS));
- basic_block_live_at_start[i][to / REGSET_ELT_BITS]
- |= ((REGSET_ELT_TYPE) 1 << (to % REGSET_ELT_BITS));
+ CLEAR_REGNO_REG_SET (basic_block_live_at_start[i], from);
+ SET_REGNO_REG_SET (basic_block_live_at_start[i], to);
}
}
\f