/* Allocate registers for pseudo-registers that span basic blocks.
Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
- 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-
#include "machmode.h"
#include "hard-reg-set.h"
#include "rtl.h"
#include "tm_p.h"
#include "flags.h"
-#include "basic-block.h"
#include "regs.h"
#include "function.h"
#include "insn-config.h"
#include "reload.h"
#include "output.h"
#include "toplev.h"
+#include "tree-pass.h"
+#include "timevar.h"
/* This pass of the compiler performs global register allocation.
It assigns hard register numbers to all the pseudo registers
/* Number of calls crossed by each allocno. */
int calls_crossed;
+ /* Number of calls that might throw crossed by each allocno. */
+ int throwing_calls_crossed;
+
/* Number of refs to each allocno. */
int n_refs;
static INT_TYPE *conflicts;
-/* Number of ints require to hold max_allocno bits.
+/* Number of ints required to hold max_allocno bits.
This is the length of a row in `conflicts'. */
static int allocno_row_words;
static void allocate_bb_info (void);
static void free_bb_info (void);
-static void check_earlyclobber (rtx);
-static bool regclass_intersect (enum reg_class, enum reg_class);
+static bool check_earlyclobber (rtx);
static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
static int mark_reg_use_for_earlyclobber (rtx *, void *);
static void calculate_local_reg_bb_info (void);
static void set_up_bb_rts_numbers (void);
static int rpost_cmp (const void *, const void *);
-static bool modify_bb_reg_pav (basic_block, basic_block, bool);
static void calculate_reg_pav (void);
static void modify_reg_pav (void);
static void make_accurate_live_analysis (void);
Return value is nonzero if reload failed
and we must not do any more for this function. */
-int
+static int
global_alloc (FILE *file)
{
int retval;
&& (! current_function_has_nonlocal_label
|| REG_N_CALLS_CROSSED (i) == 0))
{
- if (reg_renumber[i] < 0 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
+ if (reg_renumber[i] < 0
+ && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
reg_allocno[i] = reg_allocno[reg_may_share[i]];
else
reg_allocno[i] = max_allocno++;
- if (REG_LIVE_LENGTH (i) == 0)
- abort ();
+ gcc_assert (REG_LIVE_LENGTH (i));
}
else
reg_allocno[i] = -1;
allocno[num].reg = i;
allocno[num].size = PSEUDO_REGNO_SIZE (i);
allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
+ allocno[num].throwing_calls_crossed
+ += REG_N_THROWING_CALLS_CROSSED (i);
allocno[num].n_refs += REG_N_REFS (i);
allocno[num].freq += REG_FREQ (i);
if (allocno[num].live_length < REG_LIVE_LENGTH (i))
free (allocno_order);
}
- /* Do the reloads now while the allocno data still exist, so that we can
+ /* Do the reloads now while the allocno data still exists, so that we can
try to assign new hard regs to any pseudo regs that are spilled. */
#if 0 /* We need to eliminate regs even if there is no rtl code,
for the sake of debugging information. */
- if (n_basic_blocks > 0)
+ if (n_basic_blocks > NUM_FIXED_BLOCKS)
#endif
{
build_insn_chain (get_insns ());
static void
global_conflicts (void)
{
- int i;
+ unsigned i;
basic_block b;
rtx insn;
int *block_start_allocnos;
since one hard reg can be used with various sizes.
Therefore, we must require that all the hard regs
implicitly live as part of a multi-word hard reg
- are explicitly marked in basic_block_live_at_start. */
+ be explicitly marked in basic_block_live_at_start. */
{
- regset old = b->global_live_at_start;
+ regset old = b->il.rtl->global_live_at_start;
int ax = 0;
+ reg_set_iterator rsi;
REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
- EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i,
- {
- int a = reg_allocno[i];
- if (a >= 0)
- {
- SET_ALLOCNO_LIVE (a);
- block_start_allocnos[ax++] = a;
- }
- else if ((a = reg_renumber[i]) >= 0)
- mark_reg_live_nc
- (a, PSEUDO_REGNO_MODE (i));
- });
+ EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
+ {
+ int a = reg_allocno[i];
+ if (a >= 0)
+ {
+ SET_ALLOCNO_LIVE (a);
+ block_start_allocnos[ax++] = a;
+ }
+ else if ((a = reg_renumber[i]) >= 0)
+ mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
+ }
/* Record that each allocno now live conflicts with each hard reg
now live.
- It is not necessary to mark any conflicts between pseudos as
+ It is not necessary to mark any conflicts between pseudos at
this point, even for pseudos which are live at the start of
the basic block.
evaluates X.
3. Either X or Y is not evaluated on the path to P
- (ie it is used uninitialized) and thus the
+ (i.e. it is used uninitialized) and thus the
conflict can be ignored.
In cases #1 and #2 the conflict will be recorded when we
/* Pseudos can't go in stack regs at the start of a basic block that
is reached by an abnormal edge. Likewise for call clobbered regs,
- because because caller-save, fixup_abnormal_edges, and possibly
- the table driven EH machinery are not quite ready to handle such
- regs live across such edges. */
+ because caller-save, fixup_abnormal_edges and possibly the table
+ driven EH machinery are not quite ready to handle such regs live
+ across such edges. */
{
edge e;
+ edge_iterator ei;
- for (e = b->pred; e ; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, b->preds)
if (e->flags & EDGE_ABNORMAL)
break;
{
/* Did not find a register. If it would be profitable to
allocate a call-clobbered register and save and restore it
- around calls, do that. */
+ around calls, do that. Don't do this if it crosses any calls
+ that might throw. */
if (! accept_call_clobbered
&& allocno[num].calls_crossed != 0
+ && allocno[num].throwing_calls_crossed == 0
&& CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
allocno[num].calls_crossed))
{
}
}
\f
-/* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
+/* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
static void
mark_reg_clobber (rtx reg, rtx setter, void *data)
FOR_EACH_BB (bb)
{
- regset r = bb->global_live_at_start;
+ regset r = bb->il.rtl->global_live_at_start;
if (REGNO_REG_SET_P (r, from))
{
CLEAR_REGNO_REG_SET (r, from);
struct insn_chain **p = &reload_insn_chain;
struct insn_chain *prev = 0;
basic_block b = ENTRY_BLOCK_PTR->next_bb;
- regset_head live_relevant_regs_head;
- live_relevant_regs = INITIALIZE_REG_SET (live_relevant_regs_head);
+ live_relevant_regs = ALLOC_REG_SET (®_obstack);
for (; first; first = NEXT_INSN (first))
{
if (first == BB_HEAD (b))
{
- int i;
+ unsigned i;
+ bitmap_iterator bi;
CLEAR_REG_SET (live_relevant_regs);
- EXECUTE_IF_SET_IN_BITMAP
- (b->global_live_at_start, 0, i,
- {
- if (i < FIRST_PSEUDO_REGISTER
- ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
- : reg_renumber[i] >= 0)
- SET_REGNO_REG_SET (live_relevant_regs, i);
- });
+ EXECUTE_IF_SET_IN_BITMAP (b->il.rtl->global_live_at_start, 0, i, bi)
+ {
+ if (i < FIRST_PSEUDO_REGISTER
+ ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
+ : reg_renumber[i] >= 0)
+ SET_REGNO_REG_SET (live_relevant_regs, i);
+ }
}
if (!NOTE_P (first) && !BARRIER_P (first))
the previous real insn is a JUMP_INSN. */
if (b == EXIT_BLOCK_PTR)
{
- for (first = NEXT_INSN (first) ; first; first = NEXT_INSN (first))
- if (INSN_P (first)
- && GET_CODE (PATTERN (first)) != USE
- && ! ((GET_CODE (PATTERN (first)) == ADDR_VEC
- || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
- && prev_real_insn (first) != 0
- && JUMP_P (prev_real_insn (first))))
- abort ();
+#ifdef ENABLE_CHECKING
+ for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
+ gcc_assert (!INSN_P (first)
+ || GET_CODE (PATTERN (first)) == USE
+ || ((GET_CODE (PATTERN (first)) == ADDR_VEC
+ || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
+ && prev_real_insn (first) != 0
+ && JUMP_P (prev_real_insn (first))));
+#endif
break;
}
}
/* Registers correspondingly killed (clobbered) and defined but not
killed afterward in the basic block. */
bitmap killed, avloc;
- /* Registers partially available correspondingly at the start and
- end of the basic block. */
- bitmap pavin, pavout;
+ /* Registers partially available and living (in other words whose
+ values were calculated and used) correspondingly at the start
+ and end of the basic block. */
+ bitmap live_pavin, live_pavout;
};
/* Macros for accessing data flow information of basic blocks. */
#define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
/* The function allocates the info structures of each basic block. It
- also initialized PAVIN and PAVOUT as if all hard registers were
- partially available. */
+ also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
+ registers were partially available. */
static void
allocate_bb_info (void)
bitmap init;
alloc_aux_for_blocks (sizeof (struct bb_info));
- init = BITMAP_XMALLOC ();
+ init = BITMAP_ALLOC (NULL);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
bitmap_set_bit (init, i);
FOR_EACH_BB (bb)
{
bb_info = bb->aux;
- bb_info->earlyclobber = BITMAP_XMALLOC ();
- bb_info->avloc = BITMAP_XMALLOC ();
- bb_info->killed = BITMAP_XMALLOC ();
- bb_info->pavin = BITMAP_XMALLOC ();
- bb_info->pavout = BITMAP_XMALLOC ();
- bitmap_copy (bb_info->pavin, init);
- bitmap_copy (bb_info->pavout, init);
+ bb_info->earlyclobber = BITMAP_ALLOC (NULL);
+ bb_info->avloc = BITMAP_ALLOC (NULL);
+ bb_info->killed = BITMAP_ALLOC (NULL);
+ bb_info->live_pavin = BITMAP_ALLOC (NULL);
+ bb_info->live_pavout = BITMAP_ALLOC (NULL);
+ bitmap_copy (bb_info->live_pavin, init);
+ bitmap_copy (bb_info->live_pavout, init);
}
- BITMAP_XFREE (init);
+ BITMAP_FREE (init);
}
/* The function frees the allocated info of all basic blocks. */
FOR_EACH_BB (bb)
{
bb_info = BB_INFO (bb);
- BITMAP_XFREE (bb_info->pavout);
- BITMAP_XFREE (bb_info->pavin);
- BITMAP_XFREE (bb_info->killed);
- BITMAP_XFREE (bb_info->avloc);
- BITMAP_XFREE (bb_info->earlyclobber);
+ BITMAP_FREE (bb_info->live_pavout);
+ BITMAP_FREE (bb_info->live_pavin);
+ BITMAP_FREE (bb_info->killed);
+ BITMAP_FREE (bb_info->avloc);
+ BITMAP_FREE (bb_info->earlyclobber);
}
free_aux_for_blocks ();
}
/* Classes of registers which could be early clobbered in the current
insn. */
-static varray_type earlyclobber_regclass;
+DEF_VEC_I(int);
+DEF_VEC_ALLOC_I(int,heap);
-/* The function stores classes of registers which could be early
- clobbered in INSN. */
+static VEC(int,heap) *earlyclobber_regclass;
-static void
+/* This function finds and stores register classes that could be early
+ clobbered in INSN. If any earlyclobber classes are found, the function
+ returns TRUE, in all other cases it returns FALSE. */
+
+static bool
check_earlyclobber (rtx insn)
{
int opno;
+ bool found = false;
extract_insn (insn);
- VARRAY_POP_ALL (earlyclobber_regclass);
+ VEC_truncate (int, earlyclobber_regclass, 0);
for (opno = 0; opno < recog_data.n_operands; opno++)
{
char c;
case ',':
if (amp_p && class != NO_REGS)
{
- for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
- i >= 0; i--)
- if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
- break;
- if (i < 0)
- VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
+ int rc;
+
+ found = true;
+ for (i = 0;
+ VEC_iterate (int, earlyclobber_regclass, i, rc);
+ i++)
+ {
+ if (rc == (int) class)
+ goto found_rc;
+ }
+
+ /* We use VEC_quick_push here because
+ earlyclobber_regclass holds no more than
+ N_REG_CLASSES elements. */
+ VEC_quick_push (int, earlyclobber_regclass, (int) class);
+ found_rc:
+ ;
}
amp_p = false;
p += CONSTRAINT_LEN (c, p);
}
}
-}
-/* The function returns true if register classes C1 and C2 inetrsect. */
-
-static bool
-regclass_intersect (enum reg_class c1, enum reg_class c2)
-{
- HARD_REG_SET rs, zero;
-
- CLEAR_HARD_REG_SET (zero);
- COPY_HARD_REG_SET(rs, reg_class_contents [c1]);
- AND_HARD_REG_SET (rs, reg_class_contents [c2]);
- GO_IF_HARD_REG_EQUAL (zero, rs, yes);
- return true;
- yes:
- return false;
+ return found;
}
/* The function checks that pseudo-register *X has a class
intersecting with the class of pseudo-register could be early
- clobbered in the same insn. */
+ clobbered in the same insn.
+ This function is a no-op if earlyclobber_regclass is empty. */
static int
mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
basic_block bb = data;
struct bb_info *bb_info = BB_INFO (bb);
- if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
+ if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
{
+ int rc;
+
regno = REGNO (*x);
if (bitmap_bit_p (bb_info->killed, regno)
|| bitmap_bit_p (bb_info->avloc, regno))
return 0;
pref_class = reg_preferred_class (regno);
alt_class = reg_alternate_class (regno);
- for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
- if (regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
- pref_class)
- || (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
- && regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
- alt_class)))
- {
- bitmap_set_bit (bb_info->earlyclobber, regno);
- break;
- }
+ for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
+ {
+ if (reg_classes_intersect_p (rc, pref_class)
+ || (rc != NO_REGS
+ && reg_classes_intersect_p (rc, alt_class)))
+ {
+ bitmap_set_bit (bb_info->earlyclobber, regno);
+ break;
+ }
+ }
}
return 0;
}
basic_block bb;
rtx insn, bound;
- VARRAY_INT_INIT (earlyclobber_regclass, 20,
- "classes of registers early clobbered in an insn");
+ /* We know that earlyclobber_regclass holds no more than
+ N_REG_CLASSES elements. See check_earlyclobber. */
+ earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
FOR_EACH_BB (bb)
{
bound = NEXT_INSN (BB_END (bb));
if (INSN_P (insn))
{
note_stores (PATTERN (insn), mark_reg_change, bb);
- check_earlyclobber (insn);
- note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
+ if (check_earlyclobber (insn))
+ note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
}
}
+ VEC_free (int, heap, earlyclobber_regclass);
}
/* The function sets up reverse post-order number of each basic
int i;
int *rts_order;
- rts_order = xmalloc (sizeof (int) * n_basic_blocks);
- flow_reverse_top_sort_order_compute (rts_order);
- for (i = 0; i < n_basic_blocks; i++)
+ rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
+ post_order_compute (rts_order, false);
+ for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
free (rts_order);
}
return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
}
-/* The function calculates partial availability of registers. The
- function calculates partial availability at the end of basic block
- BB by propagating partial availability at end of predecessor basic
- block PRED. The function returns true if the partial availability
- at the end of BB has been changed or if CHANGED_P. We have the
- following equations:
+/* Temporary bitmap used for live_pavin, live_pavout calculation. */
+static bitmap temp_bitmap;
- bb.pavin = empty for entry block | union (pavout of predecessors)
- bb.pavout = union (bb.pavin - b.killed, bb.avloc) */
+/* The function calculates partial register availability according to
+ the following equations:
-static bool
-modify_bb_reg_pav (basic_block bb, basic_block pred, bool changed_p)
-{
- struct bb_info *bb_info;
- bitmap bb_pavin, bb_pavout;
-
- bb_info = BB_INFO (bb);
- bb_pavin = bb_info->pavin;
- bb_pavout = bb_info->pavout;
- if (pred->index != ENTRY_BLOCK)
- bitmap_a_or_b (bb_pavin, bb_pavin, BB_INFO (pred)->pavout);
- changed_p |= bitmap_union_of_diff (bb_pavout, bb_info->avloc,
- bb_pavin, bb_info->killed);
- return changed_p;
-}
-
-/* The function calculates partial register availability. */
+ bb.live_pavin
+ = empty for entry block
+ | union (live_pavout of predecessors) & global_live_at_start
+ bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
+ & global_live_at_end */
static void
calculate_reg_pav (void)
{
basic_block bb, succ;
edge e;
- bool changed_p;
int i, nel;
- varray_type bbs, new_bbs, temp;
+ VEC(basic_block,heap) *bbs, *new_bbs, *temp;
basic_block *bb_array;
sbitmap wset;
- VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
- VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
+ bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
+ new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
+ temp_bitmap = BITMAP_ALLOC (NULL);
FOR_EACH_BB (bb)
{
- VARRAY_PUSH_BB (bbs, bb);
+ VEC_quick_push (basic_block, bbs, bb);
}
wset = sbitmap_alloc (n_basic_blocks + 1);
- while (VARRAY_ACTIVE_SIZE (bbs))
+ while (VEC_length (basic_block, bbs))
{
- bb_array = &VARRAY_BB (bbs, 0);
- nel = VARRAY_ACTIVE_SIZE (bbs);
+ bb_array = VEC_address (basic_block, bbs);
+ nel = VEC_length (basic_block, bbs);
qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
sbitmap_zero (wset);
for (i = 0; i < nel; i++)
{
+ edge_iterator ei;
+ struct bb_info *bb_info;
+ bitmap bb_live_pavin, bb_live_pavout;
+
bb = bb_array [i];
- changed_p = 0;
- for (e = bb->pred; e; e = e->pred_next)
- changed_p = modify_bb_reg_pav (bb, e->src, changed_p);
- if (changed_p)
- for (e = bb->succ; e; e = e->succ_next)
- {
- succ = e->dest;
- if (succ->index != EXIT_BLOCK && !TEST_BIT (wset, succ->index))
- {
- SET_BIT (wset, succ->index);
- VARRAY_PUSH_BB (new_bbs, succ);
- }
- }
+ bb_info = BB_INFO (bb);
+ bb_live_pavin = bb_info->live_pavin;
+ bb_live_pavout = bb_info->live_pavout;
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ {
+ basic_block pred = e->src;
+
+ if (pred->index != ENTRY_BLOCK)
+ bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
+ }
+ bitmap_and_into (bb_live_pavin, bb->il.rtl->global_live_at_start);
+ bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
+ bb_live_pavin, bb_info->killed);
+ bitmap_and_into (temp_bitmap, bb->il.rtl->global_live_at_end);
+ if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
+ {
+ bitmap_copy (bb_live_pavout, temp_bitmap);
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ succ = e->dest;
+ if (succ->index != EXIT_BLOCK
+ && !TEST_BIT (wset, succ->index))
+ {
+ SET_BIT (wset, succ->index);
+ VEC_quick_push (basic_block, new_bbs, succ);
+ }
+ }
+ }
}
temp = bbs;
bbs = new_bbs;
new_bbs = temp;
- VARRAY_POP_ALL (new_bbs);
+ VEC_truncate (basic_block, new_bbs, 0);
}
sbitmap_free (wset);
+ BITMAP_FREE (temp_bitmap);
+ VEC_free (basic_block, heap, new_bbs);
+ VEC_free (basic_block, heap, bbs);
}
/* The function modifies partial availability information for two
CLEAR_HARD_REG_SET (stack_hard_regs);
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
SET_HARD_REG_BIT(stack_hard_regs, i);
- stack_regs = BITMAP_XMALLOC ();
+ stack_regs = BITMAP_ALLOC (NULL);
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
{
COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
insn if the pseudo-register is used first time in given BB
and not lived at the BB start. To prevent this we don't
change life information for such pseudo-registers. */
- bitmap_a_or_b (bb_info->pavin, bb_info->pavin, bb_info->earlyclobber);
+ bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
#ifdef STACK_REGS
/* We can not use the same stack register for uninitialized
pseudo-register and another living pseudo-register because if the
uninitialized pseudo-register dies, subsequent pass reg-stack
will be confused (it will believe that the other register
dies). */
- bitmap_a_or_b (bb_info->pavin, bb_info->pavin, stack_regs);
+ bitmap_ior_into (bb_info->live_pavin, stack_regs);
#endif
}
#ifdef STACK_REGS
- BITMAP_XFREE (stack_regs);
+ BITMAP_FREE (stack_regs);
#endif
}
/* The following function makes live information more accurate by
modifying global_live_at_start and global_live_at_end of basic
- blocks. After the function call a register lives at a program
- point only if it is initialized on a path from CFG entry to the
- program point. The standard GCC life analysis permits registers to
- live uninitialized. */
+ blocks.
+
+ The standard GCC life analysis permits registers to live
+ uninitialized, for example:
+
+ R is never used
+ .....
+ Loop:
+ R is defined
+ ...
+ R is used.
+
+ With normal life_analysis, R would be live before "Loop:".
+ The result is that R causes many interferences that do not
+ serve any purpose.
+
+ After the function call a register lives at a program point
+ only if it is initialized on a path from CFG entry to the
+ program point. */
static void
make_accurate_live_analysis (void)
{
bb_info = BB_INFO (bb);
- bitmap_a_and_b (bb->global_live_at_start, bb->global_live_at_start,
- bb_info->pavin);
- bitmap_a_and_b (bb->global_live_at_end, bb->global_live_at_end,
- bb_info->pavout);
+ bitmap_and_into (bb->il.rtl->global_live_at_start, bb_info->live_pavin);
+ bitmap_and_into (bb->il.rtl->global_live_at_end, bb_info->live_pavout);
}
free_bb_info ();
}
+/* Run old register allocator. Return TRUE if we must exit
+ rest_of_compilation upon return. */
+static void
+rest_of_handle_global_alloc (void)
+{
+ bool failure;
+
+ /* If optimizing, allocate remaining pseudo-regs. Do the reload
+ pass fixing up any insns that are invalid. */
+
+ if (optimize)
+ failure = global_alloc (dump_file);
+ else
+ {
+ build_insn_chain (get_insns ());
+ failure = reload (get_insns (), 0);
+ }
+
+ if (dump_enabled_p (pass_global_alloc.static_pass_number))
+ {
+ timevar_push (TV_DUMP);
+ dump_global_regs (dump_file);
+ timevar_pop (TV_DUMP);
+ }
+
+ gcc_assert (reload_completed || failure);
+ reload_completed = !failure;
+}
+
+struct tree_opt_pass pass_global_alloc =
+{
+ "greg", /* name */
+ NULL, /* gate */
+ rest_of_handle_global_alloc, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_GLOBAL_ALLOC, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ 'g' /* letter */
+};
+