You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "reload.h"
#include "output.h"
#include "toplev.h"
+#include "tree-pass.h"
+#include "timevar.h"
/* This pass of the compiler performs global register allocation.
It assigns hard register numbers to all the pseudo registers
/* Number of calls crossed by each allocno. */
int calls_crossed;
+ /* Number of calls that might throw crossed by each allocno. */
+ int throwing_calls_crossed;
+
/* Number of refs to each allocno. */
int n_refs;
static INT_TYPE *conflicts;
-/* Number of ints require to hold max_allocno bits.
+/* Number of ints required to hold max_allocno bits.
This is the length of a row in `conflicts'. */
static int allocno_row_words;
Return value is nonzero if reload failed
and we must not do any more for this function. */
-int
+static int
global_alloc (FILE *file)
{
int retval;
allocno[num].reg = i;
allocno[num].size = PSEUDO_REGNO_SIZE (i);
allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
+ allocno[num].throwing_calls_crossed
+ += REG_N_THROWING_CALLS_CROSSED (i);
allocno[num].n_refs += REG_N_REFS (i);
allocno[num].freq += REG_FREQ (i);
if (allocno[num].live_length < REG_LIVE_LENGTH (i))
free (allocno_order);
}
- /* Do the reloads now while the allocno data still exist, so that we can
+ /* Do the reloads now while the allocno data still exists, so that we can
try to assign new hard regs to any pseudo regs that are spilled. */
#if 0 /* We need to eliminate regs even if there is no rtl code,
for the sake of debugging information. */
- if (n_basic_blocks > 0)
+ if (n_basic_blocks > NUM_FIXED_BLOCKS)
#endif
{
build_insn_chain (get_insns ());
be explicitly marked in basic_block_live_at_start. */
{
- regset old = b->global_live_at_start;
+ regset old = b->il.rtl->global_live_at_start;
int ax = 0;
reg_set_iterator rsi;
/* Record that each allocno now live conflicts with each hard reg
now live.
- It is not necessary to mark any conflicts between pseudos as
+ It is not necessary to mark any conflicts between pseudos at
this point, even for pseudos which are live at the start of
the basic block.
/* Pseudos can't go in stack regs at the start of a basic block that
is reached by an abnormal edge. Likewise for call clobbered regs,
- because because caller-save, fixup_abnormal_edges, and possibly
- the table driven EH machinery are not quite ready to handle such
- regs live across such edges. */
+ because caller-save, fixup_abnormal_edges and possibly the table
+ driven EH machinery are not quite ready to handle such regs live
+ across such edges. */
{
edge e;
edge_iterator ei;
{
/* Did not find a register. If it would be profitable to
allocate a call-clobbered register and save and restore it
- around calls, do that. */
+ around calls, do that. Don't do this if it crosses any calls
+ that might throw. */
if (! accept_call_clobbered
&& allocno[num].calls_crossed != 0
+ && allocno[num].throwing_calls_crossed == 0
&& CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
allocno[num].calls_crossed))
{
}
}
\f
-/* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
+/* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
static void
mark_reg_clobber (rtx reg, rtx setter, void *data)
FOR_EACH_BB (bb)
{
- regset r = bb->global_live_at_start;
+ regset r = bb->il.rtl->global_live_at_start;
if (REGNO_REG_SET_P (r, from))
{
CLEAR_REGNO_REG_SET (r, from);
CLEAR_REG_SET (live_relevant_regs);
- EXECUTE_IF_SET_IN_BITMAP (b->global_live_at_start, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (b->il.rtl->global_live_at_start, 0, i, bi)
{
if (i < FIRST_PSEUDO_REGISTER
? ! TEST_HARD_REG_BIT (eliminable_regset, i)
/* Classes of registers which could be early clobbered in the current
insn. */
-DEF_VEC_P(int);
-DEF_VEC_ALLOC_P(int,heap);
+DEF_VEC_I(int);
+DEF_VEC_ALLOC_I(int,heap);
static VEC(int,heap) *earlyclobber_regclass;
int i;
int *rts_order;
- rts_order = xmalloc (sizeof (int) * n_basic_blocks);
- flow_reverse_top_sort_order_compute (rts_order);
- for (i = 0; i < n_basic_blocks; i++)
+ rts_order = xmalloc (sizeof (int) * (n_basic_blocks - NUM_FIXED_BLOCKS));
+ post_order_compute (rts_order, false);
+ for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
free (rts_order);
}
basic_block bb, succ;
edge e;
int i, nel;
- varray_type bbs, new_bbs, temp;
+ VEC(basic_block,heap) *bbs, *new_bbs, *temp;
basic_block *bb_array;
sbitmap wset;
- VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
- VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
+ bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
+ new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
temp_bitmap = BITMAP_ALLOC (NULL);
FOR_EACH_BB (bb)
{
- VARRAY_PUSH_BB (bbs, bb);
+ VEC_quick_push (basic_block, bbs, bb);
}
wset = sbitmap_alloc (n_basic_blocks + 1);
- while (VARRAY_ACTIVE_SIZE (bbs))
+ while (VEC_length (basic_block, bbs))
{
- bb_array = &VARRAY_BB (bbs, 0);
- nel = VARRAY_ACTIVE_SIZE (bbs);
+ bb_array = VEC_address (basic_block, bbs);
+ nel = VEC_length (basic_block, bbs);
qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
sbitmap_zero (wset);
for (i = 0; i < nel; i++)
if (pred->index != ENTRY_BLOCK)
bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
}
- bitmap_and_into (bb_live_pavin, bb->global_live_at_start);
+ bitmap_and_into (bb_live_pavin, bb->il.rtl->global_live_at_start);
bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
bb_live_pavin, bb_info->killed);
- bitmap_and_into (temp_bitmap, bb->global_live_at_end);
+ bitmap_and_into (temp_bitmap, bb->il.rtl->global_live_at_end);
if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
{
bitmap_copy (bb_live_pavout, temp_bitmap);
&& !TEST_BIT (wset, succ->index))
{
SET_BIT (wset, succ->index);
- VARRAY_PUSH_BB (new_bbs, succ);
+ VEC_quick_push (basic_block, new_bbs, succ);
}
}
}
temp = bbs;
bbs = new_bbs;
new_bbs = temp;
- VARRAY_POP_ALL (new_bbs);
+ VEC_truncate (basic_block, new_bbs, 0);
}
sbitmap_free (wset);
BITMAP_FREE (temp_bitmap);
+ VEC_free (basic_block, heap, new_bbs);
+ VEC_free (basic_block, heap, bbs);
}
/* The function modifies partial availability information for two
{
bb_info = BB_INFO (bb);
- bitmap_and_into (bb->global_live_at_start, bb_info->live_pavin);
- bitmap_and_into (bb->global_live_at_end, bb_info->live_pavout);
+ bitmap_and_into (bb->il.rtl->global_live_at_start, bb_info->live_pavin);
+ bitmap_and_into (bb->il.rtl->global_live_at_end, bb_info->live_pavout);
}
free_bb_info ();
}
+/* Run old register allocator. Return TRUE if we must exit
+ rest_of_compilation upon return. */
+static void
+rest_of_handle_global_alloc (void)
+{
+ bool failure;
+
+ /* If optimizing, allocate remaining pseudo-regs. Do the reload
+ pass fixing up any insns that are invalid. */
+
+ if (optimize)
+ failure = global_alloc (dump_file);
+ else
+ {
+ build_insn_chain (get_insns ());
+ failure = reload (get_insns (), 0);
+ }
+
+ if (dump_enabled_p (pass_global_alloc.static_pass_number))
+ {
+ timevar_push (TV_DUMP);
+ dump_global_regs (dump_file);
+ timevar_pop (TV_DUMP);
+ }
+
+ gcc_assert (reload_completed || failure);
+ reload_completed = !failure;
+}
+
+struct tree_opt_pass pass_global_alloc =
+{
+ "greg", /* name */
+ NULL, /* gate */
+ rest_of_handle_global_alloc, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_GLOBAL_ALLOC, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ 'g' /* letter */
+};
+