/* Perform branch target register load optimizations.
- Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
+ Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "function.h"
#include "except.h"
#include "tm_p.h"
+#include "toplev.h"
+#include "tree-pass.h"
/* Target register optimizations - these are performed after reload. */
as appropriate. */
char other_btr_uses_before_def;
char other_btr_uses_after_use;
+ /* We set own_end when we have moved a definition into a dominator.
+ Thus, when a later combination removes this definition again, we know
+ to clear out trs_live_at_end again. */
+ char own_end;
bitmap live_range;
} *btr_def;
static void build_btr_def_use_webs (fibheap_t);
static int block_at_edge_of_live_range_p (int, btr_def);
static void clear_btr_from_live_range (btr_def def);
-static void add_btr_to_live_range (btr_def);
+static void add_btr_to_live_range (btr_def, int);
static void augment_live_range (bitmap, HARD_REG_SET *, basic_block,
- basic_block);
+ basic_block, int);
static int choose_btr (HARD_REG_SET);
static void combine_btr_defs (btr_def, HARD_REG_SET *);
static void btr_def_live_range (btr_def, HARD_REG_SET *);
defs_uses_info info;
sbitmap_vector_zero (bb_gen, n_basic_blocks);
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
{
basic_block bb = BASIC_BLOCK (i);
int reg;
CLEAR_HARD_REG_SET (info.btrs_written_in_block);
for (reg = first_btr; reg <= last_btr; reg++)
if (TEST_HARD_REG_BIT (all_btrs, reg)
- && REGNO_REG_SET_P (bb->global_live_at_start, reg))
+ && REGNO_REG_SET_P (bb->il.rtl->global_live_at_start, reg))
SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
SET_BIT (btr_defset[regno - first_btr], insn_uid);
note_other_use_this_block (regno, info.users_this_bb);
}
+ /* Check for the blockage emitted by expand_nl_goto_receiver. */
+ else if (current_function_has_nonlocal_label
+ && GET_CODE (PATTERN (insn)) == ASM_INPUT)
+ {
+ btr_user user;
+
+ /* Do the equivalent of calling note_other_use_this_block
+ for every target register. */
+ for (user = info.users_this_bb; user != NULL;
+ user = user->next)
+ if (user->use)
+ user->other_use_this_block = 1;
+ IOR_HARD_REG_SET (info.btrs_written_in_block, all_btrs);
+ IOR_HARD_REG_SET (info.btrs_live_in_block, all_btrs);
+ sbitmap_zero (info.bb_gen);
+ }
else
{
if (btr_referenced_p (PATTERN (insn), NULL))
COPY_HARD_REG_SET (btrs_live[i], info.btrs_live_in_block);
COPY_HARD_REG_SET (btrs_written[i], info.btrs_written_in_block);
- REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], bb->global_live_at_end);
+ REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], bb->il.rtl->global_live_at_end);
/* If this block ends in a jump insn, add any uses or even clobbers
of branch target registers that it might have. */
for (insn = BB_END (bb); insn != BB_HEAD (bb) && ! INSN_P (insn); )
/* For each basic block, form the set BB_KILL - the set
of definitions that the block kills. */
sbitmap_vector_zero (bb_kill, n_basic_blocks);
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
{
for (regno = first_btr; regno <= last_btr; regno++)
if (TEST_HARD_REG_BIT (all_btrs, regno)
int changed;
sbitmap bb_in = sbitmap_alloc (max_uid);
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
sbitmap_copy (bb_out[i], bb_gen[i]);
changed = 1;
while (changed)
{
changed = 0;
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
{
sbitmap_union_of_preds (bb_in, bb_out, i);
changed |= sbitmap_union_of_diff_cg (bb_out[i], bb_gen[i],
/* Link uses to the uses lists of all of their reaching defs.
Count up the number of reaching defs of each use. */
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
{
basic_block bb = BASIC_BLOCK (i);
rtx insn;
{
/* Find all the reaching defs for this use. */
sbitmap reaching_defs_of_reg = sbitmap_alloc(max_uid);
- int uid;
+ unsigned int uid = 0;
+ sbitmap_iterator sbi;
if (user->use)
sbitmap_a_and_b (
reaching_defs,
btr_defset[reg - first_btr]);
}
- EXECUTE_IF_SET_IN_SBITMAP (reaching_defs_of_reg, 0, uid,
+ EXECUTE_IF_SET_IN_SBITMAP (reaching_defs_of_reg, 0, uid, sbi)
{
btr_def def = def_array[uid];
def->other_btr_uses_after_use = 1;
user->next = def->uses;
def->uses = user;
- });
+ }
sbitmap_free (reaching_defs_of_reg);
}
build_btr_def_use_webs (fibheap_t all_btr_defs)
{
const int max_uid = get_max_uid ();
- btr_def *def_array = xcalloc (max_uid, sizeof (btr_def));
- btr_user *use_array = xcalloc (max_uid, sizeof (btr_user));
+ btr_def *def_array = XCNEWVEC (btr_def, max_uid);
+ btr_user *use_array = XCNEWVEC (btr_user, max_uid);
sbitmap *btr_defset = sbitmap_vector_alloc (
(last_btr - first_btr) + 1, max_uid);
sbitmap *bb_gen = sbitmap_vector_alloc (n_basic_blocks, max_uid);
- HARD_REG_SET *btrs_written = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
+ HARD_REG_SET *btrs_written = XCNEWVEC (HARD_REG_SET, n_basic_blocks);
sbitmap *bb_kill;
sbitmap *bb_out;
dump_btrs_live (bb);
}
}
+ if (def->own_end)
+ CLEAR_HARD_REG_BIT (btrs_live_at_end[def->bb->index], def->btr);
}
/* We are adding the def/use web DEF. Add the target register used
in this web to the live set of all of the basic blocks that contain
- the live range of the web. */
+ the live range of the web.
+ If OWN_END is set, also show that the register is live from our
+ definitions at the end of the basic block where it is defined. */
static void
-add_btr_to_live_range (btr_def def)
+add_btr_to_live_range (btr_def def, int own_end)
{
unsigned bb;
bitmap_iterator bi;
if (dump_file)
dump_btrs_live (bb);
}
+ if (own_end)
+ {
+ SET_HARD_REG_BIT (btrs_live_at_end[def->bb->index], def->btr);
+ def->own_end = 1;
+ }
}
/* Update a live range to contain the basic block NEW_BLOCK, and all
all other blocks in the existing live range.
Also add to the set BTRS_LIVE_IN_RANGE all target registers that
are live in the blocks that we add to the live range.
+ If FULL_RANGE is set, include the full live range of NEW_BB;
+ otherwise, if NEW_BB dominates HEAD_BB, only add registers that
+ are life at the end of NEW_BB for NEW_BB itself.
It is a precondition that either NEW_BLOCK dominates HEAD,or
HEAD dom NEW_BLOCK. This is used to speed up the
implementation of this function. */
static void
augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
- basic_block head_bb, basic_block new_bb)
+ basic_block head_bb, basic_block new_bb, int full_range)
{
basic_block *worklist, *tos;
- tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+ tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
if (dominated_by_p (CDI_DOMINATORS, new_bb, head_bb))
- *tos++ = new_bb;
+ {
+ if (new_bb == head_bb)
+ {
+ if (full_range)
+ IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_bb->index]);
+ free (tos);
+ return;
+ }
+ *tos++ = new_bb;
+ }
else
{
edge e;
gcc_assert (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb));
+ IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
bitmap_set_bit (live_range, new_block);
- if (flag_btr_bb_exclusive)
+ /* A previous btr migration could have caused a register to be
+ live just at the end of new_block which we need in full, so
+ use trs_live_at_end even if full_range is set. */
+ IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live_at_end[new_block]);
+ if (full_range)
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_block]);
- else
- {
- IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live_at_end[new_block]);
- IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
- }
if (dump_file)
{
fprintf (dump_file,
bitmap_set_bit (live_range, bb->index);
IOR_HARD_REG_SET (*btrs_live_in_range,
btrs_live[bb->index]);
+ /* A previous btr migration could have caused a register to be
+ live just at the end of a block which we need in full. */
+ IOR_HARD_REG_SET (*btrs_live_in_range,
+ btrs_live_at_end[bb->index]);
if (dump_file)
{
fprintf (dump_file,
{
btr_user user;
- def->live_range = BITMAP_XMALLOC ();
+ def->live_range = BITMAP_ALLOC (NULL);
bitmap_set_bit (def->live_range, def->bb->index);
COPY_HARD_REG_SET (*btrs_live_in_range,
for (user = def->uses; user != NULL; user = user->next)
augment_live_range (def->live_range, btrs_live_in_range,
- def->bb, user->bb);
+ def->bb, user->bb,
+ (flag_btr_bb_exclusive
+ || user->insn != BB_END (def->bb)
+ || !JUMP_P (user->insn)));
}
else
{
target registers live over the merged range. */
int btr;
HARD_REG_SET combined_btrs_live;
- bitmap combined_live_range = BITMAP_XMALLOC ();
+ bitmap combined_live_range = BITMAP_ALLOC (NULL);
btr_user user;
if (other_def->live_range == NULL)
for (user = other_def->uses; user != NULL; user = user->next)
augment_live_range (combined_live_range, &combined_btrs_live,
- def->bb, user->bb);
+ def->bb, user->bb,
+ (flag_btr_bb_exclusive
+ || user->insn != BB_END (def->bb)
+ || !JUMP_P (user->insn)));
btr = choose_btr (combined_btrs_live);
if (btr != -1)
clear_btr_from_live_range (other_def);
other_def->uses = NULL;
bitmap_copy (def->live_range, combined_live_range);
- if (other_def->other_btr_uses_after_use)
+ if (other_def->btr == btr && other_def->other_btr_uses_after_use)
def->other_btr_uses_after_use = 1;
COPY_HARD_REG_SET (*btrs_live_in_range, combined_btrs_live);
delete_insn (other_def->insn);
}
- BITMAP_XFREE (combined_live_range);
+ BITMAP_FREE (combined_live_range);
}
}
}
def->bb = new_def_bb;
def->luid = 0;
def->cost = basic_block_freq (new_def_bb);
- def->other_btr_uses_before_def
- = TEST_HARD_REG_BIT (btrs_live[b->index], btr) ? 1 : 0;
bitmap_copy (def->live_range, live_range);
combine_btr_defs (def, btrs_live_in_range);
btr = def->btr;
- add_btr_to_live_range (def);
+ def->other_btr_uses_before_def
+ = TEST_HARD_REG_BIT (btrs_live[b->index], btr) ? 1 : 0;
+ add_btr_to_live_range (def, 1);
if (LABEL_P (insp))
insp = NEXT_INSN (insp);
/* N.B.: insp is expected to be NOTE_INSN_BASIC_BLOCK now. Some
}
btr_def_live_range (def, &btrs_live_in_range);
- live_range = BITMAP_XMALLOC ();
+ live_range = BITMAP_ALLOC (NULL);
bitmap_copy (live_range, def->live_range);
#ifdef INSN_SCHEDULING
|| (try_freq == def_basic_block_freq && btr_used_near_def))
{
int btr;
- augment_live_range (live_range, &btrs_live_in_range, def->bb, try);
+ augment_live_range (live_range, &btrs_live_in_range, def->bb, try,
+ flag_btr_bb_exclusive);
if (dump_file)
{
fprintf (dump_file, "Now btrs live in range are: ");
if (dump_file)
fprintf (dump_file, "failed to move\n");
}
- BITMAP_XFREE (live_range);
+ BITMAP_FREE (live_range);
return !give_up;
}
{
int i;
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < n_basic_blocks; i++)
{
basic_block bb = BASIC_BLOCK (i);
fprintf(dump_file,
}
}
else
- {
- if (def->live_range)
- BITMAP_XFREE (def->live_range);
- }
+ BITMAP_FREE (def->live_range);
}
free (btrs_live);
cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
#endif
- life_analysis (NULL, 0);
+ life_analysis (0);
/* Dominator info is also needed for migrate_btr_def. */
calculate_dominance_info (CDI_DOMINATORS);
PROP_DEATH_NOTES | PROP_REG_INFO);
}
}
+\f
+static bool
+gate_handle_branch_target_load_optimize (void)
+{
+ return (optimize > 0 && flag_branch_target_load_optimize2);
+}
+
+
+static unsigned int
+rest_of_handle_branch_target_load_optimize (void)
+{
+ static int warned = 0;
+
+ /* Leave this a warning for now so that it is possible to experiment
+ with running this pass twice. In 3.6, we should either make this
+ an error, or use separate dump files. */
+ if (flag_branch_target_load_optimize
+ && flag_branch_target_load_optimize2
+ && !warned)
+ {
+ warning (0, "branch target register load optimization is not intended "
+ "to be run twice");
+
+ warned = 1;
+ }
+
+ branch_target_load_optimize (epilogue_completed);
+ return 0;
+}
+
+struct tree_opt_pass pass_branch_target_load_optimize =
+{
+ "btl", /* name */
+ gate_handle_branch_target_load_optimize, /* gate */
+ rest_of_handle_branch_target_load_optimize, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ 'd' /* letter */
+};
+