/* Allocate registers for pseudo-registers that span basic blocks.
Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
- 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
static void allocate_bb_info (void);
static void free_bb_info (void);
static bool check_earlyclobber (rtx);
-static bool regclass_intersect (enum reg_class, enum reg_class);
static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
static int mark_reg_use_for_earlyclobber (rtx *, void *);
static void calculate_local_reg_bb_info (void);
killed afterward in the basic block. */
bitmap killed, avloc;
/* Registers partially available and living (in other words whose
- values were calclualted and used) correspondingly at the start
+ values were calculated and used) correspondingly at the start
and end of the basic block. */
bitmap live_pavin, live_pavout;
};
bitmap init;
alloc_aux_for_blocks (sizeof (struct bb_info));
- init = BITMAP_XMALLOC ();
+ init = BITMAP_ALLOC (NULL);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
bitmap_set_bit (init, i);
FOR_EACH_BB (bb)
{
bb_info = bb->aux;
- bb_info->earlyclobber = BITMAP_XMALLOC ();
- bb_info->avloc = BITMAP_XMALLOC ();
- bb_info->killed = BITMAP_XMALLOC ();
- bb_info->live_pavin = BITMAP_XMALLOC ();
- bb_info->live_pavout = BITMAP_XMALLOC ();
+ bb_info->earlyclobber = BITMAP_ALLOC (NULL);
+ bb_info->avloc = BITMAP_ALLOC (NULL);
+ bb_info->killed = BITMAP_ALLOC (NULL);
+ bb_info->live_pavin = BITMAP_ALLOC (NULL);
+ bb_info->live_pavout = BITMAP_ALLOC (NULL);
bitmap_copy (bb_info->live_pavin, init);
bitmap_copy (bb_info->live_pavout, init);
}
- BITMAP_XFREE (init);
+ BITMAP_FREE (init);
}
/* The function frees the allocated info of all basic blocks. */
FOR_EACH_BB (bb)
{
bb_info = BB_INFO (bb);
- BITMAP_XFREE (bb_info->live_pavout);
- BITMAP_XFREE (bb_info->live_pavin);
- BITMAP_XFREE (bb_info->killed);
- BITMAP_XFREE (bb_info->avloc);
- BITMAP_XFREE (bb_info->earlyclobber);
+ BITMAP_FREE (bb_info->live_pavout);
+ BITMAP_FREE (bb_info->live_pavin);
+ BITMAP_FREE (bb_info->killed);
+ BITMAP_FREE (bb_info->avloc);
+ BITMAP_FREE (bb_info->earlyclobber);
}
free_aux_for_blocks ();
}
/* Classes of registers which could be early clobbered in the current
insn. */
-static varray_type earlyclobber_regclass;
+DEF_VEC_P(int);
+DEF_VEC_ALLOC_P(int,heap);
+
+static VEC(int,heap) *earlyclobber_regclass;
/* This function finds and stores register classes that could be early
clobbered in INSN. If any earlyclobber classes are found, the function
extract_insn (insn);
- VARRAY_POP_ALL (earlyclobber_regclass);
+ VEC_truncate (int, earlyclobber_regclass, 0);
for (opno = 0; opno < recog_data.n_operands; opno++)
{
char c;
case ',':
if (amp_p && class != NO_REGS)
{
+ int rc;
+
found = true;
- for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
- i >= 0; i--)
- if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
- break;
- if (i < 0)
- VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
+ for (i = 0;
+ VEC_iterate (int, earlyclobber_regclass, i, rc);
+ i++)
+ {
+ if (rc == (int) class)
+ goto found_rc;
+ }
+
+ /* We use VEC_quick_push here because
+ earlyclobber_regclass holds no more than
+ N_REG_CLASSES elements. */
+ VEC_quick_push (int, earlyclobber_regclass, (int) class);
+ found_rc:
+ ;
}
amp_p = false;
return found;
}
-/* The function returns true if register classes C1 and C2 intersect. */
-
-static bool
-regclass_intersect (enum reg_class c1, enum reg_class c2)
-{
- HARD_REG_SET rs, zero;
-
- CLEAR_HARD_REG_SET (zero);
- COPY_HARD_REG_SET(rs, reg_class_contents [c1]);
- AND_HARD_REG_SET (rs, reg_class_contents [c2]);
- GO_IF_HARD_REG_EQUAL (zero, rs, yes);
- return true;
- yes:
- return false;
-}
-
/* The function checks that pseudo-register *X has a class
intersecting with the class of pseudo-register could be early
clobbered in the same insn.
basic_block bb = data;
struct bb_info *bb_info = BB_INFO (bb);
- if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
+ if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
{
+ int rc;
+
regno = REGNO (*x);
if (bitmap_bit_p (bb_info->killed, regno)
|| bitmap_bit_p (bb_info->avloc, regno))
return 0;
pref_class = reg_preferred_class (regno);
alt_class = reg_alternate_class (regno);
- for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
- if (regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
- pref_class)
- || (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
- && regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
- alt_class)))
- {
- bitmap_set_bit (bb_info->earlyclobber, regno);
- break;
- }
+ for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
+ {
+ if (reg_classes_intersect_p (rc, pref_class)
+ || (rc != NO_REGS
+ && reg_classes_intersect_p (rc, alt_class)))
+ {
+ bitmap_set_bit (bb_info->earlyclobber, regno);
+ break;
+ }
+ }
}
return 0;
}
basic_block bb;
rtx insn, bound;
- VARRAY_INT_INIT (earlyclobber_regclass, 20,
- "classes of registers early clobbered in an insn");
+ /* We know that earlyclobber_regclass holds no more than
+ N_REG_CLASSES elements. See check_earlyclobber. */
+ earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
FOR_EACH_BB (bb)
{
bound = NEXT_INSN (BB_END (bb));
note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
}
}
+ VEC_free (int, heap, earlyclobber_regclass);
}
/* The function sets up reverse post-order number of each basic
/* Temporary bitmap used for live_pavin, live_pavout calculation. */
static bitmap temp_bitmap;
+DEF_VEC_P(basic_block);
+DEF_VEC_ALLOC_P(basic_block,heap);
+
/* The function calculates partial register availability according to
the following equations:
basic_block bb, succ;
edge e;
int i, nel;
- varray_type bbs, new_bbs, temp;
+ VEC(basic_block,heap) *bbs, *new_bbs, *temp;
basic_block *bb_array;
sbitmap wset;
- VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
- VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
- temp_bitmap = BITMAP_XMALLOC ();
+ bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
+ new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
+ temp_bitmap = BITMAP_ALLOC (NULL);
FOR_EACH_BB (bb)
{
- VARRAY_PUSH_BB (bbs, bb);
+ VEC_quick_push (basic_block, bbs, bb);
}
wset = sbitmap_alloc (n_basic_blocks + 1);
- while (VARRAY_ACTIVE_SIZE (bbs))
+ while (VEC_length (basic_block, bbs))
{
- bb_array = &VARRAY_BB (bbs, 0);
- nel = VARRAY_ACTIVE_SIZE (bbs);
+ bb_array = VEC_address (basic_block, bbs);
+ nel = VEC_length (basic_block, bbs);
qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
sbitmap_zero (wset);
for (i = 0; i < nel; i++)
&& !TEST_BIT (wset, succ->index))
{
SET_BIT (wset, succ->index);
- VARRAY_PUSH_BB (new_bbs, succ);
+ VEC_quick_push (basic_block, new_bbs, succ);
}
}
}
temp = bbs;
bbs = new_bbs;
new_bbs = temp;
- VARRAY_POP_ALL (new_bbs);
+ VEC_truncate (basic_block, new_bbs, 0);
}
sbitmap_free (wset);
- BITMAP_XFREE (temp_bitmap);
+ BITMAP_FREE (temp_bitmap);
+ VEC_free (basic_block, heap, new_bbs);
+ VEC_free (basic_block, heap, bbs);
}
/* The function modifies partial availability information for two
CLEAR_HARD_REG_SET (stack_hard_regs);
for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
SET_HARD_REG_BIT(stack_hard_regs, i);
- stack_regs = BITMAP_XMALLOC ();
+ stack_regs = BITMAP_ALLOC (NULL);
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
{
COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
#endif
}
#ifdef STACK_REGS
- BITMAP_XFREE (stack_regs);
+ BITMAP_FREE (stack_regs);
#endif
}