#include "regs.h"
#include "function.h"
#include "insn-config.h"
+#include "recog.h"
#include "reload.h"
#include "output.h"
#include "toplev.h"
static void allocate_bb_info (void);
static void free_bb_info (void);
+static void check_earlyclobber (rtx);
+static bool regclass_intersect (enum reg_class, enum reg_class);
+static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
+static int mark_reg_use_for_earlyclobber (rtx *, void *);
static void calculate_local_reg_bb_info (void);
static void set_up_bb_rts_numbers (void);
static int rpost_cmp (const void *, const void *);
static bool modify_bb_reg_pav (basic_block, basic_block, bool);
static void calculate_reg_pav (void);
+static void modify_reg_pav (void);
static void make_accurate_live_analysis (void);
\f
&& (! current_function_has_nonlocal_label
|| REG_N_CALLS_CROSSED (i) == 0))
{
- if (reg_renumber[i] < 0 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
+ if (reg_renumber[i] < 0
+ && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
reg_allocno[i] = reg_allocno[reg_may_share[i]];
else
reg_allocno[i] = max_allocno++;
- if (REG_LIVE_LENGTH (i) == 0)
- abort ();
+ gcc_assert (REG_LIVE_LENGTH (i));
}
else
reg_allocno[i] = -1;
{
regset old = b->global_live_at_start;
int ax = 0;
+ reg_set_iterator rsi;
REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
- EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i,
- {
- int a = reg_allocno[i];
- if (a >= 0)
- {
- SET_ALLOCNO_LIVE (a);
- block_start_allocnos[ax++] = a;
- }
- else if ((a = reg_renumber[i]) >= 0)
- mark_reg_live_nc
- (a, PSEUDO_REGNO_MODE (i));
- });
+ EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
+ {
+ int a = reg_allocno[i];
+ if (a >= 0)
+ {
+ SET_ALLOCNO_LIVE (a);
+ block_start_allocnos[ax++] = a;
+ }
+ else if ((a = reg_renumber[i]) >= 0)
+ mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
+ }
/* Record that each allocno now live conflicts with each hard reg
now live.
evaluates X.
3. Either X or Y is not evaluated on the path to P
- (ie it is used uninitialized) and thus the
+ (i.e. it is used uninitialized) and thus the
conflict can be ignored.
In cases #1 and #2 the conflict will be recorded when we
regs live across such edges. */
{
edge e;
+ edge_iterator ei;
- for (e = b->pred; e ; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, b->preds)
if (e->flags & EDGE_ABNORMAL)
break;
if (first == BB_HEAD (b))
{
int i;
+ bitmap_iterator bi;
CLEAR_REG_SET (live_relevant_regs);
- EXECUTE_IF_SET_IN_BITMAP
- (b->global_live_at_start, 0, i,
- {
- if (i < FIRST_PSEUDO_REGISTER
- ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
- : reg_renumber[i] >= 0)
- SET_REGNO_REG_SET (live_relevant_regs, i);
- });
+ EXECUTE_IF_SET_IN_BITMAP (b->global_live_at_start, 0, i, bi)
+ {
+ if (i < FIRST_PSEUDO_REGISTER
+ ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
+ : reg_renumber[i] >= 0)
+ SET_REGNO_REG_SET (live_relevant_regs, i);
+ }
}
- if (GET_CODE (first) != NOTE && GET_CODE (first) != BARRIER)
+ if (!NOTE_P (first) && !BARRIER_P (first))
{
c = new_insn_chain ();
c->prev = prev;
the previous real insn is a JUMP_INSN. */
if (b == EXIT_BLOCK_PTR)
{
- for (first = NEXT_INSN (first) ; first; first = NEXT_INSN (first))
- if (INSN_P (first)
- && GET_CODE (PATTERN (first)) != USE
- && ! ((GET_CODE (PATTERN (first)) == ADDR_VEC
- || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
- && prev_real_insn (first) != 0
- && GET_CODE (prev_real_insn (first)) == JUMP_INSN))
- abort ();
+#ifdef ENABLE_CHECKING
+ for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
+ gcc_assert (!INSN_P (first)
+ || GET_CODE (PATTERN (first)) == USE
+ || ((GET_CODE (PATTERN (first)) == ADDR_VEC
+ || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
+ && prev_real_insn (first) != 0
+ && JUMP_P (prev_real_insn (first))));
+#endif
break;
}
}
{
/* The basic block reverse post-order number. */
int rts_number;
+ /* Registers used uninitialized in an insn in which there is an
+ early clobbered register might get the same hard register. */
+ bitmap earlyclobber;
/* Registers correspondingly killed (clobbered) and defined but not
killed afterward in the basic block. */
bitmap killed, avloc;
FOR_EACH_BB (bb)
{
bb_info = bb->aux;
+ bb_info->earlyclobber = BITMAP_XMALLOC ();
bb_info->avloc = BITMAP_XMALLOC ();
bb_info->killed = BITMAP_XMALLOC ();
bb_info->pavin = BITMAP_XMALLOC ();
BITMAP_XFREE (bb_info->pavin);
BITMAP_XFREE (bb_info->killed);
BITMAP_XFREE (bb_info->avloc);
+ BITMAP_XFREE (bb_info->earlyclobber);
}
free_aux_for_blocks ();
}
bitmap_clear_bit (bb_info->avloc, regno);
}
+/* Classes of registers which could be early clobbered in the current
+ insn. */
+
+static varray_type earlyclobber_regclass;
+
+/* The function stores classes of registers which could be early
+ clobbered in INSN. */
+
+static void
+check_earlyclobber (rtx insn)
+{
+ int opno;
+
+ extract_insn (insn);
+
+ VARRAY_POP_ALL (earlyclobber_regclass);
+ for (opno = 0; opno < recog_data.n_operands; opno++)
+ {
+ char c;
+ bool amp_p;
+ int i;
+ enum reg_class class;
+ const char *p = recog_data.constraints[opno];
+
+ class = NO_REGS;
+ amp_p = false;
+ for (;;)
+ {
+ c = *p;
+ switch (c)
+ {
+ case '=': case '+': case '?':
+ case '#': case '!':
+ case '*': case '%':
+ case 'm': case '<': case '>': case 'V': case 'o':
+ case 'E': case 'F': case 'G': case 'H':
+ case 's': case 'i': case 'n':
+ case 'I': case 'J': case 'K': case 'L':
+ case 'M': case 'N': case 'O': case 'P':
+ case 'X':
+ case '0': case '1': case '2': case '3': case '4':
+ case '5': case '6': case '7': case '8': case '9':
+ /* These don't say anything we care about. */
+ break;
+
+ case '&':
+ amp_p = true;
+ break;
+ case '\0':
+ case ',':
+ if (amp_p && class != NO_REGS)
+ {
+ for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
+ i >= 0; i--)
+ if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
+ break;
+ if (i < 0)
+ VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
+ }
+
+ amp_p = false;
+ class = NO_REGS;
+ break;
+
+ case 'r':
+ class = GENERAL_REGS;
+ break;
+
+ default:
+ class = REG_CLASS_FROM_CONSTRAINT (c, p);
+ break;
+ }
+ if (c == '\0')
+ break;
+ p += CONSTRAINT_LEN (c, p);
+ }
+ }
+}
+
+/* The function returns true if register classes C1 and C2 intersect. */
+
+static bool
+regclass_intersect (enum reg_class c1, enum reg_class c2)
+{
+ HARD_REG_SET rs, zero;
+
+ CLEAR_HARD_REG_SET (zero);
+ COPY_HARD_REG_SET(rs, reg_class_contents [c1]);
+ AND_HARD_REG_SET (rs, reg_class_contents [c2]);
+ GO_IF_HARD_REG_EQUAL (zero, rs, yes);
+ return true;
+ yes:
+ return false;
+}
+
+/* The function checks that pseudo-register *X has a class
+ intersecting with the class of pseudo-register could be early
+ clobbered in the same insn. */
+
+static int
+mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
+{
+ enum reg_class pref_class, alt_class;
+ int i, regno;
+ basic_block bb = data;
+ struct bb_info *bb_info = BB_INFO (bb);
+
+ if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
+ {
+ regno = REGNO (*x);
+ if (bitmap_bit_p (bb_info->killed, regno)
+ || bitmap_bit_p (bb_info->avloc, regno))
+ return 0;
+ pref_class = reg_preferred_class (regno);
+ alt_class = reg_alternate_class (regno);
+ for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
+ if (regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
+ pref_class)
+ || (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
+ && regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
+ alt_class)))
+ {
+ bitmap_set_bit (bb_info->earlyclobber, regno);
+ break;
+ }
+ }
+ return 0;
+}
+
+/* The function processes all pseudo-registers in *X with the aid of
+ previous function. */
+
+static void
+mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
+{
+ for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
+}
+
/* The function calculates local info for each basic block. */
static void
basic_block bb;
rtx insn, bound;
+ VARRAY_INT_INIT (earlyclobber_regclass, 20,
+ "classes of registers early clobbered in an insn");
FOR_EACH_BB (bb)
{
bound = NEXT_INSN (BB_END (bb));
for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
if (INSN_P (insn))
- note_stores (PATTERN (insn), mark_reg_change, bb);
+ {
+ note_stores (PATTERN (insn), mark_reg_change, bb);
+ check_earlyclobber (insn);
+ note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
+ }
}
}
sbitmap_zero (wset);
for (i = 0; i < nel; i++)
{
+ edge_iterator ei;
+
bb = bb_array [i];
changed_p = 0;
- for (e = bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
changed_p = modify_bb_reg_pav (bb, e->src, changed_p);
if (changed_p)
- for (e = bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, bb->succs)
{
succ = e->dest;
if (succ->index != EXIT_BLOCK && !TEST_BIT (wset, succ->index))
sbitmap_free (wset);
}
+/* The function modifies partial availability information for two
+ special cases to prevent incorrect work of the subsequent passes
+ with the accurate live information based on the partial
+ availability. */
+
+static void
+modify_reg_pav (void)
+{
+ basic_block bb;
+ struct bb_info *bb_info;
+#ifdef STACK_REGS
+ int i;
+ HARD_REG_SET zero, stack_hard_regs, used;
+ bitmap stack_regs;
+
+ CLEAR_HARD_REG_SET (zero);
+ CLEAR_HARD_REG_SET (stack_hard_regs);
+ for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
+ SET_HARD_REG_BIT(stack_hard_regs, i);
+ stack_regs = BITMAP_XMALLOC ();
+ for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
+ {
+ COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
+ IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
+ AND_HARD_REG_SET (used, stack_hard_regs);
+ GO_IF_HARD_REG_EQUAL(used, zero, skip);
+ bitmap_set_bit (stack_regs, i);
+ skip:
+ ;
+ }
+#endif
+ FOR_EACH_BB (bb)
+ {
+ bb_info = BB_INFO (bb);
+
+ /* Reload can assign the same hard register to uninitialized
+ pseudo-register and early clobbered pseudo-register in an
+ insn if the pseudo-register is used first time in given BB
+ and not lived at the BB start. To prevent this we don't
+ change life information for such pseudo-registers. */
+ bitmap_a_or_b (bb_info->pavin, bb_info->pavin, bb_info->earlyclobber);
+#ifdef STACK_REGS
+ /* We can not use the same stack register for uninitialized
+ pseudo-register and another living pseudo-register because if the
+ uninitialized pseudo-register dies, subsequent pass reg-stack
+ will be confused (it will believe that the other register
+ dies). */
+ bitmap_a_or_b (bb_info->pavin, bb_info->pavin, stack_regs);
+#endif
+ }
+#ifdef STACK_REGS
+ BITMAP_XFREE (stack_regs);
+#endif
+}
+
/* The following function makes live information more accurate by
modifying global_live_at_start and global_live_at_end of basic
blocks. After the function call a register lives at a program
calculate_local_reg_bb_info ();
set_up_bb_rts_numbers ();
calculate_reg_pav ();
+ modify_reg_pav ();
FOR_EACH_BB (bb)
{
bb_info = BB_INFO (bb);