1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 2007 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "hard-reg-set.h"
33 #include "insn-config.h"
38 #include "tree-pass.h"
44 #include "sparseset.h"
46 /* Externs defined in regs.h. */
49 struct allocno *allocno;
50 HOST_WIDEST_FAST_INT *conflicts;
54 alloc_pool adjacency_pool;
55 adjacency_t **adjacency;
57 typedef struct df_ref * df_ref_t;
59 DEF_VEC_ALLOC_P(df_ref_t,heap);
61 /* Macros to determine the bit number within the triangular bit matrix for
62 the two allocnos Low and HIGH, with LOW strictly less than HIGH. */
64 #define CONFLICT_BITNUM(I, J) \
65 (((I) < (J)) ? (partial_bitnum[I] + (J)) : (partial_bitnum[J] + (I)))
67 #define CONFLICT_BITNUM_FAST(I, I_PARTIAL_BITNUM, J) \
68 (((I) < (J)) ? ((I_PARTIAL_BITNUM) + (J)) : (partial_bitnum[J] + (I)))
71 conflict_p (int allocno1, int allocno2)
74 HOST_WIDEST_FAST_INT word, mask;
76 #ifdef ENABLE_CHECKING
79 gcc_assert (allocno1 >= 0 && allocno1 < max_allocno);
80 gcc_assert (allocno2 >= 0 && allocno2 < max_allocno);
82 blk1 = regno_basic_block (allocno[allocno1].reg);
83 blk2 = regno_basic_block (allocno[allocno2].reg);
84 gcc_assert (blk1 == 0 || blk2 == 0 || blk1 == blk2);
87 if (allocno1 == allocno2)
88 /* By definition, an allocno does not conflict with itself. */
91 bitnum = CONFLICT_BITNUM (allocno1, allocno2);
93 #ifdef ENABLE_CHECKING
94 gcc_assert (bitnum >= 0 && bitnum < max_bitnum);
97 word = conflicts[bitnum / HOST_BITS_PER_WIDEST_FAST_INT];
98 mask = (HOST_WIDEST_FAST_INT) 1 << (bitnum % HOST_BITS_PER_WIDEST_FAST_INT);
99 return (word & mask) != 0;
102 /* Add conflict edges between ALLOCNO1 and ALLOCNO2. */
105 set_conflict (int allocno1, int allocno2)
108 HOST_WIDEST_FAST_INT word, mask;
110 #ifdef ENABLE_CHECKING
113 gcc_assert (allocno1 >= 0 && allocno1 < max_allocno);
114 gcc_assert (allocno2 >= 0 && allocno2 < max_allocno);
116 blk1 = regno_basic_block (allocno[allocno1].reg);
117 blk2 = regno_basic_block (allocno[allocno2].reg);
118 gcc_assert (blk1 == 0 || blk2 == 0 || blk1 == blk2);
121 /* By definition, an allocno does not conflict with itself. */
122 if (allocno1 == allocno2)
125 bitnum = CONFLICT_BITNUM (allocno1, allocno2);
127 #ifdef ENABLE_CHECKING
128 gcc_assert (bitnum >= 0 && bitnum < max_bitnum);
131 index = bitnum / HOST_BITS_PER_WIDEST_FAST_INT;
132 word = conflicts[index];
133 mask = (HOST_WIDEST_FAST_INT) 1 << (bitnum % HOST_BITS_PER_WIDEST_FAST_INT);
135 if ((word & mask) == 0)
137 conflicts[index] = word | mask;
138 add_neighbor (allocno1, allocno2);
139 add_neighbor (allocno2, allocno1);
143 /* Add conflict edges between ALLOCNO1 and all allocnos currently live. */
146 set_conflicts (int allocno1, sparseset live)
150 HOST_WIDEST_FAST_INT word, mask;
151 int partial_bitnum_allocno1;
153 #ifdef ENABLE_CHECKING
154 gcc_assert (allocno1 >= 0 && allocno1 < max_allocno);
157 partial_bitnum_allocno1 = partial_bitnum[allocno1];
159 EXECUTE_IF_SET_IN_SPARSESET (live, i)
161 /* By definition, an allocno does not conflict with itself. */
165 #ifdef ENABLE_CHECKING
166 gcc_assert (i >= 0 && i < max_allocno);
169 bitnum = CONFLICT_BITNUM_FAST (allocno1, partial_bitnum_allocno1, i);
171 #ifdef ENABLE_CHECKING
172 gcc_assert (bitnum >= 0 && bitnum < max_bitnum);
175 index = bitnum / HOST_BITS_PER_WIDEST_FAST_INT;
176 word = conflicts[index];
177 mask = (HOST_WIDEST_FAST_INT) 1 << (bitnum % HOST_BITS_PER_WIDEST_FAST_INT);
179 if ((word & mask) == 0)
181 conflicts[index] = word | mask;
182 add_neighbor (allocno1, i);
183 add_neighbor (i, allocno1);
189 /* Add a conflict between R1 and R2. */
192 record_one_conflict_between_regnos (enum machine_mode mode1, int r1,
193 enum machine_mode mode2, int r2)
195 int allocno1 = reg_allocno[r1];
196 int allocno2 = reg_allocno[r2];
199 fprintf (dump_file, " rocbr adding %d<=>%d\n", r1, r2);
201 if (allocno1 >= 0 && allocno2 >= 0)
202 set_conflict (allocno1, allocno2);
203 else if (allocno1 >= 0)
205 if (r2 < FIRST_PSEUDO_REGISTER)
206 add_to_hard_reg_set (&allocno[allocno1].hard_reg_conflicts, mode2, r2);
208 else if (allocno2 >= 0)
210 if (r1 < FIRST_PSEUDO_REGISTER)
211 add_to_hard_reg_set (&allocno[allocno2].hard_reg_conflicts, mode1, r1);
214 /* Now, recursively handle the reg_renumber cases. */
215 if (reg_renumber[r1] >= 0)
216 record_one_conflict_between_regnos (mode1, reg_renumber[r1], mode2, r2);
218 if (reg_renumber[r2] >= 0)
219 record_one_conflict_between_regnos (mode1, r1, mode2, reg_renumber[r2]);
223 /* Record a conflict between register REGNO and everything currently
224 live. REGNO must not be a pseudo reg that was allocated by
225 local_alloc; such numbers must be translated through reg_renumber
226 before calling here. */
229 record_one_conflict (sparseset allocnos_live,
230 HARD_REG_SET *hard_regs_live, int regno)
234 if (regno < FIRST_PSEUDO_REGISTER)
235 /* When a hard register becomes live, record conflicts with live
237 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
239 SET_HARD_REG_BIT (allocno[i].hard_reg_conflicts, regno);
241 fprintf (dump_file, " roc adding %d<=>%d\n", allocno[i].reg, regno);
244 /* When a pseudo-register becomes live, record conflicts first
245 with hard regs, then with other pseudo regs. */
247 int ialloc = reg_allocno[regno];
251 fprintf (dump_file, " roc adding %d<=>(", regno);
252 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
253 if (TEST_HARD_REG_BIT (*hard_regs_live, i)
254 && !TEST_HARD_REG_BIT (allocno[ialloc].hard_reg_conflicts, i))
255 fprintf (dump_file, "%d ", i);
257 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
259 if (!conflict_p (ialloc, i))
260 fprintf (dump_file, "%d ", allocno[i].reg);
262 fprintf (dump_file, ")\n");
265 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, *hard_regs_live);
266 set_conflicts (ialloc, allocnos_live);
271 /* Handle the case where REG is set by the insn being scanned, during
272 the backward scan to accumulate conflicts. Record a conflict with
273 all other registers already live.
275 REG might actually be something other than a register; if so, we do
279 mark_reg_store (sparseset allocnos_live,
280 HARD_REG_SET *hard_regs_live,
283 rtx reg = DF_REF_REG (ref);
284 unsigned int regno = DF_REF_REGNO (ref);
285 enum machine_mode mode = GET_MODE (reg);
287 /* Either this is one of the max_allocno pseudo regs not allocated,
288 or it is or has a hardware reg. First handle the pseudo-regs. */
289 if (regno >= FIRST_PSEUDO_REGISTER && reg_allocno[regno] >= 0)
290 record_one_conflict (allocnos_live, hard_regs_live, regno);
292 if (reg_renumber[regno] >= 0)
293 regno = reg_renumber[regno];
295 /* Handle hardware regs (and pseudos allocated to hard regs). */
296 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
298 unsigned int start = regno;
299 unsigned int last = end_hard_regno (mode, regno);
300 if ((GET_CODE (reg) == SUBREG) && !DF_REF_FLAGS_IS_SET (ref, DF_REF_EXTRACT))
302 start += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
303 SUBREG_BYTE (reg), GET_MODE (reg));
304 last = start + subreg_nregs_with_regno (regno, reg);
309 record_one_conflict (allocnos_live, hard_regs_live, regno++);
314 /* Return true if REGNO with MODE can be assigned to a register in
318 may_overlap_class_p (enum machine_mode mode, unsigned int regno,
321 if (regno >= FIRST_PSEUDO_REGISTER)
323 enum reg_class pref_class = reg_preferred_class (regno);
324 enum reg_class alt_class = reg_alternate_class (regno);
325 return (reg_classes_intersect_p (rc, pref_class)
326 || reg_classes_intersect_p (rc, alt_class));
329 return in_hard_reg_set_p (reg_class_contents[rc], mode, regno);
333 /* SRC is an input operand to an instruction in which register DEST is
334 an output operand. SRC may be bound to a member of class SRC_CLASS
335 and DEST may be bound to an earlyclobbered register that overlaps
336 SRC_CLASS. If SRC is a register that might be allocated a member
337 of SRC_CLASS, add a conflict between it and DEST. */
340 add_conflicts_for_earlyclobber (rtx dest, enum reg_class src_class, rtx src)
342 if (GET_CODE (src) == SUBREG)
343 src = SUBREG_REG (src);
345 && may_overlap_class_p (GET_MODE (src), REGNO (src), src_class))
346 record_one_conflict_between_regnos (GET_MODE (src), REGNO (src),
347 GET_MODE (dest), REGNO (dest));
351 /* Look at the defs in INSN and determine if any of them are marked as
352 early clobber. If they are marked as early clobber, add a conflict
353 between any input operand that could be allocated to the same
357 set_conflicts_for_earlyclobber (rtx insn)
364 preprocess_constraints ();
367 fprintf (dump_file, " starting early clobber conflicts.\n");
369 for (alt = 0; alt < recog_data.n_alternatives; alt++)
370 for (def = 0; def < recog_data.n_operands; def++)
371 if ((recog_op_alt[def][alt].earlyclobber)
372 && (recog_op_alt[def][alt].cl != NO_REGS))
374 rtx dreg = recog_data.operand[def];
375 enum machine_mode dmode = recog_data.operand_mode[def];
376 if (GET_CODE (dreg) == SUBREG)
377 dreg = SUBREG_REG (dreg);
379 && may_overlap_class_p (dmode, REGNO (dreg), recog_op_alt[def][alt].cl))
381 for (use = 0; use < recog_data.n_operands; use++)
383 && recog_data.operand_type[use] != OP_OUT
384 && reg_classes_intersect_p (recog_op_alt[def][alt].cl,
385 recog_op_alt[use][alt].cl))
387 add_conflicts_for_earlyclobber (dreg,
388 recog_op_alt[use][alt].cl,
389 recog_data.operand[use]);
390 /* Reload may end up swapping commutative operands,
391 so you have to take both orderings into account.
392 The constraints for the two operands can be
393 completely different. (Indeed, if the
394 constraints for the two operands are the same
395 for all alternatives, there's no point marking
396 them as commutative.) */
397 if (use < recog_data.n_operands + 1
398 && recog_data.constraints[use][0] == '%')
399 add_conflicts_for_earlyclobber (dreg,
400 recog_op_alt[use][alt].cl,
401 recog_data.operand[use + 1]);
406 fprintf (dump_file, " finished early clobber conflicts.\n");
410 /* Init LIVE_SUBREGS[ALLOCNUM] and LIVE_SUBREGS_USED[ALLOCNUM] using
411 REG to the the number of nregs, and INIT_VALUE to get the
412 initialization. ALLOCNUM need not be the regno of REG. */
415 ra_init_live_subregs (bool init_value,
416 sbitmap *live_subregs,
417 int *live_subregs_used,
421 unsigned int regno = REGNO (SUBREG_REG (reg));
422 int size = GET_MODE_SIZE (GET_MODE (regno_reg_rtx[regno]));
424 gcc_assert (size > 0);
426 /* Been there, done that. */
427 if (live_subregs_used[allocnum])
430 /* Create a new one with zeros. */
431 if (live_subregs[allocnum] == NULL)
432 live_subregs[allocnum] = sbitmap_alloc (size);
434 /* If the entire reg was live before blasting into subregs, we need
435 to init all of the subregs to ones else init to 0. */
437 sbitmap_ones (live_subregs[allocnum]);
439 sbitmap_zero (live_subregs[allocnum]);
441 /* Set the number of bits that we really want. */
442 live_subregs_used[allocnum] = size;
446 /* Set REG to be not live in the sets ALLOCNOS_LIVE, LIVE_SUBREGS,
447 HARD_REGS_LIVE. If EXTRACT is false, assume that the entire reg is
448 set not live even if REG is a subreg. */
451 clear_reg_in_live (sparseset allocnos_live,
452 sbitmap *live_subregs,
453 int *live_subregs_used,
454 HARD_REG_SET *hard_regs_live,
458 unsigned int regno = (GET_CODE (reg) == SUBREG)
459 ? REGNO (SUBREG_REG (reg)): REGNO (reg);
460 int allocnum = reg_allocno[regno];
464 if ((GET_CODE (reg) == SUBREG) && !extract)
467 unsigned int start = SUBREG_BYTE (reg);
468 unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
470 ra_init_live_subregs (sparseset_bit_p (allocnos_live, allocnum),
471 live_subregs, live_subregs_used, allocnum, reg);
473 /* Ignore the paradoxical bits. */
474 if ((int)last > live_subregs_used[allocnum])
475 last = live_subregs_used[allocnum];
479 RESET_BIT (live_subregs[allocnum], start);
483 if (sbitmap_empty_p (live_subregs[allocnum]))
485 live_subregs_used[allocnum] = 0;
486 sparseset_clear_bit (allocnos_live, allocnum);
489 /* Set the allocnos live here because that bit has to be
490 true to get us to look at the live_subregs fields. */
491 sparseset_set_bit (allocnos_live, allocnum);
495 /* Resetting the live_subregs_used is effectively saying do not use the
496 subregs because we are writing the whole pseudo. */
497 live_subregs_used[allocnum] = 0;
498 sparseset_clear_bit (allocnos_live, allocnum);
502 if (regno >= FIRST_PSEUDO_REGISTER)
505 /* Handle hardware regs (and pseudos allocated to hard regs). */
506 if (! fixed_regs[regno])
508 unsigned int start = regno;
509 if ((GET_CODE (reg) == SUBREG) && !extract)
512 start += SUBREG_BYTE (reg);
513 last = start + subreg_nregs_with_regno (regno, reg);
518 CLEAR_HARD_REG_BIT (*hard_regs_live, regno);
523 remove_from_hard_reg_set (hard_regs_live, GET_MODE (reg), regno);
529 /* Set REG to be live in the sets ALLOCNOS_LIVE, LIVE_SUBREGS,
530 HARD_REGS_LIVE. If EXTRACT is false, assume that the entire reg is
531 set live even if REG is a subreg. */
534 set_reg_in_live (sparseset allocnos_live,
535 sbitmap *live_subregs,
536 int *live_subregs_used,
537 HARD_REG_SET *hard_regs_live,
541 unsigned int regno = (GET_CODE (reg) == SUBREG)
542 ? REGNO (SUBREG_REG (reg)): REGNO (reg);
543 int allocnum = reg_allocno[regno];
547 if ((GET_CODE (reg) == SUBREG) && !extract)
549 unsigned int start = SUBREG_BYTE (reg);
550 unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
552 ra_init_live_subregs (sparseset_bit_p (allocnos_live, allocnum),
553 live_subregs, live_subregs_used, allocnum, reg);
555 /* Ignore the paradoxical bits. */
556 if ((int)last > live_subregs_used[allocnum])
557 last = live_subregs_used[allocnum];
561 SET_BIT (live_subregs[allocnum], start);
566 /* Resetting the live_subregs_used is effectively saying do not use the
567 subregs because we are writing the whole pseudo. */
568 live_subregs_used[allocnum] = 0;
570 sparseset_set_bit (allocnos_live, allocnum);
573 if (regno >= FIRST_PSEUDO_REGISTER)
576 /* Handle hardware regs (and pseudos allocated to hard regs). */
577 if (! fixed_regs[regno])
579 if ((GET_CODE (reg) == SUBREG) && !extract)
581 unsigned int start = regno;
584 start += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
585 SUBREG_BYTE (reg), GET_MODE (reg));
586 last = start + subreg_nregs_with_regno (regno, reg);
591 SET_HARD_REG_BIT (*hard_regs_live, regno);
596 add_to_hard_reg_set (hard_regs_live, GET_MODE (reg), regno);
601 /* Add hard reg conflicts to RENUMBERS_LIVE assuming that pseudo in
602 allocno[ALLOCNUM] is allocated to a set of hard regs starting at
605 We are smart about the case where only subregs of REG have been
606 set, as indicated by LIVE_SUBREGS[ALLOCNUM] and
607 LIVE_SUBREGS_USED[ALLOCNUM]. See global_conflicts for description
608 of LIVE_SUBREGS and LIVE_SUBREGS_USED. */
611 set_renumbers_live (HARD_REG_SET *renumbers_live,
612 sbitmap *live_subregs,
613 int *live_subregs_used,
614 int allocnum, int renumber)
616 /* The width of the pseudo. */
617 int nbytes = live_subregs_used[allocnum];
618 int regno = allocno[allocnum].reg;
619 enum machine_mode mode = GET_MODE (regno_reg_rtx[regno]);
622 fprintf (dump_file, " set_renumbers_live %d->%d ",
628 sbitmap live_subs = live_subregs[allocnum];
630 /* First figure out how many hard regs we are considering using. */
631 int target_nregs = hard_regno_nregs[renumber][mode];
633 /* Now figure out the number of bytes per hard reg. Note that
634 this may be different that what would be obtained by looking
635 at the mode in the pseudo. For instance, a complex number
636 made up of 2 32-bit parts gets mapped to 2 hard regs, even if
637 the hardregs are 64-bit floating point values. */
638 int target_width = nbytes / target_nregs;
641 fprintf (dump_file, "target_nregs=%d target_width=%d nbytes=%d",
642 target_nregs, target_width, nbytes);
644 for (i = 0; i < target_nregs; i++)
648 for (j = 0; j < target_width; j++)
650 int reg_start = i * target_width;
651 if (reg_start + j >= nbytes)
653 set |= TEST_BIT (live_subs, reg_start + j);
657 SET_HARD_REG_BIT (*renumbers_live, renumber + i);
661 add_to_hard_reg_set (renumbers_live, mode, renumber);
664 fprintf (dump_file, "\n");
667 /* Dump out a REF with its reg_renumber range to FILE using
671 dump_ref (FILE *file,
676 sbitmap *live_subregs,
677 int *live_subregs_used
680 int allocnum = reg_allocno[regno];
682 fprintf (file, "%s %d", prefix, regno);
684 && live_subregs_used[allocnum] > 0)
689 for (j = 0; j < live_subregs_used[allocnum]; j++)
690 if (TEST_BIT (live_subregs[allocnum], j))
692 fprintf (dump_file, "%c%d", s, j);
695 fprintf (dump_file, "]");
698 if (reg_renumber[regno] >= 0)
700 enum machine_mode mode = GET_MODE (reg);
704 regno = reg_renumber[regno];
707 last = end_hard_regno (mode, regno);
708 if (GET_CODE (reg) == SUBREG)
710 start += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
711 SUBREG_BYTE (reg), GET_MODE (reg));
712 last = start + subreg_nregs_with_regno (regno, reg);
715 if (start == last - 1)
716 fprintf (file, "(%d)", start);
718 fprintf (file, "(%d:%d..%d)", regno, start, last-1);
720 fprintf (file, suffix);
724 /* Scan the rtl code and record all conflicts and register preferences in the
725 conflict matrices and preference tables. */
728 global_conflicts (void)
734 /* Regs that have allocnos can be in either
735 hard_regs_live (if regno < FIRST_PSEUDO_REGISTER) or
736 allocnos_live (if regno >= FIRST_PSEUDO_REGISTER) or
737 both if local_alloc has preallocated it and reg_renumber >= 0. */
739 HARD_REG_SET hard_regs_live;
740 HARD_REG_SET renumbers_live;
741 sparseset allocnos_live;
742 bitmap live = BITMAP_ALLOC (NULL);
743 VEC (df_ref_t, heap) *clobbers = NULL;
744 VEC (df_ref_t, heap) *dying_regs = NULL;
746 /* live_subregs is a vector used to keep accurate information about
747 which hardregs are live in multiword pseudos. live_subregs and
748 live_subregs_used are indexed by reg_allocno. The live_subreg
749 entry for a particular pseudo is a bitmap with one bit per byte
750 of the register. It is only used if the corresponding element is
751 non zero in live_subregs_used. The value in live_subregs_used is
752 number of bytes that the pseudo can occupy. */
753 sbitmap *live_subregs = XCNEWVEC (sbitmap, max_allocno);
754 int *live_subregs_used = XNEWVEC (int, max_allocno);
758 fprintf (dump_file, "fixed registers : ");
759 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
761 fprintf (dump_file, "%d ", i);
762 fprintf (dump_file, "\n");
765 allocnos_live = sparseset_alloc (max_allocno);
771 bitmap_copy (live, DF_LIVE_OUT (bb));
772 df_simulate_artificial_refs_at_end (bb, live);
774 sparseset_clear (allocnos_live);
775 memset (live_subregs_used, 0, max_allocno * sizeof (int));
776 CLEAR_HARD_REG_SET (hard_regs_live);
777 CLEAR_HARD_REG_SET (renumbers_live);
779 /* Initialize allocnos_live and hard_regs_live for bottom of block. */
780 EXECUTE_IF_SET_IN_BITMAP (live, 0, i, bi)
782 if (i >= FIRST_PSEUDO_REGISTER)
785 SET_HARD_REG_BIT (hard_regs_live, i);
788 EXECUTE_IF_SET_IN_BITMAP (live, FIRST_PSEUDO_REGISTER, i, bi)
790 int allocnum = reg_allocno[i];
794 int renumber = reg_renumber[i];
795 rtx reg = regno_reg_rtx[i];
797 set_reg_in_live (allocnos_live, live_subregs, live_subregs_used,
798 &hard_regs_live, reg, false);
799 if (renumber >= 0 && renumber < FIRST_PSEUDO_REGISTER)
800 set_renumbers_live (&renumbers_live, live_subregs, live_subregs_used,
806 fprintf (dump_file, "\nstarting basic block %d\n\n", bb->index);
808 FOR_BB_INSNS_REVERSE (bb, insn)
810 unsigned int uid = INSN_UID (insn);
811 struct df_ref **def_rec;
812 struct df_ref **use_rec;
819 fprintf (dump_file, "insn = %d live = hardregs [", uid);
821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
822 if (TEST_HARD_REG_BIT (hard_regs_live, i))
823 fprintf (dump_file, "%d ", i);
825 fprintf (dump_file, "] renumbered [");
826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
827 if (TEST_HARD_REG_BIT (renumbers_live, i))
828 fprintf (dump_file, "%d ", i);
830 fprintf (dump_file, "] pseudos [");
831 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
833 dump_ref (dump_file, " ", "", regno_reg_rtx[allocno[i].reg],
834 allocno[i].reg, live_subregs, live_subregs_used);
836 fprintf (dump_file, "]\n");
839 /* Add the defs into live. Most of them will already be
840 there, the ones that are missing are the unused ones and
841 the clobbers. We do this in order to make sure that
842 interferences are added between every def and everything
843 that is live across the insn. These defs will be removed
845 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
847 struct df_ref *def = *def_rec;
849 /* FIXME: Ignoring may clobbers is technically the wrong
850 thing to do. However the old version of the this
851 code ignores may clobbers (and instead has many
852 places in the register allocator to handle these
853 constraints). It is quite likely that with a new
854 allocator, the correct thing to do is to not ignore
855 the constraints and then do not put in the large
856 number of special checks. */
857 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
859 rtx reg = DF_REF_REG (def);
860 set_reg_in_live (allocnos_live, live_subregs, live_subregs_used,
861 &hard_regs_live, reg,
862 DF_REF_FLAGS_IS_SET (def, DF_REF_EXTRACT));
864 dump_ref (dump_file, " adding def", "\n",
865 reg, DF_REF_REGNO (def), live_subregs, live_subregs_used);
869 /* Add the hardregs into renumbers_live to build the
870 interferences. Renumbers_live will be rebuilt in the
871 next step from scratch, so corrupting it here is no
873 IOR_HARD_REG_SET (renumbers_live, hard_regs_live);
875 /* Add the interferences for the defs. */
876 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
878 struct df_ref *def = *def_rec;
879 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
880 mark_reg_store (allocnos_live, &renumbers_live, def);
883 /* Remove the defs from the live sets. Leave the partial
884 and conditional defs in the set because they do not
886 VEC_truncate (df_ref_t, clobbers, 0);
887 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
889 struct df_ref *def = *def_rec;
891 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
893 rtx reg = DF_REF_REG (def);
895 clear_reg_in_live (allocnos_live, live_subregs, live_subregs_used,
896 &hard_regs_live, reg,
897 DF_REF_FLAGS_IS_SET (def, DF_REF_EXTRACT));
899 dump_ref (dump_file, " clearing def", "\n",
900 reg, DF_REF_REGNO (def), live_subregs, live_subregs_used);
903 if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
904 VEC_safe_push (df_ref_t, heap, clobbers, def);
907 /* Go thru all of the live pseudos and reset renumbers_live.
908 We must start from scratch here because there could have
909 been several pseudos alive that have the same
910 reg_renumber and if we see a clobber for one of them, we
911 cannot not want to kill the renumbers from the other
913 CLEAR_HARD_REG_SET (renumbers_live);
914 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
916 unsigned int regno = allocno[i].reg;
917 int renumber = reg_renumber[regno];
919 if (renumber >= 0 && renumber < FIRST_PSEUDO_REGISTER)
920 set_renumbers_live (&renumbers_live, live_subregs, live_subregs_used,
924 /* Add the uses to the live sets. Keep track of the regs
925 that are dying inside the insn, this set will be useful
927 VEC_truncate (df_ref_t, dying_regs, 0);
928 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
930 struct df_ref *use = *use_rec;
931 unsigned int regno = DF_REF_REGNO (use);
933 int renumber = reg_renumber[regno];
934 int allocnum = reg_allocno[regno];
935 bool renumbering = false;
936 rtx reg = DF_REF_REG (use);
938 /* DF_REF_READ_WRITE on a use means that this use is
939 fabricated from a def that is a partial set to a
940 multiword reg. Here, we only model the subreg case
941 precisely so we do not need to look at the fabricated
942 use unless that set also happens to wrapped in a
944 if (DF_REF_FLAGS_IS_SET (use, DF_REF_READ_WRITE)
945 && (!DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT))
946 && DF_REF_FLAGS_IS_SET (use, DF_REF_SUBREG))
950 dump_ref (dump_file, " seeing use", "\n",
951 reg, regno, live_subregs, live_subregs_used);
955 if (GET_CODE (reg) == SUBREG
956 && !DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT))
958 unsigned int start = SUBREG_BYTE (reg);
959 unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
961 ra_init_live_subregs (sparseset_bit_p (allocnos_live, allocnum),
962 live_subregs, live_subregs_used, allocnum, reg);
964 /* Ignore the paradoxical bits. */
965 if ((int)last > live_subregs_used[allocnum])
966 last = live_subregs_used[allocnum];
970 if (!TEST_BIT (live_subregs[allocnum], start))
973 fprintf (dump_file, " dying pseudo subreg %d[%d]\n", regno, start);
974 SET_BIT (live_subregs[allocnum], start);
981 sparseset_set_bit (allocnos_live, allocnum);
982 if (renumber >= 0 && renumber < FIRST_PSEUDO_REGISTER)
983 set_renumbers_live (&renumbers_live, live_subregs, live_subregs_used,
987 else if (!sparseset_bit_p (allocnos_live, allocnum))
990 fprintf (dump_file, " dying pseudo\n");
992 /* Resetting the live_subregs_used is
993 effectively saying do not use the subregs
994 because we are reading the whole pseudo. */
995 live_subregs_used[allocnum] = 0;
996 sparseset_set_bit (allocnos_live, allocnum);
997 if (renumber >= 0 && renumber < FIRST_PSEUDO_REGISTER)
998 set_renumbers_live (&renumbers_live, live_subregs, live_subregs_used,
1004 if (renumber >= 0 && renumber < FIRST_PSEUDO_REGISTER)
1010 if (regno < FIRST_PSEUDO_REGISTER)
1012 unsigned int start = regno;
1014 if (GET_CODE (reg) == SUBREG)
1016 start += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1017 SUBREG_BYTE (reg), GET_MODE (reg));
1018 last = start + subreg_nregs_with_regno (regno, reg);
1021 last = end_hard_regno (GET_MODE (reg), regno);
1024 while (regno < last)
1026 if ((!TEST_HARD_REG_BIT (hard_regs_live, regno))
1027 && (!TEST_HARD_REG_BIT (renumbers_live, regno))
1028 && ! fixed_regs[regno])
1031 fprintf (dump_file, " dying hard reg %d\n", regno);
1033 SET_HARD_REG_BIT (renumbers_live, regno);
1035 SET_HARD_REG_BIT (hard_regs_live, regno);
1043 VEC_safe_push (df_ref_t, heap, dying_regs, use);
1046 /* These three cases are all closely related, they all deal
1047 with some set of outputs of the insn need to conflict
1048 with some of the registers that are used by the insn but
1049 die within the insn. If no registers die within the insn,
1050 the tests can be skipped. */
1052 if (VEC_length (df_ref_t, dying_regs) > 0)
1055 /* There appears to be an ambiguity as to what a clobber
1056 means in an insn. In some cases, the clobber happens
1057 within the processing of the insn and in some cases
1058 it happens at the end of processing the insn. There
1059 is currently no way to distinguish these two cases so
1060 this code causes real clobbers to interfere with
1061 registers that die within an insn.
1063 This is consistent with the prior version of
1064 interference graph builder but is was discovered
1065 while developing this version of the code, that on
1066 some architectures such as the x86-64, the clobbers
1067 only appear to happen at the end of the insn.
1068 However, the ppc-32 contains clobbers for which these
1069 interferences are necessary.
1071 FIXME: We should consider either adding a new kind of
1072 clobber, or adding a flag to the clobber distinguish
1074 for (k = VEC_length (df_ref_t, clobbers) - 1; k >= 0; k--)
1076 struct df_ref *def = VEC_index (df_ref_t, clobbers, k);
1079 for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
1081 struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
1082 record_one_conflict_between_regnos (GET_MODE (DF_REF_REG (def)),
1084 GET_MODE (DF_REF_REG (use)),
1085 DF_REF_REGNO (use));
1089 /* Early clobbers, by definition, need to not only
1090 clobber the registers that are live accross the insn
1091 but need to clobber the registers that die within the
1092 insn. The clobbering for registers live across the
1093 insn is handled above. */
1094 set_conflicts_for_earlyclobber (insn);
1096 /* If INSN is a store with multiple outputs, then any
1097 reg that dies here and is used inside of the address
1098 of the output must conflict with the other outputs.
1100 FIXME: There has been some discussion as to whether
1101 this is right place to handle this issue. This is a
1102 hold over from an early version global conflicts.
1104 1) There is some evidence that code only deals with a
1105 bug that is only on the m68k. The conditions of this
1106 test are such that this case only triggers for a very
1107 peculiar insn, one that is a parallel where one of
1108 the sets is a store and the other sets a reg that is
1109 used in the address of the store. See
1110 http://gcc.gnu.org/ml/gcc-patches/1998-12/msg00259.html
1112 2) The situation that this is addressing is a bug in
1113 the part of reload that handles stores, adding this
1114 conflict only hides the problem. (Of course no one
1115 really wants to fix reload so it is understandable
1116 why a bandaid was just added here.)
1118 Just because an output is unused does not mean the
1119 compiler can assume the side effect will not occur.
1120 Consider if REG appears in the address of an output
1121 and we reload the output. If we allocate REG to the
1122 same hard register as an unused output we could set
1123 the hard register before the output reload insn.
1125 3) This could actually be handled by making the other
1126 (non store) operand of the insn be an early clobber.
1127 This would insert the same conflict, even if it is
1128 not technically an early clobber. */
1130 /* It is unsafe to use !single_set here since it will ignore an
1132 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1135 for (j = VEC_length (df_ref_t, dying_regs) - 1; j >= 0; j--)
1137 int used_in_output = 0;
1138 struct df_ref *use = VEC_index (df_ref_t, dying_regs, j);
1139 rtx reg = DF_REF_REG (use);
1140 int uregno = DF_REF_REGNO (use);
1141 enum machine_mode umode = GET_MODE (DF_REF_REG (use));
1144 for (k = XVECLEN (PATTERN (insn), 0) - 1; k >= 0; k--)
1146 rtx set = XVECEXP (PATTERN (insn), 0, k);
1147 if (GET_CODE (set) == SET
1148 && !REG_P (SET_DEST (set))
1149 && !rtx_equal_p (reg, SET_DEST (set))
1150 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
1154 for (k = XVECLEN (PATTERN (insn), 0) - 1; k >= 0; k--)
1156 rtx set = XVECEXP (PATTERN (insn), 0, k);
1157 if (GET_CODE (set) == SET
1158 && REG_P (SET_DEST (set))
1159 && !rtx_equal_p (reg, SET_DEST (set)))
1160 record_one_conflict_between_regnos (GET_MODE (SET_DEST (set)),
1161 REGNO (SET_DEST (set)),
1169 /* Add the renumbers live to the hard_regs_live for the next few
1170 calls. All of this gets recomputed at the top of the loop so
1171 there is no harm. */
1172 IOR_HARD_REG_SET (hard_regs_live, renumbers_live);
1174 #ifdef EH_RETURN_DATA_REGNO
1175 if (bb_has_eh_pred (bb))
1181 unsigned int regno = EH_RETURN_DATA_REGNO (i);
1182 if (regno == INVALID_REGNUM)
1184 record_one_conflict (allocnos_live, &hard_regs_live, regno);
1189 if (bb_has_abnormal_pred (bb))
1193 /* Pseudos can't go in stack regs at the start of a basic block that
1194 is reached by an abnormal edge. Likewise for call clobbered regs,
1195 because caller-save, fixup_abnormal_edges and possibly the table
1196 driven EH machinery are not quite ready to handle such regs live
1197 across such edges. */
1198 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
1200 allocno[i].no_stack_reg = 1;
1203 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
1204 record_one_conflict (allocnos_live, &hard_regs_live, i);
1207 /* No need to record conflicts for call clobbered regs if we have
1208 nonlocal labels around, as we don't ever try to allocate such
1209 regs in this case. */
1210 if (! current_function_has_nonlocal_label)
1211 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1212 if (call_used_regs [i])
1213 record_one_conflict (allocnos_live, &hard_regs_live, i);
1217 for (i = 0; i < (unsigned int)max_allocno; i++)
1218 if (live_subregs[i])
1219 free (live_subregs[i]);
1222 free (allocnos_live);
1223 free (live_subregs);
1224 free (live_subregs_used);
1225 VEC_free (df_ref_t, heap, dying_regs);
1226 VEC_free (df_ref_t, heap, clobbers);