1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "hard-reg-set.h"
34 #include "insn-config.h"
39 #include "tree-pass.h"
46 /* This pass of the compiler performs global register allocation.
47 It assigns hard register numbers to all the pseudo registers
48 that were not handled in local_alloc. Assignments are recorded
49 in the vector reg_renumber, not by changing the rtl code.
50 (Such changes are made by final). The entry point is
51 the function global_alloc.
53 After allocation is complete, the reload pass is run as a subroutine
54 of this pass, so that when a pseudo reg loses its hard reg due to
55 spilling it is possible to make a second attempt to find a hard
56 reg for it. The reload pass is independent in other respects
57 and it is run even when stupid register allocation is in use.
59 1. Assign allocation-numbers (allocnos) to the pseudo-registers
60 still needing allocations and to the pseudo-registers currently
61 allocated by local-alloc which may be spilled by reload.
62 Set up tables reg_allocno and allocno_reg to map
63 reg numbers to allocnos and vice versa.
64 max_allocno gets the number of allocnos in use.
66 2. Allocate a max_allocno by max_allocno conflict bit matrix and
67 clear it. This is called "conflict".
69 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix for
70 conflicts between allocnos and explicit hard register use (which
71 includes use of pseudo-registers allocated by local_alloc). This
72 is the hard_reg_conflicts inside each allocno.
74 3. For each basic block
75 walk forward through the block, recording which
76 pseudo-registers and which hardware registers are live.
77 Build the conflict matrix between the pseudo-registers
78 and another of pseudo-registers versus hardware registers.
79 Also record the preferred hardware registers
80 for each pseudo-register.
82 4. Sort a table of the allocnos into order of
83 desirability of the variables.
85 5. Allocate the variables in that order; each if possible into
86 a preferred register, else into another register. */
88 /* A vector of the integers from 0 to max_allocno-1,
89 sorted in the order of first-to-be-allocated first. */
91 static int *allocno_order;
93 /* Two macros to test or store 1 in an element of `conflicts'. */
95 #define CONFLICTP(I, J) \
96 (conflicts[(I) * allocno_row_words + (unsigned) (J) / HOST_BITS_PER_WIDE_INT] \
97 & ((HOST_WIDE_INT) 1 << ((unsigned) (J) % HOST_BITS_PER_WIDE_INT)))
99 /* Set of registers that global-alloc isn't supposed to use. */
101 static HARD_REG_SET no_global_alloc_regs;
103 /* Set of registers used so far. */
105 static HARD_REG_SET regs_used_so_far;
107 /* Number of refs to each hard reg, as used by local alloc.
108 It is zero for a reg that contains global pseudos or is explicitly used. */
110 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
112 /* Frequency of uses of given hard reg. */
113 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
115 /* Guess at live length of each hard reg, as used by local alloc.
116 This is actually the sum of the live lengths of the specific regs. */
118 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
120 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
121 element I, and hard register number J. */
123 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
125 /* This is turned off because it doesn't work right for DImode.
126 (And it is only used for DImode, so the other cases are worthless.)
127 The problem is that it isn't true that there is NO possibility of conflict;
128 only that there is no conflict if the two pseudos get the exact same regs.
129 If they were allocated with a partial overlap, there would be a conflict.
130 We can't safely turn off the conflict unless we have another way to
131 prevent the partial overlap.
133 Idea: change hard_reg_conflicts so that instead of recording which
134 hard regs the allocno may not overlap, it records where the allocno
135 may not start. Change both where it is used and where it is updated.
136 Then there is a way to record that (reg:DI 108) may start at 10
137 but not at 9 or 11. There is still the question of how to record
138 this semi-conflict between two pseudos. */
140 /* Reg pairs for which conflict after the current insn
141 is inhibited by a REG_NO_CONFLICT note.
142 If the table gets full, we ignore any other notes--that is conservative. */
143 #define NUM_NO_CONFLICT_PAIRS 4
144 /* Number of pairs in use in this insn. */
145 int n_no_conflict_pairs;
146 static struct { int allocno1, allocno2;}
147 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
150 /* Return true if *LOC contains an asm. */
153 insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
157 if (GET_CODE (*loc) == ASM_OPERANDS)
163 /* Return true if INSN contains an ASM. */
166 insn_contains_asm (rtx insn)
168 return for_each_rtx (&insn, insn_contains_asm_1, NULL);
173 compute_regs_asm_clobbered (char *regs_asm_clobbered)
177 memset (regs_asm_clobbered, 0, sizeof (char) * FIRST_PSEUDO_REGISTER);
182 FOR_BB_INSNS_REVERSE (bb, insn)
184 struct df_ref **def_rec;
185 if (insn_contains_asm (insn))
186 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
188 struct df_ref *def = *def_rec;
189 unsigned int dregno = DF_REF_REGNO (def);
190 if (dregno < FIRST_PSEUDO_REGISTER)
193 enum machine_mode mode = GET_MODE (DF_REF_REAL_REG (def));
194 unsigned int end = dregno
195 + hard_regno_nregs[dregno][mode] - 1;
196 for (i = dregno; i <= end; ++i)
197 regs_asm_clobbered[i] = 1;
205 /* All registers that can be eliminated. */
207 static HARD_REG_SET eliminable_regset;
209 static int allocno_compare (const void *, const void *);
210 static void mirror_conflicts (void);
211 static void expand_preferences (void);
212 static void prune_preferences (void);
213 static void set_preferences (void);
214 static void find_reg (int, HARD_REG_SET, int, int, int);
215 static void dump_conflicts (FILE *);
216 static void build_insn_chain (void);
219 /* Look through the list of eliminable registers. Set ELIM_SET to the
220 set of registers which may be eliminated. Set NO_GLOBAL_SET to the
221 set of registers which may not be used across blocks.
223 This will normally be called with ELIM_SET as the file static
224 variable eliminable_regset, and NO_GLOBAL_SET as the file static
225 variable NO_GLOBAL_ALLOC_REGS. */
228 compute_regsets (HARD_REG_SET *elim_set,
229 HARD_REG_SET *no_global_set)
232 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
233 Unlike regs_ever_live, elements of this array corresponding to
234 eliminable regs like the frame pointer are set if an asm sets them. */
235 char *regs_asm_clobbered = alloca (FIRST_PSEUDO_REGISTER * sizeof (char));
237 #ifdef ELIMINABLE_REGS
238 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
242 = (! flag_omit_frame_pointer
243 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
244 || FRAME_POINTER_REQUIRED);
246 max_regno = max_reg_num ();
251 /* A machine may have certain hard registers that
252 are safe to use only within a basic block. */
254 CLEAR_HARD_REG_SET (*no_global_set);
255 CLEAR_HARD_REG_SET (*elim_set);
257 compute_regs_asm_clobbered (regs_asm_clobbered);
258 /* Build the regset of all eliminable registers and show we can't use those
259 that we already know won't be eliminated. */
260 #ifdef ELIMINABLE_REGS
261 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
264 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
265 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
267 if (!regs_asm_clobbered[eliminables[i].from])
269 SET_HARD_REG_BIT (*elim_set, eliminables[i].from);
272 SET_HARD_REG_BIT (*no_global_set, eliminables[i].from);
274 else if (cannot_elim)
275 error ("%s cannot be used in asm here",
276 reg_names[eliminables[i].from]);
278 df_set_regs_ever_live (eliminables[i].from, true);
280 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
281 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
283 SET_HARD_REG_BIT (*elim_set, HARD_FRAME_POINTER_REGNUM);
285 SET_HARD_REG_BIT (*no_global_set, HARD_FRAME_POINTER_REGNUM);
288 error ("%s cannot be used in asm here",
289 reg_names[HARD_FRAME_POINTER_REGNUM]);
291 df_set_regs_ever_live (HARD_FRAME_POINTER_REGNUM, true);
295 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
297 SET_HARD_REG_BIT (*elim_set, FRAME_POINTER_REGNUM);
299 SET_HARD_REG_BIT (*no_global_set, FRAME_POINTER_REGNUM);
302 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
304 df_set_regs_ever_live (FRAME_POINTER_REGNUM, true);
308 /* Perform allocation of pseudo-registers not allocated by local_alloc.
310 Return value is nonzero if reload failed
311 and we must not do any more for this function. */
319 compute_regsets (&eliminable_regset, &no_global_alloc_regs);
321 /* Track which registers have already been used. Start with registers
322 explicitly in the rtl, then registers allocated by local register
325 CLEAR_HARD_REG_SET (regs_used_so_far);
326 #ifdef LEAF_REGISTERS
327 /* If we are doing the leaf function optimization, and this is a leaf
328 function, it means that the registers that take work to save are those
329 that need a register window. So prefer the ones that can be used in
332 const char *cheap_regs;
333 const char *const leaf_regs = LEAF_REGISTERS;
335 if (only_leaf_regs_used () && leaf_function_p ())
336 cheap_regs = leaf_regs;
338 cheap_regs = call_used_regs;
339 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
340 if (df_regs_ever_live_p (i) || cheap_regs[i])
341 SET_HARD_REG_BIT (regs_used_so_far, i);
344 /* We consider registers that do not have to be saved over calls as if
345 they were already used since there is no cost in using them. */
346 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
347 if (df_regs_ever_live_p (i) || call_used_regs[i])
348 SET_HARD_REG_BIT (regs_used_so_far, i);
351 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
352 if (reg_renumber[i] >= 0)
353 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
355 /* Establish mappings from register number to allocation number
356 and vice versa. In the process, count the allocnos. */
358 reg_allocno = XNEWVEC (int, max_regno);
360 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
364 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
365 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
366 that we are supposed to refrain from putting in a hard reg.
367 -2 means do make an allocno but don't allocate it. */
368 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
369 /* Don't allocate pseudos that cross calls,
370 if this function receives a nonlocal goto. */
371 && (! current_function_has_nonlocal_label
372 || REG_N_CALLS_CROSSED (i) == 0))
374 reg_allocno[i] = max_allocno++;
375 gcc_assert (REG_LIVE_LENGTH (i));
380 allocno = XCNEWVEC (struct allocno, max_allocno);
382 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
383 if (reg_allocno[i] >= 0)
385 int num = reg_allocno[i];
386 allocno[num].reg = i;
387 allocno[num].size = PSEUDO_REGNO_SIZE (i);
388 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
389 allocno[num].throwing_calls_crossed
390 += REG_N_THROWING_CALLS_CROSSED (i);
391 allocno[num].n_refs += REG_N_REFS (i);
392 allocno[num].freq += REG_FREQ (i);
393 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
394 allocno[num].live_length = REG_LIVE_LENGTH (i);
397 /* Calculate amount of usage of each hard reg by pseudos
398 allocated by local-alloc. This is to see if we want to
400 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
401 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
402 memset (local_reg_freq, 0, sizeof local_reg_freq);
403 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
404 if (reg_renumber[i] >= 0)
406 int regno = reg_renumber[i];
407 int endregno = end_hard_regno (PSEUDO_REGNO_MODE (i), regno);
410 for (j = regno; j < endregno; j++)
412 local_reg_n_refs[j] += REG_N_REFS (i);
413 local_reg_freq[j] += REG_FREQ (i);
414 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
418 /* We can't override local-alloc for a reg used not just by local-alloc. */
419 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
420 if (df_regs_ever_live_p (i))
421 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
425 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
427 fprintf (dump_file, "%d REG_N_REFS=%d, REG_FREQ=%d, REG_LIVE_LENGTH=%d\n",
428 (int)i, REG_N_REFS (i), REG_FREQ (i), REG_LIVE_LENGTH (i));
430 fprintf (dump_file, "regs_ever_live =");
431 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
432 if (df_regs_ever_live_p (i))
433 fprintf (dump_file, " %d", (int)i);
434 fprintf (dump_file, "\n");
436 allocno_row_words = (max_allocno + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
438 /* We used to use alloca here, but the size of what it would try to
439 allocate would occasionally cause it to exceed the stack limit and
440 cause unpredictable core dumps. Some examples were > 2Mb in size. */
441 conflicts = XCNEWVEC (HOST_WIDE_INT, max_allocno * allocno_row_words);
443 /* If there is work to be done (at least one reg to allocate),
444 perform global conflict analysis and allocate the regs. */
448 /* Scan all the insns and compute the conflicts among allocnos
449 and between allocnos and hard regs. */
453 /* There is just too much going on in the register allocators to
454 keep things up to date. At the end we have to rescan anyway
455 because things change when the reload_completed flag is set.
456 So we just turn off scanning and we will rescan by hand.
458 However, we needed to do the rescanning before this point to
459 get the new insns scanned inserted by local_alloc scanned for
461 df_set_flags (DF_NO_INSN_RESCAN);
465 /* Eliminate conflicts between pseudos and eliminable registers. If
466 the register is not eliminated, the pseudo won't really be able to
467 live in the eliminable register, so the conflict doesn't matter.
468 If we do eliminate the register, the conflict will no longer exist.
469 So in either case, we can ignore the conflict. Likewise for
474 for (i = 0; i < (size_t) max_allocno; i++)
476 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
478 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
480 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
484 /* Try to expand the preferences by merging them between allocnos. */
486 expand_preferences ();
488 /* Determine the order to allocate the remaining pseudo registers. */
490 allocno_order = XNEWVEC (int, max_allocno);
491 for (i = 0; i < (size_t) max_allocno; i++)
492 allocno_order[i] = i;
494 /* Default the size to 1, since allocno_compare uses it to divide by.
495 Also convert allocno_live_length of zero to -1. A length of zero
496 can occur when all the registers for that allocno have reg_live_length
497 equal to -2. In this case, we want to make an allocno, but not
498 allocate it. So avoid the divide-by-zero and set it to a low
501 for (i = 0; i < (size_t) max_allocno; i++)
503 if (allocno[i].size == 0)
505 if (allocno[i].live_length == 0)
506 allocno[i].live_length = -1;
509 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
511 prune_preferences ();
514 dump_conflicts (dump_file);
516 /* Try allocating them, one by one, in that order,
517 except for parameters marked with reg_live_length[regno] == -2. */
519 for (i = 0; i < (size_t) max_allocno; i++)
520 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
521 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
523 if (!dbg_cnt (global_alloc_at_reg))
525 /* If we have more than one register class,
526 first try allocating in the class that is cheapest
527 for this pseudo-reg. If that fails, try any reg. */
528 if (N_REG_CLASSES > 1)
530 find_reg (allocno_order[i], 0, 0, 0, 0);
531 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
534 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
535 find_reg (allocno_order[i], 0, 1, 0, 0);
538 free (allocno_order);
541 /* Do the reloads now while the allocno data still exists, so that we can
542 try to assign new hard regs to any pseudo regs that are spilled. */
544 #if 0 /* We need to eliminate regs even if there is no rtl code,
545 for the sake of debugging information. */
546 if (n_basic_blocks > NUM_FIXED_BLOCKS)
550 retval = reload (get_insns (), 1);
561 /* Sort predicate for ordering the allocnos.
562 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
565 allocno_compare (const void *v1p, const void *v2p)
567 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
568 /* Note that the quotient will never be bigger than
569 the value of floor_log2 times the maximum number of
570 times a register can occur in one insn (surely less than 100)
571 weighted by the frequency (maximally REG_FREQ_MAX).
572 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
574 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
575 / allocno[v1].live_length)
576 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
578 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
579 / allocno[v2].live_length)
580 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
584 /* If regs are equally good, sort by allocno,
585 so that the results of qsort leave nothing to chance. */
589 /* Expand the preference information by looking for cases where one allocno
590 dies in an insn that sets an allocno. If those two allocnos don't conflict,
591 merge any preferences between those allocnos. */
594 expand_preferences (void)
600 /* We only try to handle the most common cases here. Most of the cases
601 where this wins are reg-reg copies. */
603 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
605 && (set = single_set (insn)) != 0
606 && REG_P (SET_DEST (set))
607 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
608 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
609 if (REG_NOTE_KIND (link) == REG_DEAD
610 && REG_P (XEXP (link, 0))
611 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
612 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
613 reg_allocno[REGNO (XEXP (link, 0))]))
615 int a1 = reg_allocno[REGNO (SET_DEST (set))];
616 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
618 if (XEXP (link, 0) == SET_SRC (set))
620 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
621 allocno[a2].hard_reg_copy_preferences);
622 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
623 allocno[a1].hard_reg_copy_preferences);
626 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
627 allocno[a2].hard_reg_preferences);
628 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
629 allocno[a1].hard_reg_preferences);
630 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
631 allocno[a2].hard_reg_full_preferences);
632 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
633 allocno[a1].hard_reg_full_preferences);
638 /* Try to set a preference for an allocno to a hard register.
639 We are passed DEST and SRC which are the operands of a SET. It is known
640 that SRC is a register. If SRC or the first operand of SRC is a register,
641 try to set a preference. If one of the two is a hard register and the other
642 is a pseudo-register, mark the preference.
644 Note that we are not as aggressive as local-alloc in trying to tie a
645 pseudo-register to a hard register. */
648 set_preference (rtx dest, rtx src)
650 unsigned int src_regno, dest_regno, end_regno;
651 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
652 to compensate for subregs in SRC or DEST. */
657 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
658 src = XEXP (src, 0), copy = 0;
660 /* Get the reg number for both SRC and DEST.
661 If neither is a reg, give up. */
664 src_regno = REGNO (src);
665 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
667 src_regno = REGNO (SUBREG_REG (src));
669 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
670 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
671 GET_MODE (SUBREG_REG (src)),
675 offset += (SUBREG_BYTE (src)
676 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
682 dest_regno = REGNO (dest);
683 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
685 dest_regno = REGNO (SUBREG_REG (dest));
687 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
688 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
689 GET_MODE (SUBREG_REG (dest)),
693 offset -= (SUBREG_BYTE (dest)
694 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
699 /* Convert either or both to hard reg numbers. */
701 if (reg_renumber[src_regno] >= 0)
702 src_regno = reg_renumber[src_regno];
704 if (reg_renumber[dest_regno] >= 0)
705 dest_regno = reg_renumber[dest_regno];
707 /* Now if one is a hard reg and the other is a global pseudo
708 then give the other a preference. */
710 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
711 && reg_allocno[src_regno] >= 0)
713 dest_regno -= offset;
714 if (dest_regno < FIRST_PSEUDO_REGISTER)
717 SET_REGBIT (hard_reg_copy_preferences,
718 reg_allocno[src_regno], dest_regno);
720 SET_REGBIT (hard_reg_preferences,
721 reg_allocno[src_regno], dest_regno);
722 end_regno = end_hard_regno (GET_MODE (dest), dest_regno);
723 for (i = dest_regno; i < end_regno; i++)
724 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
728 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
729 && reg_allocno[dest_regno] >= 0)
732 if (src_regno < FIRST_PSEUDO_REGISTER)
735 SET_REGBIT (hard_reg_copy_preferences,
736 reg_allocno[dest_regno], src_regno);
738 SET_REGBIT (hard_reg_preferences,
739 reg_allocno[dest_regno], src_regno);
740 end_regno = end_hard_regno (GET_MODE (src), src_regno);
741 for (i = src_regno; i < end_regno; i++)
742 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
747 /* Helper function for set_preferences. */
749 set_preferences_1 (rtx reg, const_rtx setter, void *data ATTRIBUTE_UNUSED)
751 if (GET_CODE (reg) == SUBREG)
752 reg = SUBREG_REG (reg);
758 if (GET_CODE (setter) != CLOBBER)
759 set_preference (reg, SET_SRC (setter));
762 /* Scan all of the insns and initialize the preferences. */
765 set_preferences (void)
770 FOR_BB_INSNS_REVERSE (bb, insn)
775 note_stores (PATTERN (insn), set_preferences_1, NULL);
781 /* Prune the preferences for global registers to exclude registers that cannot
784 Compute `regs_someone_prefers', which is a bitmask of the hard registers
785 that are preferred by conflicting registers of lower priority. If possible,
786 we will avoid using these registers. */
789 prune_preferences (void)
793 int *allocno_to_order = XNEWVEC (int, max_allocno);
795 /* Scan least most important to most important.
796 For each allocno, remove from preferences registers that cannot be used,
797 either because of conflicts or register type. Then compute all registers
798 preferred by each lower-priority register that conflicts. */
800 for (i = max_allocno - 1; i >= 0; i--)
804 num = allocno_order[i];
805 allocno_to_order[num] = i;
806 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
808 if (allocno[num].calls_crossed == 0)
809 IOR_HARD_REG_SET (temp, fixed_reg_set);
811 IOR_HARD_REG_SET (temp, call_used_reg_set);
813 IOR_COMPL_HARD_REG_SET
815 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
817 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
818 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
819 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
822 for (i = max_allocno - 1; i >= 0; i--)
824 /* Merge in the preferences of lower-priority registers (they have
825 already been pruned). If we also prefer some of those registers,
826 don't exclude them unless we are of a smaller size (in which case
827 we want to give the lower-priority allocno the first chance for
829 HARD_REG_SET temp, temp2;
832 num = allocno_order[i];
834 CLEAR_HARD_REG_SET (temp);
835 CLEAR_HARD_REG_SET (temp2);
837 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
840 if (allocno_to_order[allocno2] > i)
842 if (allocno[allocno2].size <= allocno[num].size)
843 IOR_HARD_REG_SET (temp,
844 allocno[allocno2].hard_reg_full_preferences);
846 IOR_HARD_REG_SET (temp2,
847 allocno[allocno2].hard_reg_full_preferences);
851 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
852 IOR_HARD_REG_SET (temp, temp2);
853 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
855 free (allocno_to_order);
858 /* Assign a hard register to allocno NUM; look for one that is the beginning
859 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
860 The registers marked in PREFREGS are tried first.
862 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
863 be used for this allocation.
865 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
866 Otherwise ignore that preferred class and use the alternate class.
868 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
869 will have to be saved and restored at calls.
871 RETRYING is nonzero if this is called from retry_global_alloc.
873 If we find one, record it in reg_renumber.
874 If not, do nothing. */
877 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
879 int i, best_reg, pass;
880 HARD_REG_SET used, used1, used2;
882 enum reg_class class = (alt_regs_p
883 ? reg_alternate_class (allocno[num].reg)
884 : reg_preferred_class (allocno[num].reg));
885 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
887 if (accept_call_clobbered)
888 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
889 else if (allocno[num].calls_crossed == 0)
890 COPY_HARD_REG_SET (used1, fixed_reg_set);
892 COPY_HARD_REG_SET (used1, call_used_reg_set);
894 /* Some registers should not be allocated in global-alloc. */
895 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
897 IOR_HARD_REG_SET (used1, losers);
899 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
900 COPY_HARD_REG_SET (used2, used1);
902 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
904 #ifdef CANNOT_CHANGE_MODE_CLASS
905 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
908 /* Try each hard reg to see if it fits. Do this in two passes.
909 In the first pass, skip registers that are preferred by some other pseudo
910 to give it a better chance of getting one of those registers. Only if
911 we can't get a register when excluding those do we take one of them.
912 However, we never allocate a register for the first time in pass 0. */
914 COPY_HARD_REG_SET (used, used1);
915 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
916 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
919 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
920 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
924 COPY_HARD_REG_SET (used, used1);
925 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
927 #ifdef REG_ALLOC_ORDER
928 int regno = reg_alloc_order[i];
932 if (! TEST_HARD_REG_BIT (used, regno)
933 && HARD_REGNO_MODE_OK (regno, mode)
934 && (allocno[num].calls_crossed == 0
935 || accept_call_clobbered
936 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
939 int lim = end_hard_regno (mode, regno);
942 && ! TEST_HARD_REG_BIT (used, j));
949 #ifndef REG_ALLOC_ORDER
950 i = j; /* Skip starting points we know will lose */
956 /* See if there is a preferred register with the same class as the register
957 we allocated above. Making this restriction prevents register
958 preferencing from creating worse register allocation.
960 Remove from the preferred registers and conflicting registers. Note that
961 additional conflicts may have been added after `prune_preferences' was
964 First do this for those register with copy preferences, then all
965 preferred registers. */
967 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
968 if (!hard_reg_set_empty_p (allocno[num].hard_reg_copy_preferences)
971 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
972 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
973 && HARD_REGNO_MODE_OK (i, mode)
974 && (allocno[num].calls_crossed == 0
975 || accept_call_clobbered
976 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
977 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
978 || reg_class_subset_p (REGNO_REG_CLASS (i),
979 REGNO_REG_CLASS (best_reg))
980 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
981 REGNO_REG_CLASS (i))))
984 int lim = end_hard_regno (mode, i);
987 && ! TEST_HARD_REG_BIT (used, j)
988 && (REGNO_REG_CLASS (j)
989 == REGNO_REG_CLASS (best_reg + (j - i))
990 || reg_class_subset_p (REGNO_REG_CLASS (j),
991 REGNO_REG_CLASS (best_reg + (j - i)))
992 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
993 REGNO_REG_CLASS (j))));
1003 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1004 if (!hard_reg_set_empty_p (allocno[num].hard_reg_preferences)
1007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1008 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1009 && HARD_REGNO_MODE_OK (i, mode)
1010 && (allocno[num].calls_crossed == 0
1011 || accept_call_clobbered
1012 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1013 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1014 || reg_class_subset_p (REGNO_REG_CLASS (i),
1015 REGNO_REG_CLASS (best_reg))
1016 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1017 REGNO_REG_CLASS (i))))
1020 int lim = end_hard_regno (mode, i);
1023 && ! TEST_HARD_REG_BIT (used, j)
1024 && (REGNO_REG_CLASS (j)
1025 == REGNO_REG_CLASS (best_reg + (j - i))
1026 || reg_class_subset_p (REGNO_REG_CLASS (j),
1027 REGNO_REG_CLASS (best_reg + (j - i)))
1028 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1029 REGNO_REG_CLASS (j))));
1040 /* If we haven't succeeded yet, try with caller-saves.
1041 We need not check to see if the current function has nonlocal
1042 labels because we don't put any pseudos that are live over calls in
1043 registers in that case. */
1045 if (flag_caller_saves && best_reg < 0)
1047 /* Did not find a register. If it would be profitable to
1048 allocate a call-clobbered register and save and restore it
1049 around calls, do that. Don't do this if it crosses any calls
1050 that might throw. */
1051 if (! accept_call_clobbered
1052 && allocno[num].calls_crossed != 0
1053 && allocno[num].throwing_calls_crossed == 0
1054 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1055 allocno[num].calls_crossed))
1057 HARD_REG_SET new_losers;
1059 CLEAR_HARD_REG_SET (new_losers);
1061 COPY_HARD_REG_SET (new_losers, losers);
1063 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1064 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1065 if (reg_renumber[allocno[num].reg] >= 0)
1067 caller_save_needed = 1;
1073 /* If we haven't succeeded yet,
1074 see if some hard reg that conflicts with us
1075 was utilized poorly by local-alloc.
1076 If so, kick out the regs that were put there by local-alloc
1077 so we can use it instead. */
1078 if (best_reg < 0 && !retrying
1079 /* Let's not bother with multi-reg allocnos. */
1080 && allocno[num].size == 1
1081 && REG_BASIC_BLOCK (allocno[num].reg) == REG_BLOCK_GLOBAL)
1083 /* Count from the end, to find the least-used ones first. */
1084 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1086 #ifdef REG_ALLOC_ORDER
1087 int regno = reg_alloc_order[i];
1092 if (local_reg_n_refs[regno] != 0
1093 /* Don't use a reg no good for this pseudo. */
1094 && ! TEST_HARD_REG_BIT (used2, regno)
1095 && HARD_REGNO_MODE_OK (regno, mode)
1096 /* The code below assumes that we need only a single
1097 register, but the check of allocno[num].size above
1098 was not enough. Sometimes we need more than one
1099 register for a single-word value. */
1100 && hard_regno_nregs[regno][mode] == 1
1101 && (allocno[num].calls_crossed == 0
1102 || accept_call_clobbered
1103 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1104 #ifdef CANNOT_CHANGE_MODE_CLASS
1105 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1109 && (!allocno[num].no_stack_reg
1110 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1114 /* We explicitly evaluate the divide results into temporary
1115 variables so as to avoid excess precision problems that occur
1116 on an i386-unknown-sysv4.2 (unixware) host. */
1118 double tmp1 = ((double) local_reg_freq[regno] * local_reg_n_refs[regno]
1119 / local_reg_live_length[regno]);
1120 double tmp2 = ((double) allocno[num].freq * allocno[num].n_refs
1121 / allocno[num].live_length);
1125 /* Hard reg REGNO was used less in total by local regs
1126 than it would be used by this one allocno! */
1130 fprintf (dump_file, "Regno %d better for global %d, ",
1131 regno, allocno[num].reg);
1132 fprintf (dump_file, "fr:%d, ll:%d, nr:%d ",
1133 allocno[num].freq, allocno[num].live_length,
1134 allocno[num].n_refs);
1135 fprintf (dump_file, "(was: fr:%d, ll:%d, nr:%d)\n",
1136 local_reg_freq[regno],
1137 local_reg_live_length[regno],
1138 local_reg_n_refs[regno]);
1141 for (k = 0; k < max_regno; k++)
1142 if (reg_renumber[k] >= 0)
1144 int r = reg_renumber[k];
1146 = end_hard_regno (PSEUDO_REGNO_MODE (k), r);
1148 if (regno >= r && regno < endregno)
1152 "Local Reg %d now on stack\n", k);
1153 reg_renumber[k] = -1;
1164 /* Did we find a register? */
1169 HARD_REG_SET this_reg;
1171 /* Yes. Record it as the hard register of this pseudo-reg. */
1172 reg_renumber[allocno[num].reg] = best_reg;
1174 /* Make a set of the hard regs being allocated. */
1175 CLEAR_HARD_REG_SET (this_reg);
1176 lim = end_hard_regno (mode, best_reg);
1177 for (j = best_reg; j < lim; j++)
1179 SET_HARD_REG_BIT (this_reg, j);
1180 SET_HARD_REG_BIT (regs_used_so_far, j);
1181 /* This is no longer a reg used just by local regs. */
1182 local_reg_n_refs[j] = 0;
1183 local_reg_freq[j] = 0;
1185 /* For each other pseudo-reg conflicting with this one,
1186 mark it as conflicting with the hard regs this one occupies. */
1188 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1190 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1195 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1196 Perhaps it had previously seemed not worth a hard reg,
1197 or perhaps its old hard reg has been commandeered for reloads.
1198 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1199 they do not appear to be allocated.
1200 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1203 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1205 int alloc_no = reg_allocno[regno];
1208 /* If we have more than one register class,
1209 first try allocating in the class that is cheapest
1210 for this pseudo-reg. If that fails, try any reg. */
1211 if (N_REG_CLASSES > 1)
1212 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1213 if (reg_renumber[regno] < 0
1214 && reg_alternate_class (regno) != NO_REGS)
1215 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1217 /* If we found a register, modify the RTL for the register to
1218 show the hard register, and mark that register live. */
1219 if (reg_renumber[regno] >= 0)
1221 SET_REGNO (regno_reg_rtx[regno], reg_renumber[regno]);
1222 mark_home_live (regno);
1227 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1229 mirror_conflicts (void)
1232 int rw = allocno_row_words;
1233 int rwb = rw * HOST_BITS_PER_WIDE_INT;
1234 HOST_WIDE_INT *p = conflicts;
1235 HOST_WIDE_INT *q0 = conflicts, *q1, *q2;
1236 unsigned HOST_WIDE_INT mask;
1238 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1245 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1247 unsigned HOST_WIDE_INT word;
1249 for (word = (unsigned HOST_WIDE_INT) *p++, q2 = q1; word;
1250 word >>= 1, q2 += rw)
1259 /* Indicate that hard register number FROM was eliminated and replaced with
1260 an offset from hard register number TO. The status of hard registers live
1261 at the start of a basic block is updated by replacing a use of FROM with
1265 mark_elimination (int from, int to)
1271 regset r = DF_LIVE_IN (bb);
1272 if (REGNO_REG_SET_P (r, from))
1274 CLEAR_REGNO_REG_SET (r, from);
1275 SET_REGNO_REG_SET (r, to);
1281 print_insn_chain (FILE *file, struct insn_chain *c)
1283 fprintf (file, "insn=%d, ", INSN_UID(c->insn));
1284 bitmap_print (file, &c->live_throughout, "live_throughout: ", ", ");
1285 bitmap_print (file, &c->dead_or_set, "dead_or_set: ", "\n");
1289 print_insn_chains (FILE *file)
1291 struct insn_chain *c;
1292 for (c = reload_insn_chain; c ; c = c->next)
1293 print_insn_chain (file, c);
1295 /* Walk the insns of the current function and build reload_insn_chain,
1296 and record register life information. */
1298 build_insn_chain (void)
1301 struct insn_chain **p = &reload_insn_chain;
1303 struct insn_chain *c = NULL;
1304 struct insn_chain *next = NULL;
1305 bitmap live_relevant_regs = BITMAP_ALLOC (NULL);
1306 bitmap elim_regset = BITMAP_ALLOC (NULL);
1307 /* live_subregs is a vector used to keep accurate information about
1308 which hardregs are live in multiword pseudos. live_subregs and
1309 live_subregs_used are indexed by pseudo number. The live_subreg
1310 entry for a particular pseudo is only used if the corresponding
1311 element is non zero in live_subregs_used. The value in
1312 live_subregs_used is number of bytes that the pseudo can
1314 sbitmap *live_subregs = XCNEWVEC (sbitmap, max_regno);
1315 int *live_subregs_used = XNEWVEC (int, max_regno);
1317 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1318 if (TEST_HARD_REG_BIT (eliminable_regset, i))
1319 bitmap_set_bit (elim_regset, i);
1321 FOR_EACH_BB_REVERSE (bb)
1326 CLEAR_REG_SET (live_relevant_regs);
1327 memset (live_subregs_used, 0, max_regno * sizeof (int));
1329 EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), 0, i, bi)
1331 if (i >= FIRST_PSEUDO_REGISTER)
1333 bitmap_set_bit (live_relevant_regs, i);
1336 EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), FIRST_PSEUDO_REGISTER, i, bi)
1338 if (reg_renumber[i] >= 0)
1339 bitmap_set_bit (live_relevant_regs, i);
1342 FOR_BB_INSNS_REVERSE (bb, insn)
1344 if (!NOTE_P (insn) && !BARRIER_P (insn))
1346 unsigned int uid = INSN_UID (insn);
1347 struct df_ref **def_rec;
1348 struct df_ref **use_rec;
1350 c = new_insn_chain ();
1357 c->block = bb->index;
1360 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
1362 struct df_ref *def = *def_rec;
1363 unsigned int regno = DF_REF_REGNO (def);
1365 /* Ignore may clobbers because these are generated
1366 from calls. However, every other kind of def is
1367 added to dead_or_set. */
1368 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1370 if (regno < FIRST_PSEUDO_REGISTER)
1372 if (! fixed_regs[regno])
1373 bitmap_set_bit (&c->dead_or_set, regno);
1375 else if (reg_renumber[regno] >= 0)
1376 bitmap_set_bit (&c->dead_or_set, regno);
1379 if ((regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
1380 && (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)))
1382 rtx reg = DF_REF_REG (def);
1383 /* We can model subregs, but not if they are
1384 wrapped in ZERO_EXTRACTS. */
1385 if (GET_CODE (reg) == SUBREG
1386 && !DF_REF_FLAGS_IS_SET (def, DF_REF_EXTRACT))
1388 unsigned int start = SUBREG_BYTE (reg);
1389 unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
1391 ra_init_live_subregs (bitmap_bit_p (live_relevant_regs, regno),
1392 live_subregs, live_subregs_used,
1394 /* Ignore the paradoxical bits. */
1395 if ((int)last > live_subregs_used[regno])
1396 last = live_subregs_used[regno];
1398 while (start < last)
1400 RESET_BIT (live_subregs[regno], start);
1404 if (sbitmap_empty_p (live_subregs[regno]))
1406 live_subregs_used[regno] = 0;
1407 bitmap_clear_bit (live_relevant_regs, regno);
1410 /* Set live_relevant_regs here because
1411 that bit has to be true to get us to
1412 look at the live_subregs fields. */
1413 bitmap_set_bit (live_relevant_regs, regno);
1417 /* DF_REF_PARTIAL is generated for
1418 subregs, STRICT_LOW_PART, and
1419 ZERO_EXTRACT. We handle the subreg
1420 case above so here we have to keep from
1421 modeling the def as a killing def. */
1422 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL))
1424 bitmap_clear_bit (live_relevant_regs, regno);
1425 live_subregs_used[regno] = 0;
1431 bitmap_and_compl_into (live_relevant_regs, elim_regset);
1432 bitmap_copy (&c->live_throughout, live_relevant_regs);
1435 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
1437 struct df_ref *use = *use_rec;
1438 unsigned int regno = DF_REF_REGNO (use);
1439 rtx reg = DF_REF_REG (use);
1441 /* DF_REF_READ_WRITE on a use means that this use
1442 is fabricated from a def that is a partial set
1443 to a multiword reg. Here, we only model the
1444 subreg case that is not wrapped in ZERO_EXTRACT
1445 precisely so we do not need to look at the
1447 if (DF_REF_FLAGS_IS_SET (use, DF_REF_READ_WRITE)
1448 && !DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT)
1449 && DF_REF_FLAGS_IS_SET (use, DF_REF_SUBREG))
1452 /* Add the last use of each var to dead_or_set. */
1453 if (!bitmap_bit_p (live_relevant_regs, regno))
1455 if (regno < FIRST_PSEUDO_REGISTER)
1457 if (! fixed_regs[regno])
1458 bitmap_set_bit (&c->dead_or_set, regno);
1460 else if (reg_renumber[regno] >= 0)
1461 bitmap_set_bit (&c->dead_or_set, regno);
1464 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
1466 if (GET_CODE (reg) == SUBREG
1467 && !DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT))
1469 unsigned int start = SUBREG_BYTE (reg);
1470 unsigned int last = start + GET_MODE_SIZE (GET_MODE (reg));
1472 ra_init_live_subregs (bitmap_bit_p (live_relevant_regs, regno),
1473 live_subregs, live_subregs_used,
1476 /* Ignore the paradoxical bits. */
1477 if ((int)last > live_subregs_used[regno])
1478 last = live_subregs_used[regno];
1480 while (start < last)
1482 SET_BIT (live_subregs[regno], start);
1487 /* Resetting the live_subregs_used is
1488 effectively saying do not use the subregs
1489 because we are reading the whole
1491 live_subregs_used[regno] = 0;
1492 bitmap_set_bit (live_relevant_regs, regno);
1499 for (i = 0; i < (unsigned int)max_regno; i++)
1500 if (live_subregs[i])
1501 free (live_subregs[i]);
1503 reload_insn_chain = c;
1506 free (live_subregs);
1507 free (live_subregs_used);
1508 BITMAP_FREE (live_relevant_regs);
1509 BITMAP_FREE (elim_regset);
1512 print_insn_chains (dump_file);
1515 /* Print debugging trace information if -dg switch is given,
1516 showing the information on which the allocation decisions are based. */
1519 dump_conflicts (FILE *file)
1522 int has_preferences;
1525 for (i = 0; i < max_allocno; i++)
1527 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1531 fprintf (file, ";; %d regs to allocate:", nregs);
1532 for (i = 0; i < max_allocno; i++)
1535 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1537 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1538 for (j = 0; j < max_regno; j++)
1539 if (reg_allocno[j] == allocno_order[i]
1540 && j != allocno[allocno_order[i]].reg)
1541 fprintf (file, "+%d", j);
1542 if (allocno[allocno_order[i]].size != 1)
1543 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1545 fprintf (file, "\n");
1547 for (i = 0; i < max_allocno; i++)
1550 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1551 for (j = 0; j < max_allocno; j++)
1552 if (CONFLICTP (j, i))
1553 fprintf (file, " %d", allocno[j].reg);
1554 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1555 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j) && ! fixed_regs[j])
1556 fprintf (file, " %d", j);
1557 fprintf (file, "\n");
1559 has_preferences = 0;
1560 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1561 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1562 has_preferences = 1;
1564 if (! has_preferences)
1566 fprintf (file, ";; %d preferences:", allocno[i].reg);
1567 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1568 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1569 fprintf (file, " %d", j);
1570 fprintf (file, "\n");
1572 fprintf (file, "\n");
1576 dump_global_regs (FILE *file)
1580 fprintf (file, ";; Register dispositions:\n");
1581 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1582 if (reg_renumber[i] >= 0)
1584 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1586 fprintf (file, "\n");
1589 fprintf (file, "\n\n;; Hard regs used: ");
1590 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1591 if (df_regs_ever_live_p (i))
1592 fprintf (file, " %d", i);
1593 fprintf (file, "\n\n");
1596 /* Run old register allocator. Return TRUE if we must exit
1597 rest_of_compilation upon return. */
1599 rest_of_handle_global_alloc (void)
1603 /* If optimizing, allocate remaining pseudo-regs. Do the reload
1604 pass fixing up any insns that are invalid. */
1605 if (optimize && dbg_cnt (global_alloc_at_func))
1606 failure = global_alloc ();
1609 /* There is just too much going on in the register allocators to
1610 keep things up to date. At the end we have to rescan anyway
1611 because things change when the reload_completed flag is set.
1612 So we just turn off scanning and we will rescan by hand. */
1613 df_set_flags (DF_NO_INSN_RESCAN);
1614 compute_regsets (&eliminable_regset, &no_global_alloc_regs);
1615 build_insn_chain ();
1616 df_set_flags (DF_NO_INSN_RESCAN);
1617 failure = reload (get_insns (), 0);
1620 if (dump_enabled_p (pass_global_alloc.static_pass_number))
1622 timevar_push (TV_DUMP);
1623 dump_global_regs (dump_file);
1624 timevar_pop (TV_DUMP);
1627 /* FIXME: This appears on the surface to be wrong thing to be doing.
1628 So much of the compiler is designed to check reload_completed to
1629 see if it is running after reload that seems doomed to failure.
1630 We should be returning a value that says that we have found
1631 errors so that nothing but the cleanup passes are run
1633 gcc_assert (reload_completed || failure);
1634 reload_completed = !failure;
1636 /* The world has changed so much that at this point we might as well
1637 just rescan everything. Not that df_rescan_all_insns is not
1638 going to help here because it does not touch the artificial uses
1640 df_finish_pass (true);
1642 df_live_add_problem ();
1643 df_scan_alloc (NULL);
1649 regstat_free_n_sets_and_refs ();
1654 struct tree_opt_pass pass_global_alloc =
1658 rest_of_handle_global_alloc, /* execute */
1661 0, /* static_pass_number */
1662 TV_GLOBAL_ALLOC, /* tv_id */
1663 0, /* properties_required */
1664 0, /* properties_provided */
1665 0, /* properties_destroyed */
1666 0, /* todo_flags_start */
1667 TODO_dump_func | TODO_verify_rtl_sharing
1668 | TODO_ggc_collect, /* todo_flags_finish */