1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
31 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
162 (spill_reg_order prevents these registers from being used to start a
164 static HARD_REG_SET bad_spill_regs;
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
171 /* Index of last register assigned as a spill register. We allocate in
172 a round-robin fashion. */
174 static int last_spill_reg;
176 /* Describes order of preference for putting regs into spill_regs.
177 Contains the numbers of all the hard regs, in order most preferred first.
178 This order is different for each function.
179 It is set up by order_regs_for_reload.
180 Empty elements at the end contain -1. */
181 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
183 /* 1 for a hard register that appears explicitly in the rtl
184 (for example, function value registers, special registers
185 used by insns, structure value pointer registers). */
186 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
188 /* Indicates if a register was counted against the need for
189 groups. 0 means it can count against max_nongroup instead. */
190 static HARD_REG_SET counted_for_groups;
192 /* Indicates if a register was counted against the need for
193 non-groups. 0 means it can become part of a new group.
194 During choose_reload_regs, 1 here means don't use this reg
195 as part of a group, even if it seems to be otherwise ok. */
196 static HARD_REG_SET counted_for_nongroups;
198 /* Indexed by pseudo reg number N,
199 says may not delete stores into the real (memory) home of pseudo N.
200 This is set if we already substituted a memory equivalent in some uses,
201 which happens when we have to eliminate the fp from it. */
202 static char *cannot_omit_stores;
204 /* Nonzero if indirect addressing is supported on the machine; this means
205 that spilling (REG n) does not require reloading it into a register in
206 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
207 value indicates the level of indirect addressing supported, e.g., two
208 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
211 static char spill_indirect_levels;
213 /* Nonzero if indirect addressing is supported when the innermost MEM is
214 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
215 which these are valid is the same as spill_indirect_levels, above. */
217 char indirect_symref_ok;
219 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
221 char double_reg_address_ok;
223 /* Record the stack slot for each spilled hard register. */
225 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
227 /* Width allocated so far for that stack slot. */
229 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
231 /* Indexed by register class and basic block number, nonzero if there is
232 any need for a spill register of that class in that basic block.
233 The pointer is 0 if we did stupid allocation and don't know
234 the structure of basic blocks. */
236 char *basic_block_needs[N_REG_CLASSES];
238 /* First uid used by insns created by reload in this function.
239 Used in find_equiv_reg. */
240 int reload_first_uid;
242 /* Flag set by local-alloc or global-alloc if anything is live in
243 a call-clobbered reg across calls. */
245 int caller_save_needed;
247 /* Set to 1 while reload_as_needed is operating.
248 Required by some machines to handle any generated moves differently. */
250 int reload_in_progress = 0;
252 /* These arrays record the insn_code of insns that may be needed to
253 perform input and output reloads of special objects. They provide a
254 place to pass a scratch register. */
256 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
257 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
259 /* This obstack is used for allocation of rtl during register elimination.
260 The allocated storage can be freed once find_reloads has processed the
263 struct obstack reload_obstack;
264 char *reload_firstobj;
266 #define obstack_chunk_alloc xmalloc
267 #define obstack_chunk_free free
269 /* List of labels that must never be deleted. */
270 extern rtx forced_labels;
272 /* This structure is used to record information about register eliminations.
273 Each array entry describes one possible way of eliminating a register
274 in favor of another. If there is more than one way of eliminating a
275 particular register, the most preferred should be specified first. */
277 static struct elim_table
279 int from; /* Register number to be eliminated. */
280 int to; /* Register number used as replacement. */
281 int initial_offset; /* Initial difference between values. */
282 int can_eliminate; /* Non-zero if this elimination can be done. */
283 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
284 insns made by reload. */
285 int offset; /* Current offset between the two regs. */
286 int max_offset; /* Maximum offset between the two regs. */
287 int previous_offset; /* Offset at end of previous insn. */
288 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
289 rtx from_rtx; /* REG rtx for the register to be eliminated.
290 We cannot simply compare the number since
291 we might then spuriously replace a hard
292 register corresponding to a pseudo
293 assigned to the reg to be eliminated. */
294 rtx to_rtx; /* REG rtx for the replacement. */
297 /* If a set of eliminable registers was specified, define the table from it.
298 Otherwise, default to the normal case of the frame pointer being
299 replaced by the stack pointer. */
301 #ifdef ELIMINABLE_REGS
304 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
307 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
309 /* Record the number of pending eliminations that have an offset not equal
310 to their initial offset. If non-zero, we use a new copy of each
311 replacement result in any insns encountered. */
312 static int num_not_at_initial_offset;
314 /* Count the number of registers that we may be able to eliminate. */
315 static int num_eliminable;
317 /* For each label, we record the offset of each elimination. If we reach
318 a label by more than one path and an offset differs, we cannot do the
319 elimination. This information is indexed by the number of the label.
320 The first table is an array of flags that records whether we have yet
321 encountered a label and the second table is an array of arrays, one
322 entry in the latter array for each elimination. */
324 static char *offsets_known_at;
325 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
327 /* Number of labels in the current function. */
329 static int num_labels;
331 struct hard_reg_n_uses { int regno; int uses; };
333 static int possible_group_p PROTO((int, int *));
334 static void count_possible_groups PROTO((int *, enum machine_mode *,
336 static int modes_equiv_for_class_p PROTO((enum machine_mode,
339 static void spill_failure PROTO((rtx));
340 static int new_spill_reg PROTO((int, int, int *, int *, int,
342 static void delete_dead_insn PROTO((rtx));
343 static void alter_reg PROTO((int, int));
344 static void mark_scratch_live PROTO((rtx));
345 static void set_label_offsets PROTO((rtx, rtx, int));
346 static int eliminate_regs_in_insn PROTO((rtx, int));
347 static void mark_not_eliminable PROTO((rtx, rtx));
348 static int spill_hard_reg PROTO((int, int, FILE *, int));
349 static void scan_paradoxical_subregs PROTO((rtx));
350 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
351 struct hard_reg_n_uses *));
352 static void order_regs_for_reload PROTO((void));
353 static int compare_spill_regs PROTO((short *, short *));
354 static void reload_as_needed PROTO((rtx, int));
355 static void forget_old_reloads_1 PROTO((rtx, rtx));
356 static int reload_reg_class_lower PROTO((short *, short *));
357 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
359 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
361 static int reload_reg_free_p PROTO((int, int, enum reload_type));
362 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
363 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
364 static int reloads_conflict PROTO((int, int));
365 static int allocate_reload_reg PROTO((int, rtx, int, int));
366 static void choose_reload_regs PROTO((rtx, rtx));
367 static void merge_assigned_reloads PROTO((rtx));
368 static void emit_reload_insns PROTO((rtx));
369 static void delete_output_reload PROTO((rtx, int, rtx));
370 static void inc_for_reload PROTO((rtx, rtx, int));
371 static int constraint_accepts_reg_p PROTO((char *, rtx));
372 static int count_occurrences PROTO((rtx, rtx));
374 /* Initialize the reload pass once per compilation. */
381 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
382 Set spill_indirect_levels to the number of levels such addressing is
383 permitted, zero if it is not permitted at all. */
386 = gen_rtx (MEM, Pmode,
387 gen_rtx (PLUS, Pmode,
388 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
390 spill_indirect_levels = 0;
392 while (memory_address_p (QImode, tem))
394 spill_indirect_levels++;
395 tem = gen_rtx (MEM, Pmode, tem);
398 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
400 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
401 indirect_symref_ok = memory_address_p (QImode, tem);
403 /* See if reg+reg is a valid (and offsettable) address. */
405 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
407 tem = gen_rtx (PLUS, Pmode,
408 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
409 gen_rtx (REG, Pmode, i));
410 /* This way, we make sure that reg+reg is an offsettable address. */
411 tem = plus_constant (tem, 4);
413 if (memory_address_p (QImode, tem))
415 double_reg_address_ok = 1;
420 /* Initialize obstack for our rtl allocation. */
421 gcc_obstack_init (&reload_obstack);
422 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
425 /* Main entry point for the reload pass.
427 FIRST is the first insn of the function being compiled.
429 GLOBAL nonzero means we were called from global_alloc
430 and should attempt to reallocate any pseudoregs that we
431 displace from hard regs we will use for reloads.
432 If GLOBAL is zero, we do not have enough information to do that,
433 so any pseudo reg that is spilled must go to the stack.
435 DUMPFILE is the global-reg debugging dump file stream, or 0.
436 If it is nonzero, messages are written to it to describe
437 which registers are seized as reload regs, which pseudo regs
438 are spilled from them, and where the pseudo regs are reallocated to.
440 Return value is nonzero if reload failed
441 and we must not do any more for this function. */
444 reload (first, global, dumpfile)
450 register int i, j, k;
452 register struct elim_table *ep;
454 int something_changed;
455 int something_needs_reloads;
456 int something_needs_elimination;
457 int new_basic_block_needs;
458 enum reg_class caller_save_spill_class = NO_REGS;
459 int caller_save_group_size = 1;
461 /* Nonzero means we couldn't get enough spill regs. */
464 /* The basic block number currently being processed for INSN. */
467 /* Make sure even insns with volatile mem refs are recognizable. */
470 /* Enable find_equiv_reg to distinguish insns made by reload. */
471 reload_first_uid = get_max_uid ();
473 for (i = 0; i < N_REG_CLASSES; i++)
474 basic_block_needs[i] = 0;
476 #ifdef SECONDARY_MEMORY_NEEDED
477 /* Initialize the secondary memory table. */
478 clear_secondary_mem ();
481 /* Remember which hard regs appear explicitly
482 before we merge into `regs_ever_live' the ones in which
483 pseudo regs have been allocated. */
484 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
486 /* We don't have a stack slot for any spill reg yet. */
487 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
488 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
490 /* Initialize the save area information for caller-save, in case some
494 /* Compute which hard registers are now in use
495 as homes for pseudo registers.
496 This is done here rather than (eg) in global_alloc
497 because this point is reached even if not optimizing. */
499 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
502 for (i = 0; i < scratch_list_length; i++)
504 mark_scratch_live (scratch_list[i]);
506 /* Make sure that the last insn in the chain
507 is not something that needs reloading. */
508 emit_note (NULL_PTR, NOTE_INSN_DELETED);
510 /* Find all the pseudo registers that didn't get hard regs
511 but do have known equivalent constants or memory slots.
512 These include parameters (known equivalent to parameter slots)
513 and cse'd or loop-moved constant memory addresses.
515 Record constant equivalents in reg_equiv_constant
516 so they will be substituted by find_reloads.
517 Record memory equivalents in reg_mem_equiv so they can
518 be substituted eventually by altering the REG-rtx's. */
520 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
521 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
522 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
523 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
524 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
525 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
526 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
527 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
528 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
529 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
530 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
531 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
532 cannot_omit_stores = (char *) alloca (max_regno);
533 bzero (cannot_omit_stores, max_regno);
535 #ifdef SMALL_REGISTER_CLASSES
536 CLEAR_HARD_REG_SET (forbidden_regs);
539 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
540 Also find all paradoxical subregs and find largest such for each pseudo.
541 On machines with small register classes, record hard registers that
542 are used for user variables. These can never be used for spills. */
544 for (insn = first; insn; insn = NEXT_INSN (insn))
546 rtx set = single_set (insn);
548 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
550 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
552 #ifdef LEGITIMATE_PIC_OPERAND_P
553 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
554 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
558 rtx x = XEXP (note, 0);
559 i = REGNO (SET_DEST (set));
560 if (i > LAST_VIRTUAL_REGISTER)
562 if (GET_CODE (x) == MEM)
563 reg_equiv_memory_loc[i] = x;
564 else if (CONSTANT_P (x))
566 if (LEGITIMATE_CONSTANT_P (x))
567 reg_equiv_constant[i] = x;
569 reg_equiv_memory_loc[i]
570 = force_const_mem (GET_MODE (SET_DEST (set)), x);
575 /* If this register is being made equivalent to a MEM
576 and the MEM is not SET_SRC, the equivalencing insn
577 is one with the MEM as a SET_DEST and it occurs later.
578 So don't mark this insn now. */
579 if (GET_CODE (x) != MEM
580 || rtx_equal_p (SET_SRC (set), x))
581 reg_equiv_init[i] = insn;
586 /* If this insn is setting a MEM from a register equivalent to it,
587 this is the equivalencing insn. */
588 else if (set && GET_CODE (SET_DEST (set)) == MEM
589 && GET_CODE (SET_SRC (set)) == REG
590 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
591 && rtx_equal_p (SET_DEST (set),
592 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
593 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
595 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
596 scan_paradoxical_subregs (PATTERN (insn));
599 /* Does this function require a frame pointer? */
601 frame_pointer_needed = (! flag_omit_frame_pointer
602 #ifdef EXIT_IGNORE_STACK
603 /* ?? If EXIT_IGNORE_STACK is set, we will not save
604 and restore sp for alloca. So we can't eliminate
605 the frame pointer in that case. At some point,
606 we should improve this by emitting the
607 sp-adjusting insns for this case. */
608 || (current_function_calls_alloca
609 && EXIT_IGNORE_STACK)
611 || FRAME_POINTER_REQUIRED);
615 /* Initialize the table of registers to eliminate. The way we do this
616 depends on how the eliminable registers were defined. */
617 #ifdef ELIMINABLE_REGS
618 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
620 ep->can_eliminate = ep->can_eliminate_previous
621 = (CAN_ELIMINATE (ep->from, ep->to)
622 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
625 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
626 = ! frame_pointer_needed;
629 /* Count the number of eliminable registers and build the FROM and TO
630 REG rtx's. Note that code in gen_rtx will cause, e.g.,
631 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
632 We depend on this. */
633 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
635 num_eliminable += ep->can_eliminate;
636 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
637 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
640 num_labels = max_label_num () - get_first_label_num ();
642 /* Allocate the tables used to store offset information at labels. */
643 offsets_known_at = (char *) alloca (num_labels);
645 = (int (*)[NUM_ELIMINABLE_REGS])
646 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
648 offsets_known_at -= get_first_label_num ();
649 offsets_at -= get_first_label_num ();
651 /* Alter each pseudo-reg rtx to contain its hard reg number.
652 Assign stack slots to the pseudos that lack hard regs or equivalents.
653 Do not touch virtual registers. */
655 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
658 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
659 because the stack size may be a part of the offset computation for
660 register elimination. */
661 assign_stack_local (BLKmode, 0, 0);
663 /* If we have some registers we think can be eliminated, scan all insns to
664 see if there is an insn that sets one of these registers to something
665 other than itself plus a constant. If so, the register cannot be
666 eliminated. Doing this scan here eliminates an extra pass through the
667 main reload loop in the most common case where register elimination
669 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
670 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
671 || GET_CODE (insn) == CALL_INSN)
672 note_stores (PATTERN (insn), mark_not_eliminable);
674 #ifndef REGISTER_CONSTRAINTS
675 /* If all the pseudo regs have hard regs,
676 except for those that are never referenced,
677 we know that no reloads are needed. */
678 /* But that is not true if there are register constraints, since
679 in that case some pseudos might be in the wrong kind of hard reg. */
681 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
682 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
685 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
689 /* Compute the order of preference for hard registers to spill.
690 Store them by decreasing preference in potential_reload_regs. */
692 order_regs_for_reload ();
694 /* So far, no hard regs have been spilled. */
696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
697 spill_reg_order[i] = -1;
699 /* Initialize to -1, which means take the first spill register. */
702 /* On most machines, we can't use any register explicitly used in the
703 rtl as a spill register. But on some, we have to. Those will have
704 taken care to keep the life of hard regs as short as possible. */
706 #ifndef SMALL_REGISTER_CLASSES
707 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
710 /* Spill any hard regs that we know we can't eliminate. */
711 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
712 if (! ep->can_eliminate)
713 spill_hard_reg (ep->from, global, dumpfile, 1);
715 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
716 if (frame_pointer_needed)
717 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
721 for (i = 0; i < N_REG_CLASSES; i++)
723 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
724 bzero (basic_block_needs[i], n_basic_blocks);
727 /* From now on, we need to emit any moves without making new pseudos. */
728 reload_in_progress = 1;
730 /* This loop scans the entire function each go-round
731 and repeats until one repetition spills no additional hard regs. */
733 /* This flag is set when a pseudo reg is spilled,
734 to require another pass. Note that getting an additional reload
735 reg does not necessarily imply any pseudo reg was spilled;
736 sometimes we find a reload reg that no pseudo reg was allocated in. */
737 something_changed = 1;
738 /* This flag is set if there are any insns that require reloading. */
739 something_needs_reloads = 0;
740 /* This flag is set if there are any insns that require register
742 something_needs_elimination = 0;
743 while (something_changed)
747 /* For each class, number of reload regs needed in that class.
748 This is the maximum over all insns of the needs in that class
749 of the individual insn. */
750 int max_needs[N_REG_CLASSES];
751 /* For each class, size of group of consecutive regs
752 that is needed for the reloads of this class. */
753 int group_size[N_REG_CLASSES];
754 /* For each class, max number of consecutive groups needed.
755 (Each group contains group_size[CLASS] consecutive registers.) */
756 int max_groups[N_REG_CLASSES];
757 /* For each class, max number needed of regs that don't belong
758 to any of the groups. */
759 int max_nongroups[N_REG_CLASSES];
760 /* For each class, the machine mode which requires consecutive
761 groups of regs of that class.
762 If two different modes ever require groups of one class,
763 they must be the same size and equally restrictive for that class,
764 otherwise we can't handle the complexity. */
765 enum machine_mode group_mode[N_REG_CLASSES];
766 /* Record the insn where each maximum need is first found. */
767 rtx max_needs_insn[N_REG_CLASSES];
768 rtx max_groups_insn[N_REG_CLASSES];
769 rtx max_nongroups_insn[N_REG_CLASSES];
771 int starting_frame_size = get_frame_size ();
772 int previous_frame_pointer_needed = frame_pointer_needed;
773 static char *reg_class_names[] = REG_CLASS_NAMES;
775 something_changed = 0;
776 bzero ((char *) max_needs, sizeof max_needs);
777 bzero ((char *) max_groups, sizeof max_groups);
778 bzero ((char *) max_nongroups, sizeof max_nongroups);
779 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
780 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
781 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
782 bzero ((char *) group_size, sizeof group_size);
783 for (i = 0; i < N_REG_CLASSES; i++)
784 group_mode[i] = VOIDmode;
786 /* Keep track of which basic blocks are needing the reloads. */
789 /* Remember whether any element of basic_block_needs
790 changes from 0 to 1 in this pass. */
791 new_basic_block_needs = 0;
793 /* Reset all offsets on eliminable registers to their initial values. */
794 #ifdef ELIMINABLE_REGS
795 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
797 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
798 ep->previous_offset = ep->offset
799 = ep->max_offset = ep->initial_offset;
802 #ifdef INITIAL_FRAME_POINTER_OFFSET
803 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
805 if (!FRAME_POINTER_REQUIRED)
807 reg_eliminate[0].initial_offset = 0;
809 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
810 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
813 num_not_at_initial_offset = 0;
815 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
817 /* Set a known offset for each forced label to be at the initial offset
818 of each elimination. We do this because we assume that all
819 computed jumps occur from a location where each elimination is
820 at its initial offset. */
822 for (x = forced_labels; x; x = XEXP (x, 1))
824 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
826 /* For each pseudo register that has an equivalent location defined,
827 try to eliminate any eliminable registers (such as the frame pointer)
828 assuming initial offsets for the replacement register, which
831 If the resulting location is directly addressable, substitute
832 the MEM we just got directly for the old REG.
834 If it is not addressable but is a constant or the sum of a hard reg
835 and constant, it is probably not addressable because the constant is
836 out of range, in that case record the address; we will generate
837 hairy code to compute the address in a register each time it is
838 needed. Similarly if it is a hard register, but one that is not
839 valid as an address register.
841 If the location is not addressable, but does not have one of the
842 above forms, assign a stack slot. We have to do this to avoid the
843 potential of producing lots of reloads if, e.g., a location involves
844 a pseudo that didn't get a hard register and has an equivalent memory
845 location that also involves a pseudo that didn't get a hard register.
847 Perhaps at some point we will improve reload_when_needed handling
848 so this problem goes away. But that's very hairy. */
850 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
851 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
853 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
855 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
857 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
858 else if (CONSTANT_P (XEXP (x, 0))
859 || (GET_CODE (XEXP (x, 0)) == REG
860 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
861 || (GET_CODE (XEXP (x, 0)) == PLUS
862 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
863 && (REGNO (XEXP (XEXP (x, 0), 0))
864 < FIRST_PSEUDO_REGISTER)
865 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
866 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
869 /* Make a new stack slot. Then indicate that something
870 changed so we go back and recompute offsets for
871 eliminable registers because the allocation of memory
872 below might change some offset. reg_equiv_{mem,address}
873 will be set up for this pseudo on the next pass around
875 reg_equiv_memory_loc[i] = 0;
876 reg_equiv_init[i] = 0;
878 something_changed = 1;
882 /* If we allocated another pseudo to the stack, redo elimination
884 if (something_changed)
887 /* If caller-saves needs a group, initialize the group to include
888 the size and mode required for caller-saves. */
890 if (caller_save_group_size > 1)
892 group_mode[(int) caller_save_spill_class] = Pmode;
893 group_size[(int) caller_save_spill_class] = caller_save_group_size;
896 /* Compute the most additional registers needed by any instruction.
897 Collect information separately for each class of regs. */
899 for (insn = first; insn; insn = NEXT_INSN (insn))
901 if (global && this_block + 1 < n_basic_blocks
902 && insn == basic_block_head[this_block+1])
905 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
906 might include REG_LABEL), we need to see what effects this
907 has on the known offsets at labels. */
909 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
910 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
911 && REG_NOTES (insn) != 0))
912 set_label_offsets (insn, insn, 0);
914 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
916 /* Nonzero means don't use a reload reg that overlaps
917 the place where a function value can be returned. */
918 rtx avoid_return_reg = 0;
920 rtx old_body = PATTERN (insn);
921 int old_code = INSN_CODE (insn);
922 rtx old_notes = REG_NOTES (insn);
923 int did_elimination = 0;
925 /* To compute the number of reload registers of each class
926 needed for an insn, we must simulate what choose_reload_regs
927 can do. We do this by splitting an insn into an "input" and
928 an "output" part. RELOAD_OTHER reloads are used in both.
929 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
930 which must be live over the entire input section of reloads,
931 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
932 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
935 The registers needed for output are RELOAD_OTHER and
936 RELOAD_FOR_OUTPUT, which are live for the entire output
937 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
938 reloads for each operand.
940 The total number of registers needed is the maximum of the
941 inputs and outputs. */
945 /* [0] is normal, [1] is nongroup. */
946 int regs[2][N_REG_CLASSES];
947 int groups[N_REG_CLASSES];
950 /* Each `struct needs' corresponds to one RELOAD_... type. */
956 struct needs other_addr;
957 struct needs op_addr;
958 struct needs op_addr_reload;
959 struct needs in_addr[MAX_RECOG_OPERANDS];
960 struct needs out_addr[MAX_RECOG_OPERANDS];
963 /* If needed, eliminate any eliminable registers. */
965 did_elimination = eliminate_regs_in_insn (insn, 0);
967 #ifdef SMALL_REGISTER_CLASSES
968 /* Set avoid_return_reg if this is an insn
969 that might use the value of a function call. */
970 if (GET_CODE (insn) == CALL_INSN)
972 if (GET_CODE (PATTERN (insn)) == SET)
973 after_call = SET_DEST (PATTERN (insn));
974 else if (GET_CODE (PATTERN (insn)) == PARALLEL
975 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
976 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
980 else if (after_call != 0
981 && !(GET_CODE (PATTERN (insn)) == SET
982 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
984 if (reg_referenced_p (after_call, PATTERN (insn)))
985 avoid_return_reg = after_call;
988 #endif /* SMALL_REGISTER_CLASSES */
990 /* Analyze the instruction. */
991 find_reloads (insn, 0, spill_indirect_levels, global,
994 /* Remember for later shortcuts which insns had any reloads or
995 register eliminations.
997 One might think that it would be worthwhile to mark insns
998 that need register replacements but not reloads, but this is
999 not safe because find_reloads may do some manipulation of
1000 the insn (such as swapping commutative operands), which would
1001 be lost when we restore the old pattern after register
1002 replacement. So the actions of find_reloads must be redone in
1003 subsequent passes or in reload_as_needed.
1005 However, it is safe to mark insns that need reloads
1006 but not register replacement. */
1008 PUT_MODE (insn, (did_elimination ? QImode
1009 : n_reloads ? HImode
1010 : GET_MODE (insn) == DImode ? DImode
1013 /* Discard any register replacements done. */
1014 if (did_elimination)
1016 obstack_free (&reload_obstack, reload_firstobj);
1017 PATTERN (insn) = old_body;
1018 INSN_CODE (insn) = old_code;
1019 REG_NOTES (insn) = old_notes;
1020 something_needs_elimination = 1;
1023 /* If this insn has no reloads, we need not do anything except
1024 in the case of a CALL_INSN when we have caller-saves and
1025 caller-save needs reloads. */
1028 && ! (GET_CODE (insn) == CALL_INSN
1029 && caller_save_spill_class != NO_REGS))
1032 something_needs_reloads = 1;
1033 bzero ((char *) &insn_needs, sizeof insn_needs);
1035 /* Count each reload once in every class
1036 containing the reload's own class. */
1038 for (i = 0; i < n_reloads; i++)
1040 register enum reg_class *p;
1041 enum reg_class class = reload_reg_class[i];
1043 enum machine_mode mode;
1045 struct needs *this_needs;
1047 /* Don't count the dummy reloads, for which one of the
1048 regs mentioned in the insn can be used for reloading.
1049 Don't count optional reloads.
1050 Don't count reloads that got combined with others. */
1051 if (reload_reg_rtx[i] != 0
1052 || reload_optional[i] != 0
1053 || (reload_out[i] == 0 && reload_in[i] == 0
1054 && ! reload_secondary_p[i]))
1057 /* Show that a reload register of this class is needed
1058 in this basic block. We do not use insn_needs and
1059 insn_groups because they are overly conservative for
1061 if (global && ! basic_block_needs[(int) class][this_block])
1063 basic_block_needs[(int) class][this_block] = 1;
1064 new_basic_block_needs = 1;
1068 mode = reload_inmode[i];
1069 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1070 mode = reload_outmode[i];
1071 size = CLASS_MAX_NREGS (class, mode);
1073 /* If this class doesn't want a group, determine if we have
1074 a nongroup need or a regular need. We have a nongroup
1075 need if this reload conflicts with a group reload whose
1076 class intersects with this reload's class. */
1080 for (j = 0; j < n_reloads; j++)
1081 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1082 (GET_MODE_SIZE (reload_outmode[j])
1083 > GET_MODE_SIZE (reload_inmode[j]))
1087 && (!reload_optional[j])
1088 && (reload_in[j] != 0 || reload_out[j] != 0
1089 || reload_secondary_p[j])
1090 && reloads_conflict (i, j)
1091 && reg_classes_intersect_p (class,
1092 reload_reg_class[j]))
1098 /* Decide which time-of-use to count this reload for. */
1099 switch (reload_when_needed[i])
1102 this_needs = &insn_needs.other;
1104 case RELOAD_FOR_INPUT:
1105 this_needs = &insn_needs.input;
1107 case RELOAD_FOR_OUTPUT:
1108 this_needs = &insn_needs.output;
1110 case RELOAD_FOR_INSN:
1111 this_needs = &insn_needs.insn;
1113 case RELOAD_FOR_OTHER_ADDRESS:
1114 this_needs = &insn_needs.other_addr;
1116 case RELOAD_FOR_INPUT_ADDRESS:
1117 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1119 case RELOAD_FOR_OUTPUT_ADDRESS:
1120 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1122 case RELOAD_FOR_OPERAND_ADDRESS:
1123 this_needs = &insn_needs.op_addr;
1125 case RELOAD_FOR_OPADDR_ADDR:
1126 this_needs = &insn_needs.op_addr_reload;
1132 enum machine_mode other_mode, allocate_mode;
1134 /* Count number of groups needed separately from
1135 number of individual regs needed. */
1136 this_needs->groups[(int) class]++;
1137 p = reg_class_superclasses[(int) class];
1138 while (*p != LIM_REG_CLASSES)
1139 this_needs->groups[(int) *p++]++;
1141 /* Record size and mode of a group of this class. */
1142 /* If more than one size group is needed,
1143 make all groups the largest needed size. */
1144 if (group_size[(int) class] < size)
1146 other_mode = group_mode[(int) class];
1147 allocate_mode = mode;
1149 group_size[(int) class] = size;
1150 group_mode[(int) class] = mode;
1155 allocate_mode = group_mode[(int) class];
1158 /* Crash if two dissimilar machine modes both need
1159 groups of consecutive regs of the same class. */
1161 if (other_mode != VOIDmode && other_mode != allocate_mode
1162 && ! modes_equiv_for_class_p (allocate_mode,
1164 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1169 this_needs->regs[nongroup_need][(int) class] += 1;
1170 p = reg_class_superclasses[(int) class];
1171 while (*p != LIM_REG_CLASSES)
1172 this_needs->regs[nongroup_need][(int) *p++] += 1;
1178 /* All reloads have been counted for this insn;
1179 now merge the various times of use.
1180 This sets insn_needs, etc., to the maximum total number
1181 of registers needed at any point in this insn. */
1183 for (i = 0; i < N_REG_CLASSES; i++)
1185 int in_max, out_max;
1187 /* Compute normal and nongroup needs. */
1188 for (j = 0; j <= 1; j++)
1190 for (in_max = 0, out_max = 0, k = 0;
1191 k < reload_n_operands; k++)
1194 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1196 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1199 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1200 and operand addresses but not things used to reload
1201 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1202 don't conflict with things needed to reload inputs or
1205 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1206 insn_needs.op_addr_reload.regs[j][i]),
1209 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1211 insn_needs.input.regs[j][i]
1212 = MAX (insn_needs.input.regs[j][i]
1213 + insn_needs.op_addr.regs[j][i]
1214 + insn_needs.insn.regs[j][i],
1215 in_max + insn_needs.input.regs[j][i]);
1217 insn_needs.output.regs[j][i] += out_max;
1218 insn_needs.other.regs[j][i]
1219 += MAX (MAX (insn_needs.input.regs[j][i],
1220 insn_needs.output.regs[j][i]),
1221 insn_needs.other_addr.regs[j][i]);
1225 /* Now compute group needs. */
1226 for (in_max = 0, out_max = 0, j = 0;
1227 j < reload_n_operands; j++)
1229 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1231 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1234 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1235 insn_needs.op_addr_reload.groups[i]),
1237 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1239 insn_needs.input.groups[i]
1240 = MAX (insn_needs.input.groups[i]
1241 + insn_needs.op_addr.groups[i]
1242 + insn_needs.insn.groups[i],
1243 in_max + insn_needs.input.groups[i]);
1245 insn_needs.output.groups[i] += out_max;
1246 insn_needs.other.groups[i]
1247 += MAX (MAX (insn_needs.input.groups[i],
1248 insn_needs.output.groups[i]),
1249 insn_needs.other_addr.groups[i]);
1252 /* If this is a CALL_INSN and caller-saves will need
1253 a spill register, act as if the spill register is
1254 needed for this insn. However, the spill register
1255 can be used by any reload of this insn, so we only
1256 need do something if no need for that class has
1259 The assumption that every CALL_INSN will trigger a
1260 caller-save is highly conservative, however, the number
1261 of cases where caller-saves will need a spill register but
1262 a block containing a CALL_INSN won't need a spill register
1263 of that class should be quite rare.
1265 If a group is needed, the size and mode of the group will
1266 have been set up at the beginning of this loop. */
1268 if (GET_CODE (insn) == CALL_INSN
1269 && caller_save_spill_class != NO_REGS)
1271 /* See if this register would conflict with any reload
1272 that needs a group. */
1273 int nongroup_need = 0;
1274 int *caller_save_needs;
1276 for (j = 0; j < n_reloads; j++)
1277 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1278 (GET_MODE_SIZE (reload_outmode[j])
1279 > GET_MODE_SIZE (reload_inmode[j]))
1283 && reg_classes_intersect_p (caller_save_spill_class,
1284 reload_reg_class[j]))
1291 = (caller_save_group_size > 1
1292 ? insn_needs.other.groups
1293 : insn_needs.other.regs[nongroup_need]);
1295 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1297 register enum reg_class *p
1298 = reg_class_superclasses[(int) caller_save_spill_class];
1300 caller_save_needs[(int) caller_save_spill_class]++;
1302 while (*p != LIM_REG_CLASSES)
1303 caller_save_needs[(int) *p++] += 1;
1306 /* Show that this basic block will need a register of
1310 && ! (basic_block_needs[(int) caller_save_spill_class]
1313 basic_block_needs[(int) caller_save_spill_class]
1315 new_basic_block_needs = 1;
1319 #ifdef SMALL_REGISTER_CLASSES
1320 /* If this insn stores the value of a function call,
1321 and that value is in a register that has been spilled,
1322 and if the insn needs a reload in a class
1323 that might use that register as the reload register,
1324 then add add an extra need in that class.
1325 This makes sure we have a register available that does
1326 not overlap the return value. */
1328 if (avoid_return_reg)
1330 int regno = REGNO (avoid_return_reg);
1332 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1334 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1336 /* First compute the "basic needs", which counts a
1337 need only in the smallest class in which it
1340 bcopy ((char *) insn_needs.other.regs[0],
1341 (char *) basic_needs, sizeof basic_needs);
1342 bcopy ((char *) insn_needs.other.groups,
1343 (char *) basic_groups, sizeof basic_groups);
1345 for (i = 0; i < N_REG_CLASSES; i++)
1349 if (basic_needs[i] >= 0)
1350 for (p = reg_class_superclasses[i];
1351 *p != LIM_REG_CLASSES; p++)
1352 basic_needs[(int) *p] -= basic_needs[i];
1354 if (basic_groups[i] >= 0)
1355 for (p = reg_class_superclasses[i];
1356 *p != LIM_REG_CLASSES; p++)
1357 basic_groups[(int) *p] -= basic_groups[i];
1360 /* Now count extra regs if there might be a conflict with
1361 the return value register. */
1363 for (r = regno; r < regno + nregs; r++)
1364 if (spill_reg_order[r] >= 0)
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1368 if (basic_needs[i] > 0)
1372 insn_needs.other.regs[0][i]++;
1373 p = reg_class_superclasses[i];
1374 while (*p != LIM_REG_CLASSES)
1375 insn_needs.other.regs[0][(int) *p++]++;
1377 if (basic_groups[i] > 0)
1381 insn_needs.other.groups[i]++;
1382 p = reg_class_superclasses[i];
1383 while (*p != LIM_REG_CLASSES)
1384 insn_needs.other.groups[(int) *p++]++;
1388 #endif /* SMALL_REGISTER_CLASSES */
1390 /* For each class, collect maximum need of any insn. */
1392 for (i = 0; i < N_REG_CLASSES; i++)
1394 if (max_needs[i] < insn_needs.other.regs[0][i])
1396 max_needs[i] = insn_needs.other.regs[0][i];
1397 max_needs_insn[i] = insn;
1399 if (max_groups[i] < insn_needs.other.groups[i])
1401 max_groups[i] = insn_needs.other.groups[i];
1402 max_groups_insn[i] = insn;
1404 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1406 max_nongroups[i] = insn_needs.other.regs[1][i];
1407 max_nongroups_insn[i] = insn;
1411 /* Note that there is a continue statement above. */
1414 /* If we allocated any new memory locations, make another pass
1415 since it might have changed elimination offsets. */
1416 if (starting_frame_size != get_frame_size ())
1417 something_changed = 1;
1420 for (i = 0; i < N_REG_CLASSES; i++)
1422 if (max_needs[i] > 0)
1424 ";; Need %d reg%s of class %s (for insn %d).\n",
1425 max_needs[i], max_needs[i] == 1 ? "" : "s",
1426 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1427 if (max_nongroups[i] > 0)
1429 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1430 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1431 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1432 if (max_groups[i] > 0)
1434 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1435 max_groups[i], max_groups[i] == 1 ? "" : "s",
1436 mode_name[(int) group_mode[i]],
1437 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1440 /* If we have caller-saves, set up the save areas and see if caller-save
1441 will need a spill register. */
1443 if (caller_save_needed
1444 && ! setup_save_areas (&something_changed)
1445 && caller_save_spill_class == NO_REGS)
1447 /* The class we will need depends on whether the machine
1448 supports the sum of two registers for an address; see
1449 find_address_reloads for details. */
1451 caller_save_spill_class
1452 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1453 caller_save_group_size
1454 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1455 something_changed = 1;
1458 /* See if anything that happened changes which eliminations are valid.
1459 For example, on the Sparc, whether or not the frame pointer can
1460 be eliminated can depend on what registers have been used. We need
1461 not check some conditions again (such as flag_omit_frame_pointer)
1462 since they can't have changed. */
1464 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1465 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1466 #ifdef ELIMINABLE_REGS
1467 || ! CAN_ELIMINATE (ep->from, ep->to)
1470 ep->can_eliminate = 0;
1472 /* Look for the case where we have discovered that we can't replace
1473 register A with register B and that means that we will now be
1474 trying to replace register A with register C. This means we can
1475 no longer replace register C with register B and we need to disable
1476 such an elimination, if it exists. This occurs often with A == ap,
1477 B == sp, and C == fp. */
1479 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1481 struct elim_table *op;
1482 register int new_to = -1;
1484 if (! ep->can_eliminate && ep->can_eliminate_previous)
1486 /* Find the current elimination for ep->from, if there is a
1488 for (op = reg_eliminate;
1489 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1490 if (op->from == ep->from && op->can_eliminate)
1496 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1498 for (op = reg_eliminate;
1499 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1500 if (op->from == new_to && op->to == ep->to)
1501 op->can_eliminate = 0;
1505 /* See if any registers that we thought we could eliminate the previous
1506 time are no longer eliminable. If so, something has changed and we
1507 must spill the register. Also, recompute the number of eliminable
1508 registers and see if the frame pointer is needed; it is if there is
1509 no elimination of the frame pointer that we can perform. */
1511 frame_pointer_needed = 1;
1512 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1514 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1515 && ep->to != HARD_FRAME_POINTER_REGNUM)
1516 frame_pointer_needed = 0;
1518 if (! ep->can_eliminate && ep->can_eliminate_previous)
1520 ep->can_eliminate_previous = 0;
1521 spill_hard_reg (ep->from, global, dumpfile, 1);
1522 something_changed = 1;
1527 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1528 /* If we didn't need a frame pointer last time, but we do now, spill
1529 the hard frame pointer. */
1530 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1532 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1533 something_changed = 1;
1537 /* If all needs are met, we win. */
1539 for (i = 0; i < N_REG_CLASSES; i++)
1540 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1542 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1545 /* Not all needs are met; must spill some hard regs. */
1547 /* Put all registers spilled so far back in potential_reload_regs, but
1548 put them at the front, since we've already spilled most of the
1549 pseudos in them (we might have left some pseudos unspilled if they
1550 were in a block that didn't need any spill registers of a conflicting
1551 class. We used to try to mark off the need for those registers,
1552 but doing so properly is very complex and reallocating them is the
1553 simpler approach. First, "pack" potential_reload_regs by pushing
1554 any nonnegative entries towards the end. That will leave room
1555 for the registers we already spilled.
1557 Also, undo the marking of the spill registers from the last time
1558 around in FORBIDDEN_REGS since we will be probably be allocating
1561 ??? It is theoretically possible that we might end up not using one
1562 of our previously-spilled registers in this allocation, even though
1563 they are at the head of the list. It's not clear what to do about
1564 this, but it was no better before, when we marked off the needs met
1565 by the previously-spilled registers. With the current code, globals
1566 can be allocated into these registers, but locals cannot. */
1570 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1571 if (potential_reload_regs[i] != -1)
1572 potential_reload_regs[j--] = potential_reload_regs[i];
1574 for (i = 0; i < n_spills; i++)
1576 potential_reload_regs[i] = spill_regs[i];
1577 spill_reg_order[spill_regs[i]] = -1;
1578 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1584 /* Now find more reload regs to satisfy the remaining need
1585 Do it by ascending class number, since otherwise a reg
1586 might be spilled for a big class and might fail to count
1587 for a smaller class even though it belongs to that class.
1589 Count spilled regs in `spills', and add entries to
1590 `spill_regs' and `spill_reg_order'.
1592 ??? Note there is a problem here.
1593 When there is a need for a group in a high-numbered class,
1594 and also need for non-group regs that come from a lower class,
1595 the non-group regs are chosen first. If there aren't many regs,
1596 they might leave no room for a group.
1598 This was happening on the 386. To fix it, we added the code
1599 that calls possible_group_p, so that the lower class won't
1600 break up the last possible group.
1602 Really fixing the problem would require changes above
1603 in counting the regs already spilled, and in choose_reload_regs.
1604 It might be hard to avoid introducing bugs there. */
1606 CLEAR_HARD_REG_SET (counted_for_groups);
1607 CLEAR_HARD_REG_SET (counted_for_nongroups);
1609 for (class = 0; class < N_REG_CLASSES; class++)
1611 /* First get the groups of registers.
1612 If we got single registers first, we might fragment
1614 while (max_groups[class] > 0)
1616 /* If any single spilled regs happen to form groups,
1617 count them now. Maybe we don't really need
1618 to spill another group. */
1619 count_possible_groups (group_size, group_mode, max_groups,
1622 if (max_groups[class] <= 0)
1625 /* Groups of size 2 (the only groups used on most machines)
1626 are treated specially. */
1627 if (group_size[class] == 2)
1629 /* First, look for a register that will complete a group. */
1630 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1634 j = potential_reload_regs[i];
1635 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1637 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1638 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1639 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1640 && HARD_REGNO_MODE_OK (other, group_mode[class])
1641 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1643 /* We don't want one part of another group.
1644 We could get "two groups" that overlap! */
1645 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1647 (j < FIRST_PSEUDO_REGISTER - 1
1648 && (other = j + 1, spill_reg_order[other] >= 0)
1649 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1651 && HARD_REGNO_MODE_OK (j, group_mode[class])
1652 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1654 && ! TEST_HARD_REG_BIT (counted_for_groups,
1657 register enum reg_class *p;
1659 /* We have found one that will complete a group,
1660 so count off one group as provided. */
1661 max_groups[class]--;
1662 p = reg_class_superclasses[class];
1663 while (*p != LIM_REG_CLASSES)
1665 if (group_size [(int) *p] <= group_size [class])
1666 max_groups[(int) *p]--;
1670 /* Indicate both these regs are part of a group. */
1671 SET_HARD_REG_BIT (counted_for_groups, j);
1672 SET_HARD_REG_BIT (counted_for_groups, other);
1676 /* We can't complete a group, so start one. */
1677 #ifdef SMALL_REGISTER_CLASSES
1678 /* Look for a pair neither of which is explicitly used. */
1679 if (i == FIRST_PSEUDO_REGISTER)
1680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1683 j = potential_reload_regs[i];
1684 /* Verify that J+1 is a potential reload reg. */
1685 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1686 if (potential_reload_regs[k] == j + 1)
1688 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1689 && k < FIRST_PSEUDO_REGISTER
1690 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1691 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1692 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1693 && HARD_REGNO_MODE_OK (j, group_mode[class])
1694 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1696 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1697 /* Reject J at this stage
1698 if J+1 was explicitly used. */
1699 && ! regs_explicitly_used[j + 1])
1703 /* Now try any group at all
1704 whose registers are not in bad_spill_regs. */
1705 if (i == FIRST_PSEUDO_REGISTER)
1706 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1709 j = potential_reload_regs[i];
1710 /* Verify that J+1 is a potential reload reg. */
1711 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1712 if (potential_reload_regs[k] == j + 1)
1714 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1715 && k < FIRST_PSEUDO_REGISTER
1716 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1717 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1719 && HARD_REGNO_MODE_OK (j, group_mode[class])
1720 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1722 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1726 /* I should be the index in potential_reload_regs
1727 of the new reload reg we have found. */
1729 if (i >= FIRST_PSEUDO_REGISTER)
1731 /* There are no groups left to spill. */
1732 spill_failure (max_groups_insn[class]);
1738 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1743 /* For groups of more than 2 registers,
1744 look for a sufficient sequence of unspilled registers,
1745 and spill them all at once. */
1746 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1750 j = potential_reload_regs[i];
1752 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1753 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1755 /* Check each reg in the sequence. */
1756 for (k = 0; k < group_size[class]; k++)
1757 if (! (spill_reg_order[j + k] < 0
1758 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1759 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1761 /* We got a full sequence, so spill them all. */
1762 if (k == group_size[class])
1764 register enum reg_class *p;
1765 for (k = 0; k < group_size[class]; k++)
1768 SET_HARD_REG_BIT (counted_for_groups, j + k);
1769 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1770 if (potential_reload_regs[idx] == j + k)
1773 |= new_spill_reg (idx, class,
1774 max_needs, NULL_PTR,
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups[class]--;
1781 p = reg_class_superclasses[class];
1782 while (*p != LIM_REG_CLASSES)
1784 if (group_size [(int) *p]
1785 <= group_size [class])
1786 max_groups[(int) *p]--;
1793 /* We couldn't find any registers for this reload.
1794 Avoid going into an infinite loop. */
1795 if (i >= FIRST_PSEUDO_REGISTER)
1797 /* There are no groups left. */
1798 spill_failure (max_groups_insn[class]);
1805 /* Now similarly satisfy all need for single registers. */
1807 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1809 #ifdef SMALL_REGISTER_CLASSES
1810 /* This should be right for all machines, but only the 386
1811 is known to need it, so this conditional plays safe.
1812 ??? For 2.5, try making this unconditional. */
1813 /* If we spilled enough regs, but they weren't counted
1814 against the non-group need, see if we can count them now.
1815 If so, we can avoid some actual spilling. */
1816 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1817 for (i = 0; i < n_spills; i++)
1818 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1820 && !TEST_HARD_REG_BIT (counted_for_groups,
1822 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1824 && max_nongroups[class] > 0)
1826 register enum reg_class *p;
1828 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1829 max_nongroups[class]--;
1830 p = reg_class_superclasses[class];
1831 while (*p != LIM_REG_CLASSES)
1832 max_nongroups[(int) *p++]--;
1834 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1838 /* Consider the potential reload regs that aren't
1839 yet in use as reload regs, in order of preference.
1840 Find the most preferred one that's in this class. */
1842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1843 if (potential_reload_regs[i] >= 0
1844 && TEST_HARD_REG_BIT (reg_class_contents[class],
1845 potential_reload_regs[i])
1846 /* If this reg will not be available for groups,
1847 pick one that does not foreclose possible groups.
1848 This is a kludge, and not very general,
1849 but it should be sufficient to make the 386 work,
1850 and the problem should not occur on machines with
1852 && (max_nongroups[class] == 0
1853 || possible_group_p (potential_reload_regs[i], max_groups)))
1856 /* If we couldn't get a register, try to get one even if we
1857 might foreclose possible groups. This may cause problems
1858 later, but that's better than aborting now, since it is
1859 possible that we will, in fact, be able to form the needed
1860 group even with this allocation. */
1862 if (i >= FIRST_PSEUDO_REGISTER
1863 && (asm_noperands (max_needs[class] > 0
1864 ? max_needs_insn[class]
1865 : max_nongroups_insn[class])
1867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1868 if (potential_reload_regs[i] >= 0
1869 && TEST_HARD_REG_BIT (reg_class_contents[class],
1870 potential_reload_regs[i]))
1873 /* I should be the index in potential_reload_regs
1874 of the new reload reg we have found. */
1876 if (i >= FIRST_PSEUDO_REGISTER)
1878 /* There are no possible registers left to spill. */
1879 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1880 : max_nongroups_insn[class]);
1886 |= new_spill_reg (i, class, max_needs, max_nongroups,
1892 /* If global-alloc was run, notify it of any register eliminations we have
1895 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1896 if (ep->can_eliminate)
1897 mark_elimination (ep->from, ep->to);
1899 /* Insert code to save and restore call-clobbered hard regs
1900 around calls. Tell if what mode to use so that we will process
1901 those insns in reload_as_needed if we have to. */
1903 if (caller_save_needed)
1904 save_call_clobbered_regs (num_eliminable ? QImode
1905 : caller_save_spill_class != NO_REGS ? HImode
1908 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1909 If that insn didn't set the register (i.e., it copied the register to
1910 memory), just delete that insn instead of the equivalencing insn plus
1911 anything now dead. If we call delete_dead_insn on that insn, we may
1912 delete the insn that actually sets the register if the register die
1913 there and that is incorrect. */
1915 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1916 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1917 && GET_CODE (reg_equiv_init[i]) != NOTE)
1919 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1920 delete_dead_insn (reg_equiv_init[i]);
1923 PUT_CODE (reg_equiv_init[i], NOTE);
1924 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1925 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1929 /* Use the reload registers where necessary
1930 by generating move instructions to move the must-be-register
1931 values into or out of the reload registers. */
1933 if (something_needs_reloads || something_needs_elimination
1934 || (caller_save_needed && num_eliminable)
1935 || caller_save_spill_class != NO_REGS)
1936 reload_as_needed (first, global);
1938 /* If we were able to eliminate the frame pointer, show that it is no
1939 longer live at the start of any basic block. If it ls live by
1940 virtue of being in a pseudo, that pseudo will be marked live
1941 and hence the frame pointer will be known to be live via that
1944 if (! frame_pointer_needed)
1945 for (i = 0; i < n_basic_blocks; i++)
1946 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1947 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1948 % REGSET_ELT_BITS));
1950 /* Come here (with failure set nonzero) if we can't get enough spill regs
1951 and we decide not to abort about it. */
1954 reload_in_progress = 0;
1956 /* Now eliminate all pseudo regs by modifying them into
1957 their equivalent memory references.
1958 The REG-rtx's for the pseudos are modified in place,
1959 so all insns that used to refer to them now refer to memory.
1961 For a reg that has a reg_equiv_address, all those insns
1962 were changed by reloading so that no insns refer to it any longer;
1963 but the DECL_RTL of a variable decl may refer to it,
1964 and if so this causes the debugging info to mention the variable. */
1966 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1970 if (reg_equiv_mem[i])
1972 addr = XEXP (reg_equiv_mem[i], 0);
1973 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1975 if (reg_equiv_address[i])
1976 addr = reg_equiv_address[i];
1979 if (reg_renumber[i] < 0)
1981 rtx reg = regno_reg_rtx[i];
1982 XEXP (reg, 0) = addr;
1983 REG_USERVAR_P (reg) = 0;
1984 MEM_IN_STRUCT_P (reg) = in_struct;
1985 PUT_CODE (reg, MEM);
1987 else if (reg_equiv_mem[i])
1988 XEXP (reg_equiv_mem[i], 0) = addr;
1992 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1993 /* Make a pass over all the insns and remove death notes for things that
1994 are no longer registers or no longer die in the insn (e.g., an input
1995 and output pseudo being tied). */
1997 for (insn = first; insn; insn = NEXT_INSN (insn))
1998 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2002 for (note = REG_NOTES (insn); note; note = next)
2004 next = XEXP (note, 1);
2005 if (REG_NOTE_KIND (note) == REG_DEAD
2006 && (GET_CODE (XEXP (note, 0)) != REG
2007 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2008 remove_note (insn, note);
2013 /* Indicate that we no longer have known memory locations or constants. */
2014 reg_equiv_constant = 0;
2015 reg_equiv_memory_loc = 0;
2018 free (scratch_list);
2021 free (scratch_block);
2027 /* Nonzero if, after spilling reg REGNO for non-groups,
2028 it will still be possible to find a group if we still need one. */
2031 possible_group_p (regno, max_groups)
2036 int class = (int) NO_REGS;
2038 for (i = 0; i < (int) N_REG_CLASSES; i++)
2039 if (max_groups[i] > 0)
2045 if (class == (int) NO_REGS)
2048 /* Consider each pair of consecutive registers. */
2049 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2051 /* Ignore pairs that include reg REGNO. */
2052 if (i == regno || i + 1 == regno)
2055 /* Ignore pairs that are outside the class that needs the group.
2056 ??? Here we fail to handle the case where two different classes
2057 independently need groups. But this never happens with our
2058 current machine descriptions. */
2059 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2060 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2063 /* A pair of consecutive regs we can still spill does the trick. */
2064 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2065 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2066 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2069 /* A pair of one already spilled and one we can spill does it
2070 provided the one already spilled is not otherwise reserved. */
2071 if (spill_reg_order[i] < 0
2072 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2073 && spill_reg_order[i + 1] >= 0
2074 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2075 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2077 if (spill_reg_order[i + 1] < 0
2078 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2079 && spill_reg_order[i] >= 0
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2088 /* Count any groups of CLASS that can be formed from the registers recently
2092 count_possible_groups (group_size, group_mode, max_groups, class)
2094 enum machine_mode *group_mode;
2101 /* Now find all consecutive groups of spilled registers
2102 and mark each group off against the need for such groups.
2103 But don't count them against ordinary need, yet. */
2105 if (group_size[class] == 0)
2108 CLEAR_HARD_REG_SET (new);
2110 /* Make a mask of all the regs that are spill regs in class I. */
2111 for (i = 0; i < n_spills; i++)
2112 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2113 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2114 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2115 SET_HARD_REG_BIT (new, spill_regs[i]);
2117 /* Find each consecutive group of them. */
2118 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2119 if (TEST_HARD_REG_BIT (new, i)
2120 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2121 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2123 for (j = 1; j < group_size[class]; j++)
2124 if (! TEST_HARD_REG_BIT (new, i + j))
2127 if (j == group_size[class])
2129 /* We found a group. Mark it off against this class's need for
2130 groups, and against each superclass too. */
2131 register enum reg_class *p;
2133 max_groups[class]--;
2134 p = reg_class_superclasses[class];
2135 while (*p != LIM_REG_CLASSES)
2137 if (group_size [(int) *p] <= group_size [class])
2138 max_groups[(int) *p]--;
2142 /* Don't count these registers again. */
2143 for (j = 0; j < group_size[class]; j++)
2144 SET_HARD_REG_BIT (counted_for_groups, i + j);
2147 /* Skip to the last reg in this group. When i is incremented above,
2148 it will then point to the first reg of the next possible group. */
2153 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2154 another mode that needs to be reloaded for the same register class CLASS.
2155 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2156 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2158 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2159 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2160 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2161 causes unnecessary failures on machines requiring alignment of register
2162 groups when the two modes are different sizes, because the larger mode has
2163 more strict alignment rules than the smaller mode. */
2166 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2167 enum machine_mode allocate_mode, other_mode;
2168 enum reg_class class;
2171 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2173 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2174 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2175 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2181 /* Handle the failure to find a register to spill.
2182 INSN should be one of the insns which needed this particular spill reg. */
2185 spill_failure (insn)
2188 if (asm_noperands (PATTERN (insn)) >= 0)
2189 error_for_asm (insn, "`asm' needs too many reloads");
2191 fatal_insn ("Unable to find a register to spill.", insn);
2194 /* Add a new register to the tables of available spill-registers
2195 (as well as spilling all pseudos allocated to the register).
2196 I is the index of this register in potential_reload_regs.
2197 CLASS is the regclass whose need is being satisfied.
2198 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2199 so that this register can count off against them.
2200 MAX_NONGROUPS is 0 if this register is part of a group.
2201 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2204 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2212 register enum reg_class *p;
2214 int regno = potential_reload_regs[i];
2216 if (i >= FIRST_PSEUDO_REGISTER)
2217 abort (); /* Caller failed to find any register. */
2219 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2220 fatal ("fixed or forbidden register was spilled.\n\
2221 This may be due to a compiler bug or to impossible asm\n\
2222 statements or clauses.");
2224 /* Make reg REGNO an additional reload reg. */
2226 potential_reload_regs[i] = -1;
2227 spill_regs[n_spills] = regno;
2228 spill_reg_order[regno] = n_spills;
2230 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2232 /* Clear off the needs we just satisfied. */
2235 p = reg_class_superclasses[class];
2236 while (*p != LIM_REG_CLASSES)
2237 max_needs[(int) *p++]--;
2239 if (max_nongroups && max_nongroups[class] > 0)
2241 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2242 max_nongroups[class]--;
2243 p = reg_class_superclasses[class];
2244 while (*p != LIM_REG_CLASSES)
2245 max_nongroups[(int) *p++]--;
2248 /* Spill every pseudo reg that was allocated to this reg
2249 or to something that overlaps this reg. */
2251 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2253 /* If there are some registers still to eliminate and this register
2254 wasn't ever used before, additional stack space may have to be
2255 allocated to store this register. Thus, we may have changed the offset
2256 between the stack and frame pointers, so mark that something has changed.
2257 (If new pseudos were spilled, thus requiring more space, VAL would have
2258 been set non-zero by the call to spill_hard_reg above since additional
2259 reloads may be needed in that case.
2261 One might think that we need only set VAL to 1 if this is a call-used
2262 register. However, the set of registers that must be saved by the
2263 prologue is not identical to the call-used set. For example, the
2264 register used by the call insn for the return PC is a call-used register,
2265 but must be saved by the prologue. */
2266 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2269 regs_ever_live[spill_regs[n_spills]] = 1;
2275 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2276 data that is dead in INSN. */
2279 delete_dead_insn (insn)
2282 rtx prev = prev_real_insn (insn);
2285 /* If the previous insn sets a register that dies in our insn, delete it
2287 if (prev && GET_CODE (PATTERN (prev)) == SET
2288 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2289 && reg_mentioned_p (prev_dest, PATTERN (insn))
2290 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2291 delete_dead_insn (prev);
2293 PUT_CODE (insn, NOTE);
2294 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2295 NOTE_SOURCE_FILE (insn) = 0;
2298 /* Modify the home of pseudo-reg I.
2299 The new home is present in reg_renumber[I].
2301 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2302 or it may be -1, meaning there is none or it is not relevant.
2303 This is used so that all pseudos spilled from a given hard reg
2304 can share one stack slot. */
2307 alter_reg (i, from_reg)
2311 /* When outputting an inline function, this can happen
2312 for a reg that isn't actually used. */
2313 if (regno_reg_rtx[i] == 0)
2316 /* If the reg got changed to a MEM at rtl-generation time,
2318 if (GET_CODE (regno_reg_rtx[i]) != REG)
2321 /* Modify the reg-rtx to contain the new hard reg
2322 number or else to contain its pseudo reg number. */
2323 REGNO (regno_reg_rtx[i])
2324 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2326 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2327 allocate a stack slot for it. */
2329 if (reg_renumber[i] < 0
2330 && reg_n_refs[i] > 0
2331 && reg_equiv_constant[i] == 0
2332 && reg_equiv_memory_loc[i] == 0)
2335 int inherent_size = PSEUDO_REGNO_BYTES (i);
2336 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2339 /* Each pseudo reg has an inherent size which comes from its own mode,
2340 and a total size which provides room for paradoxical subregs
2341 which refer to the pseudo reg in wider modes.
2343 We can use a slot already allocated if it provides both
2344 enough inherent space and enough total space.
2345 Otherwise, we allocate a new slot, making sure that it has no less
2346 inherent space, and no less total space, then the previous slot. */
2349 /* No known place to spill from => no slot to reuse. */
2350 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2351 if (BYTES_BIG_ENDIAN)
2352 /* Cancel the big-endian correction done in assign_stack_local.
2353 Get the address of the beginning of the slot.
2354 This is so we can do a big-endian correction unconditionally
2356 adjust = inherent_size - total_size;
2358 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2360 /* Reuse a stack slot if possible. */
2361 else if (spill_stack_slot[from_reg] != 0
2362 && spill_stack_slot_width[from_reg] >= total_size
2363 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2365 x = spill_stack_slot[from_reg];
2366 /* Allocate a bigger slot. */
2369 /* Compute maximum size needed, both for inherent size
2370 and for total size. */
2371 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2373 if (spill_stack_slot[from_reg])
2375 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2377 mode = GET_MODE (spill_stack_slot[from_reg]);
2378 if (spill_stack_slot_width[from_reg] > total_size)
2379 total_size = spill_stack_slot_width[from_reg];
2381 /* Make a slot with that size. */
2382 x = assign_stack_local (mode, total_size, -1);
2384 if (BYTES_BIG_ENDIAN)
2386 /* Cancel the big-endian correction done in assign_stack_local.
2387 Get the address of the beginning of the slot.
2388 This is so we can do a big-endian correction unconditionally
2390 adjust = GET_MODE_SIZE (mode) - total_size;
2392 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2395 plus_constant (XEXP (x, 0), adjust));
2397 spill_stack_slot[from_reg] = stack_slot;
2398 spill_stack_slot_width[from_reg] = total_size;
2401 /* On a big endian machine, the "address" of the slot
2402 is the address of the low part that fits its inherent mode. */
2403 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2404 adjust += (total_size - inherent_size);
2406 /* If we have any adjustment to make, or if the stack slot is the
2407 wrong mode, make a new stack slot. */
2408 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2410 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2411 plus_constant (XEXP (x, 0), adjust));
2412 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2415 /* Save the stack slot for later. */
2416 reg_equiv_memory_loc[i] = x;
2420 /* Mark the slots in regs_ever_live for the hard regs
2421 used by pseudo-reg number REGNO. */
2424 mark_home_live (regno)
2427 register int i, lim;
2428 i = reg_renumber[regno];
2431 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2433 regs_ever_live[i++] = 1;
2436 /* Mark the registers used in SCRATCH as being live. */
2439 mark_scratch_live (scratch)
2443 int regno = REGNO (scratch);
2444 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2446 for (i = regno; i < lim; i++)
2447 regs_ever_live[i] = 1;
2450 /* This function handles the tracking of elimination offsets around branches.
2452 X is a piece of RTL being scanned.
2454 INSN is the insn that it came from, if any.
2456 INITIAL_P is non-zero if we are to set the offset to be the initial
2457 offset and zero if we are setting the offset of the label to be the
2461 set_label_offsets (x, insn, initial_p)
2466 enum rtx_code code = GET_CODE (x);
2469 struct elim_table *p;
2474 if (LABEL_REF_NONLOCAL_P (x))
2479 /* ... fall through ... */
2482 /* If we know nothing about this label, set the desired offsets. Note
2483 that this sets the offset at a label to be the offset before a label
2484 if we don't know anything about the label. This is not correct for
2485 the label after a BARRIER, but is the best guess we can make. If
2486 we guessed wrong, we will suppress an elimination that might have
2487 been possible had we been able to guess correctly. */
2489 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2491 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2492 offsets_at[CODE_LABEL_NUMBER (x)][i]
2493 = (initial_p ? reg_eliminate[i].initial_offset
2494 : reg_eliminate[i].offset);
2495 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2498 /* Otherwise, if this is the definition of a label and it is
2499 preceded by a BARRIER, set our offsets to the known offset of
2503 && (tem = prev_nonnote_insn (insn)) != 0
2504 && GET_CODE (tem) == BARRIER)
2506 num_not_at_initial_offset = 0;
2507 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2509 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2510 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2511 if (reg_eliminate[i].can_eliminate
2512 && (reg_eliminate[i].offset
2513 != reg_eliminate[i].initial_offset))
2514 num_not_at_initial_offset++;
2519 /* If neither of the above cases is true, compare each offset
2520 with those previously recorded and suppress any eliminations
2521 where the offsets disagree. */
2523 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2524 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2525 != (initial_p ? reg_eliminate[i].initial_offset
2526 : reg_eliminate[i].offset))
2527 reg_eliminate[i].can_eliminate = 0;
2532 set_label_offsets (PATTERN (insn), insn, initial_p);
2534 /* ... fall through ... */
2538 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2539 and hence must have all eliminations at their initial offsets. */
2540 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2541 if (REG_NOTE_KIND (tem) == REG_LABEL)
2542 set_label_offsets (XEXP (tem, 0), insn, 1);
2547 /* Each of the labels in the address vector must be at their initial
2548 offsets. We want the first first for ADDR_VEC and the second
2549 field for ADDR_DIFF_VEC. */
2551 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2552 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2557 /* We only care about setting PC. If the source is not RETURN,
2558 IF_THEN_ELSE, or a label, disable any eliminations not at
2559 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2560 isn't one of those possibilities. For branches to a label,
2561 call ourselves recursively.
2563 Note that this can disable elimination unnecessarily when we have
2564 a non-local goto since it will look like a non-constant jump to
2565 someplace in the current function. This isn't a significant
2566 problem since such jumps will normally be when all elimination
2567 pairs are back to their initial offsets. */
2569 if (SET_DEST (x) != pc_rtx)
2572 switch (GET_CODE (SET_SRC (x)))
2579 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2583 tem = XEXP (SET_SRC (x), 1);
2584 if (GET_CODE (tem) == LABEL_REF)
2585 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2586 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2589 tem = XEXP (SET_SRC (x), 2);
2590 if (GET_CODE (tem) == LABEL_REF)
2591 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2592 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2597 /* If we reach here, all eliminations must be at their initial
2598 offset because we are doing a jump to a variable address. */
2599 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2600 if (p->offset != p->initial_offset)
2601 p->can_eliminate = 0;
2605 /* Used for communication between the next two function to properly share
2606 the vector for an ASM_OPERANDS. */
2608 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2610 /* Scan X and replace any eliminable registers (such as fp) with a
2611 replacement (such as sp), plus an offset.
2613 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2614 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2615 MEM, we are allowed to replace a sum of a register and the constant zero
2616 with the register, which we cannot do outside a MEM. In addition, we need
2617 to record the fact that a register is referenced outside a MEM.
2619 If INSN is an insn, it is the insn containing X. If we replace a REG
2620 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2621 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2622 that the REG is being modified.
2624 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2625 That's used when we eliminate in expressions stored in notes.
2626 This means, do not set ref_outside_mem even if the reference
2629 If we see a modification to a register we know about, take the
2630 appropriate action (see case SET, below).
2632 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2633 replacements done assuming all offsets are at their initial values. If
2634 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2635 encounter, return the actual location so that find_reloads will do
2636 the proper thing. */
2639 eliminate_regs (x, mem_mode, insn)
2641 enum machine_mode mem_mode;
2644 enum rtx_code code = GET_CODE (x);
2645 struct elim_table *ep;
2670 /* First handle the case where we encounter a bare register that
2671 is eliminable. Replace it with a PLUS. */
2672 if (regno < FIRST_PSEUDO_REGISTER)
2674 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2676 if (ep->from_rtx == x && ep->can_eliminate)
2679 /* Refs inside notes don't count for this purpose. */
2680 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2681 || GET_CODE (insn) == INSN_LIST)))
2682 ep->ref_outside_mem = 1;
2683 return plus_constant (ep->to_rtx, ep->previous_offset);
2687 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2688 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2690 /* In this case, find_reloads would attempt to either use an
2691 incorrect address (if something is not at its initial offset)
2692 or substitute an replaced address into an insn (which loses
2693 if the offset is changed by some later action). So we simply
2694 return the replaced stack slot (assuming it is changed by
2695 elimination) and ignore the fact that this is actually a
2696 reference to the pseudo. Ensure we make a copy of the
2697 address in case it is shared. */
2698 new = eliminate_regs (reg_equiv_memory_loc[regno],
2700 if (new != reg_equiv_memory_loc[regno])
2702 cannot_omit_stores[regno] = 1;
2703 return copy_rtx (new);
2709 /* If this is the sum of an eliminable register and a constant, rework
2711 if (GET_CODE (XEXP (x, 0)) == REG
2712 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2713 && CONSTANT_P (XEXP (x, 1)))
2715 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2717 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2720 /* Refs inside notes don't count for this purpose. */
2721 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2722 || GET_CODE (insn) == INSN_LIST)))
2723 ep->ref_outside_mem = 1;
2725 /* The only time we want to replace a PLUS with a REG (this
2726 occurs when the constant operand of the PLUS is the negative
2727 of the offset) is when we are inside a MEM. We won't want
2728 to do so at other times because that would change the
2729 structure of the insn in a way that reload can't handle.
2730 We special-case the commonest situation in
2731 eliminate_regs_in_insn, so just replace a PLUS with a
2732 PLUS here, unless inside a MEM. */
2733 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2734 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2737 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2738 plus_constant (XEXP (x, 1),
2739 ep->previous_offset));
2742 /* If the register is not eliminable, we are done since the other
2743 operand is a constant. */
2747 /* If this is part of an address, we want to bring any constant to the
2748 outermost PLUS. We will do this by doing register replacement in
2749 our operands and seeing if a constant shows up in one of them.
2751 We assume here this is part of an address (or a "load address" insn)
2752 since an eliminable register is not likely to appear in any other
2755 If we have (plus (eliminable) (reg)), we want to produce
2756 (plus (plus (replacement) (reg) (const))). If this was part of a
2757 normal add insn, (plus (replacement) (reg)) will be pushed as a
2758 reload. This is the desired action. */
2761 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2762 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2764 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2766 /* If one side is a PLUS and the other side is a pseudo that
2767 didn't get a hard register but has a reg_equiv_constant,
2768 we must replace the constant here since it may no longer
2769 be in the position of any operand. */
2770 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2771 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2772 && reg_renumber[REGNO (new1)] < 0
2773 && reg_equiv_constant != 0
2774 && reg_equiv_constant[REGNO (new1)] != 0)
2775 new1 = reg_equiv_constant[REGNO (new1)];
2776 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2777 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2778 && reg_renumber[REGNO (new0)] < 0
2779 && reg_equiv_constant[REGNO (new0)] != 0)
2780 new0 = reg_equiv_constant[REGNO (new0)];
2782 new = form_sum (new0, new1);
2784 /* As above, if we are not inside a MEM we do not want to
2785 turn a PLUS into something else. We might try to do so here
2786 for an addition of 0 if we aren't optimizing. */
2787 if (! mem_mode && GET_CODE (new) != PLUS)
2788 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2796 /* If this is the product of an eliminable register and a
2797 constant, apply the distribute law and move the constant out
2798 so that we have (plus (mult ..) ..). This is needed in order
2799 to keep load-address insns valid. This case is pathological.
2800 We ignore the possibility of overflow here. */
2801 if (GET_CODE (XEXP (x, 0)) == REG
2802 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2803 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2804 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2806 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2809 /* Refs inside notes don't count for this purpose. */
2810 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2811 || GET_CODE (insn) == INSN_LIST)))
2812 ep->ref_outside_mem = 1;
2815 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2816 ep->previous_offset * INTVAL (XEXP (x, 1)));
2819 /* ... fall through ... */
2824 case DIV: case UDIV:
2825 case MOD: case UMOD:
2826 case AND: case IOR: case XOR:
2827 case ROTATERT: case ROTATE:
2828 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2830 case GE: case GT: case GEU: case GTU:
2831 case LE: case LT: case LEU: case LTU:
2833 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2835 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2837 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2838 return gen_rtx (code, GET_MODE (x), new0, new1);
2843 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2846 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2847 if (new != XEXP (x, 0))
2848 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2851 /* ... fall through ... */
2854 /* Now do eliminations in the rest of the chain. If this was
2855 an EXPR_LIST, this might result in allocating more memory than is
2856 strictly needed, but it simplifies the code. */
2859 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2860 if (new != XEXP (x, 1))
2861 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2869 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2870 if (ep->to_rtx == XEXP (x, 0))
2872 int size = GET_MODE_SIZE (mem_mode);
2874 /* If more bytes than MEM_MODE are pushed, account for them. */
2875 #ifdef PUSH_ROUNDING
2876 if (ep->to_rtx == stack_pointer_rtx)
2877 size = PUSH_ROUNDING (size);
2879 if (code == PRE_DEC || code == POST_DEC)
2885 /* Fall through to generic unary operation case. */
2887 case STRICT_LOW_PART:
2889 case SIGN_EXTEND: case ZERO_EXTEND:
2890 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2891 case FLOAT: case FIX:
2892 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2896 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2897 if (new != XEXP (x, 0))
2898 return gen_rtx (code, GET_MODE (x), new);
2902 /* Similar to above processing, but preserve SUBREG_WORD.
2903 Convert (subreg (mem)) to (mem) if not paradoxical.
2904 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2905 pseudo didn't get a hard reg, we must replace this with the
2906 eliminated version of the memory location because push_reloads
2907 may do the replacement in certain circumstances. */
2908 if (GET_CODE (SUBREG_REG (x)) == REG
2909 && (GET_MODE_SIZE (GET_MODE (x))
2910 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2911 && reg_equiv_memory_loc != 0
2912 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2914 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2917 /* If we didn't change anything, we must retain the pseudo. */
2918 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2919 new = SUBREG_REG (x);
2922 /* Otherwise, ensure NEW isn't shared in case we have to reload
2924 new = copy_rtx (new);
2926 /* In this case, we must show that the pseudo is used in this
2927 insn so that delete_output_reload will do the right thing. */
2928 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2929 && GET_CODE (insn) != INSN_LIST)
2930 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2935 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2937 if (new != XEXP (x, 0))
2939 if (GET_CODE (new) == MEM
2940 && (GET_MODE_SIZE (GET_MODE (x))
2941 <= GET_MODE_SIZE (GET_MODE (new)))
2942 #ifdef LOAD_EXTEND_OP
2943 /* On these machines we will be reloading what is
2944 inside the SUBREG if it originally was a pseudo and
2945 the inner and outer modes are both a word or
2946 smaller. So leave the SUBREG then. */
2947 && ! (GET_CODE (SUBREG_REG (x)) == REG
2948 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2949 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2950 && (GET_MODE_SIZE (GET_MODE (x))
2951 > GET_MODE_SIZE (GET_MODE (new)))
2952 && INTEGRAL_MODE_P (GET_MODE (new))
2953 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2957 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2958 enum machine_mode mode = GET_MODE (x);
2960 if (BYTES_BIG_ENDIAN)
2961 offset += (MIN (UNITS_PER_WORD,
2962 GET_MODE_SIZE (GET_MODE (new)))
2963 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2965 PUT_MODE (new, mode);
2966 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2970 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2976 /* If clobbering a register that is the replacement register for an
2977 elimination we still think can be performed, note that it cannot
2978 be performed. Otherwise, we need not be concerned about it. */
2979 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2980 if (ep->to_rtx == XEXP (x, 0))
2981 ep->can_eliminate = 0;
2983 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2984 if (new != XEXP (x, 0))
2985 return gen_rtx (code, GET_MODE (x), new);
2991 /* Properly handle sharing input and constraint vectors. */
2992 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2994 /* When we come to a new vector not seen before,
2995 scan all its elements; keep the old vector if none
2996 of them changes; otherwise, make a copy. */
2997 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2998 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2999 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3000 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3003 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3004 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3007 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3008 new_asm_operands_vec = old_asm_operands_vec;
3010 new_asm_operands_vec
3011 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3014 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3015 if (new_asm_operands_vec == old_asm_operands_vec)
3018 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3019 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3020 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3021 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3022 ASM_OPERANDS_SOURCE_FILE (x),
3023 ASM_OPERANDS_SOURCE_LINE (x));
3024 new->volatil = x->volatil;
3029 /* Check for setting a register that we know about. */
3030 if (GET_CODE (SET_DEST (x)) == REG)
3032 /* See if this is setting the replacement register for an
3035 If DEST is the hard frame pointer, we do nothing because we
3036 assume that all assignments to the frame pointer are for
3037 non-local gotos and are being done at a time when they are valid
3038 and do not disturb anything else. Some machines want to
3039 eliminate a fake argument pointer (or even a fake frame pointer)
3040 with either the real frame or the stack pointer. Assignments to
3041 the hard frame pointer must not prevent this elimination. */
3043 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3045 if (ep->to_rtx == SET_DEST (x)
3046 && SET_DEST (x) != hard_frame_pointer_rtx)
3048 /* If it is being incremented, adjust the offset. Otherwise,
3049 this elimination can't be done. */
3050 rtx src = SET_SRC (x);
3052 if (GET_CODE (src) == PLUS
3053 && XEXP (src, 0) == SET_DEST (x)
3054 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3055 ep->offset -= INTVAL (XEXP (src, 1));
3057 ep->can_eliminate = 0;
3060 /* Now check to see we are assigning to a register that can be
3061 eliminated. If so, it must be as part of a PARALLEL, since we
3062 will not have been called if this is a single SET. So indicate
3063 that we can no longer eliminate this reg. */
3064 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3066 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3067 ep->can_eliminate = 0;
3070 /* Now avoid the loop below in this common case. */
3072 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3073 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3075 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3076 write a CLOBBER insn. */
3077 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3078 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3079 && GET_CODE (insn) != INSN_LIST)
3080 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3082 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3083 return gen_rtx (SET, VOIDmode, new0, new1);
3089 /* Our only special processing is to pass the mode of the MEM to our
3090 recursive call and copy the flags. While we are here, handle this
3091 case more efficiently. */
3092 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3093 if (new != XEXP (x, 0))
3095 new = gen_rtx (MEM, GET_MODE (x), new);
3096 new->volatil = x->volatil;
3097 new->unchanging = x->unchanging;
3098 new->in_struct = x->in_struct;
3105 /* Process each of our operands recursively. If any have changed, make a
3107 fmt = GET_RTX_FORMAT (code);
3108 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3112 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3113 if (new != XEXP (x, i) && ! copied)
3115 rtx new_x = rtx_alloc (code);
3116 bcopy ((char *) x, (char *) new_x,
3117 (sizeof (*new_x) - sizeof (new_x->fld)
3118 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3124 else if (*fmt == 'E')
3127 for (j = 0; j < XVECLEN (x, i); j++)
3129 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3130 if (new != XVECEXP (x, i, j) && ! copied_vec)
3132 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3133 &XVECEXP (x, i, 0));
3136 rtx new_x = rtx_alloc (code);
3137 bcopy ((char *) x, (char *) new_x,
3138 (sizeof (*new_x) - sizeof (new_x->fld)
3139 + (sizeof (new_x->fld[0])
3140 * GET_RTX_LENGTH (code))));
3144 XVEC (x, i) = new_v;
3147 XVECEXP (x, i, j) = new;
3155 /* Scan INSN and eliminate all eliminable registers in it.
3157 If REPLACE is nonzero, do the replacement destructively. Also
3158 delete the insn as dead it if it is setting an eliminable register.
3160 If REPLACE is zero, do all our allocations in reload_obstack.
3162 If no eliminations were done and this insn doesn't require any elimination
3163 processing (these are not identical conditions: it might be updating sp,
3164 but not referencing fp; this needs to be seen during reload_as_needed so
3165 that the offset between fp and sp can be taken into consideration), zero
3166 is returned. Otherwise, 1 is returned. */
3169 eliminate_regs_in_insn (insn, replace)
3173 rtx old_body = PATTERN (insn);
3174 rtx old_set = single_set (insn);
3177 struct elim_table *ep;
3180 push_obstacks (&reload_obstack, &reload_obstack);
3182 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3183 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3185 /* Check for setting an eliminable register. */
3186 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3187 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3189 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3190 /* If this is setting the frame pointer register to the
3191 hardware frame pointer register and this is an elimination
3192 that will be done (tested above), this insn is really
3193 adjusting the frame pointer downward to compensate for
3194 the adjustment done before a nonlocal goto. */
3195 if (ep->from == FRAME_POINTER_REGNUM
3196 && ep->to == HARD_FRAME_POINTER_REGNUM)
3198 rtx src = SET_SRC (old_set);
3201 if (src == ep->to_rtx)
3203 else if (GET_CODE (src) == PLUS
3204 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3205 offset = INTVAL (XEXP (src, 0)), ok = 1;
3212 = plus_constant (ep->to_rtx, offset - ep->offset);
3214 /* First see if this insn remains valid when we
3215 make the change. If not, keep the INSN_CODE
3216 the same and let reload fit it up. */
3217 validate_change (insn, &SET_SRC (old_set), src, 1);
3218 validate_change (insn, &SET_DEST (old_set),
3220 if (! apply_change_group ())
3222 SET_SRC (old_set) = src;
3223 SET_DEST (old_set) = ep->to_rtx;
3233 /* In this case this insn isn't serving a useful purpose. We
3234 will delete it in reload_as_needed once we know that this
3235 elimination is, in fact, being done.
3237 If REPLACE isn't set, we can't delete this insn, but neededn't
3238 process it since it won't be used unless something changes. */
3240 delete_dead_insn (insn);
3245 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3246 in the insn is the negative of the offset in FROM. Substitute
3247 (set (reg) (reg to)) for the insn and change its code.
3249 We have to do this here, rather than in eliminate_regs, do that we can
3250 change the insn code. */
3252 if (GET_CODE (SET_SRC (old_set)) == PLUS
3253 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3254 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3255 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3257 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3258 && ep->can_eliminate)
3260 /* We must stop at the first elimination that will be used.
3261 If this one would replace the PLUS with a REG, do it
3262 now. Otherwise, quit the loop and let eliminate_regs
3263 do its normal replacement. */
3264 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3266 /* We assume here that we don't need a PARALLEL of
3267 any CLOBBERs for this assignment. There's not
3268 much we can do if we do need it. */
3269 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3270 SET_DEST (old_set), ep->to_rtx);
3271 INSN_CODE (insn) = -1;
3280 old_asm_operands_vec = 0;
3282 /* Replace the body of this insn with a substituted form. If we changed
3283 something, return non-zero.
3285 If we are replacing a body that was a (set X (plus Y Z)), try to
3286 re-recognize the insn. We do this in case we had a simple addition
3287 but now can do this as a load-address. This saves an insn in this
3290 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3291 if (new_body != old_body)
3293 /* If we aren't replacing things permanently and we changed something,
3294 make another copy to ensure that all the RTL is new. Otherwise
3295 things can go wrong if find_reload swaps commutative operands
3296 and one is inside RTL that has been copied while the other is not. */
3298 /* Don't copy an asm_operands because (1) there's no need and (2)
3299 copy_rtx can't do it properly when there are multiple outputs. */
3300 if (! replace && asm_noperands (old_body) < 0)
3301 new_body = copy_rtx (new_body);
3303 /* If we had a move insn but now we don't, rerecognize it. This will
3304 cause spurious re-recognition if the old move had a PARALLEL since
3305 the new one still will, but we can't call single_set without
3306 having put NEW_BODY into the insn and the re-recognition won't
3307 hurt in this rare case. */
3309 && ((GET_CODE (SET_SRC (old_set)) == REG
3310 && (GET_CODE (new_body) != SET
3311 || GET_CODE (SET_SRC (new_body)) != REG))
3312 /* If this was a load from or store to memory, compare
3313 the MEM in recog_operand to the one in the insn. If they
3314 are not equal, then rerecognize the insn. */
3316 && ((GET_CODE (SET_SRC (old_set)) == MEM
3317 && SET_SRC (old_set) != recog_operand[1])
3318 || (GET_CODE (SET_DEST (old_set)) == MEM
3319 && SET_DEST (old_set) != recog_operand[0])))
3320 /* If this was an add insn before, rerecognize. */
3321 || GET_CODE (SET_SRC (old_set)) == PLUS))
3323 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3324 /* If recognition fails, store the new body anyway.
3325 It's normal to have recognition failures here
3326 due to bizarre memory addresses; reloading will fix them. */
3327 PATTERN (insn) = new_body;
3330 PATTERN (insn) = new_body;
3335 /* Loop through all elimination pairs. See if any have changed and
3336 recalculate the number not at initial offset.
3338 Compute the maximum offset (minimum offset if the stack does not
3339 grow downward) for each elimination pair.
3341 We also detect a cases where register elimination cannot be done,
3342 namely, if a register would be both changed and referenced outside a MEM
3343 in the resulting insn since such an insn is often undefined and, even if
3344 not, we cannot know what meaning will be given to it. Note that it is
3345 valid to have a register used in an address in an insn that changes it
3346 (presumably with a pre- or post-increment or decrement).
3348 If anything changes, return nonzero. */
3350 num_not_at_initial_offset = 0;
3351 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3353 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3354 ep->can_eliminate = 0;
3356 ep->ref_outside_mem = 0;
3358 if (ep->previous_offset != ep->offset)
3361 ep->previous_offset = ep->offset;
3362 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3363 num_not_at_initial_offset++;
3365 #ifdef STACK_GROWS_DOWNWARD
3366 ep->max_offset = MAX (ep->max_offset, ep->offset);
3368 ep->max_offset = MIN (ep->max_offset, ep->offset);
3373 /* If we changed something, perform elimination in REG_NOTES. This is
3374 needed even when REPLACE is zero because a REG_DEAD note might refer
3375 to a register that we eliminate and could cause a different number
3376 of spill registers to be needed in the final reload pass than in
3378 if (val && REG_NOTES (insn) != 0)
3379 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3387 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3388 replacement we currently believe is valid, mark it as not eliminable if X
3389 modifies DEST in any way other than by adding a constant integer to it.
3391 If DEST is the frame pointer, we do nothing because we assume that
3392 all assignments to the hard frame pointer are nonlocal gotos and are being
3393 done at a time when they are valid and do not disturb anything else.
3394 Some machines want to eliminate a fake argument pointer with either the
3395 frame or stack pointer. Assignments to the hard frame pointer must not
3396 prevent this elimination.
3398 Called via note_stores from reload before starting its passes to scan
3399 the insns of the function. */
3402 mark_not_eliminable (dest, x)
3408 /* A SUBREG of a hard register here is just changing its mode. We should
3409 not see a SUBREG of an eliminable hard register, but check just in
3411 if (GET_CODE (dest) == SUBREG)
3412 dest = SUBREG_REG (dest);
3414 if (dest == hard_frame_pointer_rtx)
3417 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3418 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3419 && (GET_CODE (x) != SET
3420 || GET_CODE (SET_SRC (x)) != PLUS
3421 || XEXP (SET_SRC (x), 0) != dest
3422 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3424 reg_eliminate[i].can_eliminate_previous
3425 = reg_eliminate[i].can_eliminate = 0;
3430 /* Kick all pseudos out of hard register REGNO.
3431 If GLOBAL is nonzero, try to find someplace else to put them.
3432 If DUMPFILE is nonzero, log actions taken on that file.
3434 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3435 because we found we can't eliminate some register. In the case, no pseudos
3436 are allowed to be in the register, even if they are only in a block that
3437 doesn't require spill registers, unlike the case when we are spilling this
3438 hard reg to produce another spill register.
3440 Return nonzero if any pseudos needed to be kicked out. */
3443 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3449 enum reg_class class = REGNO_REG_CLASS (regno);
3450 int something_changed = 0;
3453 SET_HARD_REG_BIT (forbidden_regs, regno);
3456 regs_ever_live[regno] = 1;
3458 /* Spill every pseudo reg that was allocated to this reg
3459 or to something that overlaps this reg. */
3461 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3462 if (reg_renumber[i] >= 0
3463 && reg_renumber[i] <= regno
3465 + HARD_REGNO_NREGS (reg_renumber[i],
3466 PSEUDO_REGNO_MODE (i))
3469 /* If this register belongs solely to a basic block which needed no
3470 spilling of any class that this register is contained in,
3471 leave it be, unless we are spilling this register because
3472 it was a hard register that can't be eliminated. */
3474 if (! cant_eliminate
3475 && basic_block_needs[0]
3476 && reg_basic_block[i] >= 0
3477 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3481 for (p = reg_class_superclasses[(int) class];
3482 *p != LIM_REG_CLASSES; p++)
3483 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3486 if (*p == LIM_REG_CLASSES)
3490 /* Mark it as no longer having a hard register home. */
3491 reg_renumber[i] = -1;
3492 /* We will need to scan everything again. */
3493 something_changed = 1;
3495 retry_global_alloc (i, forbidden_regs);
3497 alter_reg (i, regno);
3500 if (reg_renumber[i] == -1)
3501 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3503 fprintf (dumpfile, " Register %d now in %d.\n\n",
3504 i, reg_renumber[i]);
3507 for (i = 0; i < scratch_list_length; i++)
3509 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3511 if (! cant_eliminate && basic_block_needs[0]
3512 && ! basic_block_needs[(int) class][scratch_block[i]])
3516 for (p = reg_class_superclasses[(int) class];
3517 *p != LIM_REG_CLASSES; p++)
3518 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3521 if (*p == LIM_REG_CLASSES)
3524 PUT_CODE (scratch_list[i], SCRATCH);
3525 scratch_list[i] = 0;
3526 something_changed = 1;
3531 return something_changed;
3534 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3535 Also mark any hard registers used to store user variables as
3536 forbidden from being used for spill registers. */
3539 scan_paradoxical_subregs (x)
3544 register enum rtx_code code = GET_CODE (x);
3549 #ifdef SMALL_REGISTER_CLASSES
3550 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3551 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3567 if (GET_CODE (SUBREG_REG (x)) == REG
3568 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3569 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3570 = GET_MODE_SIZE (GET_MODE (x));
3574 fmt = GET_RTX_FORMAT (code);
3575 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3578 scan_paradoxical_subregs (XEXP (x, i));
3579 else if (fmt[i] == 'E')
3582 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3583 scan_paradoxical_subregs (XVECEXP (x, i, j));
3589 hard_reg_use_compare (p1, p2)
3590 struct hard_reg_n_uses *p1, *p2;
3592 int tem = p1->uses - p2->uses;
3593 if (tem != 0) return tem;
3594 /* If regs are equally good, sort by regno,
3595 so that the results of qsort leave nothing to chance. */
3596 return p1->regno - p2->regno;
3599 /* Choose the order to consider regs for use as reload registers
3600 based on how much trouble would be caused by spilling one.
3601 Store them in order of decreasing preference in potential_reload_regs. */
3604 order_regs_for_reload ()
3610 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3612 CLEAR_HARD_REG_SET (bad_spill_regs);
3614 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3615 potential_reload_regs[i] = -1;
3617 /* Count number of uses of each hard reg by pseudo regs allocated to it
3618 and then order them by decreasing use. */
3620 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3622 hard_reg_n_uses[i].uses = 0;
3623 hard_reg_n_uses[i].regno = i;
3626 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3628 int regno = reg_renumber[i];
3631 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3633 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3635 large += reg_n_refs[i];
3638 /* Now fixed registers (which cannot safely be used for reloading)
3639 get a very high use count so they will be considered least desirable.
3640 Registers used explicitly in the rtl code are almost as bad. */
3642 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3646 hard_reg_n_uses[i].uses += 2 * large + 2;
3647 SET_HARD_REG_BIT (bad_spill_regs, i);
3649 else if (regs_explicitly_used[i])
3651 hard_reg_n_uses[i].uses += large + 1;
3652 #ifndef SMALL_REGISTER_CLASSES
3653 /* ??? We are doing this here because of the potential that
3654 bad code may be generated if a register explicitly used in
3655 an insn was used as a spill register for that insn. But
3656 not using these are spill registers may lose on some machine.
3657 We'll have to see how this works out. */
3658 SET_HARD_REG_BIT (bad_spill_regs, i);
3662 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3663 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3665 #ifdef ELIMINABLE_REGS
3666 /* If registers other than the frame pointer are eliminable, mark them as
3668 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3670 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3671 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3675 /* Prefer registers not so far used, for use in temporary loading.
3676 Among them, if REG_ALLOC_ORDER is defined, use that order.
3677 Otherwise, prefer registers not preserved by calls. */
3679 #ifdef REG_ALLOC_ORDER
3680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3682 int regno = reg_alloc_order[i];
3684 if (hard_reg_n_uses[regno].uses == 0)
3685 potential_reload_regs[o++] = regno;
3688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3690 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3691 potential_reload_regs[o++] = i;
3693 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3695 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3696 potential_reload_regs[o++] = i;
3700 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3701 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3703 /* Now add the regs that are already used,
3704 preferring those used less often. The fixed and otherwise forbidden
3705 registers will be at the end of this list. */
3707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3708 if (hard_reg_n_uses[i].uses != 0)
3709 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3712 /* Used in reload_as_needed to sort the spilled regs. */
3715 compare_spill_regs (r1, r2)
3721 /* Reload pseudo-registers into hard regs around each insn as needed.
3722 Additional register load insns are output before the insn that needs it
3723 and perhaps store insns after insns that modify the reloaded pseudo reg.
3725 reg_last_reload_reg and reg_reloaded_contents keep track of
3726 which registers are already available in reload registers.
3727 We update these for the reloads that we perform,
3728 as the insns are scanned. */
3731 reload_as_needed (first, live_known)
3741 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3742 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3743 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3744 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3745 reg_has_output_reload = (char *) alloca (max_regno);
3746 for (i = 0; i < n_spills; i++)
3748 reg_reloaded_contents[i] = -1;
3749 reg_reloaded_insn[i] = 0;
3752 /* Reset all offsets on eliminable registers to their initial values. */
3753 #ifdef ELIMINABLE_REGS
3754 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3756 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3757 reg_eliminate[i].initial_offset);
3758 reg_eliminate[i].previous_offset
3759 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3762 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3763 reg_eliminate[0].previous_offset
3764 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3767 num_not_at_initial_offset = 0;
3769 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3770 pack registers with group needs. */
3773 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3774 for (i = 0; i < n_spills; i++)
3775 spill_reg_order[spill_regs[i]] = i;
3778 for (insn = first; insn;)
3780 register rtx next = NEXT_INSN (insn);
3782 /* Notice when we move to a new basic block. */
3783 if (live_known && this_block + 1 < n_basic_blocks
3784 && insn == basic_block_head[this_block+1])
3787 /* If we pass a label, copy the offsets from the label information
3788 into the current offsets of each elimination. */
3789 if (GET_CODE (insn) == CODE_LABEL)
3791 num_not_at_initial_offset = 0;
3792 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3794 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3795 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3796 if (reg_eliminate[i].can_eliminate
3797 && (reg_eliminate[i].offset
3798 != reg_eliminate[i].initial_offset))
3799 num_not_at_initial_offset++;
3803 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3805 rtx avoid_return_reg = 0;
3806 rtx oldpat = PATTERN (insn);
3808 #ifdef SMALL_REGISTER_CLASSES
3809 /* Set avoid_return_reg if this is an insn
3810 that might use the value of a function call. */
3811 if (GET_CODE (insn) == CALL_INSN)
3813 if (GET_CODE (PATTERN (insn)) == SET)
3814 after_call = SET_DEST (PATTERN (insn));
3815 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3816 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3817 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3821 else if (after_call != 0
3822 && !(GET_CODE (PATTERN (insn)) == SET
3823 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3825 if (reg_referenced_p (after_call, PATTERN (insn)))
3826 avoid_return_reg = after_call;
3829 #endif /* SMALL_REGISTER_CLASSES */
3831 /* If this is a USE and CLOBBER of a MEM, ensure that any
3832 references to eliminable registers have been removed. */
3834 if ((GET_CODE (PATTERN (insn)) == USE
3835 || GET_CODE (PATTERN (insn)) == CLOBBER)
3836 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3837 XEXP (XEXP (PATTERN (insn), 0), 0)
3838 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3839 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3841 /* If we need to do register elimination processing, do so.
3842 This might delete the insn, in which case we are done. */
3843 if (num_eliminable && GET_MODE (insn) == QImode)
3845 eliminate_regs_in_insn (insn, 1);
3846 if (GET_CODE (insn) == NOTE)
3853 if (GET_MODE (insn) == VOIDmode)
3855 /* First find the pseudo regs that must be reloaded for this insn.
3856 This info is returned in the tables reload_... (see reload.h).
3857 Also modify the body of INSN by substituting RELOAD
3858 rtx's for those pseudo regs. */
3861 bzero (reg_has_output_reload, max_regno);
3862 CLEAR_HARD_REG_SET (reg_is_output_reload);
3864 find_reloads (insn, 1, spill_indirect_levels, live_known,
3870 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3874 /* If this block has not had spilling done for a
3875 particular clas and we have any non-optionals that need a
3876 spill reg in that class, abort. */
3878 for (class = 0; class < N_REG_CLASSES; class++)
3879 if (basic_block_needs[class] != 0
3880 && basic_block_needs[class][this_block] == 0)
3881 for (i = 0; i < n_reloads; i++)
3882 if (class == (int) reload_reg_class[i]
3883 && reload_reg_rtx[i] == 0
3884 && ! reload_optional[i]
3885 && (reload_in[i] != 0 || reload_out[i] != 0
3886 || reload_secondary_p[i] != 0))
3887 fatal_insn ("Non-optional registers need a spill register", insn);
3889 /* Now compute which reload regs to reload them into. Perhaps
3890 reusing reload regs from previous insns, or else output
3891 load insns to reload them. Maybe output store insns too.
3892 Record the choices of reload reg in reload_reg_rtx. */
3893 choose_reload_regs (insn, avoid_return_reg);
3895 #ifdef SMALL_REGISTER_CLASSES
3896 /* Merge any reloads that we didn't combine for fear of
3897 increasing the number of spill registers needed but now
3898 discover can be safely merged. */
3899 merge_assigned_reloads (insn);
3902 /* Generate the insns to reload operands into or out of
3903 their reload regs. */
3904 emit_reload_insns (insn);
3906 /* Substitute the chosen reload regs from reload_reg_rtx
3907 into the insn's body (or perhaps into the bodies of other
3908 load and store insn that we just made for reloading
3909 and that we moved the structure into). */
3912 /* If this was an ASM, make sure that all the reload insns
3913 we have generated are valid. If not, give an error
3916 if (asm_noperands (PATTERN (insn)) >= 0)
3917 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3918 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3919 && (recog_memoized (p) < 0
3920 || (insn_extract (p),
3921 ! constrain_operands (INSN_CODE (p), 1))))
3923 error_for_asm (insn,
3924 "`asm' operand requires impossible reload");
3926 NOTE_SOURCE_FILE (p) = 0;
3927 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3930 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3931 is no longer validly lying around to save a future reload.
3932 Note that this does not detect pseudos that were reloaded
3933 for this insn in order to be stored in
3934 (obeying register constraints). That is correct; such reload
3935 registers ARE still valid. */
3936 note_stores (oldpat, forget_old_reloads_1);
3938 /* There may have been CLOBBER insns placed after INSN. So scan
3939 between INSN and NEXT and use them to forget old reloads. */
3940 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3941 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3942 note_stores (PATTERN (x), forget_old_reloads_1);
3945 /* Likewise for regs altered by auto-increment in this insn.
3946 But note that the reg-notes are not changed by reloading:
3947 they still contain the pseudo-regs, not the spill regs. */
3948 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3949 if (REG_NOTE_KIND (x) == REG_INC)
3951 /* See if this pseudo reg was reloaded in this insn.
3952 If so, its last-reload info is still valid
3953 because it is based on this insn's reload. */
3954 for (i = 0; i < n_reloads; i++)
3955 if (reload_out[i] == XEXP (x, 0))
3959 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3963 /* A reload reg's contents are unknown after a label. */
3964 if (GET_CODE (insn) == CODE_LABEL)
3965 for (i = 0; i < n_spills; i++)
3967 reg_reloaded_contents[i] = -1;
3968 reg_reloaded_insn[i] = 0;
3971 /* Don't assume a reload reg is still good after a call insn
3972 if it is a call-used reg. */
3973 else if (GET_CODE (insn) == CALL_INSN)
3974 for (i = 0; i < n_spills; i++)
3975 if (call_used_regs[spill_regs[i]])
3977 reg_reloaded_contents[i] = -1;
3978 reg_reloaded_insn[i] = 0;
3981 /* In case registers overlap, allow certain insns to invalidate
3982 particular hard registers. */
3984 #ifdef INSN_CLOBBERS_REGNO_P
3985 for (i = 0 ; i < n_spills ; i++)
3986 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3988 reg_reloaded_contents[i] = -1;
3989 reg_reloaded_insn[i] = 0;
4001 /* Discard all record of any value reloaded from X,
4002 or reloaded in X from someplace else;
4003 unless X is an output reload reg of the current insn.
4005 X may be a hard reg (the reload reg)
4006 or it may be a pseudo reg that was reloaded from. */
4009 forget_old_reloads_1 (x, ignored)
4017 /* note_stores does give us subregs of hard regs. */
4018 while (GET_CODE (x) == SUBREG)
4020 offset += SUBREG_WORD (x);
4024 if (GET_CODE (x) != REG)
4027 regno = REGNO (x) + offset;
4029 if (regno >= FIRST_PSEUDO_REGISTER)
4034 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4035 /* Storing into a spilled-reg invalidates its contents.
4036 This can happen if a block-local pseudo is allocated to that reg
4037 and it wasn't spilled because this block's total need is 0.
4038 Then some insn might have an optional reload and use this reg. */
4039 for (i = 0; i < nr; i++)
4040 if (spill_reg_order[regno + i] >= 0
4041 /* But don't do this if the reg actually serves as an output
4042 reload reg in the current instruction. */
4044 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4046 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4047 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4051 /* Since value of X has changed,
4052 forget any value previously copied from it. */
4055 /* But don't forget a copy if this is the output reload
4056 that establishes the copy's validity. */
4057 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4058 reg_last_reload_reg[regno + nr] = 0;
4061 /* For each reload, the mode of the reload register. */
4062 static enum machine_mode reload_mode[MAX_RELOADS];
4064 /* For each reload, the largest number of registers it will require. */
4065 static int reload_nregs[MAX_RELOADS];
4067 /* Comparison function for qsort to decide which of two reloads
4068 should be handled first. *P1 and *P2 are the reload numbers. */
4071 reload_reg_class_lower (p1, p2)
4074 register int r1 = *p1, r2 = *p2;
4077 /* Consider required reloads before optional ones. */
4078 t = reload_optional[r1] - reload_optional[r2];
4082 /* Count all solitary classes before non-solitary ones. */
4083 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4084 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4088 /* Aside from solitaires, consider all multi-reg groups first. */
4089 t = reload_nregs[r2] - reload_nregs[r1];
4093 /* Consider reloads in order of increasing reg-class number. */
4094 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4098 /* If reloads are equally urgent, sort by reload number,
4099 so that the results of qsort leave nothing to chance. */
4103 /* The following HARD_REG_SETs indicate when each hard register is
4104 used for a reload of various parts of the current insn. */
4106 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4107 static HARD_REG_SET reload_reg_used;
4108 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4109 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4110 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4111 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4112 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4113 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4114 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4115 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4116 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4117 static HARD_REG_SET reload_reg_used_in_op_addr;
4118 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4119 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4120 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4121 static HARD_REG_SET reload_reg_used_in_insn;
4122 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4123 static HARD_REG_SET reload_reg_used_in_other_addr;
4125 /* If reg is in use as a reload reg for any sort of reload. */
4126 static HARD_REG_SET reload_reg_used_at_all;
4128 /* If reg is use as an inherited reload. We just mark the first register
4130 static HARD_REG_SET reload_reg_used_for_inherit;
4132 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4133 TYPE. MODE is used to indicate how many consecutive regs are
4137 mark_reload_reg_in_use (regno, opnum, type, mode)
4140 enum reload_type type;
4141 enum machine_mode mode;
4143 int nregs = HARD_REGNO_NREGS (regno, mode);
4146 for (i = regno; i < nregs + regno; i++)
4151 SET_HARD_REG_BIT (reload_reg_used, i);
4154 case RELOAD_FOR_INPUT_ADDRESS:
4155 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4158 case RELOAD_FOR_OUTPUT_ADDRESS:
4159 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4162 case RELOAD_FOR_OPERAND_ADDRESS:
4163 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4166 case RELOAD_FOR_OPADDR_ADDR:
4167 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4170 case RELOAD_FOR_OTHER_ADDRESS:
4171 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4174 case RELOAD_FOR_INPUT:
4175 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4178 case RELOAD_FOR_OUTPUT:
4179 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4182 case RELOAD_FOR_INSN:
4183 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4187 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4191 /* Similarly, but show REGNO is no longer in use for a reload. */
4194 clear_reload_reg_in_use (regno, opnum, type, mode)
4197 enum reload_type type;
4198 enum machine_mode mode;
4200 int nregs = HARD_REGNO_NREGS (regno, mode);
4203 for (i = regno; i < nregs + regno; i++)
4208 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4211 case RELOAD_FOR_INPUT_ADDRESS:
4212 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4215 case RELOAD_FOR_OUTPUT_ADDRESS:
4216 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4219 case RELOAD_FOR_OPERAND_ADDRESS:
4220 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4223 case RELOAD_FOR_OPADDR_ADDR:
4224 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4227 case RELOAD_FOR_OTHER_ADDRESS:
4228 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4231 case RELOAD_FOR_INPUT:
4232 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4235 case RELOAD_FOR_OUTPUT:
4236 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4239 case RELOAD_FOR_INSN:
4240 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4246 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4247 specified by OPNUM and TYPE. */
4250 reload_reg_free_p (regno, opnum, type)
4253 enum reload_type type;
4257 /* In use for a RELOAD_OTHER means it's not available for anything except
4258 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4259 to be used only for inputs. */
4261 if (type != RELOAD_FOR_OTHER_ADDRESS
4262 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4268 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4269 we can't use it for RELOAD_OTHER. */
4270 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4271 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4272 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4275 for (i = 0; i < reload_n_operands; i++)
4276 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4277 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4278 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4279 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4284 case RELOAD_FOR_INPUT:
4285 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4286 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4289 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4292 /* If it is used for some other input, can't use it. */
4293 for (i = 0; i < reload_n_operands; i++)
4294 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4297 /* If it is used in a later operand's address, can't use it. */
4298 for (i = opnum + 1; i < reload_n_operands; i++)
4299 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4304 case RELOAD_FOR_INPUT_ADDRESS:
4305 /* Can't use a register if it is used for an input address for this
4306 operand or used as an input in an earlier one. */
4307 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4310 for (i = 0; i < opnum; i++)
4311 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4316 case RELOAD_FOR_OUTPUT_ADDRESS:
4317 /* Can't use a register if it is used for an output address for this
4318 operand or used as an output in this or a later operand. */
4319 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4322 for (i = opnum; i < reload_n_operands; i++)
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4328 case RELOAD_FOR_OPERAND_ADDRESS:
4329 for (i = 0; i < reload_n_operands; i++)
4330 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4333 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4334 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4336 case RELOAD_FOR_OPADDR_ADDR:
4337 for (i = 0; i < reload_n_operands; i++)
4338 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4341 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4343 case RELOAD_FOR_OUTPUT:
4344 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4345 outputs, or an operand address for this or an earlier output. */
4346 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4349 for (i = 0; i < reload_n_operands; i++)
4350 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4353 for (i = 0; i <= opnum; i++)
4354 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4359 case RELOAD_FOR_INSN:
4360 for (i = 0; i < reload_n_operands; i++)
4361 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4362 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4365 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4366 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4368 case RELOAD_FOR_OTHER_ADDRESS:
4369 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4374 /* Return 1 if the value in reload reg REGNO, as used by a reload
4375 needed for the part of the insn specified by OPNUM and TYPE,
4376 is not in use for a reload in any prior part of the insn.
4378 We can assume that the reload reg was already tested for availability
4379 at the time it is needed, and we should not check this again,
4380 in case the reg has already been marked in use. */
4383 reload_reg_free_before_p (regno, opnum, type)
4386 enum reload_type type;
4392 case RELOAD_FOR_OTHER_ADDRESS:
4393 /* These always come first. */
4397 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4399 /* If this use is for part of the insn,
4400 check the reg is not in use for any prior part. It is tempting
4401 to try to do this by falling through from objecs that occur
4402 later in the insn to ones that occur earlier, but that will not
4403 correctly take into account the fact that here we MUST ignore
4404 things that would prevent the register from being allocated in
4405 the first place, since we know that it was allocated. */
4407 case RELOAD_FOR_OUTPUT_ADDRESS:
4408 /* Earlier reloads are for earlier outputs or their addresses,
4409 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4410 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4412 for (i = 0; i < opnum; i++)
4413 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4414 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4417 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4420 for (i = 0; i < reload_n_operands; i++)
4421 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4422 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4425 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4426 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4427 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4429 case RELOAD_FOR_OUTPUT:
4430 /* This can't be used in the output address for this operand and
4431 anything that can't be used for it, except that we've already
4432 tested for RELOAD_FOR_INSN objects. */
4434 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4437 for (i = 0; i < opnum; i++)
4438 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4439 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4442 for (i = 0; i < reload_n_operands; i++)
4443 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4444 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4445 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4448 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4450 case RELOAD_FOR_OPERAND_ADDRESS:
4451 case RELOAD_FOR_OPADDR_ADDR:
4452 case RELOAD_FOR_INSN:
4453 /* These can't conflict with inputs, or each other, so all we have to
4454 test is input addresses and the addresses of OTHER items. */
4456 for (i = 0; i < reload_n_operands; i++)
4457 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4460 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4462 case RELOAD_FOR_INPUT:
4463 /* The only things earlier are the address for this and
4464 earlier inputs, other inputs (which we know we don't conflict
4465 with), and addresses of RELOAD_OTHER objects. */
4467 for (i = 0; i <= opnum; i++)
4468 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4471 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4473 case RELOAD_FOR_INPUT_ADDRESS:
4474 /* Similarly, all we have to check is for use in earlier inputs'
4476 for (i = 0; i < opnum; i++)
4477 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4480 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4485 /* Return 1 if the value in reload reg REGNO, as used by a reload
4486 needed for the part of the insn specified by OPNUM and TYPE,
4487 is still available in REGNO at the end of the insn.
4489 We can assume that the reload reg was already tested for availability
4490 at the time it is needed, and we should not check this again,
4491 in case the reg has already been marked in use. */
4494 reload_reg_reaches_end_p (regno, opnum, type)
4497 enum reload_type type;
4504 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4505 its value must reach the end. */
4508 /* If this use is for part of the insn,
4509 its value reaches if no subsequent part uses the same register.
4510 Just like the above function, don't try to do this with lots
4513 case RELOAD_FOR_OTHER_ADDRESS:
4514 /* Here we check for everything else, since these don't conflict
4515 with anything else and everything comes later. */
4517 for (i = 0; i < reload_n_operands; i++)
4518 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4519 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4520 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4521 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4524 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4525 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4526 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4528 case RELOAD_FOR_INPUT_ADDRESS:
4529 /* Similar, except that we check only for this and subsequent inputs
4530 and the address of only subsequent inputs and we do not need
4531 to check for RELOAD_OTHER objects since they are known not to
4534 for (i = opnum; i < reload_n_operands; i++)
4535 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4538 for (i = opnum + 1; i < reload_n_operands; i++)
4539 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4542 for (i = 0; i < reload_n_operands; i++)
4543 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4544 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4547 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4550 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4551 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4553 case RELOAD_FOR_INPUT:
4554 /* Similar to input address, except we start at the next operand for
4555 both input and input address and we do not check for
4556 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4559 for (i = opnum + 1; i < reload_n_operands; i++)
4560 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4561 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4564 /* ... fall through ... */
4566 case RELOAD_FOR_OPERAND_ADDRESS:
4567 /* Check outputs and their addresses. */
4569 for (i = 0; i < reload_n_operands; i++)
4570 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4571 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4576 case RELOAD_FOR_OPADDR_ADDR:
4577 for (i = 0; i < reload_n_operands; i++)
4578 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4579 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4582 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4583 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4585 case RELOAD_FOR_INSN:
4586 /* These conflict with other outputs with RELOAD_OTHER. So
4587 we need only check for output addresses. */
4591 /* ... fall through ... */
4593 case RELOAD_FOR_OUTPUT:
4594 case RELOAD_FOR_OUTPUT_ADDRESS:
4595 /* We already know these can't conflict with a later output. So the
4596 only thing to check are later output addresses. */
4597 for (i = opnum + 1; i < reload_n_operands; i++)
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4607 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4610 This function uses the same algorithm as reload_reg_free_p above. */
4613 reloads_conflict (r1, r2)
4616 enum reload_type r1_type = reload_when_needed[r1];
4617 enum reload_type r2_type = reload_when_needed[r2];
4618 int r1_opnum = reload_opnum[r1];
4619 int r2_opnum = reload_opnum[r2];
4621 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
4623 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
4626 /* Otherwise, check conflicts differently for each type. */
4630 case RELOAD_FOR_INPUT:
4631 return (r2_type == RELOAD_FOR_INSN
4632 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4633 || r2_type == RELOAD_FOR_OPADDR_ADDR
4634 || r2_type == RELOAD_FOR_INPUT
4635 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4637 case RELOAD_FOR_INPUT_ADDRESS:
4638 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4639 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4641 case RELOAD_FOR_OUTPUT_ADDRESS:
4642 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4643 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4645 case RELOAD_FOR_OPERAND_ADDRESS:
4646 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4647 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4649 case RELOAD_FOR_OPADDR_ADDR:
4650 return (r2_type == RELOAD_FOR_INPUT
4651 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4653 case RELOAD_FOR_OUTPUT:
4654 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4655 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4656 && r2_opnum >= r1_opnum));
4658 case RELOAD_FOR_INSN:
4659 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4660 || r2_type == RELOAD_FOR_INSN
4661 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4663 case RELOAD_FOR_OTHER_ADDRESS:
4664 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4667 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4674 /* Vector of reload-numbers showing the order in which the reloads should
4676 short reload_order[MAX_RELOADS];
4678 /* Indexed by reload number, 1 if incoming value
4679 inherited from previous insns. */
4680 char reload_inherited[MAX_RELOADS];
4682 /* For an inherited reload, this is the insn the reload was inherited from,
4683 if we know it. Otherwise, this is 0. */
4684 rtx reload_inheritance_insn[MAX_RELOADS];
4686 /* If non-zero, this is a place to get the value of the reload,
4687 rather than using reload_in. */
4688 rtx reload_override_in[MAX_RELOADS];
4690 /* For each reload, the index in spill_regs of the spill register used,
4691 or -1 if we did not need one of the spill registers for this reload. */
4692 int reload_spill_index[MAX_RELOADS];
4694 /* Find a spill register to use as a reload register for reload R.
4695 LAST_RELOAD is non-zero if this is the last reload for the insn being
4698 Set reload_reg_rtx[R] to the register allocated.
4700 If NOERROR is nonzero, we return 1 if successful,
4701 or 0 if we couldn't find a spill reg and we didn't change anything. */
4704 allocate_reload_reg (r, insn, last_reload, noerror)
4716 /* If we put this reload ahead, thinking it is a group,
4717 then insist on finding a group. Otherwise we can grab a
4718 reg that some other reload needs.
4719 (That can happen when we have a 68000 DATA_OR_FP_REG
4720 which is a group of data regs or one fp reg.)
4721 We need not be so restrictive if there are no more reloads
4724 ??? Really it would be nicer to have smarter handling
4725 for that kind of reg class, where a problem like this is normal.
4726 Perhaps those classes should be avoided for reloading
4727 by use of more alternatives. */
4729 int force_group = reload_nregs[r] > 1 && ! last_reload;
4731 /* If we want a single register and haven't yet found one,
4732 take any reg in the right class and not in use.
4733 If we want a consecutive group, here is where we look for it.
4735 We use two passes so we can first look for reload regs to
4736 reuse, which are already in use for other reloads in this insn,
4737 and only then use additional registers.
4738 I think that maximizing reuse is needed to make sure we don't
4739 run out of reload regs. Suppose we have three reloads, and
4740 reloads A and B can share regs. These need two regs.
4741 Suppose A and B are given different regs.
4742 That leaves none for C. */
4743 for (pass = 0; pass < 2; pass++)
4745 /* I is the index in spill_regs.
4746 We advance it round-robin between insns to use all spill regs
4747 equally, so that inherited reloads have a chance
4748 of leapfrogging each other. Don't do this, however, when we have
4749 group needs and failure would be fatal; if we only have a relatively
4750 small number of spill registers, and more than one of them has
4751 group needs, then by starting in the middle, we may end up
4752 allocating the first one in such a way that we are not left with
4753 sufficient groups to handle the rest. */
4755 if (noerror || ! force_group)
4760 for (count = 0; count < n_spills; count++)
4762 int class = (int) reload_reg_class[r];
4764 i = (i + 1) % n_spills;
4766 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4767 reload_when_needed[r])
4768 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4769 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4770 /* Look first for regs to share, then for unshared. But
4771 don't share regs used for inherited reloads; they are
4772 the ones we want to preserve. */
4774 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4776 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4779 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4780 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4781 (on 68000) got us two FP regs. If NR is 1,
4782 we would reject both of them. */
4784 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4785 /* If we need only one reg, we have already won. */
4788 /* But reject a single reg if we demand a group. */
4793 /* Otherwise check that as many consecutive regs as we need
4795 Also, don't use for a group registers that are
4796 needed for nongroups. */
4797 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4800 regno = spill_regs[i] + nr - 1;
4801 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4802 && spill_reg_order[regno] >= 0
4803 && reload_reg_free_p (regno, reload_opnum[r],
4804 reload_when_needed[r])
4805 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4815 /* If we found something on pass 1, omit pass 2. */
4816 if (count < n_spills)
4820 /* We should have found a spill register by now. */
4821 if (count == n_spills)
4828 /* I is the index in SPILL_REG_RTX of the reload register we are to
4829 allocate. Get an rtx for it and find its register number. */
4831 new = spill_reg_rtx[i];
4833 if (new == 0 || GET_MODE (new) != reload_mode[r])
4834 spill_reg_rtx[i] = new
4835 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4837 regno = true_regnum (new);
4839 /* Detect when the reload reg can't hold the reload mode.
4840 This used to be one `if', but Sequent compiler can't handle that. */
4841 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4843 enum machine_mode test_mode = VOIDmode;
4845 test_mode = GET_MODE (reload_in[r]);
4846 /* If reload_in[r] has VOIDmode, it means we will load it
4847 in whatever mode the reload reg has: to wit, reload_mode[r].
4848 We have already tested that for validity. */
4849 /* Aside from that, we need to test that the expressions
4850 to reload from or into have modes which are valid for this
4851 reload register. Otherwise the reload insns would be invalid. */
4852 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4853 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4854 if (! (reload_out[r] != 0
4855 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4857 /* The reg is OK. */
4860 /* Mark as in use for this insn the reload regs we use
4862 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4863 reload_when_needed[r], reload_mode[r]);
4865 reload_reg_rtx[r] = new;
4866 reload_spill_index[r] = i;
4871 /* The reg is not OK. */
4876 if (asm_noperands (PATTERN (insn)) < 0)
4877 /* It's the compiler's fault. */
4878 fatal_insn ("Could not find a spill register", insn);
4880 /* It's the user's fault; the operand's mode and constraint
4881 don't match. Disable this reload so we don't crash in final. */
4882 error_for_asm (insn,
4883 "`asm' operand constraint incompatible with operand size");
4886 reload_reg_rtx[r] = 0;
4887 reload_optional[r] = 1;
4888 reload_secondary_p[r] = 1;
4893 /* Assign hard reg targets for the pseudo-registers we must reload
4894 into hard regs for this insn.
4895 Also output the instructions to copy them in and out of the hard regs.
4897 For machines with register classes, we are responsible for
4898 finding a reload reg in the proper class. */
4901 choose_reload_regs (insn, avoid_return_reg)
4903 rtx avoid_return_reg;
4906 int max_group_size = 1;
4907 enum reg_class group_class = NO_REGS;
4910 rtx save_reload_reg_rtx[MAX_RELOADS];
4911 char save_reload_inherited[MAX_RELOADS];
4912 rtx save_reload_inheritance_insn[MAX_RELOADS];
4913 rtx save_reload_override_in[MAX_RELOADS];
4914 int save_reload_spill_index[MAX_RELOADS];
4915 HARD_REG_SET save_reload_reg_used;
4916 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4917 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4918 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4919 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4920 HARD_REG_SET save_reload_reg_used_in_op_addr;
4921 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4922 HARD_REG_SET save_reload_reg_used_in_insn;
4923 HARD_REG_SET save_reload_reg_used_in_other_addr;
4924 HARD_REG_SET save_reload_reg_used_at_all;
4926 bzero (reload_inherited, MAX_RELOADS);
4927 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4928 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4930 CLEAR_HARD_REG_SET (reload_reg_used);
4931 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4932 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4933 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4934 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4935 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4937 for (i = 0; i < reload_n_operands; i++)
4939 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4940 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4941 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4942 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4945 #ifdef SMALL_REGISTER_CLASSES
4946 /* Don't bother with avoiding the return reg
4947 if we have no mandatory reload that could use it. */
4948 if (avoid_return_reg)
4951 int regno = REGNO (avoid_return_reg);
4953 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4956 for (r = regno; r < regno + nregs; r++)
4957 if (spill_reg_order[r] >= 0)
4958 for (j = 0; j < n_reloads; j++)
4959 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4960 && (reload_in[j] != 0 || reload_out[j] != 0
4961 || reload_secondary_p[j])
4963 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4966 avoid_return_reg = 0;
4968 #endif /* SMALL_REGISTER_CLASSES */
4970 #if 0 /* Not needed, now that we can always retry without inheritance. */
4971 /* See if we have more mandatory reloads than spill regs.
4972 If so, then we cannot risk optimizations that could prevent
4973 reloads from sharing one spill register.
4975 Since we will try finding a better register than reload_reg_rtx
4976 unless it is equal to reload_in or reload_out, count such reloads. */
4980 #ifdef SMALL_REGISTER_CLASSES
4981 int tem = (avoid_return_reg != 0);
4983 for (j = 0; j < n_reloads; j++)
4984 if (! reload_optional[j]
4985 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4986 && (reload_reg_rtx[j] == 0
4987 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4988 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4995 #ifdef SMALL_REGISTER_CLASSES
4996 /* Don't use the subroutine call return reg for a reload
4997 if we are supposed to avoid it. */
4998 if (avoid_return_reg)
5000 int regno = REGNO (avoid_return_reg);
5002 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5005 for (r = regno; r < regno + nregs; r++)
5006 if (spill_reg_order[r] >= 0)
5007 SET_HARD_REG_BIT (reload_reg_used, r);
5009 #endif /* SMALL_REGISTER_CLASSES */
5011 /* In order to be certain of getting the registers we need,
5012 we must sort the reloads into order of increasing register class.
5013 Then our grabbing of reload registers will parallel the process
5014 that provided the reload registers.
5016 Also note whether any of the reloads wants a consecutive group of regs.
5017 If so, record the maximum size of the group desired and what
5018 register class contains all the groups needed by this insn. */
5020 for (j = 0; j < n_reloads; j++)
5022 reload_order[j] = j;
5023 reload_spill_index[j] = -1;
5026 = (reload_inmode[j] == VOIDmode
5027 || (GET_MODE_SIZE (reload_outmode[j])
5028 > GET_MODE_SIZE (reload_inmode[j])))
5029 ? reload_outmode[j] : reload_inmode[j];
5031 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5033 if (reload_nregs[j] > 1)
5035 max_group_size = MAX (reload_nregs[j], max_group_size);
5036 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5039 /* If we have already decided to use a certain register,
5040 don't use it in another way. */
5041 if (reload_reg_rtx[j])
5042 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5043 reload_when_needed[j], reload_mode[j]);
5047 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5049 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5050 sizeof reload_reg_rtx);
5051 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5052 bcopy ((char *) reload_inheritance_insn,
5053 (char *) save_reload_inheritance_insn,
5054 sizeof reload_inheritance_insn);
5055 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5056 sizeof reload_override_in);
5057 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5058 sizeof reload_spill_index);
5059 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5060 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5061 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5062 reload_reg_used_in_op_addr);
5064 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5065 reload_reg_used_in_op_addr_reload);
5067 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5068 reload_reg_used_in_insn);
5069 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5070 reload_reg_used_in_other_addr);
5072 for (i = 0; i < reload_n_operands; i++)
5074 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5075 reload_reg_used_in_output[i]);
5076 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5077 reload_reg_used_in_input[i]);
5078 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5079 reload_reg_used_in_input_addr[i]);
5080 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5081 reload_reg_used_in_output_addr[i]);
5084 /* If -O, try first with inheritance, then turning it off.
5085 If not -O, don't do inheritance.
5086 Using inheritance when not optimizing leads to paradoxes
5087 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5088 because one side of the comparison might be inherited. */
5090 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5092 /* Process the reloads in order of preference just found.
5093 Beyond this point, subregs can be found in reload_reg_rtx.
5095 This used to look for an existing reloaded home for all
5096 of the reloads, and only then perform any new reloads.
5097 But that could lose if the reloads were done out of reg-class order
5098 because a later reload with a looser constraint might have an old
5099 home in a register needed by an earlier reload with a tighter constraint.
5101 To solve this, we make two passes over the reloads, in the order
5102 described above. In the first pass we try to inherit a reload
5103 from a previous insn. If there is a later reload that needs a
5104 class that is a proper subset of the class being processed, we must
5105 also allocate a spill register during the first pass.
5107 Then make a second pass over the reloads to allocate any reloads
5108 that haven't been given registers yet. */
5110 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5112 for (j = 0; j < n_reloads; j++)
5114 register int r = reload_order[j];
5116 /* Ignore reloads that got marked inoperative. */
5117 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5120 /* If find_reloads chose a to use reload_in or reload_out as a reload
5121 register, we don't need to chose one. Otherwise, try even if it found
5122 one since we might save an insn if we find the value lying around. */
5123 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5124 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5125 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5128 #if 0 /* No longer needed for correct operation.
5129 It might give better code, or might not; worth an experiment? */
5130 /* If this is an optional reload, we can't inherit from earlier insns
5131 until we are sure that any non-optional reloads have been allocated.
5132 The following code takes advantage of the fact that optional reloads
5133 are at the end of reload_order. */
5134 if (reload_optional[r] != 0)
5135 for (i = 0; i < j; i++)
5136 if ((reload_out[reload_order[i]] != 0
5137 || reload_in[reload_order[i]] != 0
5138 || reload_secondary_p[reload_order[i]])
5139 && ! reload_optional[reload_order[i]]
5140 && reload_reg_rtx[reload_order[i]] == 0)
5141 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5144 /* First see if this pseudo is already available as reloaded
5145 for a previous insn. We cannot try to inherit for reloads
5146 that are smaller than the maximum number of registers needed
5147 for groups unless the register we would allocate cannot be used
5150 We could check here to see if this is a secondary reload for
5151 an object that is already in a register of the desired class.
5152 This would avoid the need for the secondary reload register.
5153 But this is complex because we can't easily determine what
5154 objects might want to be loaded via this reload. So let a register
5155 be allocated here. In `emit_reload_insns' we suppress one of the
5156 loads in the case described above. */
5160 register int regno = -1;
5161 enum machine_mode mode;
5163 if (reload_in[r] == 0)
5165 else if (GET_CODE (reload_in[r]) == REG)
5167 regno = REGNO (reload_in[r]);
5168 mode = GET_MODE (reload_in[r]);
5170 else if (GET_CODE (reload_in_reg[r]) == REG)
5172 regno = REGNO (reload_in_reg[r]);
5173 mode = GET_MODE (reload_in_reg[r]);
5176 /* This won't work, since REGNO can be a pseudo reg number.
5177 Also, it takes much more hair to keep track of all the things
5178 that can invalidate an inherited reload of part of a pseudoreg. */
5179 else if (GET_CODE (reload_in[r]) == SUBREG
5180 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5181 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5184 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5186 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5188 if (reg_reloaded_contents[i] == regno
5189 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5190 >= GET_MODE_SIZE (mode))
5191 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5192 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5194 && (reload_nregs[r] == max_group_size
5195 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5197 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5198 reload_when_needed[r])
5199 && reload_reg_free_before_p (spill_regs[i],
5201 reload_when_needed[r]))
5203 /* If a group is needed, verify that all the subsequent
5204 registers still have their values intact. */
5206 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5209 for (k = 1; k < nr; k++)
5210 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5218 /* We found a register that contains the
5219 value we need. If this register is the
5220 same as an `earlyclobber' operand of the
5221 current insn, just mark it as a place to
5222 reload from since we can't use it as the
5223 reload register itself. */
5225 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5226 if (reg_overlap_mentioned_for_reload_p
5227 (reg_last_reload_reg[regno],
5228 reload_earlyclobbers[i1]))
5231 if (i1 != n_earlyclobbers
5232 /* Don't really use the inherited spill reg
5233 if we need it wider than we've got it. */
5234 || (GET_MODE_SIZE (reload_mode[r])
5235 > GET_MODE_SIZE (mode)))
5236 reload_override_in[r] = reg_last_reload_reg[regno];
5240 /* We can use this as a reload reg. */
5241 /* Mark the register as in use for this part of
5243 mark_reload_reg_in_use (spill_regs[i],
5245 reload_when_needed[r],
5247 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5248 reload_inherited[r] = 1;
5249 reload_inheritance_insn[r]
5250 = reg_reloaded_insn[i];
5251 reload_spill_index[r] = i;
5252 for (k = 0; k < nr; k++)
5253 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5261 /* Here's another way to see if the value is already lying around. */
5263 && reload_in[r] != 0
5264 && ! reload_inherited[r]
5265 && reload_out[r] == 0
5266 && (CONSTANT_P (reload_in[r])
5267 || GET_CODE (reload_in[r]) == PLUS
5268 || GET_CODE (reload_in[r]) == REG
5269 || GET_CODE (reload_in[r]) == MEM)
5270 && (reload_nregs[r] == max_group_size
5271 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5274 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5275 -1, NULL_PTR, 0, reload_mode[r]);
5280 if (GET_CODE (equiv) == REG)
5281 regno = REGNO (equiv);
5282 else if (GET_CODE (equiv) == SUBREG)
5284 /* This must be a SUBREG of a hard register.
5285 Make a new REG since this might be used in an
5286 address and not all machines support SUBREGs
5288 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5289 equiv = gen_rtx (REG, reload_mode[r], regno);
5295 /* If we found a spill reg, reject it unless it is free
5296 and of the desired class. */
5298 && ((spill_reg_order[regno] >= 0
5299 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5300 reload_when_needed[r]))
5301 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5305 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5308 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5311 /* We found a register that contains the value we need.
5312 If this register is the same as an `earlyclobber' operand
5313 of the current insn, just mark it as a place to reload from
5314 since we can't use it as the reload register itself. */
5317 for (i = 0; i < n_earlyclobbers; i++)
5318 if (reg_overlap_mentioned_for_reload_p (equiv,
5319 reload_earlyclobbers[i]))
5321 reload_override_in[r] = equiv;
5326 /* JRV: If the equiv register we have found is explicitly
5327 clobbered in the current insn, mark but don't use, as above. */
5329 if (equiv != 0 && regno_clobbered_p (regno, insn))
5331 reload_override_in[r] = equiv;
5335 /* If we found an equivalent reg, say no code need be generated
5336 to load it, and use it as our reload reg. */
5337 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5339 reload_reg_rtx[r] = equiv;
5340 reload_inherited[r] = 1;
5341 /* If it is a spill reg,
5342 mark the spill reg as in use for this insn. */
5343 i = spill_reg_order[regno];
5346 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5348 mark_reload_reg_in_use (regno, reload_opnum[r],
5349 reload_when_needed[r],
5351 for (k = 0; k < nr; k++)
5352 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5357 /* If we found a register to use already, or if this is an optional
5358 reload, we are done. */
5359 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5362 #if 0 /* No longer needed for correct operation. Might or might not
5363 give better code on the average. Want to experiment? */
5365 /* See if there is a later reload that has a class different from our
5366 class that intersects our class or that requires less register
5367 than our reload. If so, we must allocate a register to this
5368 reload now, since that reload might inherit a previous reload
5369 and take the only available register in our class. Don't do this
5370 for optional reloads since they will force all previous reloads
5371 to be allocated. Also don't do this for reloads that have been
5374 for (i = j + 1; i < n_reloads; i++)
5376 int s = reload_order[i];
5378 if ((reload_in[s] == 0 && reload_out[s] == 0
5379 && ! reload_secondary_p[s])
5380 || reload_optional[s])
5383 if ((reload_reg_class[s] != reload_reg_class[r]
5384 && reg_classes_intersect_p (reload_reg_class[r],
5385 reload_reg_class[s]))
5386 || reload_nregs[s] < reload_nregs[r])
5393 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5397 /* Now allocate reload registers for anything non-optional that
5398 didn't get one yet. */
5399 for (j = 0; j < n_reloads; j++)
5401 register int r = reload_order[j];
5403 /* Ignore reloads that got marked inoperative. */
5404 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5407 /* Skip reloads that already have a register allocated or are
5409 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5412 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5416 /* If that loop got all the way, we have won. */
5421 /* Loop around and try without any inheritance. */
5422 /* First undo everything done by the failed attempt
5423 to allocate with inheritance. */
5424 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5425 sizeof reload_reg_rtx);
5426 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5427 sizeof reload_inherited);
5428 bcopy ((char *) save_reload_inheritance_insn,
5429 (char *) reload_inheritance_insn,
5430 sizeof reload_inheritance_insn);
5431 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5432 sizeof reload_override_in);
5433 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5434 sizeof reload_spill_index);
5435 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5436 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5437 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5438 save_reload_reg_used_in_op_addr);
5439 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5440 save_reload_reg_used_in_op_addr_reload);
5441 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5442 save_reload_reg_used_in_insn);
5443 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5444 save_reload_reg_used_in_other_addr);
5446 for (i = 0; i < reload_n_operands; i++)
5448 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5449 save_reload_reg_used_in_input[i]);
5450 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5451 save_reload_reg_used_in_output[i]);
5452 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5453 save_reload_reg_used_in_input_addr[i]);
5454 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5455 save_reload_reg_used_in_output_addr[i]);
5459 /* If we thought we could inherit a reload, because it seemed that
5460 nothing else wanted the same reload register earlier in the insn,
5461 verify that assumption, now that all reloads have been assigned. */
5463 for (j = 0; j < n_reloads; j++)
5465 register int r = reload_order[j];
5467 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5468 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5470 reload_when_needed[r]))
5471 reload_inherited[r] = 0;
5473 /* If we found a better place to reload from,
5474 validate it in the same fashion, if it is a reload reg. */
5475 if (reload_override_in[r]
5476 && (GET_CODE (reload_override_in[r]) == REG
5477 || GET_CODE (reload_override_in[r]) == SUBREG))
5479 int regno = true_regnum (reload_override_in[r]);
5480 if (spill_reg_order[regno] >= 0
5481 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5482 reload_when_needed[r]))
5483 reload_override_in[r] = 0;
5487 /* Now that reload_override_in is known valid,
5488 actually override reload_in. */
5489 for (j = 0; j < n_reloads; j++)
5490 if (reload_override_in[j])
5491 reload_in[j] = reload_override_in[j];
5493 /* If this reload won't be done because it has been cancelled or is
5494 optional and not inherited, clear reload_reg_rtx so other
5495 routines (such as subst_reloads) don't get confused. */
5496 for (j = 0; j < n_reloads; j++)
5497 if (reload_reg_rtx[j] != 0
5498 && ((reload_optional[j] && ! reload_inherited[j])
5499 || (reload_in[j] == 0 && reload_out[j] == 0
5500 && ! reload_secondary_p[j])))
5502 int regno = true_regnum (reload_reg_rtx[j]);
5504 if (spill_reg_order[regno] >= 0)
5505 clear_reload_reg_in_use (regno, reload_opnum[j],
5506 reload_when_needed[j], reload_mode[j]);
5507 reload_reg_rtx[j] = 0;
5510 /* Record which pseudos and which spill regs have output reloads. */
5511 for (j = 0; j < n_reloads; j++)
5513 register int r = reload_order[j];
5515 i = reload_spill_index[r];
5517 /* I is nonneg if this reload used one of the spill regs.
5518 If reload_reg_rtx[r] is 0, this is an optional reload
5519 that we opted to ignore. */
5520 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5521 && reload_reg_rtx[r] != 0)
5523 register int nregno = REGNO (reload_out[r]);
5526 if (nregno < FIRST_PSEUDO_REGISTER)
5527 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5530 reg_has_output_reload[nregno + nr] = 1;
5534 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5536 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5539 if (reload_when_needed[r] != RELOAD_OTHER
5540 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5541 && reload_when_needed[r] != RELOAD_FOR_INSN)
5547 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5548 reloads of the same item for fear that we might not have enough reload
5549 registers. However, normally they will get the same reload register
5550 and hence actually need not be loaded twice.
5552 Here we check for the most common case of this phenomenon: when we have
5553 a number of reloads for the same object, each of which were allocated
5554 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5555 reload, and is not modified in the insn itself. If we find such,
5556 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5557 This will not increase the number of spill registers needed and will
5558 prevent redundant code. */
5560 #ifdef SMALL_REGISTER_CLASSES
5563 merge_assigned_reloads (insn)
5568 /* Scan all the reloads looking for ones that only load values and
5569 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5570 assigned and not modified by INSN. */
5572 for (i = 0; i < n_reloads; i++)
5574 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5575 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5576 || reg_set_p (reload_reg_rtx[i], insn))
5579 /* Look at all other reloads. Ensure that the only use of this
5580 reload_reg_rtx is in a reload that just loads the same value
5581 as we do. Note that any secondary reloads must be of the identical
5582 class since the values, modes, and result registers are the
5583 same, so we need not do anything with any secondary reloads. */
5585 for (j = 0; j < n_reloads; j++)
5587 if (i == j || reload_reg_rtx[j] == 0
5588 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5592 /* If the reload regs aren't exactly the same (e.g, different modes)
5593 or if the values are different, we can't merge anything with this
5596 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5597 || reload_out[j] != 0 || reload_in[j] == 0
5598 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5602 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5603 we, in fact, found any matching reloads. */
5607 for (j = 0; j < n_reloads; j++)
5608 if (i != j && reload_reg_rtx[j] != 0
5609 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5611 reload_when_needed[i] = RELOAD_OTHER;
5613 transfer_replacements (i, j);
5616 /* If this is now RELOAD_OTHER, look for any reloads that load
5617 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5618 if they were for inputs, RELOAD_OTHER for outputs. Note that
5619 this test is equivalent to looking for reloads for this operand
5622 if (reload_when_needed[i] == RELOAD_OTHER)
5623 for (j = 0; j < n_reloads; j++)
5624 if (reload_in[j] != 0
5625 && reload_when_needed[i] != RELOAD_OTHER
5626 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5628 reload_when_needed[j]
5629 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5630 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5634 #endif /* SMALL_RELOAD_CLASSES */
5636 /* Output insns to reload values in and out of the chosen reload regs. */
5639 emit_reload_insns (insn)
5643 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5644 rtx other_input_address_reload_insns = 0;
5645 rtx other_input_reload_insns = 0;
5646 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5647 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5648 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5649 rtx operand_reload_insns = 0;
5650 rtx other_operand_reload_insns = 0;
5651 rtx following_insn = NEXT_INSN (insn);
5652 rtx before_insn = insn;
5654 /* Values to be put in spill_reg_store are put here first. */
5655 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5657 for (j = 0; j < reload_n_operands; j++)
5658 input_reload_insns[j] = input_address_reload_insns[j]
5659 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5661 /* Now output the instructions to copy the data into and out of the
5662 reload registers. Do these in the order that the reloads were reported,
5663 since reloads of base and index registers precede reloads of operands
5664 and the operands may need the base and index registers reloaded. */
5666 for (j = 0; j < n_reloads; j++)
5669 rtx oldequiv_reg = 0;
5671 if (reload_spill_index[j] >= 0)
5672 new_spill_reg_store[reload_spill_index[j]] = 0;
5675 if (old != 0 && ! reload_inherited[j]
5676 && ! rtx_equal_p (reload_reg_rtx[j], old)
5677 && reload_reg_rtx[j] != 0)
5679 register rtx reloadreg = reload_reg_rtx[j];
5681 enum machine_mode mode;
5684 /* Determine the mode to reload in.
5685 This is very tricky because we have three to choose from.
5686 There is the mode the insn operand wants (reload_inmode[J]).
5687 There is the mode of the reload register RELOADREG.
5688 There is the intrinsic mode of the operand, which we could find
5689 by stripping some SUBREGs.
5690 It turns out that RELOADREG's mode is irrelevant:
5691 we can change that arbitrarily.
5693 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5694 then the reload reg may not support QImode moves, so use SImode.
5695 If foo is in memory due to spilling a pseudo reg, this is safe,
5696 because the QImode value is in the least significant part of a
5697 slot big enough for a SImode. If foo is some other sort of
5698 memory reference, then it is impossible to reload this case,
5699 so previous passes had better make sure this never happens.
5701 Then consider a one-word union which has SImode and one of its
5702 members is a float, being fetched as (SUBREG:SF union:SI).
5703 We must fetch that as SFmode because we could be loading into
5704 a float-only register. In this case OLD's mode is correct.
5706 Consider an immediate integer: it has VOIDmode. Here we need
5707 to get a mode from something else.
5709 In some cases, there is a fourth mode, the operand's
5710 containing mode. If the insn specifies a containing mode for
5711 this operand, it overrides all others.
5713 I am not sure whether the algorithm here is always right,
5714 but it does the right things in those cases. */
5716 mode = GET_MODE (old);
5717 if (mode == VOIDmode)
5718 mode = reload_inmode[j];
5720 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5721 /* If we need a secondary register for this operation, see if
5722 the value is already in a register in that class. Don't
5723 do this if the secondary register will be used as a scratch
5726 if (reload_secondary_in_reload[j] >= 0
5727 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5730 = find_equiv_reg (old, insn,
5731 reload_reg_class[reload_secondary_in_reload[j]],
5732 -1, NULL_PTR, 0, mode);
5735 /* If reloading from memory, see if there is a register
5736 that already holds the same value. If so, reload from there.
5737 We can pass 0 as the reload_reg_p argument because
5738 any other reload has either already been emitted,
5739 in which case find_equiv_reg will see the reload-insn,
5740 or has yet to be emitted, in which case it doesn't matter
5741 because we will use this equiv reg right away. */
5743 if (oldequiv == 0 && optimize
5744 && (GET_CODE (old) == MEM
5745 || (GET_CODE (old) == REG
5746 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5747 && reg_renumber[REGNO (old)] < 0)))
5748 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5749 -1, NULL_PTR, 0, mode);
5753 int regno = true_regnum (oldequiv);
5755 /* If OLDEQUIV is a spill register, don't use it for this
5756 if any other reload needs it at an earlier stage of this insn
5757 or at this stage. */
5758 if (spill_reg_order[regno] >= 0
5759 && (! reload_reg_free_p (regno, reload_opnum[j],
5760 reload_when_needed[j])
5761 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5762 reload_when_needed[j])))
5765 /* If OLDEQUIV is not a spill register,
5766 don't use it if any other reload wants it. */
5767 if (spill_reg_order[regno] < 0)
5770 for (k = 0; k < n_reloads; k++)
5771 if (reload_reg_rtx[k] != 0 && k != j
5772 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5780 /* If it is no cheaper to copy from OLDEQUIV into the
5781 reload register than it would be to move from memory,
5782 don't use it. Likewise, if we need a secondary register
5786 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5787 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5788 reload_reg_class[j])
5789 >= MEMORY_MOVE_COST (mode)))
5790 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5791 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5795 #ifdef SECONDARY_MEMORY_NEEDED
5796 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5797 REGNO_REG_CLASS (regno),
5806 else if (GET_CODE (oldequiv) == REG)
5807 oldequiv_reg = oldequiv;
5808 else if (GET_CODE (oldequiv) == SUBREG)
5809 oldequiv_reg = SUBREG_REG (oldequiv);
5811 /* If we are reloading from a register that was recently stored in
5812 with an output-reload, see if we can prove there was
5813 actually no need to store the old value in it. */
5815 if (optimize && GET_CODE (oldequiv) == REG
5816 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5817 && spill_reg_order[REGNO (oldequiv)] >= 0
5818 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5819 && find_reg_note (insn, REG_DEAD, reload_in[j])
5820 /* This is unsafe if operand occurs more than once in current
5821 insn. Perhaps some occurrences weren't reloaded. */
5822 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5823 delete_output_reload
5824 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5826 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5827 then load RELOADREG from OLDEQUIV. Note that we cannot use
5828 gen_lowpart_common since it can do the wrong thing when
5829 RELOADREG has a multi-word mode. Note that RELOADREG
5830 must always be a REG here. */
5832 if (GET_MODE (reloadreg) != mode)
5833 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5834 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5835 oldequiv = SUBREG_REG (oldequiv);
5836 if (GET_MODE (oldequiv) != VOIDmode
5837 && mode != GET_MODE (oldequiv))
5838 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5840 /* Switch to the right place to emit the reload insns. */
5841 switch (reload_when_needed[j])
5844 where = &other_input_reload_insns;
5846 case RELOAD_FOR_INPUT:
5847 where = &input_reload_insns[reload_opnum[j]];
5849 case RELOAD_FOR_INPUT_ADDRESS:
5850 where = &input_address_reload_insns[reload_opnum[j]];
5852 case RELOAD_FOR_OUTPUT_ADDRESS:
5853 where = &output_address_reload_insns[reload_opnum[j]];
5855 case RELOAD_FOR_OPERAND_ADDRESS:
5856 where = &operand_reload_insns;
5858 case RELOAD_FOR_OPADDR_ADDR:
5859 where = &other_operand_reload_insns;
5861 case RELOAD_FOR_OTHER_ADDRESS:
5862 where = &other_input_address_reload_insns;
5868 push_to_sequence (*where);
5871 /* Auto-increment addresses must be reloaded in a special way. */
5872 if (GET_CODE (oldequiv) == POST_INC
5873 || GET_CODE (oldequiv) == POST_DEC
5874 || GET_CODE (oldequiv) == PRE_INC
5875 || GET_CODE (oldequiv) == PRE_DEC)
5877 /* We are not going to bother supporting the case where a
5878 incremented register can't be copied directly from
5879 OLDEQUIV since this seems highly unlikely. */
5880 if (reload_secondary_in_reload[j] >= 0)
5882 /* Prevent normal processing of this reload. */
5884 /* Output a special code sequence for this case. */
5885 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5888 /* If we are reloading a pseudo-register that was set by the previous
5889 insn, see if we can get rid of that pseudo-register entirely
5890 by redirecting the previous insn into our reload register. */
5892 else if (optimize && GET_CODE (old) == REG
5893 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5894 && dead_or_set_p (insn, old)
5895 /* This is unsafe if some other reload
5896 uses the same reg first. */
5897 && reload_reg_free_before_p (REGNO (reloadreg),
5899 reload_when_needed[j]))
5901 rtx temp = PREV_INSN (insn);
5902 while (temp && GET_CODE (temp) == NOTE)
5903 temp = PREV_INSN (temp);
5905 && GET_CODE (temp) == INSN
5906 && GET_CODE (PATTERN (temp)) == SET
5907 && SET_DEST (PATTERN (temp)) == old
5908 /* Make sure we can access insn_operand_constraint. */
5909 && asm_noperands (PATTERN (temp)) < 0
5910 /* This is unsafe if prev insn rejects our reload reg. */
5911 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5913 /* This is unsafe if operand occurs more than once in current
5914 insn. Perhaps some occurrences aren't reloaded. */
5915 && count_occurrences (PATTERN (insn), old) == 1
5916 /* Don't risk splitting a matching pair of operands. */
5917 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5919 /* Store into the reload register instead of the pseudo. */
5920 SET_DEST (PATTERN (temp)) = reloadreg;
5921 /* If these are the only uses of the pseudo reg,
5922 pretend for GDB it lives in the reload reg we used. */
5923 if (reg_n_deaths[REGNO (old)] == 1
5924 && reg_n_sets[REGNO (old)] == 1)
5926 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5927 alter_reg (REGNO (old), -1);
5933 /* We can't do that, so output an insn to load RELOADREG. */
5937 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5938 rtx second_reload_reg = 0;
5939 enum insn_code icode;
5941 /* If we have a secondary reload, pick up the secondary register
5942 and icode, if any. If OLDEQUIV and OLD are different or
5943 if this is an in-out reload, recompute whether or not we
5944 still need a secondary register and what the icode should
5945 be. If we still need a secondary register and the class or
5946 icode is different, go back to reloading from OLD if using
5947 OLDEQUIV means that we got the wrong type of register. We
5948 cannot have different class or icode due to an in-out reload
5949 because we don't make such reloads when both the input and
5950 output need secondary reload registers. */
5952 if (reload_secondary_in_reload[j] >= 0)
5954 int secondary_reload = reload_secondary_in_reload[j];
5955 rtx real_oldequiv = oldequiv;
5958 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5959 and similarly for OLD.
5960 See comments in get_secondary_reload in reload.c. */
5961 if (GET_CODE (oldequiv) == REG
5962 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5963 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5964 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5966 if (GET_CODE (old) == REG
5967 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5968 && reg_equiv_mem[REGNO (old)] != 0)
5969 real_old = reg_equiv_mem[REGNO (old)];
5971 second_reload_reg = reload_reg_rtx[secondary_reload];
5972 icode = reload_secondary_in_icode[j];
5974 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5975 || (reload_in[j] != 0 && reload_out[j] != 0))
5977 enum reg_class new_class
5978 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5979 mode, real_oldequiv);
5981 if (new_class == NO_REGS)
5982 second_reload_reg = 0;
5985 enum insn_code new_icode;
5986 enum machine_mode new_mode;
5988 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5989 REGNO (second_reload_reg)))
5990 oldequiv = old, real_oldequiv = real_old;
5993 new_icode = reload_in_optab[(int) mode];
5994 if (new_icode != CODE_FOR_nothing
5995 && ((insn_operand_predicate[(int) new_icode][0]
5996 && ! ((*insn_operand_predicate[(int) new_icode][0])
5998 || (insn_operand_predicate[(int) new_icode][1]
5999 && ! ((*insn_operand_predicate[(int) new_icode][1])
6000 (real_oldequiv, mode)))))
6001 new_icode = CODE_FOR_nothing;
6003 if (new_icode == CODE_FOR_nothing)
6006 new_mode = insn_operand_mode[(int) new_icode][2];
6008 if (GET_MODE (second_reload_reg) != new_mode)
6010 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6012 oldequiv = old, real_oldequiv = real_old;
6015 = gen_rtx (REG, new_mode,
6016 REGNO (second_reload_reg));
6022 /* If we still need a secondary reload register, check
6023 to see if it is being used as a scratch or intermediate
6024 register and generate code appropriately. If we need
6025 a scratch register, use REAL_OLDEQUIV since the form of
6026 the insn may depend on the actual address if it is
6029 if (second_reload_reg)
6031 if (icode != CODE_FOR_nothing)
6033 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6034 second_reload_reg));
6039 /* See if we need a scratch register to load the
6040 intermediate register (a tertiary reload). */
6041 enum insn_code tertiary_icode
6042 = reload_secondary_in_icode[secondary_reload];
6044 if (tertiary_icode != CODE_FOR_nothing)
6046 rtx third_reload_reg
6047 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6049 emit_insn ((GEN_FCN (tertiary_icode)
6050 (second_reload_reg, real_oldequiv,
6051 third_reload_reg)));
6054 gen_reload (second_reload_reg, oldequiv,
6056 reload_when_needed[j]);
6058 oldequiv = second_reload_reg;
6064 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6065 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6066 reload_when_needed[j]);
6068 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6069 /* We may have to make a REG_DEAD note for the secondary reload
6070 register in the insns we just made. Find the last insn that
6071 mentioned the register. */
6072 if (! special && second_reload_reg
6073 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6077 for (prev = get_last_insn (); prev;
6078 prev = PREV_INSN (prev))
6079 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6080 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6083 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6092 /* End this sequence. */
6093 *where = get_insns ();
6097 /* Add a note saying the input reload reg
6098 dies in this insn, if anyone cares. */
6099 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6101 && reload_reg_rtx[j] != old
6102 && reload_reg_rtx[j] != 0
6103 && reload_out[j] == 0
6104 && ! reload_inherited[j]
6105 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6107 register rtx reloadreg = reload_reg_rtx[j];
6110 /* We can't abort here because we need to support this for sched.c.
6111 It's not terrible to miss a REG_DEAD note, but we should try
6112 to figure out how to do this correctly. */
6113 /* The code below is incorrect for address-only reloads. */
6114 if (reload_when_needed[j] != RELOAD_OTHER
6115 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6119 /* Add a death note to this insn, for an input reload. */
6121 if ((reload_when_needed[j] == RELOAD_OTHER
6122 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6123 && ! dead_or_set_p (insn, reloadreg))
6125 = gen_rtx (EXPR_LIST, REG_DEAD,
6126 reloadreg, REG_NOTES (insn));
6129 /* When we inherit a reload, the last marked death of the reload reg
6130 may no longer really be a death. */
6131 if (reload_reg_rtx[j] != 0
6132 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6133 && reload_inherited[j])
6135 /* Handle inheriting an output reload.
6136 Remove the death note from the output reload insn. */
6137 if (reload_spill_index[j] >= 0
6138 && GET_CODE (reload_in[j]) == REG
6139 && spill_reg_store[reload_spill_index[j]] != 0
6140 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6141 REG_DEAD, REGNO (reload_reg_rtx[j])))
6142 remove_death (REGNO (reload_reg_rtx[j]),
6143 spill_reg_store[reload_spill_index[j]]);
6144 /* Likewise for input reloads that were inherited. */
6145 else if (reload_spill_index[j] >= 0
6146 && GET_CODE (reload_in[j]) == REG
6147 && spill_reg_store[reload_spill_index[j]] == 0
6148 && reload_inheritance_insn[j] != 0
6149 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6150 REGNO (reload_reg_rtx[j])))
6151 remove_death (REGNO (reload_reg_rtx[j]),
6152 reload_inheritance_insn[j]);
6157 /* We got this register from find_equiv_reg.
6158 Search back for its last death note and get rid of it.
6159 But don't search back too far.
6160 Don't go past a place where this reg is set,
6161 since a death note before that remains valid. */
6162 for (prev = PREV_INSN (insn);
6163 prev && GET_CODE (prev) != CODE_LABEL;
6164 prev = PREV_INSN (prev))
6165 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6166 && dead_or_set_p (prev, reload_reg_rtx[j]))
6168 if (find_regno_note (prev, REG_DEAD,
6169 REGNO (reload_reg_rtx[j])))
6170 remove_death (REGNO (reload_reg_rtx[j]), prev);
6176 /* We might have used find_equiv_reg above to choose an alternate
6177 place from which to reload. If so, and it died, we need to remove
6178 that death and move it to one of the insns we just made. */
6180 if (oldequiv_reg != 0
6181 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6185 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6186 prev = PREV_INSN (prev))
6187 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6188 && dead_or_set_p (prev, oldequiv_reg))
6190 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6192 for (prev1 = this_reload_insn;
6193 prev1; prev1 = PREV_INSN (prev1))
6194 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6195 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6198 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6203 remove_death (REGNO (oldequiv_reg), prev);
6210 /* If we are reloading a register that was recently stored in with an
6211 output-reload, see if we can prove there was
6212 actually no need to store the old value in it. */
6214 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6215 && reload_in[j] != 0
6216 && GET_CODE (reload_in[j]) == REG
6218 /* There doesn't seem to be any reason to restrict this to pseudos
6219 and doing so loses in the case where we are copying from a
6220 register of the wrong class. */
6221 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6223 && spill_reg_store[reload_spill_index[j]] != 0
6224 /* This is unsafe if some other reload uses the same reg first. */
6225 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6226 reload_opnum[j], reload_when_needed[j])
6227 && dead_or_set_p (insn, reload_in[j])
6228 /* This is unsafe if operand occurs more than once in current
6229 insn. Perhaps some occurrences weren't reloaded. */
6230 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6231 delete_output_reload (insn, j,
6232 spill_reg_store[reload_spill_index[j]]);
6234 /* Input-reloading is done. Now do output-reloading,
6235 storing the value from the reload-register after the main insn
6236 if reload_out[j] is nonzero.
6238 ??? At some point we need to support handling output reloads of
6239 JUMP_INSNs or insns that set cc0. */
6240 old = reload_out[j];
6242 && reload_reg_rtx[j] != old
6243 && reload_reg_rtx[j] != 0)
6245 register rtx reloadreg = reload_reg_rtx[j];
6246 register rtx second_reloadreg = 0;
6248 enum machine_mode mode;
6251 /* An output operand that dies right away does need a reload,
6252 but need not be copied from it. Show the new location in the
6254 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6255 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6257 XEXP (note, 0) = reload_reg_rtx[j];
6260 else if (GET_CODE (old) == SCRATCH)
6261 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6262 but we don't want to make an output reload. */
6266 /* Strip off of OLD any size-increasing SUBREGs such as
6267 (SUBREG:SI foo:QI 0). */
6269 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6270 && (GET_MODE_SIZE (GET_MODE (old))
6271 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6272 old = SUBREG_REG (old);
6275 /* If is a JUMP_INSN, we can't support output reloads yet. */
6276 if (GET_CODE (insn) == JUMP_INSN)
6279 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6281 /* Determine the mode to reload in.
6282 See comments above (for input reloading). */
6284 mode = GET_MODE (old);
6285 if (mode == VOIDmode)
6287 /* VOIDmode should never happen for an output. */
6288 if (asm_noperands (PATTERN (insn)) < 0)
6289 /* It's the compiler's fault. */
6290 fatal_insn ("VOIDmode on an output", insn);
6291 error_for_asm (insn, "output operand is constant in `asm'");
6292 /* Prevent crash--use something we know is valid. */
6294 old = gen_rtx (REG, mode, REGNO (reloadreg));
6297 if (GET_MODE (reloadreg) != mode)
6298 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6300 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6302 /* If we need two reload regs, set RELOADREG to the intermediate
6303 one, since it will be stored into OLD. We might need a secondary
6304 register only for an input reload, so check again here. */
6306 if (reload_secondary_out_reload[j] >= 0)
6310 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6311 && reg_equiv_mem[REGNO (old)] != 0)
6312 real_old = reg_equiv_mem[REGNO (old)];
6314 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6318 second_reloadreg = reloadreg;
6319 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6321 /* See if RELOADREG is to be used as a scratch register
6322 or as an intermediate register. */
6323 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6325 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6326 (real_old, second_reloadreg, reloadreg)));
6331 /* See if we need both a scratch and intermediate reload
6334 int secondary_reload = reload_secondary_out_reload[j];
6335 enum insn_code tertiary_icode
6336 = reload_secondary_out_icode[secondary_reload];
6338 if (GET_MODE (reloadreg) != mode)
6339 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6341 if (tertiary_icode != CODE_FOR_nothing)
6344 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6346 /* Copy primary reload reg to secondary reload reg.
6347 (Note that these have been swapped above, then
6348 secondary reload reg to OLD using our insn. */
6350 gen_reload (reloadreg, second_reloadreg,
6351 reload_opnum[j], reload_when_needed[j]);
6352 emit_insn ((GEN_FCN (tertiary_icode)
6353 (real_old, reloadreg, third_reloadreg)));
6358 /* Copy between the reload regs here and then to
6361 gen_reload (reloadreg, second_reloadreg,
6362 reload_opnum[j], reload_when_needed[j]);
6368 /* Output the last reload insn. */
6370 gen_reload (old, reloadreg, reload_opnum[j],
6371 reload_when_needed[j]);
6373 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6374 /* If final will look at death notes for this reg,
6375 put one on the last output-reload insn to use it. Similarly
6376 for any secondary register. */
6377 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6378 for (p = get_last_insn (); p; p = PREV_INSN (p))
6379 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6380 && reg_overlap_mentioned_for_reload_p (reloadreg,
6382 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6383 reloadreg, REG_NOTES (p));
6385 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6387 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6388 for (p = get_last_insn (); p; p = PREV_INSN (p))
6389 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6390 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6392 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6393 second_reloadreg, REG_NOTES (p));
6396 /* Look at all insns we emitted, just to be safe. */
6397 for (p = get_insns (); p; p = NEXT_INSN (p))
6398 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6400 /* If this output reload doesn't come from a spill reg,
6401 clear any memory of reloaded copies of the pseudo reg.
6402 If this output reload comes from a spill reg,
6403 reg_has_output_reload will make this do nothing. */
6404 note_stores (PATTERN (p), forget_old_reloads_1);
6406 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6407 && reload_spill_index[j] >= 0)
6408 new_spill_reg_store[reload_spill_index[j]] = p;
6411 output_reload_insns[reload_opnum[j]] = get_insns ();
6416 /* Now write all the insns we made for reloads in the order expected by
6417 the allocation functions. Prior to the insn being reloaded, we write
6418 the following reloads:
6420 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6422 RELOAD_OTHER reloads.
6424 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6425 the RELOAD_FOR_INPUT reload for the operand.
6427 RELOAD_FOR_OPADDR_ADDRS reloads.
6429 RELOAD_FOR_OPERAND_ADDRESS reloads.
6431 After the insn being reloaded, we write the following:
6433 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6434 the RELOAD_FOR_OUTPUT reload for that operand. */
6436 emit_insns_before (other_input_address_reload_insns, before_insn);
6437 emit_insns_before (other_input_reload_insns, before_insn);
6439 for (j = 0; j < reload_n_operands; j++)
6441 emit_insns_before (input_address_reload_insns[j], before_insn);
6442 emit_insns_before (input_reload_insns[j], before_insn);
6445 emit_insns_before (other_operand_reload_insns, before_insn);
6446 emit_insns_before (operand_reload_insns, before_insn);
6448 for (j = 0; j < reload_n_operands; j++)
6450 emit_insns_before (output_address_reload_insns[j], following_insn);
6451 emit_insns_before (output_reload_insns[j], following_insn);
6454 /* Move death notes from INSN
6455 to output-operand-address and output reload insns. */
6456 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6459 /* Loop over those insns, last ones first. */
6460 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6461 insn1 = PREV_INSN (insn1))
6462 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6464 rtx source = SET_SRC (PATTERN (insn1));
6465 rtx dest = SET_DEST (PATTERN (insn1));
6467 /* The note we will examine next. */
6468 rtx reg_notes = REG_NOTES (insn);
6469 /* The place that pointed to this note. */
6470 rtx *prev_reg_note = ®_NOTES (insn);
6472 /* If the note is for something used in the source of this
6473 reload insn, or in the output address, move the note. */
6476 rtx next_reg_notes = XEXP (reg_notes, 1);
6477 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6478 && GET_CODE (XEXP (reg_notes, 0)) == REG
6479 && ((GET_CODE (dest) != REG
6480 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6482 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6485 *prev_reg_note = next_reg_notes;
6486 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6487 REG_NOTES (insn1) = reg_notes;
6490 prev_reg_note = &XEXP (reg_notes, 1);
6492 reg_notes = next_reg_notes;
6498 /* For all the spill regs newly reloaded in this instruction,
6499 record what they were reloaded from, so subsequent instructions
6500 can inherit the reloads.
6502 Update spill_reg_store for the reloads of this insn.
6503 Copy the elements that were updated in the loop above. */
6505 for (j = 0; j < n_reloads; j++)
6507 register int r = reload_order[j];
6508 register int i = reload_spill_index[r];
6510 /* I is nonneg if this reload used one of the spill regs.
6511 If reload_reg_rtx[r] is 0, this is an optional reload
6512 that we opted to ignore.
6514 Also ignore reloads that don't reach the end of the insn,
6515 since we will eventually see the one that does. */
6517 if (i >= 0 && reload_reg_rtx[r] != 0
6518 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6519 reload_when_needed[r]))
6521 /* First, clear out memory of what used to be in this spill reg.
6522 If consecutive registers are used, clear them all. */
6524 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6527 for (k = 0; k < nr; k++)
6529 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6530 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6533 /* Maybe the spill reg contains a copy of reload_out. */
6534 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6536 register int nregno = REGNO (reload_out[r]);
6537 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6538 : HARD_REGNO_NREGS (nregno,
6539 GET_MODE (reload_reg_rtx[r])));
6541 spill_reg_store[i] = new_spill_reg_store[i];
6542 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6544 /* If NREGNO is a hard register, it may occupy more than
6545 one register. If it does, say what is in the
6546 rest of the registers assuming that both registers
6547 agree on how many words the object takes. If not,
6548 invalidate the subsequent registers. */
6550 if (nregno < FIRST_PSEUDO_REGISTER)
6551 for (k = 1; k < nnr; k++)
6552 reg_last_reload_reg[nregno + k]
6553 = (nr == nnr ? gen_rtx (REG,
6554 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6555 REGNO (reload_reg_rtx[r]) + k)
6558 /* Now do the inverse operation. */
6559 for (k = 0; k < nr; k++)
6561 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6562 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6564 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6568 /* Maybe the spill reg contains a copy of reload_in. Only do
6569 something if there will not be an output reload for
6570 the register being reloaded. */
6571 else if (reload_out[r] == 0
6572 && reload_in[r] != 0
6573 && ((GET_CODE (reload_in[r]) == REG
6574 && ! reg_has_output_reload[REGNO (reload_in[r])]
6575 || (GET_CODE (reload_in_reg[r]) == REG
6576 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6578 register int nregno;
6581 if (GET_CODE (reload_in[r]) == REG)
6582 nregno = REGNO (reload_in[r]);
6584 nregno = REGNO (reload_in_reg[r]);
6586 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6587 : HARD_REGNO_NREGS (nregno,
6588 GET_MODE (reload_reg_rtx[r])));
6590 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6592 if (nregno < FIRST_PSEUDO_REGISTER)
6593 for (k = 1; k < nnr; k++)
6594 reg_last_reload_reg[nregno + k]
6595 = (nr == nnr ? gen_rtx (REG,
6596 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6597 REGNO (reload_reg_rtx[r]) + k)
6600 /* Unless we inherited this reload, show we haven't
6601 recently done a store. */
6602 if (! reload_inherited[r])
6603 spill_reg_store[i] = 0;
6605 for (k = 0; k < nr; k++)
6607 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6608 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6610 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6616 /* The following if-statement was #if 0'd in 1.34 (or before...).
6617 It's reenabled in 1.35 because supposedly nothing else
6618 deals with this problem. */
6620 /* If a register gets output-reloaded from a non-spill register,
6621 that invalidates any previous reloaded copy of it.
6622 But forget_old_reloads_1 won't get to see it, because
6623 it thinks only about the original insn. So invalidate it here. */
6624 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6626 register int nregno = REGNO (reload_out[r]);
6627 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6629 while (num_regs-- > 0)
6630 reg_last_reload_reg[nregno + num_regs] = 0;
6635 /* Emit code to perform a reload from IN (which may be a reload register) to
6636 OUT (which may also be a reload register). IN or OUT is from operand
6637 OPNUM with reload type TYPE.
6639 Returns first insn emitted. */
6642 gen_reload (out, in, opnum, type)
6646 enum reload_type type;
6648 rtx last = get_last_insn ();
6651 /* If IN is a paradoxical SUBREG, remove it and try to put the
6652 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6653 if (GET_CODE (in) == SUBREG
6654 && (GET_MODE_SIZE (GET_MODE (in))
6655 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6656 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6657 in = SUBREG_REG (in), out = tem;
6658 else if (GET_CODE (out) == SUBREG
6659 && (GET_MODE_SIZE (GET_MODE (out))
6660 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6661 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6662 out = SUBREG_REG (out), in = tem;
6664 /* How to do this reload can get quite tricky. Normally, we are being
6665 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6666 register that didn't get a hard register. In that case we can just
6667 call emit_move_insn.
6669 We can also be asked to reload a PLUS that adds a register or a MEM to
6670 another register, constant or MEM. This can occur during frame pointer
6671 elimination and while reloading addresses. This case is handled by
6672 trying to emit a single insn to perform the add. If it is not valid,
6673 we use a two insn sequence.
6675 Finally, we could be called to handle an 'o' constraint by putting
6676 an address into a register. In that case, we first try to do this
6677 with a named pattern of "reload_load_address". If no such pattern
6678 exists, we just emit a SET insn and hope for the best (it will normally
6679 be valid on machines that use 'o').
6681 This entire process is made complex because reload will never
6682 process the insns we generate here and so we must ensure that
6683 they will fit their constraints and also by the fact that parts of
6684 IN might be being reloaded separately and replaced with spill registers.
6685 Because of this, we are, in some sense, just guessing the right approach
6686 here. The one listed above seems to work.
6688 ??? At some point, this whole thing needs to be rethought. */
6690 if (GET_CODE (in) == PLUS
6691 && (GET_CODE (XEXP (in, 0)) == REG
6692 || GET_CODE (XEXP (in, 0)) == MEM)
6693 && (GET_CODE (XEXP (in, 1)) == REG
6694 || CONSTANT_P (XEXP (in, 1))
6695 || GET_CODE (XEXP (in, 1)) == MEM))
6697 /* We need to compute the sum of a register or a MEM and another
6698 register, constant, or MEM, and put it into the reload
6699 register. The best possible way of doing this is if the machine
6700 has a three-operand ADD insn that accepts the required operands.
6702 The simplest approach is to try to generate such an insn and see if it
6703 is recognized and matches its constraints. If so, it can be used.
6705 It might be better not to actually emit the insn unless it is valid,
6706 but we need to pass the insn as an operand to `recog' and
6707 `insn_extract' and it is simpler to emit and then delete the insn if
6708 not valid than to dummy things up. */
6710 rtx op0, op1, tem, insn;
6713 op0 = find_replacement (&XEXP (in, 0));
6714 op1 = find_replacement (&XEXP (in, 1));
6716 /* Since constraint checking is strict, commutativity won't be
6717 checked, so we need to do that here to avoid spurious failure
6718 if the add instruction is two-address and the second operand
6719 of the add is the same as the reload reg, which is frequently
6720 the case. If the insn would be A = B + A, rearrange it so
6721 it will be A = A + B as constrain_operands expects. */
6723 if (GET_CODE (XEXP (in, 1)) == REG
6724 && REGNO (out) == REGNO (XEXP (in, 1)))
6725 tem = op0, op0 = op1, op1 = tem;
6727 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6728 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6730 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6731 code = recog_memoized (insn);
6735 insn_extract (insn);
6736 /* We want constrain operands to treat this insn strictly in
6737 its validity determination, i.e., the way it would after reload
6739 if (constrain_operands (code, 1))
6743 delete_insns_since (last);
6745 /* If that failed, we must use a conservative two-insn sequence.
6746 use move to copy constant, MEM, or pseudo register to the reload
6747 register since "move" will be able to handle an arbitrary operand,
6748 unlike add which can't, in general. Then add the registers.
6750 If there is another way to do this for a specific machine, a
6751 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6754 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6755 || (GET_CODE (op1) == REG
6756 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6757 tem = op0, op0 = op1, op1 = tem;
6759 emit_insn (gen_move_insn (out, op0));
6761 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6762 This fixes a problem on the 32K where the stack pointer cannot
6763 be used as an operand of an add insn. */
6765 if (rtx_equal_p (op0, op1))
6768 insn = emit_insn (gen_add2_insn (out, op1));
6770 /* If that failed, copy the address register to the reload register.
6771 Then add the constant to the reload register. */
6773 code = recog_memoized (insn);
6777 insn_extract (insn);
6778 /* We want constrain operands to treat this insn strictly in
6779 its validity determination, i.e., the way it would after reload
6781 if (constrain_operands (code, 1))
6785 delete_insns_since (last);
6787 emit_insn (gen_move_insn (out, op1));
6788 emit_insn (gen_add2_insn (out, op0));
6791 #ifdef SECONDARY_MEMORY_NEEDED
6792 /* If we need a memory location to do the move, do it that way. */
6793 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6794 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6795 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6796 REGNO_REG_CLASS (REGNO (out)),
6799 /* Get the memory to use and rewrite both registers to its mode. */
6800 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6802 if (GET_MODE (loc) != GET_MODE (out))
6803 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6805 if (GET_MODE (loc) != GET_MODE (in))
6806 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6808 emit_insn (gen_move_insn (loc, in));
6809 emit_insn (gen_move_insn (out, loc));
6813 /* If IN is a simple operand, use gen_move_insn. */
6814 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6815 emit_insn (gen_move_insn (out, in));
6817 #ifdef HAVE_reload_load_address
6818 else if (HAVE_reload_load_address)
6819 emit_insn (gen_reload_load_address (out, in));
6822 /* Otherwise, just write (set OUT IN) and hope for the best. */
6824 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6826 /* Return the first insn emitted.
6827 We can not just return get_last_insn, because there may have
6828 been multiple instructions emitted. Also note that gen_move_insn may
6829 emit more than one insn itself, so we can not assume that there is one
6830 insn emitted per emit_insn_before call. */
6832 return last ? NEXT_INSN (last) : get_insns ();
6835 /* Delete a previously made output-reload
6836 whose result we now believe is not needed.
6837 First we double-check.
6839 INSN is the insn now being processed.
6840 OUTPUT_RELOAD_INSN is the insn of the output reload.
6841 J is the reload-number for this insn. */
6844 delete_output_reload (insn, j, output_reload_insn)
6847 rtx output_reload_insn;
6851 /* Get the raw pseudo-register referred to. */
6853 rtx reg = reload_in[j];
6854 while (GET_CODE (reg) == SUBREG)
6855 reg = SUBREG_REG (reg);
6857 /* If the pseudo-reg we are reloading is no longer referenced
6858 anywhere between the store into it and here,
6859 and no jumps or labels intervene, then the value can get
6860 here through the reload reg alone.
6861 Otherwise, give up--return. */
6862 for (i1 = NEXT_INSN (output_reload_insn);
6863 i1 != insn; i1 = NEXT_INSN (i1))
6865 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6867 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6868 && reg_mentioned_p (reg, PATTERN (i1)))
6872 if (cannot_omit_stores[REGNO (reg)])
6875 /* If this insn will store in the pseudo again,
6876 the previous store can be removed. */
6877 if (reload_out[j] == reload_in[j])
6878 delete_insn (output_reload_insn);
6880 /* See if the pseudo reg has been completely replaced
6881 with reload regs. If so, delete the store insn
6882 and forget we had a stack slot for the pseudo. */
6883 else if (reg_n_deaths[REGNO (reg)] == 1
6884 && reg_basic_block[REGNO (reg)] >= 0
6885 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6889 /* We know that it was used only between here
6890 and the beginning of the current basic block.
6891 (We also know that the last use before INSN was
6892 the output reload we are thinking of deleting, but never mind that.)
6893 Search that range; see if any ref remains. */
6894 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6896 rtx set = single_set (i2);
6898 /* Uses which just store in the pseudo don't count,
6899 since if they are the only uses, they are dead. */
6900 if (set != 0 && SET_DEST (set) == reg)
6902 if (GET_CODE (i2) == CODE_LABEL
6903 || GET_CODE (i2) == JUMP_INSN)
6905 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6906 && reg_mentioned_p (reg, PATTERN (i2)))
6907 /* Some other ref remains;
6908 we can't do anything. */
6912 /* Delete the now-dead stores into this pseudo. */
6913 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6915 rtx set = single_set (i2);
6917 if (set != 0 && SET_DEST (set) == reg)
6919 if (GET_CODE (i2) == CODE_LABEL
6920 || GET_CODE (i2) == JUMP_INSN)
6924 /* For the debugging info,
6925 say the pseudo lives in this reload reg. */
6926 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6927 alter_reg (REGNO (reg), -1);
6931 /* Output reload-insns to reload VALUE into RELOADREG.
6932 VALUE is an autoincrement or autodecrement RTX whose operand
6933 is a register or memory location;
6934 so reloading involves incrementing that location.
6936 INC_AMOUNT is the number to increment or decrement by (always positive).
6937 This cannot be deduced from VALUE. */
6940 inc_for_reload (reloadreg, value, inc_amount)
6945 /* REG or MEM to be copied and incremented. */
6946 rtx incloc = XEXP (value, 0);
6947 /* Nonzero if increment after copying. */
6948 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6954 /* No hard register is equivalent to this register after
6955 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6956 we could inc/dec that register as well (maybe even using it for
6957 the source), but I'm not sure it's worth worrying about. */
6958 if (GET_CODE (incloc) == REG)
6959 reg_last_reload_reg[REGNO (incloc)] = 0;
6961 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6962 inc_amount = - inc_amount;
6964 inc = GEN_INT (inc_amount);
6966 /* If this is post-increment, first copy the location to the reload reg. */
6968 emit_insn (gen_move_insn (reloadreg, incloc));
6970 /* See if we can directly increment INCLOC. Use a method similar to that
6973 last = get_last_insn ();
6974 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6975 gen_rtx (PLUS, GET_MODE (incloc),
6978 code = recog_memoized (add_insn);
6981 insn_extract (add_insn);
6982 if (constrain_operands (code, 1))
6984 /* If this is a pre-increment and we have incremented the value
6985 where it lives, copy the incremented value to RELOADREG to
6986 be used as an address. */
6989 emit_insn (gen_move_insn (reloadreg, incloc));
6995 delete_insns_since (last);
6997 /* If couldn't do the increment directly, must increment in RELOADREG.
6998 The way we do this depends on whether this is pre- or post-increment.
6999 For pre-increment, copy INCLOC to the reload register, increment it
7000 there, then save back. */
7004 emit_insn (gen_move_insn (reloadreg, incloc));
7005 emit_insn (gen_add2_insn (reloadreg, inc));
7006 emit_insn (gen_move_insn (incloc, reloadreg));
7011 Because this might be a jump insn or a compare, and because RELOADREG
7012 may not be available after the insn in an input reload, we must do
7013 the incrementation before the insn being reloaded for.
7015 We have already copied INCLOC to RELOADREG. Increment the copy in
7016 RELOADREG, save that back, then decrement RELOADREG so it has
7017 the original value. */
7019 emit_insn (gen_add2_insn (reloadreg, inc));
7020 emit_insn (gen_move_insn (incloc, reloadreg));
7021 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7027 /* Return 1 if we are certain that the constraint-string STRING allows
7028 the hard register REG. Return 0 if we can't be sure of this. */
7031 constraint_accepts_reg_p (string, reg)
7036 int regno = true_regnum (reg);
7039 /* Initialize for first alternative. */
7041 /* Check that each alternative contains `g' or `r'. */
7043 switch (c = *string++)
7046 /* If an alternative lacks `g' or `r', we lose. */
7049 /* If an alternative lacks `g' or `r', we lose. */
7052 /* Initialize for next alternative. */
7057 /* Any general reg wins for this alternative. */
7058 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7062 /* Any reg in specified class wins for this alternative. */
7064 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7066 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7072 /* Return the number of places FIND appears within X, but don't count
7073 an occurrence if some SET_DEST is FIND. */
7076 count_occurrences (x, find)
7077 register rtx x, find;
7080 register enum rtx_code code;
7081 register char *format_ptr;
7089 code = GET_CODE (x);
7104 if (SET_DEST (x) == find)
7105 return count_occurrences (SET_SRC (x), find);
7109 format_ptr = GET_RTX_FORMAT (code);
7112 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7114 switch (*format_ptr++)
7117 count += count_occurrences (XEXP (x, i), find);
7121 if (XVEC (x, i) != NULL)
7123 for (j = 0; j < XVECLEN (x, i); j++)
7124 count += count_occurrences (XVECEXP (x, i, j), find);