1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
32 #include "hard-reg-set.h"
35 #include "basic-block.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
166 static HARD_REG_SET bad_spill_regs;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
220 static char spill_indirect_levels;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs[N_REG_CLASSES];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed;
256 /* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
259 int reload_in_progress = 0;
261 /* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
272 struct obstack reload_obstack;
273 char *reload_firstobj;
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels;
281 /* Allocation number table from global register allocation. */
282 extern int *reg_allocno;
284 /* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
289 static struct elim_table
291 int from; /* Register number to be eliminated. */
292 int to; /* Register number used as replacement. */
293 int initial_offset; /* Initial difference between values. */
294 int can_eliminate; /* Non-zero if this elimination can be done. */
295 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
296 insns made by reload. */
297 int offset; /* Current offset between the two regs. */
298 int max_offset; /* Maximum offset between the two regs. */
299 int previous_offset; /* Offset at end of previous insn. */
300 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
301 rtx from_rtx; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
305 assigned to the reg to be eliminated. */
306 rtx to_rtx; /* REG rtx for the replacement. */
309 /* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
313 #ifdef ELIMINABLE_REGS
316 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
319 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
321 /* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324 static int num_not_at_initial_offset;
326 /* Count the number of registers that we may be able to eliminate. */
327 static int num_eliminable;
329 /* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
336 static char *offsets_known_at;
337 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
339 /* Number of labels in the current function. */
341 static int num_labels;
343 struct hard_reg_n_uses { int regno; int uses; };
345 static int possible_group_p PROTO((int, int *));
346 static void count_possible_groups PROTO((int *, enum machine_mode *,
348 static int modes_equiv_for_class_p PROTO((enum machine_mode,
351 static void spill_failure PROTO((rtx));
352 static int new_spill_reg PROTO((int, int, int *, int *, int,
354 static void delete_dead_insn PROTO((rtx));
355 static void alter_reg PROTO((int, int));
356 static void mark_scratch_live PROTO((rtx));
357 static void set_label_offsets PROTO((rtx, rtx, int));
358 static int eliminate_regs_in_insn PROTO((rtx, int));
359 static void mark_not_eliminable PROTO((rtx, rtx));
360 static int spill_hard_reg PROTO((int, int, FILE *, int));
361 static void scan_paradoxical_subregs PROTO((rtx));
362 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
363 static void order_regs_for_reload PROTO((int));
364 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
365 static void reload_as_needed PROTO((rtx, int));
366 static void forget_old_reloads_1 PROTO((rtx, rtx));
367 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
368 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
370 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
372 static int reload_reg_free_p PROTO((int, int, enum reload_type));
373 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
374 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
375 static int reloads_conflict PROTO((int, int));
376 static int allocate_reload_reg PROTO((int, rtx, int, int));
377 static void choose_reload_regs PROTO((rtx, rtx));
378 static void merge_assigned_reloads PROTO((rtx));
379 static void emit_reload_insns PROTO((rtx));
380 static void delete_output_reload PROTO((rtx, int, rtx));
381 static void inc_for_reload PROTO((rtx, rtx, int));
382 static int constraint_accepts_reg_p PROTO((char *, rtx));
383 static int count_occurrences PROTO((rtx, rtx));
385 /* Initialize the reload pass once per compilation. */
392 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
393 Set spill_indirect_levels to the number of levels such addressing is
394 permitted, zero if it is not permitted at all. */
397 = gen_rtx (MEM, Pmode,
398 gen_rtx (PLUS, Pmode,
399 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
401 spill_indirect_levels = 0;
403 while (memory_address_p (QImode, tem))
405 spill_indirect_levels++;
406 tem = gen_rtx (MEM, Pmode, tem);
409 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
411 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
412 indirect_symref_ok = memory_address_p (QImode, tem);
414 /* See if reg+reg is a valid (and offsettable) address. */
416 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
418 tem = gen_rtx (PLUS, Pmode,
419 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
420 gen_rtx (REG, Pmode, i));
421 /* This way, we make sure that reg+reg is an offsettable address. */
422 tem = plus_constant (tem, 4);
424 if (memory_address_p (QImode, tem))
426 double_reg_address_ok = 1;
431 /* Initialize obstack for our rtl allocation. */
432 gcc_obstack_init (&reload_obstack);
433 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
436 /* Main entry point for the reload pass.
438 FIRST is the first insn of the function being compiled.
440 GLOBAL nonzero means we were called from global_alloc
441 and should attempt to reallocate any pseudoregs that we
442 displace from hard regs we will use for reloads.
443 If GLOBAL is zero, we do not have enough information to do that,
444 so any pseudo reg that is spilled must go to the stack.
446 DUMPFILE is the global-reg debugging dump file stream, or 0.
447 If it is nonzero, messages are written to it to describe
448 which registers are seized as reload regs, which pseudo regs
449 are spilled from them, and where the pseudo regs are reallocated to.
451 Return value is nonzero if reload failed
452 and we must not do any more for this function. */
455 reload (first, global, dumpfile)
461 register int i, j, k;
463 register struct elim_table *ep;
465 int something_changed;
466 int something_needs_reloads;
467 int something_needs_elimination;
468 int new_basic_block_needs;
469 enum reg_class caller_save_spill_class = NO_REGS;
470 int caller_save_group_size = 1;
472 /* Nonzero means we couldn't get enough spill regs. */
475 /* The basic block number currently being processed for INSN. */
478 /* Make sure even insns with volatile mem refs are recognizable. */
481 /* Enable find_equiv_reg to distinguish insns made by reload. */
482 reload_first_uid = get_max_uid ();
484 for (i = 0; i < N_REG_CLASSES; i++)
485 basic_block_needs[i] = 0;
487 #ifdef SECONDARY_MEMORY_NEEDED
488 /* Initialize the secondary memory table. */
489 clear_secondary_mem ();
492 /* Remember which hard regs appear explicitly
493 before we merge into `regs_ever_live' the ones in which
494 pseudo regs have been allocated. */
495 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
497 /* We don't have a stack slot for any spill reg yet. */
498 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
499 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
501 /* Initialize the save area information for caller-save, in case some
505 /* Compute which hard registers are now in use
506 as homes for pseudo registers.
507 This is done here rather than (eg) in global_alloc
508 because this point is reached even if not optimizing. */
510 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
513 for (i = 0; i < scratch_list_length; i++)
515 mark_scratch_live (scratch_list[i]);
517 /* Make sure that the last insn in the chain
518 is not something that needs reloading. */
519 emit_note (NULL_PTR, NOTE_INSN_DELETED);
521 /* Find all the pseudo registers that didn't get hard regs
522 but do have known equivalent constants or memory slots.
523 These include parameters (known equivalent to parameter slots)
524 and cse'd or loop-moved constant memory addresses.
526 Record constant equivalents in reg_equiv_constant
527 so they will be substituted by find_reloads.
528 Record memory equivalents in reg_mem_equiv so they can
529 be substituted eventually by altering the REG-rtx's. */
531 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
532 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
533 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
534 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
535 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
536 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
537 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
538 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
539 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
540 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
541 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
542 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
543 cannot_omit_stores = (char *) alloca (max_regno);
544 bzero (cannot_omit_stores, max_regno);
546 #ifdef SMALL_REGISTER_CLASSES
547 if (SMALL_REGISTER_CLASSES)
548 CLEAR_HARD_REG_SET (forbidden_regs);
551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
554 are used for user variables. These can never be used for spills.
555 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
556 caller-saved registers must be marked live. */
558 for (insn = first; insn; insn = NEXT_INSN (insn))
560 rtx set = single_set (insn);
562 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
563 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
564 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
565 if (! call_used_regs[i])
566 regs_ever_live[i] = 1;
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
570 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
572 #ifdef LEGITIMATE_PIC_OPERAND_P
573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
589 reg_equiv_memory_loc[i]
590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
619 /* Does this function require a frame pointer? */
621 frame_pointer_needed = (! flag_omit_frame_pointer
622 #ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
631 || FRAME_POINTER_REQUIRED);
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637 #ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
642 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
649 /* Count the number of eliminable registers and build the FROM and TO
650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
660 num_labels = max_label_num () - get_first_label_num ();
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
694 #ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
712 order_regs_for_reload (global);
714 /* So far, no hard regs have been spilled. */
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
719 /* Initialize to -1, which means take the first spill register. */
722 /* On most machines, we can't use any register explicitly used in the
723 rtl as a spill register. But on some, we have to. Those will have
724 taken care to keep the life of hard regs as short as possible. */
726 #ifdef SMALL_REGISTER_CLASSES
727 if (! SMALL_REGISTER_CLASSES)
729 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
731 /* Spill any hard regs that we know we can't eliminate. */
732 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
733 if (! ep->can_eliminate)
734 spill_hard_reg (ep->from, global, dumpfile, 1);
736 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
737 if (frame_pointer_needed)
738 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
742 for (i = 0; i < N_REG_CLASSES; i++)
744 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
745 bzero (basic_block_needs[i], n_basic_blocks);
748 /* From now on, we need to emit any moves without making new pseudos. */
749 reload_in_progress = 1;
751 /* This loop scans the entire function each go-round
752 and repeats until one repetition spills no additional hard regs. */
754 /* This flag is set when a pseudo reg is spilled,
755 to require another pass. Note that getting an additional reload
756 reg does not necessarily imply any pseudo reg was spilled;
757 sometimes we find a reload reg that no pseudo reg was allocated in. */
758 something_changed = 1;
759 /* This flag is set if there are any insns that require reloading. */
760 something_needs_reloads = 0;
761 /* This flag is set if there are any insns that require register
763 something_needs_elimination = 0;
764 while (something_changed)
768 /* For each class, number of reload regs needed in that class.
769 This is the maximum over all insns of the needs in that class
770 of the individual insn. */
771 int max_needs[N_REG_CLASSES];
772 /* For each class, size of group of consecutive regs
773 that is needed for the reloads of this class. */
774 int group_size[N_REG_CLASSES];
775 /* For each class, max number of consecutive groups needed.
776 (Each group contains group_size[CLASS] consecutive registers.) */
777 int max_groups[N_REG_CLASSES];
778 /* For each class, max number needed of regs that don't belong
779 to any of the groups. */
780 int max_nongroups[N_REG_CLASSES];
781 /* For each class, the machine mode which requires consecutive
782 groups of regs of that class.
783 If two different modes ever require groups of one class,
784 they must be the same size and equally restrictive for that class,
785 otherwise we can't handle the complexity. */
786 enum machine_mode group_mode[N_REG_CLASSES];
787 /* Record the insn where each maximum need is first found. */
788 rtx max_needs_insn[N_REG_CLASSES];
789 rtx max_groups_insn[N_REG_CLASSES];
790 rtx max_nongroups_insn[N_REG_CLASSES];
792 HOST_WIDE_INT starting_frame_size = get_frame_size ();
793 int previous_frame_pointer_needed = frame_pointer_needed;
794 static char *reg_class_names[] = REG_CLASS_NAMES;
796 something_changed = 0;
797 bzero ((char *) max_needs, sizeof max_needs);
798 bzero ((char *) max_groups, sizeof max_groups);
799 bzero ((char *) max_nongroups, sizeof max_nongroups);
800 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
801 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
802 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
803 bzero ((char *) group_size, sizeof group_size);
804 for (i = 0; i < N_REG_CLASSES; i++)
805 group_mode[i] = VOIDmode;
807 /* Keep track of which basic blocks are needing the reloads. */
810 /* Remember whether any element of basic_block_needs
811 changes from 0 to 1 in this pass. */
812 new_basic_block_needs = 0;
814 /* Reset all offsets on eliminable registers to their initial values. */
815 #ifdef ELIMINABLE_REGS
816 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
818 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
819 ep->previous_offset = ep->offset
820 = ep->max_offset = ep->initial_offset;
823 #ifdef INITIAL_FRAME_POINTER_OFFSET
824 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
826 if (!FRAME_POINTER_REQUIRED)
828 reg_eliminate[0].initial_offset = 0;
830 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
831 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
834 num_not_at_initial_offset = 0;
836 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
838 /* Set a known offset for each forced label to be at the initial offset
839 of each elimination. We do this because we assume that all
840 computed jumps occur from a location where each elimination is
841 at its initial offset. */
843 for (x = forced_labels; x; x = XEXP (x, 1))
845 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
847 /* For each pseudo register that has an equivalent location defined,
848 try to eliminate any eliminable registers (such as the frame pointer)
849 assuming initial offsets for the replacement register, which
852 If the resulting location is directly addressable, substitute
853 the MEM we just got directly for the old REG.
855 If it is not addressable but is a constant or the sum of a hard reg
856 and constant, it is probably not addressable because the constant is
857 out of range, in that case record the address; we will generate
858 hairy code to compute the address in a register each time it is
859 needed. Similarly if it is a hard register, but one that is not
860 valid as an address register.
862 If the location is not addressable, but does not have one of the
863 above forms, assign a stack slot. We have to do this to avoid the
864 potential of producing lots of reloads if, e.g., a location involves
865 a pseudo that didn't get a hard register and has an equivalent memory
866 location that also involves a pseudo that didn't get a hard register.
868 Perhaps at some point we will improve reload_when_needed handling
869 so this problem goes away. But that's very hairy. */
871 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
872 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
874 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
876 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
878 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
879 else if (CONSTANT_P (XEXP (x, 0))
880 || (GET_CODE (XEXP (x, 0)) == REG
881 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
882 || (GET_CODE (XEXP (x, 0)) == PLUS
883 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
884 && (REGNO (XEXP (XEXP (x, 0), 0))
885 < FIRST_PSEUDO_REGISTER)
886 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
887 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
890 /* Make a new stack slot. Then indicate that something
891 changed so we go back and recompute offsets for
892 eliminable registers because the allocation of memory
893 below might change some offset. reg_equiv_{mem,address}
894 will be set up for this pseudo on the next pass around
896 reg_equiv_memory_loc[i] = 0;
897 reg_equiv_init[i] = 0;
899 something_changed = 1;
903 /* If we allocated another pseudo to the stack, redo elimination
905 if (something_changed)
908 /* If caller-saves needs a group, initialize the group to include
909 the size and mode required for caller-saves. */
911 if (caller_save_group_size > 1)
913 group_mode[(int) caller_save_spill_class] = Pmode;
914 group_size[(int) caller_save_spill_class] = caller_save_group_size;
917 /* Compute the most additional registers needed by any instruction.
918 Collect information separately for each class of regs. */
920 for (insn = first; insn; insn = NEXT_INSN (insn))
922 if (global && this_block + 1 < n_basic_blocks
923 && insn == basic_block_head[this_block+1])
926 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
927 might include REG_LABEL), we need to see what effects this
928 has on the known offsets at labels. */
930 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
931 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
932 && REG_NOTES (insn) != 0))
933 set_label_offsets (insn, insn, 0);
935 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
937 /* Nonzero means don't use a reload reg that overlaps
938 the place where a function value can be returned. */
939 rtx avoid_return_reg = 0;
941 rtx old_body = PATTERN (insn);
942 int old_code = INSN_CODE (insn);
943 rtx old_notes = REG_NOTES (insn);
944 int did_elimination = 0;
946 /* To compute the number of reload registers of each class
947 needed for an insn, we must simulate what choose_reload_regs
948 can do. We do this by splitting an insn into an "input" and
949 an "output" part. RELOAD_OTHER reloads are used in both.
950 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
951 which must be live over the entire input section of reloads,
952 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
953 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
956 The registers needed for output are RELOAD_OTHER and
957 RELOAD_FOR_OUTPUT, which are live for the entire output
958 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
959 reloads for each operand.
961 The total number of registers needed is the maximum of the
962 inputs and outputs. */
966 /* [0] is normal, [1] is nongroup. */
967 int regs[2][N_REG_CLASSES];
968 int groups[N_REG_CLASSES];
971 /* Each `struct needs' corresponds to one RELOAD_... type. */
977 struct needs other_addr;
978 struct needs op_addr;
979 struct needs op_addr_reload;
980 struct needs in_addr[MAX_RECOG_OPERANDS];
981 struct needs out_addr[MAX_RECOG_OPERANDS];
984 /* If needed, eliminate any eliminable registers. */
986 did_elimination = eliminate_regs_in_insn (insn, 0);
988 #ifdef SMALL_REGISTER_CLASSES
989 /* Set avoid_return_reg if this is an insn
990 that might use the value of a function call. */
991 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
993 if (GET_CODE (PATTERN (insn)) == SET)
994 after_call = SET_DEST (PATTERN (insn));
995 else if (GET_CODE (PATTERN (insn)) == PARALLEL
996 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
997 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1001 else if (SMALL_REGISTER_CLASSES
1003 && !(GET_CODE (PATTERN (insn)) == SET
1004 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1006 if (reg_referenced_p (after_call, PATTERN (insn)))
1007 avoid_return_reg = after_call;
1010 #endif /* SMALL_REGISTER_CLASSES */
1012 /* Analyze the instruction. */
1013 find_reloads (insn, 0, spill_indirect_levels, global,
1016 /* Remember for later shortcuts which insns had any reloads or
1017 register eliminations.
1019 One might think that it would be worthwhile to mark insns
1020 that need register replacements but not reloads, but this is
1021 not safe because find_reloads may do some manipulation of
1022 the insn (such as swapping commutative operands), which would
1023 be lost when we restore the old pattern after register
1024 replacement. So the actions of find_reloads must be redone in
1025 subsequent passes or in reload_as_needed.
1027 However, it is safe to mark insns that need reloads
1028 but not register replacement. */
1030 PUT_MODE (insn, (did_elimination ? QImode
1031 : n_reloads ? HImode
1032 : GET_MODE (insn) == DImode ? DImode
1035 /* Discard any register replacements done. */
1036 if (did_elimination)
1038 obstack_free (&reload_obstack, reload_firstobj);
1039 PATTERN (insn) = old_body;
1040 INSN_CODE (insn) = old_code;
1041 REG_NOTES (insn) = old_notes;
1042 something_needs_elimination = 1;
1045 /* If this insn has no reloads, we need not do anything except
1046 in the case of a CALL_INSN when we have caller-saves and
1047 caller-save needs reloads. */
1050 && ! (GET_CODE (insn) == CALL_INSN
1051 && caller_save_spill_class != NO_REGS))
1054 something_needs_reloads = 1;
1055 bzero ((char *) &insn_needs, sizeof insn_needs);
1057 /* Count each reload once in every class
1058 containing the reload's own class. */
1060 for (i = 0; i < n_reloads; i++)
1062 register enum reg_class *p;
1063 enum reg_class class = reload_reg_class[i];
1065 enum machine_mode mode;
1067 struct needs *this_needs;
1069 /* Don't count the dummy reloads, for which one of the
1070 regs mentioned in the insn can be used for reloading.
1071 Don't count optional reloads.
1072 Don't count reloads that got combined with others. */
1073 if (reload_reg_rtx[i] != 0
1074 || reload_optional[i] != 0
1075 || (reload_out[i] == 0 && reload_in[i] == 0
1076 && ! reload_secondary_p[i]))
1079 /* Show that a reload register of this class is needed
1080 in this basic block. We do not use insn_needs and
1081 insn_groups because they are overly conservative for
1083 if (global && ! basic_block_needs[(int) class][this_block])
1085 basic_block_needs[(int) class][this_block] = 1;
1086 new_basic_block_needs = 1;
1090 mode = reload_inmode[i];
1091 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1092 mode = reload_outmode[i];
1093 size = CLASS_MAX_NREGS (class, mode);
1095 /* If this class doesn't want a group, determine if we have
1096 a nongroup need or a regular need. We have a nongroup
1097 need if this reload conflicts with a group reload whose
1098 class intersects with this reload's class. */
1102 for (j = 0; j < n_reloads; j++)
1103 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1104 (GET_MODE_SIZE (reload_outmode[j])
1105 > GET_MODE_SIZE (reload_inmode[j]))
1109 && (!reload_optional[j])
1110 && (reload_in[j] != 0 || reload_out[j] != 0
1111 || reload_secondary_p[j])
1112 && reloads_conflict (i, j)
1113 && reg_classes_intersect_p (class,
1114 reload_reg_class[j]))
1120 /* Decide which time-of-use to count this reload for. */
1121 switch (reload_when_needed[i])
1124 this_needs = &insn_needs.other;
1126 case RELOAD_FOR_INPUT:
1127 this_needs = &insn_needs.input;
1129 case RELOAD_FOR_OUTPUT:
1130 this_needs = &insn_needs.output;
1132 case RELOAD_FOR_INSN:
1133 this_needs = &insn_needs.insn;
1135 case RELOAD_FOR_OTHER_ADDRESS:
1136 this_needs = &insn_needs.other_addr;
1138 case RELOAD_FOR_INPUT_ADDRESS:
1139 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1141 case RELOAD_FOR_OUTPUT_ADDRESS:
1142 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1144 case RELOAD_FOR_OPERAND_ADDRESS:
1145 this_needs = &insn_needs.op_addr;
1147 case RELOAD_FOR_OPADDR_ADDR:
1148 this_needs = &insn_needs.op_addr_reload;
1154 enum machine_mode other_mode, allocate_mode;
1156 /* Count number of groups needed separately from
1157 number of individual regs needed. */
1158 this_needs->groups[(int) class]++;
1159 p = reg_class_superclasses[(int) class];
1160 while (*p != LIM_REG_CLASSES)
1161 this_needs->groups[(int) *p++]++;
1163 /* Record size and mode of a group of this class. */
1164 /* If more than one size group is needed,
1165 make all groups the largest needed size. */
1166 if (group_size[(int) class] < size)
1168 other_mode = group_mode[(int) class];
1169 allocate_mode = mode;
1171 group_size[(int) class] = size;
1172 group_mode[(int) class] = mode;
1177 allocate_mode = group_mode[(int) class];
1180 /* Crash if two dissimilar machine modes both need
1181 groups of consecutive regs of the same class. */
1183 if (other_mode != VOIDmode && other_mode != allocate_mode
1184 && ! modes_equiv_for_class_p (allocate_mode,
1186 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1191 this_needs->regs[nongroup_need][(int) class] += 1;
1192 p = reg_class_superclasses[(int) class];
1193 while (*p != LIM_REG_CLASSES)
1194 this_needs->regs[nongroup_need][(int) *p++] += 1;
1200 /* All reloads have been counted for this insn;
1201 now merge the various times of use.
1202 This sets insn_needs, etc., to the maximum total number
1203 of registers needed at any point in this insn. */
1205 for (i = 0; i < N_REG_CLASSES; i++)
1207 int in_max, out_max;
1209 /* Compute normal and nongroup needs. */
1210 for (j = 0; j <= 1; j++)
1212 for (in_max = 0, out_max = 0, k = 0;
1213 k < reload_n_operands; k++)
1216 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1218 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1221 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1222 and operand addresses but not things used to reload
1223 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1224 don't conflict with things needed to reload inputs or
1227 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1228 insn_needs.op_addr_reload.regs[j][i]),
1231 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1233 insn_needs.input.regs[j][i]
1234 = MAX (insn_needs.input.regs[j][i]
1235 + insn_needs.op_addr.regs[j][i]
1236 + insn_needs.insn.regs[j][i],
1237 in_max + insn_needs.input.regs[j][i]);
1239 insn_needs.output.regs[j][i] += out_max;
1240 insn_needs.other.regs[j][i]
1241 += MAX (MAX (insn_needs.input.regs[j][i],
1242 insn_needs.output.regs[j][i]),
1243 insn_needs.other_addr.regs[j][i]);
1247 /* Now compute group needs. */
1248 for (in_max = 0, out_max = 0, j = 0;
1249 j < reload_n_operands; j++)
1251 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1253 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1256 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1257 insn_needs.op_addr_reload.groups[i]),
1259 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1261 insn_needs.input.groups[i]
1262 = MAX (insn_needs.input.groups[i]
1263 + insn_needs.op_addr.groups[i]
1264 + insn_needs.insn.groups[i],
1265 in_max + insn_needs.input.groups[i]);
1267 insn_needs.output.groups[i] += out_max;
1268 insn_needs.other.groups[i]
1269 += MAX (MAX (insn_needs.input.groups[i],
1270 insn_needs.output.groups[i]),
1271 insn_needs.other_addr.groups[i]);
1274 /* If this is a CALL_INSN and caller-saves will need
1275 a spill register, act as if the spill register is
1276 needed for this insn. However, the spill register
1277 can be used by any reload of this insn, so we only
1278 need do something if no need for that class has
1281 The assumption that every CALL_INSN will trigger a
1282 caller-save is highly conservative, however, the number
1283 of cases where caller-saves will need a spill register but
1284 a block containing a CALL_INSN won't need a spill register
1285 of that class should be quite rare.
1287 If a group is needed, the size and mode of the group will
1288 have been set up at the beginning of this loop. */
1290 if (GET_CODE (insn) == CALL_INSN
1291 && caller_save_spill_class != NO_REGS)
1293 /* See if this register would conflict with any reload
1294 that needs a group. */
1295 int nongroup_need = 0;
1296 int *caller_save_needs;
1298 for (j = 0; j < n_reloads; j++)
1299 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1300 (GET_MODE_SIZE (reload_outmode[j])
1301 > GET_MODE_SIZE (reload_inmode[j]))
1305 && reg_classes_intersect_p (caller_save_spill_class,
1306 reload_reg_class[j]))
1313 = (caller_save_group_size > 1
1314 ? insn_needs.other.groups
1315 : insn_needs.other.regs[nongroup_need]);
1317 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1319 register enum reg_class *p
1320 = reg_class_superclasses[(int) caller_save_spill_class];
1322 caller_save_needs[(int) caller_save_spill_class]++;
1324 while (*p != LIM_REG_CLASSES)
1325 caller_save_needs[(int) *p++] += 1;
1328 /* Show that this basic block will need a register of
1332 && ! (basic_block_needs[(int) caller_save_spill_class]
1335 basic_block_needs[(int) caller_save_spill_class]
1337 new_basic_block_needs = 1;
1341 #ifdef SMALL_REGISTER_CLASSES
1342 /* If this insn stores the value of a function call,
1343 and that value is in a register that has been spilled,
1344 and if the insn needs a reload in a class
1345 that might use that register as the reload register,
1346 then add add an extra need in that class.
1347 This makes sure we have a register available that does
1348 not overlap the return value. */
1350 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1352 int regno = REGNO (avoid_return_reg);
1354 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1356 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1358 /* First compute the "basic needs", which counts a
1359 need only in the smallest class in which it
1362 bcopy ((char *) insn_needs.other.regs[0],
1363 (char *) basic_needs, sizeof basic_needs);
1364 bcopy ((char *) insn_needs.other.groups,
1365 (char *) basic_groups, sizeof basic_groups);
1367 for (i = 0; i < N_REG_CLASSES; i++)
1371 if (basic_needs[i] >= 0)
1372 for (p = reg_class_superclasses[i];
1373 *p != LIM_REG_CLASSES; p++)
1374 basic_needs[(int) *p] -= basic_needs[i];
1376 if (basic_groups[i] >= 0)
1377 for (p = reg_class_superclasses[i];
1378 *p != LIM_REG_CLASSES; p++)
1379 basic_groups[(int) *p] -= basic_groups[i];
1382 /* Now count extra regs if there might be a conflict with
1383 the return value register. */
1385 for (r = regno; r < regno + nregs; r++)
1386 if (spill_reg_order[r] >= 0)
1387 for (i = 0; i < N_REG_CLASSES; i++)
1388 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1390 if (basic_needs[i] > 0)
1394 insn_needs.other.regs[0][i]++;
1395 p = reg_class_superclasses[i];
1396 while (*p != LIM_REG_CLASSES)
1397 insn_needs.other.regs[0][(int) *p++]++;
1399 if (basic_groups[i] > 0)
1403 insn_needs.other.groups[i]++;
1404 p = reg_class_superclasses[i];
1405 while (*p != LIM_REG_CLASSES)
1406 insn_needs.other.groups[(int) *p++]++;
1410 #endif /* SMALL_REGISTER_CLASSES */
1412 /* For each class, collect maximum need of any insn. */
1414 for (i = 0; i < N_REG_CLASSES; i++)
1416 if (max_needs[i] < insn_needs.other.regs[0][i])
1418 max_needs[i] = insn_needs.other.regs[0][i];
1419 max_needs_insn[i] = insn;
1421 if (max_groups[i] < insn_needs.other.groups[i])
1423 max_groups[i] = insn_needs.other.groups[i];
1424 max_groups_insn[i] = insn;
1426 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1428 max_nongroups[i] = insn_needs.other.regs[1][i];
1429 max_nongroups_insn[i] = insn;
1433 /* Note that there is a continue statement above. */
1436 /* If we allocated any new memory locations, make another pass
1437 since it might have changed elimination offsets. */
1438 if (starting_frame_size != get_frame_size ())
1439 something_changed = 1;
1442 for (i = 0; i < N_REG_CLASSES; i++)
1444 if (max_needs[i] > 0)
1446 ";; Need %d reg%s of class %s (for insn %d).\n",
1447 max_needs[i], max_needs[i] == 1 ? "" : "s",
1448 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1449 if (max_nongroups[i] > 0)
1451 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1452 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1453 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1454 if (max_groups[i] > 0)
1456 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1457 max_groups[i], max_groups[i] == 1 ? "" : "s",
1458 mode_name[(int) group_mode[i]],
1459 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1462 /* If we have caller-saves, set up the save areas and see if caller-save
1463 will need a spill register. */
1465 if (caller_save_needed)
1467 /* Set the offsets for setup_save_areas. */
1468 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
1470 ep->previous_offset = ep->max_offset;
1472 if ( ! setup_save_areas (&something_changed)
1473 && caller_save_spill_class == NO_REGS)
1475 /* The class we will need depends on whether the machine
1476 supports the sum of two registers for an address; see
1477 find_address_reloads for details. */
1479 caller_save_spill_class
1480 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1481 caller_save_group_size
1482 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1483 something_changed = 1;
1487 /* See if anything that happened changes which eliminations are valid.
1488 For example, on the Sparc, whether or not the frame pointer can
1489 be eliminated can depend on what registers have been used. We need
1490 not check some conditions again (such as flag_omit_frame_pointer)
1491 since they can't have changed. */
1493 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1494 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1495 #ifdef ELIMINABLE_REGS
1496 || ! CAN_ELIMINATE (ep->from, ep->to)
1499 ep->can_eliminate = 0;
1501 /* Look for the case where we have discovered that we can't replace
1502 register A with register B and that means that we will now be
1503 trying to replace register A with register C. This means we can
1504 no longer replace register C with register B and we need to disable
1505 such an elimination, if it exists. This occurs often with A == ap,
1506 B == sp, and C == fp. */
1508 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1510 struct elim_table *op;
1511 register int new_to = -1;
1513 if (! ep->can_eliminate && ep->can_eliminate_previous)
1515 /* Find the current elimination for ep->from, if there is a
1517 for (op = reg_eliminate;
1518 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1519 if (op->from == ep->from && op->can_eliminate)
1525 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1527 for (op = reg_eliminate;
1528 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1529 if (op->from == new_to && op->to == ep->to)
1530 op->can_eliminate = 0;
1534 /* See if any registers that we thought we could eliminate the previous
1535 time are no longer eliminable. If so, something has changed and we
1536 must spill the register. Also, recompute the number of eliminable
1537 registers and see if the frame pointer is needed; it is if there is
1538 no elimination of the frame pointer that we can perform. */
1540 frame_pointer_needed = 1;
1541 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1543 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1544 && ep->to != HARD_FRAME_POINTER_REGNUM)
1545 frame_pointer_needed = 0;
1547 if (! ep->can_eliminate && ep->can_eliminate_previous)
1549 ep->can_eliminate_previous = 0;
1550 spill_hard_reg (ep->from, global, dumpfile, 1);
1551 something_changed = 1;
1556 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1557 /* If we didn't need a frame pointer last time, but we do now, spill
1558 the hard frame pointer. */
1559 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1561 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1562 something_changed = 1;
1566 /* If all needs are met, we win. */
1568 for (i = 0; i < N_REG_CLASSES; i++)
1569 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1571 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1574 /* Not all needs are met; must spill some hard regs. */
1576 /* Put all registers spilled so far back in potential_reload_regs, but
1577 put them at the front, since we've already spilled most of the
1578 pseudos in them (we might have left some pseudos unspilled if they
1579 were in a block that didn't need any spill registers of a conflicting
1580 class. We used to try to mark off the need for those registers,
1581 but doing so properly is very complex and reallocating them is the
1582 simpler approach. First, "pack" potential_reload_regs by pushing
1583 any nonnegative entries towards the end. That will leave room
1584 for the registers we already spilled.
1586 Also, undo the marking of the spill registers from the last time
1587 around in FORBIDDEN_REGS since we will be probably be allocating
1590 ??? It is theoretically possible that we might end up not using one
1591 of our previously-spilled registers in this allocation, even though
1592 they are at the head of the list. It's not clear what to do about
1593 this, but it was no better before, when we marked off the needs met
1594 by the previously-spilled registers. With the current code, globals
1595 can be allocated into these registers, but locals cannot. */
1599 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1600 if (potential_reload_regs[i] != -1)
1601 potential_reload_regs[j--] = potential_reload_regs[i];
1603 for (i = 0; i < n_spills; i++)
1605 potential_reload_regs[i] = spill_regs[i];
1606 spill_reg_order[spill_regs[i]] = -1;
1607 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1613 /* Now find more reload regs to satisfy the remaining need
1614 Do it by ascending class number, since otherwise a reg
1615 might be spilled for a big class and might fail to count
1616 for a smaller class even though it belongs to that class.
1618 Count spilled regs in `spills', and add entries to
1619 `spill_regs' and `spill_reg_order'.
1621 ??? Note there is a problem here.
1622 When there is a need for a group in a high-numbered class,
1623 and also need for non-group regs that come from a lower class,
1624 the non-group regs are chosen first. If there aren't many regs,
1625 they might leave no room for a group.
1627 This was happening on the 386. To fix it, we added the code
1628 that calls possible_group_p, so that the lower class won't
1629 break up the last possible group.
1631 Really fixing the problem would require changes above
1632 in counting the regs already spilled, and in choose_reload_regs.
1633 It might be hard to avoid introducing bugs there. */
1635 CLEAR_HARD_REG_SET (counted_for_groups);
1636 CLEAR_HARD_REG_SET (counted_for_nongroups);
1638 for (class = 0; class < N_REG_CLASSES; class++)
1640 /* First get the groups of registers.
1641 If we got single registers first, we might fragment
1643 while (max_groups[class] > 0)
1645 /* If any single spilled regs happen to form groups,
1646 count them now. Maybe we don't really need
1647 to spill another group. */
1648 count_possible_groups (group_size, group_mode, max_groups,
1651 if (max_groups[class] <= 0)
1654 /* Groups of size 2 (the only groups used on most machines)
1655 are treated specially. */
1656 if (group_size[class] == 2)
1658 /* First, look for a register that will complete a group. */
1659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1663 j = potential_reload_regs[i];
1664 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1666 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1667 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1668 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1669 && HARD_REGNO_MODE_OK (other, group_mode[class])
1670 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1672 /* We don't want one part of another group.
1673 We could get "two groups" that overlap! */
1674 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1676 (j < FIRST_PSEUDO_REGISTER - 1
1677 && (other = j + 1, spill_reg_order[other] >= 0)
1678 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1679 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1680 && HARD_REGNO_MODE_OK (j, group_mode[class])
1681 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1683 && ! TEST_HARD_REG_BIT (counted_for_groups,
1686 register enum reg_class *p;
1688 /* We have found one that will complete a group,
1689 so count off one group as provided. */
1690 max_groups[class]--;
1691 p = reg_class_superclasses[class];
1692 while (*p != LIM_REG_CLASSES)
1694 if (group_size [(int) *p] <= group_size [class])
1695 max_groups[(int) *p]--;
1699 /* Indicate both these regs are part of a group. */
1700 SET_HARD_REG_BIT (counted_for_groups, j);
1701 SET_HARD_REG_BIT (counted_for_groups, other);
1705 /* We can't complete a group, so start one. */
1706 #ifdef SMALL_REGISTER_CLASSES
1707 /* Look for a pair neither of which is explicitly used. */
1708 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1709 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1712 j = potential_reload_regs[i];
1713 /* Verify that J+1 is a potential reload reg. */
1714 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1715 if (potential_reload_regs[k] == j + 1)
1717 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1718 && k < FIRST_PSEUDO_REGISTER
1719 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1720 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1721 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1722 && HARD_REGNO_MODE_OK (j, group_mode[class])
1723 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1725 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1726 /* Reject J at this stage
1727 if J+1 was explicitly used. */
1728 && ! regs_explicitly_used[j + 1])
1732 /* Now try any group at all
1733 whose registers are not in bad_spill_regs. */
1734 if (i == FIRST_PSEUDO_REGISTER)
1735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1738 j = potential_reload_regs[i];
1739 /* Verify that J+1 is a potential reload reg. */
1740 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1741 if (potential_reload_regs[k] == j + 1)
1743 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1744 && k < FIRST_PSEUDO_REGISTER
1745 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1746 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1747 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1748 && HARD_REGNO_MODE_OK (j, group_mode[class])
1749 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1751 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1755 /* I should be the index in potential_reload_regs
1756 of the new reload reg we have found. */
1758 if (i >= FIRST_PSEUDO_REGISTER)
1760 /* There are no groups left to spill. */
1761 spill_failure (max_groups_insn[class]);
1767 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1772 /* For groups of more than 2 registers,
1773 look for a sufficient sequence of unspilled registers,
1774 and spill them all at once. */
1775 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1779 j = potential_reload_regs[i];
1781 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1782 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1784 /* Check each reg in the sequence. */
1785 for (k = 0; k < group_size[class]; k++)
1786 if (! (spill_reg_order[j + k] < 0
1787 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1788 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1790 /* We got a full sequence, so spill them all. */
1791 if (k == group_size[class])
1793 register enum reg_class *p;
1794 for (k = 0; k < group_size[class]; k++)
1797 SET_HARD_REG_BIT (counted_for_groups, j + k);
1798 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1799 if (potential_reload_regs[idx] == j + k)
1802 |= new_spill_reg (idx, class,
1803 max_needs, NULL_PTR,
1807 /* We have found one that will complete a group,
1808 so count off one group as provided. */
1809 max_groups[class]--;
1810 p = reg_class_superclasses[class];
1811 while (*p != LIM_REG_CLASSES)
1813 if (group_size [(int) *p]
1814 <= group_size [class])
1815 max_groups[(int) *p]--;
1822 /* We couldn't find any registers for this reload.
1823 Avoid going into an infinite loop. */
1824 if (i >= FIRST_PSEUDO_REGISTER)
1826 /* There are no groups left. */
1827 spill_failure (max_groups_insn[class]);
1834 /* Now similarly satisfy all need for single registers. */
1836 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1838 /* If we spilled enough regs, but they weren't counted
1839 against the non-group need, see if we can count them now.
1840 If so, we can avoid some actual spilling. */
1841 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1842 for (i = 0; i < n_spills; i++)
1843 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1845 && !TEST_HARD_REG_BIT (counted_for_groups,
1847 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1849 && max_nongroups[class] > 0)
1851 register enum reg_class *p;
1853 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1854 max_nongroups[class]--;
1855 p = reg_class_superclasses[class];
1856 while (*p != LIM_REG_CLASSES)
1857 max_nongroups[(int) *p++]--;
1859 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1862 /* Consider the potential reload regs that aren't
1863 yet in use as reload regs, in order of preference.
1864 Find the most preferred one that's in this class. */
1866 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1867 if (potential_reload_regs[i] >= 0
1868 && TEST_HARD_REG_BIT (reg_class_contents[class],
1869 potential_reload_regs[i])
1870 /* If this reg will not be available for groups,
1871 pick one that does not foreclose possible groups.
1872 This is a kludge, and not very general,
1873 but it should be sufficient to make the 386 work,
1874 and the problem should not occur on machines with
1876 && (max_nongroups[class] == 0
1877 || possible_group_p (potential_reload_regs[i], max_groups)))
1880 /* If we couldn't get a register, try to get one even if we
1881 might foreclose possible groups. This may cause problems
1882 later, but that's better than aborting now, since it is
1883 possible that we will, in fact, be able to form the needed
1884 group even with this allocation. */
1886 if (i >= FIRST_PSEUDO_REGISTER
1887 && (asm_noperands (max_needs[class] > 0
1888 ? max_needs_insn[class]
1889 : max_nongroups_insn[class])
1891 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1892 if (potential_reload_regs[i] >= 0
1893 && TEST_HARD_REG_BIT (reg_class_contents[class],
1894 potential_reload_regs[i]))
1897 /* I should be the index in potential_reload_regs
1898 of the new reload reg we have found. */
1900 if (i >= FIRST_PSEUDO_REGISTER)
1902 /* There are no possible registers left to spill. */
1903 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1904 : max_nongroups_insn[class]);
1910 |= new_spill_reg (i, class, max_needs, max_nongroups,
1916 /* If global-alloc was run, notify it of any register eliminations we have
1919 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1920 if (ep->can_eliminate)
1921 mark_elimination (ep->from, ep->to);
1923 /* Insert code to save and restore call-clobbered hard regs
1924 around calls. Tell if what mode to use so that we will process
1925 those insns in reload_as_needed if we have to. */
1927 if (caller_save_needed)
1928 save_call_clobbered_regs (num_eliminable ? QImode
1929 : caller_save_spill_class != NO_REGS ? HImode
1932 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1933 If that insn didn't set the register (i.e., it copied the register to
1934 memory), just delete that insn instead of the equivalencing insn plus
1935 anything now dead. If we call delete_dead_insn on that insn, we may
1936 delete the insn that actually sets the register if the register die
1937 there and that is incorrect. */
1939 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1940 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1941 && GET_CODE (reg_equiv_init[i]) != NOTE)
1943 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1944 delete_dead_insn (reg_equiv_init[i]);
1947 PUT_CODE (reg_equiv_init[i], NOTE);
1948 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1949 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1953 /* Use the reload registers where necessary
1954 by generating move instructions to move the must-be-register
1955 values into or out of the reload registers. */
1957 if (something_needs_reloads || something_needs_elimination
1958 || (caller_save_needed && num_eliminable)
1959 || caller_save_spill_class != NO_REGS)
1960 reload_as_needed (first, global);
1962 /* If we were able to eliminate the frame pointer, show that it is no
1963 longer live at the start of any basic block. If it ls live by
1964 virtue of being in a pseudo, that pseudo will be marked live
1965 and hence the frame pointer will be known to be live via that
1968 if (! frame_pointer_needed)
1969 for (i = 0; i < n_basic_blocks; i++)
1970 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1971 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1972 % REGSET_ELT_BITS));
1974 /* Come here (with failure set nonzero) if we can't get enough spill regs
1975 and we decide not to abort about it. */
1978 reload_in_progress = 0;
1980 /* Now eliminate all pseudo regs by modifying them into
1981 their equivalent memory references.
1982 The REG-rtx's for the pseudos are modified in place,
1983 so all insns that used to refer to them now refer to memory.
1985 For a reg that has a reg_equiv_address, all those insns
1986 were changed by reloading so that no insns refer to it any longer;
1987 but the DECL_RTL of a variable decl may refer to it,
1988 and if so this causes the debugging info to mention the variable. */
1990 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1994 if (reg_equiv_mem[i])
1996 addr = XEXP (reg_equiv_mem[i], 0);
1997 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1999 if (reg_equiv_address[i])
2000 addr = reg_equiv_address[i];
2003 if (reg_renumber[i] < 0)
2005 rtx reg = regno_reg_rtx[i];
2006 XEXP (reg, 0) = addr;
2007 REG_USERVAR_P (reg) = 0;
2008 MEM_IN_STRUCT_P (reg) = in_struct;
2009 PUT_CODE (reg, MEM);
2011 else if (reg_equiv_mem[i])
2012 XEXP (reg_equiv_mem[i], 0) = addr;
2016 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2017 /* Make a pass over all the insns and remove death notes for things that
2018 are no longer registers or no longer die in the insn (e.g., an input
2019 and output pseudo being tied). */
2021 for (insn = first; insn; insn = NEXT_INSN (insn))
2022 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2026 for (note = REG_NOTES (insn); note; note = next)
2028 next = XEXP (note, 1);
2029 if (REG_NOTE_KIND (note) == REG_DEAD
2030 && (GET_CODE (XEXP (note, 0)) != REG
2031 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2032 remove_note (insn, note);
2037 /* Indicate that we no longer have known memory locations or constants. */
2038 reg_equiv_constant = 0;
2039 reg_equiv_memory_loc = 0;
2042 free (scratch_list);
2045 free (scratch_block);
2048 CLEAR_HARD_REG_SET (used_spill_regs);
2049 for (i = 0; i < n_spills; i++)
2050 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2055 /* Nonzero if, after spilling reg REGNO for non-groups,
2056 it will still be possible to find a group if we still need one. */
2059 possible_group_p (regno, max_groups)
2064 int class = (int) NO_REGS;
2066 for (i = 0; i < (int) N_REG_CLASSES; i++)
2067 if (max_groups[i] > 0)
2073 if (class == (int) NO_REGS)
2076 /* Consider each pair of consecutive registers. */
2077 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2079 /* Ignore pairs that include reg REGNO. */
2080 if (i == regno || i + 1 == regno)
2083 /* Ignore pairs that are outside the class that needs the group.
2084 ??? Here we fail to handle the case where two different classes
2085 independently need groups. But this never happens with our
2086 current machine descriptions. */
2087 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2088 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2091 /* A pair of consecutive regs we can still spill does the trick. */
2092 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2093 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2094 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2097 /* A pair of one already spilled and one we can spill does it
2098 provided the one already spilled is not otherwise reserved. */
2099 if (spill_reg_order[i] < 0
2100 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2101 && spill_reg_order[i + 1] >= 0
2102 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2103 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2105 if (spill_reg_order[i + 1] < 0
2106 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2107 && spill_reg_order[i] >= 0
2108 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2109 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2116 /* Count any groups of CLASS that can be formed from the registers recently
2120 count_possible_groups (group_size, group_mode, max_groups, class)
2122 enum machine_mode *group_mode;
2129 /* Now find all consecutive groups of spilled registers
2130 and mark each group off against the need for such groups.
2131 But don't count them against ordinary need, yet. */
2133 if (group_size[class] == 0)
2136 CLEAR_HARD_REG_SET (new);
2138 /* Make a mask of all the regs that are spill regs in class I. */
2139 for (i = 0; i < n_spills; i++)
2140 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2141 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2142 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2143 SET_HARD_REG_BIT (new, spill_regs[i]);
2145 /* Find each consecutive group of them. */
2146 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2147 if (TEST_HARD_REG_BIT (new, i)
2148 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2149 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2151 for (j = 1; j < group_size[class]; j++)
2152 if (! TEST_HARD_REG_BIT (new, i + j))
2155 if (j == group_size[class])
2157 /* We found a group. Mark it off against this class's need for
2158 groups, and against each superclass too. */
2159 register enum reg_class *p;
2161 max_groups[class]--;
2162 p = reg_class_superclasses[class];
2163 while (*p != LIM_REG_CLASSES)
2165 if (group_size [(int) *p] <= group_size [class])
2166 max_groups[(int) *p]--;
2170 /* Don't count these registers again. */
2171 for (j = 0; j < group_size[class]; j++)
2172 SET_HARD_REG_BIT (counted_for_groups, i + j);
2175 /* Skip to the last reg in this group. When i is incremented above,
2176 it will then point to the first reg of the next possible group. */
2181 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2182 another mode that needs to be reloaded for the same register class CLASS.
2183 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2184 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2186 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2187 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2188 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2189 causes unnecessary failures on machines requiring alignment of register
2190 groups when the two modes are different sizes, because the larger mode has
2191 more strict alignment rules than the smaller mode. */
2194 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2195 enum machine_mode allocate_mode, other_mode;
2196 enum reg_class class;
2199 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2201 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2202 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2203 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2209 /* Handle the failure to find a register to spill.
2210 INSN should be one of the insns which needed this particular spill reg. */
2213 spill_failure (insn)
2216 if (asm_noperands (PATTERN (insn)) >= 0)
2217 error_for_asm (insn, "`asm' needs too many reloads");
2219 fatal_insn ("Unable to find a register to spill.", insn);
2222 /* Add a new register to the tables of available spill-registers
2223 (as well as spilling all pseudos allocated to the register).
2224 I is the index of this register in potential_reload_regs.
2225 CLASS is the regclass whose need is being satisfied.
2226 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2227 so that this register can count off against them.
2228 MAX_NONGROUPS is 0 if this register is part of a group.
2229 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2232 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2240 register enum reg_class *p;
2242 int regno = potential_reload_regs[i];
2244 if (i >= FIRST_PSEUDO_REGISTER)
2245 abort (); /* Caller failed to find any register. */
2247 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2248 fatal ("fixed or forbidden register was spilled.\n\
2249 This may be due to a compiler bug or to impossible asm\n\
2250 statements or clauses.");
2252 /* Make reg REGNO an additional reload reg. */
2254 potential_reload_regs[i] = -1;
2255 spill_regs[n_spills] = regno;
2256 spill_reg_order[regno] = n_spills;
2258 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2260 /* Clear off the needs we just satisfied. */
2263 p = reg_class_superclasses[class];
2264 while (*p != LIM_REG_CLASSES)
2265 max_needs[(int) *p++]--;
2267 if (max_nongroups && max_nongroups[class] > 0)
2269 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2270 max_nongroups[class]--;
2271 p = reg_class_superclasses[class];
2272 while (*p != LIM_REG_CLASSES)
2273 max_nongroups[(int) *p++]--;
2276 /* Spill every pseudo reg that was allocated to this reg
2277 or to something that overlaps this reg. */
2279 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2281 /* If there are some registers still to eliminate and this register
2282 wasn't ever used before, additional stack space may have to be
2283 allocated to store this register. Thus, we may have changed the offset
2284 between the stack and frame pointers, so mark that something has changed.
2285 (If new pseudos were spilled, thus requiring more space, VAL would have
2286 been set non-zero by the call to spill_hard_reg above since additional
2287 reloads may be needed in that case.
2289 One might think that we need only set VAL to 1 if this is a call-used
2290 register. However, the set of registers that must be saved by the
2291 prologue is not identical to the call-used set. For example, the
2292 register used by the call insn for the return PC is a call-used register,
2293 but must be saved by the prologue. */
2294 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2297 regs_ever_live[spill_regs[n_spills]] = 1;
2303 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2304 data that is dead in INSN. */
2307 delete_dead_insn (insn)
2310 rtx prev = prev_real_insn (insn);
2313 /* If the previous insn sets a register that dies in our insn, delete it
2315 if (prev && GET_CODE (PATTERN (prev)) == SET
2316 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2317 && reg_mentioned_p (prev_dest, PATTERN (insn))
2318 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2319 delete_dead_insn (prev);
2321 PUT_CODE (insn, NOTE);
2322 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2323 NOTE_SOURCE_FILE (insn) = 0;
2326 /* Modify the home of pseudo-reg I.
2327 The new home is present in reg_renumber[I].
2329 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2330 or it may be -1, meaning there is none or it is not relevant.
2331 This is used so that all pseudos spilled from a given hard reg
2332 can share one stack slot. */
2335 alter_reg (i, from_reg)
2339 /* When outputting an inline function, this can happen
2340 for a reg that isn't actually used. */
2341 if (regno_reg_rtx[i] == 0)
2344 /* If the reg got changed to a MEM at rtl-generation time,
2346 if (GET_CODE (regno_reg_rtx[i]) != REG)
2349 /* Modify the reg-rtx to contain the new hard reg
2350 number or else to contain its pseudo reg number. */
2351 REGNO (regno_reg_rtx[i])
2352 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2354 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2355 allocate a stack slot for it. */
2357 if (reg_renumber[i] < 0
2358 && reg_n_refs[i] > 0
2359 && reg_equiv_constant[i] == 0
2360 && reg_equiv_memory_loc[i] == 0)
2363 int inherent_size = PSEUDO_REGNO_BYTES (i);
2364 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2367 /* Each pseudo reg has an inherent size which comes from its own mode,
2368 and a total size which provides room for paradoxical subregs
2369 which refer to the pseudo reg in wider modes.
2371 We can use a slot already allocated if it provides both
2372 enough inherent space and enough total space.
2373 Otherwise, we allocate a new slot, making sure that it has no less
2374 inherent space, and no less total space, then the previous slot. */
2377 /* No known place to spill from => no slot to reuse. */
2378 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2379 if (BYTES_BIG_ENDIAN)
2380 /* Cancel the big-endian correction done in assign_stack_local.
2381 Get the address of the beginning of the slot.
2382 This is so we can do a big-endian correction unconditionally
2384 adjust = inherent_size - total_size;
2386 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2388 /* Reuse a stack slot if possible. */
2389 else if (spill_stack_slot[from_reg] != 0
2390 && spill_stack_slot_width[from_reg] >= total_size
2391 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2393 x = spill_stack_slot[from_reg];
2394 /* Allocate a bigger slot. */
2397 /* Compute maximum size needed, both for inherent size
2398 and for total size. */
2399 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2401 if (spill_stack_slot[from_reg])
2403 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2405 mode = GET_MODE (spill_stack_slot[from_reg]);
2406 if (spill_stack_slot_width[from_reg] > total_size)
2407 total_size = spill_stack_slot_width[from_reg];
2409 /* Make a slot with that size. */
2410 x = assign_stack_local (mode, total_size, -1);
2412 if (BYTES_BIG_ENDIAN)
2414 /* Cancel the big-endian correction done in assign_stack_local.
2415 Get the address of the beginning of the slot.
2416 This is so we can do a big-endian correction unconditionally
2418 adjust = GET_MODE_SIZE (mode) - total_size;
2420 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2423 plus_constant (XEXP (x, 0), adjust));
2425 spill_stack_slot[from_reg] = stack_slot;
2426 spill_stack_slot_width[from_reg] = total_size;
2429 /* On a big endian machine, the "address" of the slot
2430 is the address of the low part that fits its inherent mode. */
2431 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2432 adjust += (total_size - inherent_size);
2434 /* If we have any adjustment to make, or if the stack slot is the
2435 wrong mode, make a new stack slot. */
2436 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2438 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2439 plus_constant (XEXP (x, 0), adjust));
2440 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2443 /* Save the stack slot for later. */
2444 reg_equiv_memory_loc[i] = x;
2448 /* Mark the slots in regs_ever_live for the hard regs
2449 used by pseudo-reg number REGNO. */
2452 mark_home_live (regno)
2455 register int i, lim;
2456 i = reg_renumber[regno];
2459 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2461 regs_ever_live[i++] = 1;
2464 /* Mark the registers used in SCRATCH as being live. */
2467 mark_scratch_live (scratch)
2471 int regno = REGNO (scratch);
2472 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2474 for (i = regno; i < lim; i++)
2475 regs_ever_live[i] = 1;
2478 /* This function handles the tracking of elimination offsets around branches.
2480 X is a piece of RTL being scanned.
2482 INSN is the insn that it came from, if any.
2484 INITIAL_P is non-zero if we are to set the offset to be the initial
2485 offset and zero if we are setting the offset of the label to be the
2489 set_label_offsets (x, insn, initial_p)
2494 enum rtx_code code = GET_CODE (x);
2497 struct elim_table *p;
2502 if (LABEL_REF_NONLOCAL_P (x))
2507 /* ... fall through ... */
2510 /* If we know nothing about this label, set the desired offsets. Note
2511 that this sets the offset at a label to be the offset before a label
2512 if we don't know anything about the label. This is not correct for
2513 the label after a BARRIER, but is the best guess we can make. If
2514 we guessed wrong, we will suppress an elimination that might have
2515 been possible had we been able to guess correctly. */
2517 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2519 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2520 offsets_at[CODE_LABEL_NUMBER (x)][i]
2521 = (initial_p ? reg_eliminate[i].initial_offset
2522 : reg_eliminate[i].offset);
2523 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2526 /* Otherwise, if this is the definition of a label and it is
2527 preceded by a BARRIER, set our offsets to the known offset of
2531 && (tem = prev_nonnote_insn (insn)) != 0
2532 && GET_CODE (tem) == BARRIER)
2534 num_not_at_initial_offset = 0;
2535 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2537 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2538 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2539 if (reg_eliminate[i].can_eliminate
2540 && (reg_eliminate[i].offset
2541 != reg_eliminate[i].initial_offset))
2542 num_not_at_initial_offset++;
2547 /* If neither of the above cases is true, compare each offset
2548 with those previously recorded and suppress any eliminations
2549 where the offsets disagree. */
2551 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2552 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2553 != (initial_p ? reg_eliminate[i].initial_offset
2554 : reg_eliminate[i].offset))
2555 reg_eliminate[i].can_eliminate = 0;
2560 set_label_offsets (PATTERN (insn), insn, initial_p);
2562 /* ... fall through ... */
2566 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2567 and hence must have all eliminations at their initial offsets. */
2568 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2569 if (REG_NOTE_KIND (tem) == REG_LABEL)
2570 set_label_offsets (XEXP (tem, 0), insn, 1);
2575 /* Each of the labels in the address vector must be at their initial
2576 offsets. We want the first first for ADDR_VEC and the second
2577 field for ADDR_DIFF_VEC. */
2579 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2580 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2585 /* We only care about setting PC. If the source is not RETURN,
2586 IF_THEN_ELSE, or a label, disable any eliminations not at
2587 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2588 isn't one of those possibilities. For branches to a label,
2589 call ourselves recursively.
2591 Note that this can disable elimination unnecessarily when we have
2592 a non-local goto since it will look like a non-constant jump to
2593 someplace in the current function. This isn't a significant
2594 problem since such jumps will normally be when all elimination
2595 pairs are back to their initial offsets. */
2597 if (SET_DEST (x) != pc_rtx)
2600 switch (GET_CODE (SET_SRC (x)))
2607 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2611 tem = XEXP (SET_SRC (x), 1);
2612 if (GET_CODE (tem) == LABEL_REF)
2613 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2614 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2617 tem = XEXP (SET_SRC (x), 2);
2618 if (GET_CODE (tem) == LABEL_REF)
2619 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2620 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2625 /* If we reach here, all eliminations must be at their initial
2626 offset because we are doing a jump to a variable address. */
2627 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2628 if (p->offset != p->initial_offset)
2629 p->can_eliminate = 0;
2633 /* Used for communication between the next two function to properly share
2634 the vector for an ASM_OPERANDS. */
2636 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2638 /* Scan X and replace any eliminable registers (such as fp) with a
2639 replacement (such as sp), plus an offset.
2641 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2642 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2643 MEM, we are allowed to replace a sum of a register and the constant zero
2644 with the register, which we cannot do outside a MEM. In addition, we need
2645 to record the fact that a register is referenced outside a MEM.
2647 If INSN is an insn, it is the insn containing X. If we replace a REG
2648 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2649 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2650 that the REG is being modified.
2652 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2653 That's used when we eliminate in expressions stored in notes.
2654 This means, do not set ref_outside_mem even if the reference
2657 If we see a modification to a register we know about, take the
2658 appropriate action (see case SET, below).
2660 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2661 replacements done assuming all offsets are at their initial values. If
2662 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2663 encounter, return the actual location so that find_reloads will do
2664 the proper thing. */
2667 eliminate_regs (x, mem_mode, insn)
2669 enum machine_mode mem_mode;
2672 enum rtx_code code = GET_CODE (x);
2673 struct elim_table *ep;
2698 /* First handle the case where we encounter a bare register that
2699 is eliminable. Replace it with a PLUS. */
2700 if (regno < FIRST_PSEUDO_REGISTER)
2702 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2704 if (ep->from_rtx == x && ep->can_eliminate)
2707 /* Refs inside notes don't count for this purpose. */
2708 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2709 || GET_CODE (insn) == INSN_LIST)))
2710 ep->ref_outside_mem = 1;
2711 return plus_constant (ep->to_rtx, ep->previous_offset);
2715 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2716 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2718 /* In this case, find_reloads would attempt to either use an
2719 incorrect address (if something is not at its initial offset)
2720 or substitute an replaced address into an insn (which loses
2721 if the offset is changed by some later action). So we simply
2722 return the replaced stack slot (assuming it is changed by
2723 elimination) and ignore the fact that this is actually a
2724 reference to the pseudo. Ensure we make a copy of the
2725 address in case it is shared. */
2726 new = eliminate_regs (reg_equiv_memory_loc[regno],
2728 if (new != reg_equiv_memory_loc[regno])
2730 cannot_omit_stores[regno] = 1;
2731 return copy_rtx (new);
2737 /* If this is the sum of an eliminable register and a constant, rework
2739 if (GET_CODE (XEXP (x, 0)) == REG
2740 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2741 && CONSTANT_P (XEXP (x, 1)))
2743 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2745 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2748 /* Refs inside notes don't count for this purpose. */
2749 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2750 || GET_CODE (insn) == INSN_LIST)))
2751 ep->ref_outside_mem = 1;
2753 /* The only time we want to replace a PLUS with a REG (this
2754 occurs when the constant operand of the PLUS is the negative
2755 of the offset) is when we are inside a MEM. We won't want
2756 to do so at other times because that would change the
2757 structure of the insn in a way that reload can't handle.
2758 We special-case the commonest situation in
2759 eliminate_regs_in_insn, so just replace a PLUS with a
2760 PLUS here, unless inside a MEM. */
2761 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2762 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2765 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2766 plus_constant (XEXP (x, 1),
2767 ep->previous_offset));
2770 /* If the register is not eliminable, we are done since the other
2771 operand is a constant. */
2775 /* If this is part of an address, we want to bring any constant to the
2776 outermost PLUS. We will do this by doing register replacement in
2777 our operands and seeing if a constant shows up in one of them.
2779 We assume here this is part of an address (or a "load address" insn)
2780 since an eliminable register is not likely to appear in any other
2783 If we have (plus (eliminable) (reg)), we want to produce
2784 (plus (plus (replacement) (reg) (const))). If this was part of a
2785 normal add insn, (plus (replacement) (reg)) will be pushed as a
2786 reload. This is the desired action. */
2789 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2790 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2792 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2794 /* If one side is a PLUS and the other side is a pseudo that
2795 didn't get a hard register but has a reg_equiv_constant,
2796 we must replace the constant here since it may no longer
2797 be in the position of any operand. */
2798 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2799 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2800 && reg_renumber[REGNO (new1)] < 0
2801 && reg_equiv_constant != 0
2802 && reg_equiv_constant[REGNO (new1)] != 0)
2803 new1 = reg_equiv_constant[REGNO (new1)];
2804 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2805 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2806 && reg_renumber[REGNO (new0)] < 0
2807 && reg_equiv_constant[REGNO (new0)] != 0)
2808 new0 = reg_equiv_constant[REGNO (new0)];
2810 new = form_sum (new0, new1);
2812 /* As above, if we are not inside a MEM we do not want to
2813 turn a PLUS into something else. We might try to do so here
2814 for an addition of 0 if we aren't optimizing. */
2815 if (! mem_mode && GET_CODE (new) != PLUS)
2816 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2824 /* If this is the product of an eliminable register and a
2825 constant, apply the distribute law and move the constant out
2826 so that we have (plus (mult ..) ..). This is needed in order
2827 to keep load-address insns valid. This case is pathological.
2828 We ignore the possibility of overflow here. */
2829 if (GET_CODE (XEXP (x, 0)) == REG
2830 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2831 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2832 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2834 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2837 /* Refs inside notes don't count for this purpose. */
2838 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2839 || GET_CODE (insn) == INSN_LIST)))
2840 ep->ref_outside_mem = 1;
2843 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2844 ep->previous_offset * INTVAL (XEXP (x, 1)));
2847 /* ... fall through ... */
2852 case DIV: case UDIV:
2853 case MOD: case UMOD:
2854 case AND: case IOR: case XOR:
2855 case ROTATERT: case ROTATE:
2856 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2858 case GE: case GT: case GEU: case GTU:
2859 case LE: case LT: case LEU: case LTU:
2861 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2863 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2865 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2866 return gen_rtx (code, GET_MODE (x), new0, new1);
2871 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2874 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2875 if (new != XEXP (x, 0))
2876 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2879 /* ... fall through ... */
2882 /* Now do eliminations in the rest of the chain. If this was
2883 an EXPR_LIST, this might result in allocating more memory than is
2884 strictly needed, but it simplifies the code. */
2887 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2888 if (new != XEXP (x, 1))
2889 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2897 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2898 if (ep->to_rtx == XEXP (x, 0))
2900 int size = GET_MODE_SIZE (mem_mode);
2902 /* If more bytes than MEM_MODE are pushed, account for them. */
2903 #ifdef PUSH_ROUNDING
2904 if (ep->to_rtx == stack_pointer_rtx)
2905 size = PUSH_ROUNDING (size);
2907 if (code == PRE_DEC || code == POST_DEC)
2913 /* Fall through to generic unary operation case. */
2914 case STRICT_LOW_PART:
2916 case SIGN_EXTEND: case ZERO_EXTEND:
2917 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2918 case FLOAT: case FIX:
2919 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2923 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2924 if (new != XEXP (x, 0))
2925 return gen_rtx (code, GET_MODE (x), new);
2929 /* Similar to above processing, but preserve SUBREG_WORD.
2930 Convert (subreg (mem)) to (mem) if not paradoxical.
2931 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2932 pseudo didn't get a hard reg, we must replace this with the
2933 eliminated version of the memory location because push_reloads
2934 may do the replacement in certain circumstances. */
2935 if (GET_CODE (SUBREG_REG (x)) == REG
2936 && (GET_MODE_SIZE (GET_MODE (x))
2937 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2938 && reg_equiv_memory_loc != 0
2939 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2941 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2944 /* If we didn't change anything, we must retain the pseudo. */
2945 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2946 new = SUBREG_REG (x);
2949 /* Otherwise, ensure NEW isn't shared in case we have to reload
2951 new = copy_rtx (new);
2953 /* In this case, we must show that the pseudo is used in this
2954 insn so that delete_output_reload will do the right thing. */
2955 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2956 && GET_CODE (insn) != INSN_LIST)
2957 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2962 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2964 if (new != XEXP (x, 0))
2966 if (GET_CODE (new) == MEM
2967 && (GET_MODE_SIZE (GET_MODE (x))
2968 <= GET_MODE_SIZE (GET_MODE (new)))
2969 #ifdef LOAD_EXTEND_OP
2970 /* On these machines we will be reloading what is
2971 inside the SUBREG if it originally was a pseudo and
2972 the inner and outer modes are both a word or
2973 smaller. So leave the SUBREG then. */
2974 && ! (GET_CODE (SUBREG_REG (x)) == REG
2975 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2976 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2977 && (GET_MODE_SIZE (GET_MODE (x))
2978 > GET_MODE_SIZE (GET_MODE (new)))
2979 && INTEGRAL_MODE_P (GET_MODE (new))
2980 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2984 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2985 enum machine_mode mode = GET_MODE (x);
2987 if (BYTES_BIG_ENDIAN)
2988 offset += (MIN (UNITS_PER_WORD,
2989 GET_MODE_SIZE (GET_MODE (new)))
2990 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2992 PUT_MODE (new, mode);
2993 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2997 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3003 /* If using a register that is the source of an eliminate we still
3004 think can be performed, note it cannot be performed since we don't
3005 know how this register is used. */
3006 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3007 if (ep->from_rtx == XEXP (x, 0))
3008 ep->can_eliminate = 0;
3010 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3011 if (new != XEXP (x, 0))
3012 return gen_rtx (code, GET_MODE (x), new);
3016 /* If clobbering a register that is the replacement register for an
3017 elimination we still think can be performed, note that it cannot
3018 be performed. Otherwise, we need not be concerned about it. */
3019 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3020 if (ep->to_rtx == XEXP (x, 0))
3021 ep->can_eliminate = 0;
3023 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3024 if (new != XEXP (x, 0))
3025 return gen_rtx (code, GET_MODE (x), new);
3031 /* Properly handle sharing input and constraint vectors. */
3032 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3034 /* When we come to a new vector not seen before,
3035 scan all its elements; keep the old vector if none
3036 of them changes; otherwise, make a copy. */
3037 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3038 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3039 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3040 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3043 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3044 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3047 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3048 new_asm_operands_vec = old_asm_operands_vec;
3050 new_asm_operands_vec
3051 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3054 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3055 if (new_asm_operands_vec == old_asm_operands_vec)
3058 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3059 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3060 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3061 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3062 ASM_OPERANDS_SOURCE_FILE (x),
3063 ASM_OPERANDS_SOURCE_LINE (x));
3064 new->volatil = x->volatil;
3069 /* Check for setting a register that we know about. */
3070 if (GET_CODE (SET_DEST (x)) == REG)
3072 /* See if this is setting the replacement register for an
3075 If DEST is the hard frame pointer, we do nothing because we
3076 assume that all assignments to the frame pointer are for
3077 non-local gotos and are being done at a time when they are valid
3078 and do not disturb anything else. Some machines want to
3079 eliminate a fake argument pointer (or even a fake frame pointer)
3080 with either the real frame or the stack pointer. Assignments to
3081 the hard frame pointer must not prevent this elimination. */
3083 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3085 if (ep->to_rtx == SET_DEST (x)
3086 && SET_DEST (x) != hard_frame_pointer_rtx)
3088 /* If it is being incremented, adjust the offset. Otherwise,
3089 this elimination can't be done. */
3090 rtx src = SET_SRC (x);
3092 if (GET_CODE (src) == PLUS
3093 && XEXP (src, 0) == SET_DEST (x)
3094 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3095 ep->offset -= INTVAL (XEXP (src, 1));
3097 ep->can_eliminate = 0;
3100 /* Now check to see we are assigning to a register that can be
3101 eliminated. If so, it must be as part of a PARALLEL, since we
3102 will not have been called if this is a single SET. So indicate
3103 that we can no longer eliminate this reg. */
3104 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3106 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3107 ep->can_eliminate = 0;
3110 /* Now avoid the loop below in this common case. */
3112 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3113 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3115 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3116 write a CLOBBER insn. */
3117 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3118 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3119 && GET_CODE (insn) != INSN_LIST)
3120 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3122 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3123 return gen_rtx (SET, VOIDmode, new0, new1);
3129 /* Our only special processing is to pass the mode of the MEM to our
3130 recursive call and copy the flags. While we are here, handle this
3131 case more efficiently. */
3132 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3133 if (new != XEXP (x, 0))
3135 new = gen_rtx (MEM, GET_MODE (x), new);
3136 new->volatil = x->volatil;
3137 new->unchanging = x->unchanging;
3138 new->in_struct = x->in_struct;
3145 /* Process each of our operands recursively. If any have changed, make a
3147 fmt = GET_RTX_FORMAT (code);
3148 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3152 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3153 if (new != XEXP (x, i) && ! copied)
3155 rtx new_x = rtx_alloc (code);
3156 bcopy ((char *) x, (char *) new_x,
3157 (sizeof (*new_x) - sizeof (new_x->fld)
3158 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3164 else if (*fmt == 'E')
3167 for (j = 0; j < XVECLEN (x, i); j++)
3169 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3170 if (new != XVECEXP (x, i, j) && ! copied_vec)
3172 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3176 rtx new_x = rtx_alloc (code);
3177 bcopy ((char *) x, (char *) new_x,
3178 (sizeof (*new_x) - sizeof (new_x->fld)
3179 + (sizeof (new_x->fld[0])
3180 * GET_RTX_LENGTH (code))));
3184 XVEC (x, i) = new_v;
3187 XVECEXP (x, i, j) = new;
3195 /* Scan INSN and eliminate all eliminable registers in it.
3197 If REPLACE is nonzero, do the replacement destructively. Also
3198 delete the insn as dead it if it is setting an eliminable register.
3200 If REPLACE is zero, do all our allocations in reload_obstack.
3202 If no eliminations were done and this insn doesn't require any elimination
3203 processing (these are not identical conditions: it might be updating sp,
3204 but not referencing fp; this needs to be seen during reload_as_needed so
3205 that the offset between fp and sp can be taken into consideration), zero
3206 is returned. Otherwise, 1 is returned. */
3209 eliminate_regs_in_insn (insn, replace)
3213 rtx old_body = PATTERN (insn);
3214 rtx old_set = single_set (insn);
3217 struct elim_table *ep;
3220 push_obstacks (&reload_obstack, &reload_obstack);
3222 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3223 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3225 /* Check for setting an eliminable register. */
3226 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3227 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3229 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3230 /* If this is setting the frame pointer register to the
3231 hardware frame pointer register and this is an elimination
3232 that will be done (tested above), this insn is really
3233 adjusting the frame pointer downward to compensate for
3234 the adjustment done before a nonlocal goto. */
3235 if (ep->from == FRAME_POINTER_REGNUM
3236 && ep->to == HARD_FRAME_POINTER_REGNUM)
3238 rtx src = SET_SRC (old_set);
3241 if (src == ep->to_rtx)
3243 else if (GET_CODE (src) == PLUS
3244 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3245 offset = INTVAL (XEXP (src, 0)), ok = 1;
3252 = plus_constant (ep->to_rtx, offset - ep->offset);
3254 /* First see if this insn remains valid when we
3255 make the change. If not, keep the INSN_CODE
3256 the same and let reload fit it up. */
3257 validate_change (insn, &SET_SRC (old_set), src, 1);
3258 validate_change (insn, &SET_DEST (old_set),
3260 if (! apply_change_group ())
3262 SET_SRC (old_set) = src;
3263 SET_DEST (old_set) = ep->to_rtx;
3273 /* In this case this insn isn't serving a useful purpose. We
3274 will delete it in reload_as_needed once we know that this
3275 elimination is, in fact, being done.
3277 If REPLACE isn't set, we can't delete this insn, but needn't
3278 process it since it won't be used unless something changes. */
3280 delete_dead_insn (insn);
3285 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3286 in the insn is the negative of the offset in FROM. Substitute
3287 (set (reg) (reg to)) for the insn and change its code.
3289 We have to do this here, rather than in eliminate_regs, do that we can
3290 change the insn code. */
3292 if (GET_CODE (SET_SRC (old_set)) == PLUS
3293 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3294 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3295 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3297 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3298 && ep->can_eliminate)
3300 /* We must stop at the first elimination that will be used.
3301 If this one would replace the PLUS with a REG, do it
3302 now. Otherwise, quit the loop and let eliminate_regs
3303 do its normal replacement. */
3304 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3306 /* We assume here that we don't need a PARALLEL of
3307 any CLOBBERs for this assignment. There's not
3308 much we can do if we do need it. */
3309 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3310 SET_DEST (old_set), ep->to_rtx);
3311 INSN_CODE (insn) = -1;
3320 old_asm_operands_vec = 0;
3322 /* Replace the body of this insn with a substituted form. If we changed
3323 something, return non-zero.
3325 If we are replacing a body that was a (set X (plus Y Z)), try to
3326 re-recognize the insn. We do this in case we had a simple addition
3327 but now can do this as a load-address. This saves an insn in this
3330 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3331 if (new_body != old_body)
3333 /* If we aren't replacing things permanently and we changed something,
3334 make another copy to ensure that all the RTL is new. Otherwise
3335 things can go wrong if find_reload swaps commutative operands
3336 and one is inside RTL that has been copied while the other is not. */
3338 /* Don't copy an asm_operands because (1) there's no need and (2)
3339 copy_rtx can't do it properly when there are multiple outputs. */
3340 if (! replace && asm_noperands (old_body) < 0)
3341 new_body = copy_rtx (new_body);
3343 /* If we had a move insn but now we don't, rerecognize it. This will
3344 cause spurious re-recognition if the old move had a PARALLEL since
3345 the new one still will, but we can't call single_set without
3346 having put NEW_BODY into the insn and the re-recognition won't
3347 hurt in this rare case. */
3349 && ((GET_CODE (SET_SRC (old_set)) == REG
3350 && (GET_CODE (new_body) != SET
3351 || GET_CODE (SET_SRC (new_body)) != REG))
3352 /* If this was a load from or store to memory, compare
3353 the MEM in recog_operand to the one in the insn. If they
3354 are not equal, then rerecognize the insn. */
3356 && ((GET_CODE (SET_SRC (old_set)) == MEM
3357 && SET_SRC (old_set) != recog_operand[1])
3358 || (GET_CODE (SET_DEST (old_set)) == MEM
3359 && SET_DEST (old_set) != recog_operand[0])))
3360 /* If this was an add insn before, rerecognize. */
3361 || GET_CODE (SET_SRC (old_set)) == PLUS))
3363 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3364 /* If recognition fails, store the new body anyway.
3365 It's normal to have recognition failures here
3366 due to bizarre memory addresses; reloading will fix them. */
3367 PATTERN (insn) = new_body;
3370 PATTERN (insn) = new_body;
3375 /* Loop through all elimination pairs. See if any have changed and
3376 recalculate the number not at initial offset.
3378 Compute the maximum offset (minimum offset if the stack does not
3379 grow downward) for each elimination pair.
3381 We also detect a cases where register elimination cannot be done,
3382 namely, if a register would be both changed and referenced outside a MEM
3383 in the resulting insn since such an insn is often undefined and, even if
3384 not, we cannot know what meaning will be given to it. Note that it is
3385 valid to have a register used in an address in an insn that changes it
3386 (presumably with a pre- or post-increment or decrement).
3388 If anything changes, return nonzero. */
3390 num_not_at_initial_offset = 0;
3391 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3393 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3394 ep->can_eliminate = 0;
3396 ep->ref_outside_mem = 0;
3398 if (ep->previous_offset != ep->offset)
3401 ep->previous_offset = ep->offset;
3402 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3403 num_not_at_initial_offset++;
3405 #ifdef STACK_GROWS_DOWNWARD
3406 ep->max_offset = MAX (ep->max_offset, ep->offset);
3408 ep->max_offset = MIN (ep->max_offset, ep->offset);
3413 /* If we changed something, perform elimination in REG_NOTES. This is
3414 needed even when REPLACE is zero because a REG_DEAD note might refer
3415 to a register that we eliminate and could cause a different number
3416 of spill registers to be needed in the final reload pass than in
3418 if (val && REG_NOTES (insn) != 0)
3419 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3427 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3428 replacement we currently believe is valid, mark it as not eliminable if X
3429 modifies DEST in any way other than by adding a constant integer to it.
3431 If DEST is the frame pointer, we do nothing because we assume that
3432 all assignments to the hard frame pointer are nonlocal gotos and are being
3433 done at a time when they are valid and do not disturb anything else.
3434 Some machines want to eliminate a fake argument pointer with either the
3435 frame or stack pointer. Assignments to the hard frame pointer must not
3436 prevent this elimination.
3438 Called via note_stores from reload before starting its passes to scan
3439 the insns of the function. */
3442 mark_not_eliminable (dest, x)
3448 /* A SUBREG of a hard register here is just changing its mode. We should
3449 not see a SUBREG of an eliminable hard register, but check just in
3451 if (GET_CODE (dest) == SUBREG)
3452 dest = SUBREG_REG (dest);
3454 if (dest == hard_frame_pointer_rtx)
3457 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3458 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3459 && (GET_CODE (x) != SET
3460 || GET_CODE (SET_SRC (x)) != PLUS
3461 || XEXP (SET_SRC (x), 0) != dest
3462 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3464 reg_eliminate[i].can_eliminate_previous
3465 = reg_eliminate[i].can_eliminate = 0;
3470 /* Kick all pseudos out of hard register REGNO.
3471 If GLOBAL is nonzero, try to find someplace else to put them.
3472 If DUMPFILE is nonzero, log actions taken on that file.
3474 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3475 because we found we can't eliminate some register. In the case, no pseudos
3476 are allowed to be in the register, even if they are only in a block that
3477 doesn't require spill registers, unlike the case when we are spilling this
3478 hard reg to produce another spill register.
3480 Return nonzero if any pseudos needed to be kicked out. */
3483 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3489 enum reg_class class = REGNO_REG_CLASS (regno);
3490 int something_changed = 0;
3493 SET_HARD_REG_BIT (forbidden_regs, regno);
3496 regs_ever_live[regno] = 1;
3498 /* Spill every pseudo reg that was allocated to this reg
3499 or to something that overlaps this reg. */
3501 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3502 if (reg_renumber[i] >= 0
3503 && reg_renumber[i] <= regno
3505 + HARD_REGNO_NREGS (reg_renumber[i],
3506 PSEUDO_REGNO_MODE (i))
3509 /* If this register belongs solely to a basic block which needed no
3510 spilling of any class that this register is contained in,
3511 leave it be, unless we are spilling this register because
3512 it was a hard register that can't be eliminated. */
3514 if (! cant_eliminate
3515 && basic_block_needs[0]
3516 && reg_basic_block[i] >= 0
3517 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3521 for (p = reg_class_superclasses[(int) class];
3522 *p != LIM_REG_CLASSES; p++)
3523 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3526 if (*p == LIM_REG_CLASSES)
3530 /* Mark it as no longer having a hard register home. */
3531 reg_renumber[i] = -1;
3532 /* We will need to scan everything again. */
3533 something_changed = 1;
3535 retry_global_alloc (i, forbidden_regs);
3537 alter_reg (i, regno);
3540 if (reg_renumber[i] == -1)
3541 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3543 fprintf (dumpfile, " Register %d now in %d.\n\n",
3544 i, reg_renumber[i]);
3547 for (i = 0; i < scratch_list_length; i++)
3549 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3551 if (! cant_eliminate && basic_block_needs[0]
3552 && ! basic_block_needs[(int) class][scratch_block[i]])
3556 for (p = reg_class_superclasses[(int) class];
3557 *p != LIM_REG_CLASSES; p++)
3558 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3561 if (*p == LIM_REG_CLASSES)
3564 PUT_CODE (scratch_list[i], SCRATCH);
3565 scratch_list[i] = 0;
3566 something_changed = 1;
3571 return something_changed;
3574 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3575 Also mark any hard registers used to store user variables as
3576 forbidden from being used for spill registers. */
3579 scan_paradoxical_subregs (x)
3584 register enum rtx_code code = GET_CODE (x);
3589 #ifdef SMALL_REGISTER_CLASSES
3590 if (SMALL_REGISTER_CLASSES
3591 && REGNO (x) < FIRST_PSEUDO_REGISTER
3592 && REG_USERVAR_P (x))
3593 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3609 if (GET_CODE (SUBREG_REG (x)) == REG
3610 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3611 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3612 = GET_MODE_SIZE (GET_MODE (x));
3616 fmt = GET_RTX_FORMAT (code);
3617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3620 scan_paradoxical_subregs (XEXP (x, i));
3621 else if (fmt[i] == 'E')
3624 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3625 scan_paradoxical_subregs (XVECEXP (x, i, j));
3631 hard_reg_use_compare (p1p, p2p)
3632 const GENERIC_PTR p1p;
3633 const GENERIC_PTR p2p;
3635 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3636 *p2 = (struct hard_reg_n_uses *)p2p;
3637 int tem = p1->uses - p2->uses;
3638 if (tem != 0) return tem;
3639 /* If regs are equally good, sort by regno,
3640 so that the results of qsort leave nothing to chance. */
3641 return p1->regno - p2->regno;
3644 /* Choose the order to consider regs for use as reload registers
3645 based on how much trouble would be caused by spilling one.
3646 Store them in order of decreasing preference in potential_reload_regs. */
3649 order_regs_for_reload (global)
3656 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3658 CLEAR_HARD_REG_SET (bad_spill_regs);
3660 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3661 potential_reload_regs[i] = -1;
3663 /* Count number of uses of each hard reg by pseudo regs allocated to it
3664 and then order them by decreasing use. */
3666 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3668 hard_reg_n_uses[i].uses = 0;
3669 hard_reg_n_uses[i].regno = i;
3672 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3674 int regno = reg_renumber[i];
3677 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3680 /* If allocated by local-alloc, show more uses since
3681 we're not going to be able to reallocate it, but
3682 we might if allocated by global alloc. */
3683 if (global && reg_allocno[i] < 0)
3684 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3686 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3689 large += reg_n_refs[i];
3692 /* Now fixed registers (which cannot safely be used for reloading)
3693 get a very high use count so they will be considered least desirable.
3694 Registers used explicitly in the rtl code are almost as bad. */
3696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3700 hard_reg_n_uses[i].uses += 2 * large + 2;
3701 SET_HARD_REG_BIT (bad_spill_regs, i);
3703 else if (regs_explicitly_used[i])
3705 hard_reg_n_uses[i].uses += large + 1;
3706 /* ??? We are doing this here because of the potential that
3707 bad code may be generated if a register explicitly used in
3708 an insn was used as a spill register for that insn. But
3709 not using these are spill registers may lose on some machine.
3710 We'll have to see how this works out. */
3711 #ifdef SMALL_REGISTER_CLASSES
3712 if (! SMALL_REGISTER_CLASSES)
3714 SET_HARD_REG_BIT (bad_spill_regs, i);
3717 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3718 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3720 #ifdef ELIMINABLE_REGS
3721 /* If registers other than the frame pointer are eliminable, mark them as
3723 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3725 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3726 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3730 /* Prefer registers not so far used, for use in temporary loading.
3731 Among them, if REG_ALLOC_ORDER is defined, use that order.
3732 Otherwise, prefer registers not preserved by calls. */
3734 #ifdef REG_ALLOC_ORDER
3735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3737 int regno = reg_alloc_order[i];
3739 if (hard_reg_n_uses[regno].uses == 0)
3740 potential_reload_regs[o++] = regno;
3743 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3745 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3746 potential_reload_regs[o++] = i;
3748 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3750 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3751 potential_reload_regs[o++] = i;
3755 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3756 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3758 /* Now add the regs that are already used,
3759 preferring those used less often. The fixed and otherwise forbidden
3760 registers will be at the end of this list. */
3762 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3763 if (hard_reg_n_uses[i].uses != 0)
3764 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3767 /* Used in reload_as_needed to sort the spilled regs. */
3770 compare_spill_regs (r1p, r2p)
3771 const GENERIC_PTR r1p;
3772 const GENERIC_PTR r2p;
3774 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3778 /* Reload pseudo-registers into hard regs around each insn as needed.
3779 Additional register load insns are output before the insn that needs it
3780 and perhaps store insns after insns that modify the reloaded pseudo reg.
3782 reg_last_reload_reg and reg_reloaded_contents keep track of
3783 which registers are already available in reload registers.
3784 We update these for the reloads that we perform,
3785 as the insns are scanned. */
3788 reload_as_needed (first, live_known)
3798 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3799 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3800 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3801 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3802 reg_has_output_reload = (char *) alloca (max_regno);
3803 for (i = 0; i < n_spills; i++)
3805 reg_reloaded_contents[i] = -1;
3806 reg_reloaded_insn[i] = 0;
3809 /* Reset all offsets on eliminable registers to their initial values. */
3810 #ifdef ELIMINABLE_REGS
3811 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3813 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3814 reg_eliminate[i].initial_offset);
3815 reg_eliminate[i].previous_offset
3816 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3819 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3820 reg_eliminate[0].previous_offset
3821 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3824 num_not_at_initial_offset = 0;
3826 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3827 pack registers with group needs. */
3830 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3831 for (i = 0; i < n_spills; i++)
3832 spill_reg_order[spill_regs[i]] = i;
3835 for (insn = first; insn;)
3837 register rtx next = NEXT_INSN (insn);
3839 /* Notice when we move to a new basic block. */
3840 if (live_known && this_block + 1 < n_basic_blocks
3841 && insn == basic_block_head[this_block+1])
3844 /* If we pass a label, copy the offsets from the label information
3845 into the current offsets of each elimination. */
3846 if (GET_CODE (insn) == CODE_LABEL)
3848 num_not_at_initial_offset = 0;
3849 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3851 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3852 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3853 if (reg_eliminate[i].can_eliminate
3854 && (reg_eliminate[i].offset
3855 != reg_eliminate[i].initial_offset))
3856 num_not_at_initial_offset++;
3860 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3862 rtx avoid_return_reg = 0;
3863 rtx oldpat = PATTERN (insn);
3865 #ifdef SMALL_REGISTER_CLASSES
3866 /* Set avoid_return_reg if this is an insn
3867 that might use the value of a function call. */
3868 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
3870 if (GET_CODE (PATTERN (insn)) == SET)
3871 after_call = SET_DEST (PATTERN (insn));
3872 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3873 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3874 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3878 else if (SMALL_REGISTER_CLASSES
3880 && !(GET_CODE (PATTERN (insn)) == SET
3881 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3883 if (reg_referenced_p (after_call, PATTERN (insn)))
3884 avoid_return_reg = after_call;
3887 #endif /* SMALL_REGISTER_CLASSES */
3889 /* If this is a USE and CLOBBER of a MEM, ensure that any
3890 references to eliminable registers have been removed. */
3892 if ((GET_CODE (PATTERN (insn)) == USE
3893 || GET_CODE (PATTERN (insn)) == CLOBBER)
3894 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3895 XEXP (XEXP (PATTERN (insn), 0), 0)
3896 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3897 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3899 /* If we need to do register elimination processing, do so.
3900 This might delete the insn, in which case we are done. */
3901 if (num_eliminable && GET_MODE (insn) == QImode)
3903 eliminate_regs_in_insn (insn, 1);
3904 if (GET_CODE (insn) == NOTE)
3911 if (GET_MODE (insn) == VOIDmode)
3913 /* First find the pseudo regs that must be reloaded for this insn.
3914 This info is returned in the tables reload_... (see reload.h).
3915 Also modify the body of INSN by substituting RELOAD
3916 rtx's for those pseudo regs. */
3919 bzero (reg_has_output_reload, max_regno);
3920 CLEAR_HARD_REG_SET (reg_is_output_reload);
3922 find_reloads (insn, 1, spill_indirect_levels, live_known,
3928 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3932 /* If this block has not had spilling done for a
3933 particular clas and we have any non-optionals that need a
3934 spill reg in that class, abort. */
3936 for (class = 0; class < N_REG_CLASSES; class++)
3937 if (basic_block_needs[class] != 0
3938 && basic_block_needs[class][this_block] == 0)
3939 for (i = 0; i < n_reloads; i++)
3940 if (class == (int) reload_reg_class[i]
3941 && reload_reg_rtx[i] == 0
3942 && ! reload_optional[i]
3943 && (reload_in[i] != 0 || reload_out[i] != 0
3944 || reload_secondary_p[i] != 0))
3945 fatal_insn ("Non-optional registers need a spill register", insn);
3947 /* Now compute which reload regs to reload them into. Perhaps
3948 reusing reload regs from previous insns, or else output
3949 load insns to reload them. Maybe output store insns too.
3950 Record the choices of reload reg in reload_reg_rtx. */
3951 choose_reload_regs (insn, avoid_return_reg);
3953 #ifdef SMALL_REGISTER_CLASSES
3954 /* Merge any reloads that we didn't combine for fear of
3955 increasing the number of spill registers needed but now
3956 discover can be safely merged. */
3957 if (SMALL_REGISTER_CLASSES)
3958 merge_assigned_reloads (insn);
3961 /* Generate the insns to reload operands into or out of
3962 their reload regs. */
3963 emit_reload_insns (insn);
3965 /* Substitute the chosen reload regs from reload_reg_rtx
3966 into the insn's body (or perhaps into the bodies of other
3967 load and store insn that we just made for reloading
3968 and that we moved the structure into). */
3971 /* If this was an ASM, make sure that all the reload insns
3972 we have generated are valid. If not, give an error
3975 if (asm_noperands (PATTERN (insn)) >= 0)
3976 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3977 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3978 && (recog_memoized (p) < 0
3979 || (insn_extract (p),
3980 ! constrain_operands (INSN_CODE (p), 1))))
3982 error_for_asm (insn,
3983 "`asm' operand requires impossible reload");
3985 NOTE_SOURCE_FILE (p) = 0;
3986 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3989 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3990 is no longer validly lying around to save a future reload.
3991 Note that this does not detect pseudos that were reloaded
3992 for this insn in order to be stored in
3993 (obeying register constraints). That is correct; such reload
3994 registers ARE still valid. */
3995 note_stores (oldpat, forget_old_reloads_1);
3997 /* There may have been CLOBBER insns placed after INSN. So scan
3998 between INSN and NEXT and use them to forget old reloads. */
3999 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4000 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4001 note_stores (PATTERN (x), forget_old_reloads_1);
4004 /* Likewise for regs altered by auto-increment in this insn.
4005 But note that the reg-notes are not changed by reloading:
4006 they still contain the pseudo-regs, not the spill regs. */
4007 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4008 if (REG_NOTE_KIND (x) == REG_INC)
4010 /* See if this pseudo reg was reloaded in this insn.
4011 If so, its last-reload info is still valid
4012 because it is based on this insn's reload. */
4013 for (i = 0; i < n_reloads; i++)
4014 if (reload_out[i] == XEXP (x, 0))
4018 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4022 /* A reload reg's contents are unknown after a label. */
4023 if (GET_CODE (insn) == CODE_LABEL)
4024 for (i = 0; i < n_spills; i++)
4026 reg_reloaded_contents[i] = -1;
4027 reg_reloaded_insn[i] = 0;
4030 /* Don't assume a reload reg is still good after a call insn
4031 if it is a call-used reg. */
4032 else if (GET_CODE (insn) == CALL_INSN)
4033 for (i = 0; i < n_spills; i++)
4034 if (call_used_regs[spill_regs[i]])
4036 reg_reloaded_contents[i] = -1;
4037 reg_reloaded_insn[i] = 0;
4040 /* In case registers overlap, allow certain insns to invalidate
4041 particular hard registers. */
4043 #ifdef INSN_CLOBBERS_REGNO_P
4044 for (i = 0 ; i < n_spills ; i++)
4045 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4047 reg_reloaded_contents[i] = -1;
4048 reg_reloaded_insn[i] = 0;
4060 /* Discard all record of any value reloaded from X,
4061 or reloaded in X from someplace else;
4062 unless X is an output reload reg of the current insn.
4064 X may be a hard reg (the reload reg)
4065 or it may be a pseudo reg that was reloaded from. */
4068 forget_old_reloads_1 (x, ignored)
4076 /* note_stores does give us subregs of hard regs. */
4077 while (GET_CODE (x) == SUBREG)
4079 offset += SUBREG_WORD (x);
4083 if (GET_CODE (x) != REG)
4086 regno = REGNO (x) + offset;
4088 if (regno >= FIRST_PSEUDO_REGISTER)
4093 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4094 /* Storing into a spilled-reg invalidates its contents.
4095 This can happen if a block-local pseudo is allocated to that reg
4096 and it wasn't spilled because this block's total need is 0.
4097 Then some insn might have an optional reload and use this reg. */
4098 for (i = 0; i < nr; i++)
4099 if (spill_reg_order[regno + i] >= 0
4100 /* But don't do this if the reg actually serves as an output
4101 reload reg in the current instruction. */
4103 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4105 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4106 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4110 /* Since value of X has changed,
4111 forget any value previously copied from it. */
4114 /* But don't forget a copy if this is the output reload
4115 that establishes the copy's validity. */
4116 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4117 reg_last_reload_reg[regno + nr] = 0;
4120 /* For each reload, the mode of the reload register. */
4121 static enum machine_mode reload_mode[MAX_RELOADS];
4123 /* For each reload, the largest number of registers it will require. */
4124 static int reload_nregs[MAX_RELOADS];
4126 /* Comparison function for qsort to decide which of two reloads
4127 should be handled first. *P1 and *P2 are the reload numbers. */
4130 reload_reg_class_lower (r1p, r2p)
4131 const GENERIC_PTR r1p;
4132 const GENERIC_PTR r2p;
4134 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4137 /* Consider required reloads before optional ones. */
4138 t = reload_optional[r1] - reload_optional[r2];
4142 /* Count all solitary classes before non-solitary ones. */
4143 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4144 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4148 /* Aside from solitaires, consider all multi-reg groups first. */
4149 t = reload_nregs[r2] - reload_nregs[r1];
4153 /* Consider reloads in order of increasing reg-class number. */
4154 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4158 /* If reloads are equally urgent, sort by reload number,
4159 so that the results of qsort leave nothing to chance. */
4163 /* The following HARD_REG_SETs indicate when each hard register is
4164 used for a reload of various parts of the current insn. */
4166 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4167 static HARD_REG_SET reload_reg_used;
4168 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4169 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4170 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4171 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4172 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4173 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4174 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4175 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4176 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4177 static HARD_REG_SET reload_reg_used_in_op_addr;
4178 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4179 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4180 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4181 static HARD_REG_SET reload_reg_used_in_insn;
4182 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4183 static HARD_REG_SET reload_reg_used_in_other_addr;
4185 /* If reg is in use as a reload reg for any sort of reload. */
4186 static HARD_REG_SET reload_reg_used_at_all;
4188 /* If reg is use as an inherited reload. We just mark the first register
4190 static HARD_REG_SET reload_reg_used_for_inherit;
4192 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4193 TYPE. MODE is used to indicate how many consecutive regs are
4197 mark_reload_reg_in_use (regno, opnum, type, mode)
4200 enum reload_type type;
4201 enum machine_mode mode;
4203 int nregs = HARD_REGNO_NREGS (regno, mode);
4206 for (i = regno; i < nregs + regno; i++)
4211 SET_HARD_REG_BIT (reload_reg_used, i);
4214 case RELOAD_FOR_INPUT_ADDRESS:
4215 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4218 case RELOAD_FOR_OUTPUT_ADDRESS:
4219 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4222 case RELOAD_FOR_OPERAND_ADDRESS:
4223 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4226 case RELOAD_FOR_OPADDR_ADDR:
4227 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4230 case RELOAD_FOR_OTHER_ADDRESS:
4231 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4234 case RELOAD_FOR_INPUT:
4235 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4238 case RELOAD_FOR_OUTPUT:
4239 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4242 case RELOAD_FOR_INSN:
4243 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4247 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4251 /* Similarly, but show REGNO is no longer in use for a reload. */
4254 clear_reload_reg_in_use (regno, opnum, type, mode)
4257 enum reload_type type;
4258 enum machine_mode mode;
4260 int nregs = HARD_REGNO_NREGS (regno, mode);
4263 for (i = regno; i < nregs + regno; i++)
4268 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4271 case RELOAD_FOR_INPUT_ADDRESS:
4272 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4275 case RELOAD_FOR_OUTPUT_ADDRESS:
4276 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4279 case RELOAD_FOR_OPERAND_ADDRESS:
4280 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4283 case RELOAD_FOR_OPADDR_ADDR:
4284 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4287 case RELOAD_FOR_OTHER_ADDRESS:
4288 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4291 case RELOAD_FOR_INPUT:
4292 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4295 case RELOAD_FOR_OUTPUT:
4296 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4299 case RELOAD_FOR_INSN:
4300 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4306 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4307 specified by OPNUM and TYPE. */
4310 reload_reg_free_p (regno, opnum, type)
4313 enum reload_type type;
4317 /* In use for a RELOAD_OTHER means it's not available for anything. */
4318 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4324 /* In use for anything means we can't use it for RELOAD_OTHER. */
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4327 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4330 for (i = 0; i < reload_n_operands; i++)
4331 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4332 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4333 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4334 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4339 case RELOAD_FOR_INPUT:
4340 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4341 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4344 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4347 /* If it is used for some other input, can't use it. */
4348 for (i = 0; i < reload_n_operands; i++)
4349 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4352 /* If it is used in a later operand's address, can't use it. */
4353 for (i = opnum + 1; i < reload_n_operands; i++)
4354 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4359 case RELOAD_FOR_INPUT_ADDRESS:
4360 /* Can't use a register if it is used for an input address for this
4361 operand or used as an input in an earlier one. */
4362 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4365 for (i = 0; i < opnum; i++)
4366 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4371 case RELOAD_FOR_OUTPUT_ADDRESS:
4372 /* Can't use a register if it is used for an output address for this
4373 operand or used as an output in this or a later operand. */
4374 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4377 for (i = opnum; i < reload_n_operands; i++)
4378 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4383 case RELOAD_FOR_OPERAND_ADDRESS:
4384 for (i = 0; i < reload_n_operands; i++)
4385 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4388 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4389 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4391 case RELOAD_FOR_OPADDR_ADDR:
4392 for (i = 0; i < reload_n_operands; i++)
4393 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4396 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4398 case RELOAD_FOR_OUTPUT:
4399 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4400 outputs, or an operand address for this or an earlier output. */
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4404 for (i = 0; i < reload_n_operands; i++)
4405 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4408 for (i = 0; i <= opnum; i++)
4409 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4414 case RELOAD_FOR_INSN:
4415 for (i = 0; i < reload_n_operands; i++)
4416 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4417 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4420 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4421 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4423 case RELOAD_FOR_OTHER_ADDRESS:
4424 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4429 /* Return 1 if the value in reload reg REGNO, as used by a reload
4430 needed for the part of the insn specified by OPNUM and TYPE,
4431 is not in use for a reload in any prior part of the insn.
4433 We can assume that the reload reg was already tested for availability
4434 at the time it is needed, and we should not check this again,
4435 in case the reg has already been marked in use. */
4438 reload_reg_free_before_p (regno, opnum, type)
4441 enum reload_type type;
4447 case RELOAD_FOR_OTHER_ADDRESS:
4448 /* These always come first. */
4452 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4454 /* If this use is for part of the insn,
4455 check the reg is not in use for any prior part. It is tempting
4456 to try to do this by falling through from objecs that occur
4457 later in the insn to ones that occur earlier, but that will not
4458 correctly take into account the fact that here we MUST ignore
4459 things that would prevent the register from being allocated in
4460 the first place, since we know that it was allocated. */
4462 case RELOAD_FOR_OUTPUT_ADDRESS:
4463 /* Earlier reloads are for earlier outputs or their addresses,
4464 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4465 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4467 for (i = 0; i < opnum; i++)
4468 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4469 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4472 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4475 for (i = 0; i < reload_n_operands; i++)
4476 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4477 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4480 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4481 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4482 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4484 case RELOAD_FOR_OUTPUT:
4485 /* This can't be used in the output address for this operand and
4486 anything that can't be used for it, except that we've already
4487 tested for RELOAD_FOR_INSN objects. */
4489 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4492 for (i = 0; i < opnum; i++)
4493 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4494 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4497 for (i = 0; i < reload_n_operands; i++)
4498 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4499 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4500 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4503 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4505 case RELOAD_FOR_OPERAND_ADDRESS:
4506 case RELOAD_FOR_OPADDR_ADDR:
4507 case RELOAD_FOR_INSN:
4508 /* These can't conflict with inputs, or each other, so all we have to
4509 test is input addresses and the addresses of OTHER items. */
4511 for (i = 0; i < reload_n_operands; i++)
4512 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4515 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4517 case RELOAD_FOR_INPUT:
4518 /* The only things earlier are the address for this and
4519 earlier inputs, other inputs (which we know we don't conflict
4520 with), and addresses of RELOAD_OTHER objects. */
4522 for (i = 0; i <= opnum; i++)
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4526 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4528 case RELOAD_FOR_INPUT_ADDRESS:
4529 /* Similarly, all we have to check is for use in earlier inputs'
4531 for (i = 0; i < opnum; i++)
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4535 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4540 /* Return 1 if the value in reload reg REGNO, as used by a reload
4541 needed for the part of the insn specified by OPNUM and TYPE,
4542 is still available in REGNO at the end of the insn.
4544 We can assume that the reload reg was already tested for availability
4545 at the time it is needed, and we should not check this again,
4546 in case the reg has already been marked in use. */
4549 reload_reg_reaches_end_p (regno, opnum, type)
4552 enum reload_type type;
4559 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4560 its value must reach the end. */
4563 /* If this use is for part of the insn,
4564 its value reaches if no subsequent part uses the same register.
4565 Just like the above function, don't try to do this with lots
4568 case RELOAD_FOR_OTHER_ADDRESS:
4569 /* Here we check for everything else, since these don't conflict
4570 with anything else and everything comes later. */
4572 for (i = 0; i < reload_n_operands; i++)
4573 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4574 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4575 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4576 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4579 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4580 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4581 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4583 case RELOAD_FOR_INPUT_ADDRESS:
4584 /* Similar, except that we check only for this and subsequent inputs
4585 and the address of only subsequent inputs and we do not need
4586 to check for RELOAD_OTHER objects since they are known not to
4589 for (i = opnum; i < reload_n_operands; i++)
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4593 for (i = opnum + 1; i < reload_n_operands; i++)
4594 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4597 for (i = 0; i < reload_n_operands; i++)
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4599 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4605 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4606 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4608 case RELOAD_FOR_INPUT:
4609 /* Similar to input address, except we start at the next operand for
4610 both input and input address and we do not check for
4611 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4614 for (i = opnum + 1; i < reload_n_operands; i++)
4615 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4616 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4619 /* ... fall through ... */
4621 case RELOAD_FOR_OPERAND_ADDRESS:
4622 /* Check outputs and their addresses. */
4624 for (i = 0; i < reload_n_operands; i++)
4625 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4626 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4631 case RELOAD_FOR_OPADDR_ADDR:
4632 for (i = 0; i < reload_n_operands; i++)
4633 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4634 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4637 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4638 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4640 case RELOAD_FOR_INSN:
4641 /* These conflict with other outputs with RELOAD_OTHER. So
4642 we need only check for output addresses. */
4646 /* ... fall through ... */
4648 case RELOAD_FOR_OUTPUT:
4649 case RELOAD_FOR_OUTPUT_ADDRESS:
4650 /* We already know these can't conflict with a later output. So the
4651 only thing to check are later output addresses. */
4652 for (i = opnum + 1; i < reload_n_operands; i++)
4653 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4662 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4665 This function uses the same algorithm as reload_reg_free_p above. */
4668 reloads_conflict (r1, r2)
4671 enum reload_type r1_type = reload_when_needed[r1];
4672 enum reload_type r2_type = reload_when_needed[r2];
4673 int r1_opnum = reload_opnum[r1];
4674 int r2_opnum = reload_opnum[r2];
4676 /* RELOAD_OTHER conflicts with everything. */
4677 if (r2_type == RELOAD_OTHER)
4680 /* Otherwise, check conflicts differently for each type. */
4684 case RELOAD_FOR_INPUT:
4685 return (r2_type == RELOAD_FOR_INSN
4686 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4687 || r2_type == RELOAD_FOR_OPADDR_ADDR
4688 || r2_type == RELOAD_FOR_INPUT
4689 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4691 case RELOAD_FOR_INPUT_ADDRESS:
4692 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4693 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4695 case RELOAD_FOR_OUTPUT_ADDRESS:
4696 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4697 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4699 case RELOAD_FOR_OPERAND_ADDRESS:
4700 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4701 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4703 case RELOAD_FOR_OPADDR_ADDR:
4704 return (r2_type == RELOAD_FOR_INPUT
4705 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4707 case RELOAD_FOR_OUTPUT:
4708 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4709 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4710 && r2_opnum >= r1_opnum));
4712 case RELOAD_FOR_INSN:
4713 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4714 || r2_type == RELOAD_FOR_INSN
4715 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4717 case RELOAD_FOR_OTHER_ADDRESS:
4718 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4728 /* Vector of reload-numbers showing the order in which the reloads should
4730 short reload_order[MAX_RELOADS];
4732 /* Indexed by reload number, 1 if incoming value
4733 inherited from previous insns. */
4734 char reload_inherited[MAX_RELOADS];
4736 /* For an inherited reload, this is the insn the reload was inherited from,
4737 if we know it. Otherwise, this is 0. */
4738 rtx reload_inheritance_insn[MAX_RELOADS];
4740 /* If non-zero, this is a place to get the value of the reload,
4741 rather than using reload_in. */
4742 rtx reload_override_in[MAX_RELOADS];
4744 /* For each reload, the index in spill_regs of the spill register used,
4745 or -1 if we did not need one of the spill registers for this reload. */
4746 int reload_spill_index[MAX_RELOADS];
4748 /* Find a spill register to use as a reload register for reload R.
4749 LAST_RELOAD is non-zero if this is the last reload for the insn being
4752 Set reload_reg_rtx[R] to the register allocated.
4754 If NOERROR is nonzero, we return 1 if successful,
4755 or 0 if we couldn't find a spill reg and we didn't change anything. */
4758 allocate_reload_reg (r, insn, last_reload, noerror)
4770 /* If we put this reload ahead, thinking it is a group,
4771 then insist on finding a group. Otherwise we can grab a
4772 reg that some other reload needs.
4773 (That can happen when we have a 68000 DATA_OR_FP_REG
4774 which is a group of data regs or one fp reg.)
4775 We need not be so restrictive if there are no more reloads
4778 ??? Really it would be nicer to have smarter handling
4779 for that kind of reg class, where a problem like this is normal.
4780 Perhaps those classes should be avoided for reloading
4781 by use of more alternatives. */
4783 int force_group = reload_nregs[r] > 1 && ! last_reload;
4785 /* If we want a single register and haven't yet found one,
4786 take any reg in the right class and not in use.
4787 If we want a consecutive group, here is where we look for it.
4789 We use two passes so we can first look for reload regs to
4790 reuse, which are already in use for other reloads in this insn,
4791 and only then use additional registers.
4792 I think that maximizing reuse is needed to make sure we don't
4793 run out of reload regs. Suppose we have three reloads, and
4794 reloads A and B can share regs. These need two regs.
4795 Suppose A and B are given different regs.
4796 That leaves none for C. */
4797 for (pass = 0; pass < 2; pass++)
4799 /* I is the index in spill_regs.
4800 We advance it round-robin between insns to use all spill regs
4801 equally, so that inherited reloads have a chance
4802 of leapfrogging each other. Don't do this, however, when we have
4803 group needs and failure would be fatal; if we only have a relatively
4804 small number of spill registers, and more than one of them has
4805 group needs, then by starting in the middle, we may end up
4806 allocating the first one in such a way that we are not left with
4807 sufficient groups to handle the rest. */
4809 if (noerror || ! force_group)
4814 for (count = 0; count < n_spills; count++)
4816 int class = (int) reload_reg_class[r];
4818 i = (i + 1) % n_spills;
4820 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4821 reload_when_needed[r])
4822 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4823 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4824 /* Look first for regs to share, then for unshared. But
4825 don't share regs used for inherited reloads; they are
4826 the ones we want to preserve. */
4828 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4830 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4833 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4834 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4835 (on 68000) got us two FP regs. If NR is 1,
4836 we would reject both of them. */
4838 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4839 /* If we need only one reg, we have already won. */
4842 /* But reject a single reg if we demand a group. */
4847 /* Otherwise check that as many consecutive regs as we need
4849 Also, don't use for a group registers that are
4850 needed for nongroups. */
4851 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4854 regno = spill_regs[i] + nr - 1;
4855 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4856 && spill_reg_order[regno] >= 0
4857 && reload_reg_free_p (regno, reload_opnum[r],
4858 reload_when_needed[r])
4859 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4869 /* If we found something on pass 1, omit pass 2. */
4870 if (count < n_spills)
4874 /* We should have found a spill register by now. */
4875 if (count == n_spills)
4882 /* I is the index in SPILL_REG_RTX of the reload register we are to
4883 allocate. Get an rtx for it and find its register number. */
4885 new = spill_reg_rtx[i];
4887 if (new == 0 || GET_MODE (new) != reload_mode[r])
4888 spill_reg_rtx[i] = new
4889 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4891 regno = true_regnum (new);
4893 /* Detect when the reload reg can't hold the reload mode.
4894 This used to be one `if', but Sequent compiler can't handle that. */
4895 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4897 enum machine_mode test_mode = VOIDmode;
4899 test_mode = GET_MODE (reload_in[r]);
4900 /* If reload_in[r] has VOIDmode, it means we will load it
4901 in whatever mode the reload reg has: to wit, reload_mode[r].
4902 We have already tested that for validity. */
4903 /* Aside from that, we need to test that the expressions
4904 to reload from or into have modes which are valid for this
4905 reload register. Otherwise the reload insns would be invalid. */
4906 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4907 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4908 if (! (reload_out[r] != 0
4909 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4911 /* The reg is OK. */
4914 /* Mark as in use for this insn the reload regs we use
4916 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4917 reload_when_needed[r], reload_mode[r]);
4919 reload_reg_rtx[r] = new;
4920 reload_spill_index[r] = i;
4925 /* The reg is not OK. */
4930 if (asm_noperands (PATTERN (insn)) < 0)
4931 /* It's the compiler's fault. */
4932 fatal_insn ("Could not find a spill register", insn);
4934 /* It's the user's fault; the operand's mode and constraint
4935 don't match. Disable this reload so we don't crash in final. */
4936 error_for_asm (insn,
4937 "`asm' operand constraint incompatible with operand size");
4940 reload_reg_rtx[r] = 0;
4941 reload_optional[r] = 1;
4942 reload_secondary_p[r] = 1;
4947 /* Assign hard reg targets for the pseudo-registers we must reload
4948 into hard regs for this insn.
4949 Also output the instructions to copy them in and out of the hard regs.
4951 For machines with register classes, we are responsible for
4952 finding a reload reg in the proper class. */
4955 choose_reload_regs (insn, avoid_return_reg)
4957 rtx avoid_return_reg;
4960 int max_group_size = 1;
4961 enum reg_class group_class = NO_REGS;
4964 rtx save_reload_reg_rtx[MAX_RELOADS];
4965 char save_reload_inherited[MAX_RELOADS];
4966 rtx save_reload_inheritance_insn[MAX_RELOADS];
4967 rtx save_reload_override_in[MAX_RELOADS];
4968 int save_reload_spill_index[MAX_RELOADS];
4969 HARD_REG_SET save_reload_reg_used;
4970 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4971 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4972 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4973 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4974 HARD_REG_SET save_reload_reg_used_in_op_addr;
4975 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4976 HARD_REG_SET save_reload_reg_used_in_insn;
4977 HARD_REG_SET save_reload_reg_used_in_other_addr;
4978 HARD_REG_SET save_reload_reg_used_at_all;
4980 bzero (reload_inherited, MAX_RELOADS);
4981 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4982 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4984 CLEAR_HARD_REG_SET (reload_reg_used);
4985 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4986 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4987 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4988 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4989 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4991 for (i = 0; i < reload_n_operands; i++)
4993 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4994 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4995 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4996 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4999 #ifdef SMALL_REGISTER_CLASSES
5000 /* Don't bother with avoiding the return reg
5001 if we have no mandatory reload that could use it. */
5002 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5005 int regno = REGNO (avoid_return_reg);
5007 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5010 for (r = regno; r < regno + nregs; r++)
5011 if (spill_reg_order[r] >= 0)
5012 for (j = 0; j < n_reloads; j++)
5013 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5014 && (reload_in[j] != 0 || reload_out[j] != 0
5015 || reload_secondary_p[j])
5017 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5020 avoid_return_reg = 0;
5022 #endif /* SMALL_REGISTER_CLASSES */
5024 #if 0 /* Not needed, now that we can always retry without inheritance. */
5025 /* See if we have more mandatory reloads than spill regs.
5026 If so, then we cannot risk optimizations that could prevent
5027 reloads from sharing one spill register.
5029 Since we will try finding a better register than reload_reg_rtx
5030 unless it is equal to reload_in or reload_out, count such reloads. */
5034 #ifdef SMALL_REGISTER_CLASSES
5035 if (SMALL_REGISTER_CLASSES)
5036 tem = (avoid_return_reg != 0);
5038 for (j = 0; j < n_reloads; j++)
5039 if (! reload_optional[j]
5040 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5041 && (reload_reg_rtx[j] == 0
5042 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5043 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5050 #ifdef SMALL_REGISTER_CLASSES
5051 /* Don't use the subroutine call return reg for a reload
5052 if we are supposed to avoid it. */
5053 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5055 int regno = REGNO (avoid_return_reg);
5057 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5060 for (r = regno; r < regno + nregs; r++)
5061 if (spill_reg_order[r] >= 0)
5062 SET_HARD_REG_BIT (reload_reg_used, r);
5064 #endif /* SMALL_REGISTER_CLASSES */
5066 /* In order to be certain of getting the registers we need,
5067 we must sort the reloads into order of increasing register class.
5068 Then our grabbing of reload registers will parallel the process
5069 that provided the reload registers.
5071 Also note whether any of the reloads wants a consecutive group of regs.
5072 If so, record the maximum size of the group desired and what
5073 register class contains all the groups needed by this insn. */
5075 for (j = 0; j < n_reloads; j++)
5077 reload_order[j] = j;
5078 reload_spill_index[j] = -1;
5081 = (reload_inmode[j] == VOIDmode
5082 || (GET_MODE_SIZE (reload_outmode[j])
5083 > GET_MODE_SIZE (reload_inmode[j])))
5084 ? reload_outmode[j] : reload_inmode[j];
5086 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5088 if (reload_nregs[j] > 1)
5090 max_group_size = MAX (reload_nregs[j], max_group_size);
5091 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5094 /* If we have already decided to use a certain register,
5095 don't use it in another way. */
5096 if (reload_reg_rtx[j])
5097 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5098 reload_when_needed[j], reload_mode[j]);
5102 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5104 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5105 sizeof reload_reg_rtx);
5106 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5107 bcopy ((char *) reload_inheritance_insn,
5108 (char *) save_reload_inheritance_insn,
5109 sizeof reload_inheritance_insn);
5110 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5111 sizeof reload_override_in);
5112 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5113 sizeof reload_spill_index);
5114 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5115 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5116 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5117 reload_reg_used_in_op_addr);
5119 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5120 reload_reg_used_in_op_addr_reload);
5122 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5123 reload_reg_used_in_insn);
5124 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5125 reload_reg_used_in_other_addr);
5127 for (i = 0; i < reload_n_operands; i++)
5129 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5130 reload_reg_used_in_output[i]);
5131 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5132 reload_reg_used_in_input[i]);
5133 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5134 reload_reg_used_in_input_addr[i]);
5135 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5136 reload_reg_used_in_output_addr[i]);
5139 /* If -O, try first with inheritance, then turning it off.
5140 If not -O, don't do inheritance.
5141 Using inheritance when not optimizing leads to paradoxes
5142 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5143 because one side of the comparison might be inherited. */
5145 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5147 /* Process the reloads in order of preference just found.
5148 Beyond this point, subregs can be found in reload_reg_rtx.
5150 This used to look for an existing reloaded home for all
5151 of the reloads, and only then perform any new reloads.
5152 But that could lose if the reloads were done out of reg-class order
5153 because a later reload with a looser constraint might have an old
5154 home in a register needed by an earlier reload with a tighter constraint.
5156 To solve this, we make two passes over the reloads, in the order
5157 described above. In the first pass we try to inherit a reload
5158 from a previous insn. If there is a later reload that needs a
5159 class that is a proper subset of the class being processed, we must
5160 also allocate a spill register during the first pass.
5162 Then make a second pass over the reloads to allocate any reloads
5163 that haven't been given registers yet. */
5165 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5167 for (j = 0; j < n_reloads; j++)
5169 register int r = reload_order[j];
5171 /* Ignore reloads that got marked inoperative. */
5172 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5175 /* If find_reloads chose a to use reload_in or reload_out as a reload
5176 register, we don't need to chose one. Otherwise, try even if it found
5177 one since we might save an insn if we find the value lying around. */
5178 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5179 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5180 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5183 #if 0 /* No longer needed for correct operation.
5184 It might give better code, or might not; worth an experiment? */
5185 /* If this is an optional reload, we can't inherit from earlier insns
5186 until we are sure that any non-optional reloads have been allocated.
5187 The following code takes advantage of the fact that optional reloads
5188 are at the end of reload_order. */
5189 if (reload_optional[r] != 0)
5190 for (i = 0; i < j; i++)
5191 if ((reload_out[reload_order[i]] != 0
5192 || reload_in[reload_order[i]] != 0
5193 || reload_secondary_p[reload_order[i]])
5194 && ! reload_optional[reload_order[i]]
5195 && reload_reg_rtx[reload_order[i]] == 0)
5196 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5199 /* First see if this pseudo is already available as reloaded
5200 for a previous insn. We cannot try to inherit for reloads
5201 that are smaller than the maximum number of registers needed
5202 for groups unless the register we would allocate cannot be used
5205 We could check here to see if this is a secondary reload for
5206 an object that is already in a register of the desired class.
5207 This would avoid the need for the secondary reload register.
5208 But this is complex because we can't easily determine what
5209 objects might want to be loaded via this reload. So let a register
5210 be allocated here. In `emit_reload_insns' we suppress one of the
5211 loads in the case described above. */
5215 register int regno = -1;
5216 enum machine_mode mode;
5218 if (reload_in[r] == 0)
5220 else if (GET_CODE (reload_in[r]) == REG)
5222 regno = REGNO (reload_in[r]);
5223 mode = GET_MODE (reload_in[r]);
5225 else if (GET_CODE (reload_in_reg[r]) == REG)
5227 regno = REGNO (reload_in_reg[r]);
5228 mode = GET_MODE (reload_in_reg[r]);
5231 /* This won't work, since REGNO can be a pseudo reg number.
5232 Also, it takes much more hair to keep track of all the things
5233 that can invalidate an inherited reload of part of a pseudoreg. */
5234 else if (GET_CODE (reload_in[r]) == SUBREG
5235 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5236 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5239 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5241 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5243 if (reg_reloaded_contents[i] == regno
5244 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5245 >= GET_MODE_SIZE (mode))
5246 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5247 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5249 && (reload_nregs[r] == max_group_size
5250 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5252 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5253 reload_when_needed[r])
5254 && reload_reg_free_before_p (spill_regs[i],
5256 reload_when_needed[r]))
5258 /* If a group is needed, verify that all the subsequent
5259 registers still have their values intact. */
5261 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5264 for (k = 1; k < nr; k++)
5265 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5273 /* We found a register that contains the
5274 value we need. If this register is the
5275 same as an `earlyclobber' operand of the
5276 current insn, just mark it as a place to
5277 reload from since we can't use it as the
5278 reload register itself. */
5280 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5281 if (reg_overlap_mentioned_for_reload_p
5282 (reg_last_reload_reg[regno],
5283 reload_earlyclobbers[i1]))
5286 if (i1 != n_earlyclobbers
5287 /* Don't really use the inherited spill reg
5288 if we need it wider than we've got it. */
5289 || (GET_MODE_SIZE (reload_mode[r])
5290 > GET_MODE_SIZE (mode)))
5291 reload_override_in[r] = reg_last_reload_reg[regno];
5295 /* We can use this as a reload reg. */
5296 /* Mark the register as in use for this part of
5298 mark_reload_reg_in_use (spill_regs[i],
5300 reload_when_needed[r],
5302 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5303 reload_inherited[r] = 1;
5304 reload_inheritance_insn[r]
5305 = reg_reloaded_insn[i];
5306 reload_spill_index[r] = i;
5307 for (k = 0; k < nr; k++)
5308 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5316 /* Here's another way to see if the value is already lying around. */
5318 && reload_in[r] != 0
5319 && ! reload_inherited[r]
5320 && reload_out[r] == 0
5321 && (CONSTANT_P (reload_in[r])
5322 || GET_CODE (reload_in[r]) == PLUS
5323 || GET_CODE (reload_in[r]) == REG
5324 || GET_CODE (reload_in[r]) == MEM)
5325 && (reload_nregs[r] == max_group_size
5326 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5329 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5330 -1, NULL_PTR, 0, reload_mode[r]);
5335 if (GET_CODE (equiv) == REG)
5336 regno = REGNO (equiv);
5337 else if (GET_CODE (equiv) == SUBREG)
5339 /* This must be a SUBREG of a hard register.
5340 Make a new REG since this might be used in an
5341 address and not all machines support SUBREGs
5343 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5344 equiv = gen_rtx (REG, reload_mode[r], regno);
5350 /* If we found a spill reg, reject it unless it is free
5351 and of the desired class. */
5353 && ((spill_reg_order[regno] >= 0
5354 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5355 reload_when_needed[r]))
5356 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5360 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5363 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5366 /* We found a register that contains the value we need.
5367 If this register is the same as an `earlyclobber' operand
5368 of the current insn, just mark it as a place to reload from
5369 since we can't use it as the reload register itself. */
5372 for (i = 0; i < n_earlyclobbers; i++)
5373 if (reg_overlap_mentioned_for_reload_p (equiv,
5374 reload_earlyclobbers[i]))
5376 reload_override_in[r] = equiv;
5381 /* JRV: If the equiv register we have found is
5382 explicitly clobbered in the current insn, mark but
5383 don't use, as above. */
5385 if (equiv != 0 && regno_clobbered_p (regno, insn))
5387 reload_override_in[r] = equiv;
5391 /* If we found an equivalent reg, say no code need be generated
5392 to load it, and use it as our reload reg. */
5393 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5395 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5397 reload_reg_rtx[r] = equiv;
5398 reload_inherited[r] = 1;
5400 /* If any of the hard registers in EQUIV are spill
5401 registers, mark them as in use for this insn. */
5402 for (k = 0; k < nr; k++)
5404 i = spill_reg_order[regno + k];
5407 mark_reload_reg_in_use (regno, reload_opnum[r],
5408 reload_when_needed[r],
5410 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5417 /* If we found a register to use already, or if this is an optional
5418 reload, we are done. */
5419 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5422 #if 0 /* No longer needed for correct operation. Might or might not
5423 give better code on the average. Want to experiment? */
5425 /* See if there is a later reload that has a class different from our
5426 class that intersects our class or that requires less register
5427 than our reload. If so, we must allocate a register to this
5428 reload now, since that reload might inherit a previous reload
5429 and take the only available register in our class. Don't do this
5430 for optional reloads since they will force all previous reloads
5431 to be allocated. Also don't do this for reloads that have been
5434 for (i = j + 1; i < n_reloads; i++)
5436 int s = reload_order[i];
5438 if ((reload_in[s] == 0 && reload_out[s] == 0
5439 && ! reload_secondary_p[s])
5440 || reload_optional[s])
5443 if ((reload_reg_class[s] != reload_reg_class[r]
5444 && reg_classes_intersect_p (reload_reg_class[r],
5445 reload_reg_class[s]))
5446 || reload_nregs[s] < reload_nregs[r])
5453 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5457 /* Now allocate reload registers for anything non-optional that
5458 didn't get one yet. */
5459 for (j = 0; j < n_reloads; j++)
5461 register int r = reload_order[j];
5463 /* Ignore reloads that got marked inoperative. */
5464 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5467 /* Skip reloads that already have a register allocated or are
5469 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5472 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5476 /* If that loop got all the way, we have won. */
5481 /* Loop around and try without any inheritance. */
5482 /* First undo everything done by the failed attempt
5483 to allocate with inheritance. */
5484 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5485 sizeof reload_reg_rtx);
5486 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5487 sizeof reload_inherited);
5488 bcopy ((char *) save_reload_inheritance_insn,
5489 (char *) reload_inheritance_insn,
5490 sizeof reload_inheritance_insn);
5491 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5492 sizeof reload_override_in);
5493 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5494 sizeof reload_spill_index);
5495 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5496 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5497 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5498 save_reload_reg_used_in_op_addr);
5499 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5500 save_reload_reg_used_in_op_addr_reload);
5501 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5502 save_reload_reg_used_in_insn);
5503 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5504 save_reload_reg_used_in_other_addr);
5506 for (i = 0; i < reload_n_operands; i++)
5508 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5509 save_reload_reg_used_in_input[i]);
5510 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5511 save_reload_reg_used_in_output[i]);
5512 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5513 save_reload_reg_used_in_input_addr[i]);
5514 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5515 save_reload_reg_used_in_output_addr[i]);
5519 /* If we thought we could inherit a reload, because it seemed that
5520 nothing else wanted the same reload register earlier in the insn,
5521 verify that assumption, now that all reloads have been assigned. */
5523 for (j = 0; j < n_reloads; j++)
5525 register int r = reload_order[j];
5527 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5528 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5530 reload_when_needed[r]))
5531 reload_inherited[r] = 0;
5533 /* If we found a better place to reload from,
5534 validate it in the same fashion, if it is a reload reg. */
5535 if (reload_override_in[r]
5536 && (GET_CODE (reload_override_in[r]) == REG
5537 || GET_CODE (reload_override_in[r]) == SUBREG))
5539 int regno = true_regnum (reload_override_in[r]);
5540 if (spill_reg_order[regno] >= 0
5541 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5542 reload_when_needed[r]))
5543 reload_override_in[r] = 0;
5547 /* Now that reload_override_in is known valid,
5548 actually override reload_in. */
5549 for (j = 0; j < n_reloads; j++)
5550 if (reload_override_in[j])
5551 reload_in[j] = reload_override_in[j];
5553 /* If this reload won't be done because it has been cancelled or is
5554 optional and not inherited, clear reload_reg_rtx so other
5555 routines (such as subst_reloads) don't get confused. */
5556 for (j = 0; j < n_reloads; j++)
5557 if (reload_reg_rtx[j] != 0
5558 && ((reload_optional[j] && ! reload_inherited[j])
5559 || (reload_in[j] == 0 && reload_out[j] == 0
5560 && ! reload_secondary_p[j])))
5562 int regno = true_regnum (reload_reg_rtx[j]);
5564 if (spill_reg_order[regno] >= 0)
5565 clear_reload_reg_in_use (regno, reload_opnum[j],
5566 reload_when_needed[j], reload_mode[j]);
5567 reload_reg_rtx[j] = 0;
5570 /* Record which pseudos and which spill regs have output reloads. */
5571 for (j = 0; j < n_reloads; j++)
5573 register int r = reload_order[j];
5575 i = reload_spill_index[r];
5577 /* I is nonneg if this reload used one of the spill regs.
5578 If reload_reg_rtx[r] is 0, this is an optional reload
5579 that we opted to ignore. */
5580 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5581 && reload_reg_rtx[r] != 0)
5583 register int nregno = REGNO (reload_out[r]);
5586 if (nregno < FIRST_PSEUDO_REGISTER)
5587 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5590 reg_has_output_reload[nregno + nr] = 1;
5594 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5596 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5599 if (reload_when_needed[r] != RELOAD_OTHER
5600 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5601 && reload_when_needed[r] != RELOAD_FOR_INSN)
5607 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5608 reloads of the same item for fear that we might not have enough reload
5609 registers. However, normally they will get the same reload register
5610 and hence actually need not be loaded twice.
5612 Here we check for the most common case of this phenomenon: when we have
5613 a number of reloads for the same object, each of which were allocated
5614 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5615 reload, and is not modified in the insn itself. If we find such,
5616 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5617 This will not increase the number of spill registers needed and will
5618 prevent redundant code. */
5620 #ifdef SMALL_REGISTER_CLASSES
5623 merge_assigned_reloads (insn)
5628 /* Scan all the reloads looking for ones that only load values and
5629 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5630 assigned and not modified by INSN. */
5632 for (i = 0; i < n_reloads; i++)
5634 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5635 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5636 || reg_set_p (reload_reg_rtx[i], insn))
5639 /* Look at all other reloads. Ensure that the only use of this
5640 reload_reg_rtx is in a reload that just loads the same value
5641 as we do. Note that any secondary reloads must be of the identical
5642 class since the values, modes, and result registers are the
5643 same, so we need not do anything with any secondary reloads. */
5645 for (j = 0; j < n_reloads; j++)
5647 if (i == j || reload_reg_rtx[j] == 0
5648 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5652 /* If the reload regs aren't exactly the same (e.g, different modes)
5653 or if the values are different, we can't merge anything with this
5656 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5657 || reload_out[j] != 0 || reload_in[j] == 0
5658 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5662 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5663 we, in fact, found any matching reloads. */
5667 for (j = 0; j < n_reloads; j++)
5668 if (i != j && reload_reg_rtx[j] != 0
5669 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5671 reload_when_needed[i] = RELOAD_OTHER;
5673 transfer_replacements (i, j);
5676 /* If this is now RELOAD_OTHER, look for any reloads that load
5677 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5678 if they were for inputs, RELOAD_OTHER for outputs. Note that
5679 this test is equivalent to looking for reloads for this operand
5682 if (reload_when_needed[i] == RELOAD_OTHER)
5683 for (j = 0; j < n_reloads; j++)
5684 if (reload_in[j] != 0
5685 && reload_when_needed[i] != RELOAD_OTHER
5686 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5688 reload_when_needed[j]
5689 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5690 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5694 #endif /* SMALL_RELOAD_CLASSES */
5696 /* Output insns to reload values in and out of the chosen reload regs. */
5699 emit_reload_insns (insn)
5703 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5704 rtx other_input_address_reload_insns = 0;
5705 rtx other_input_reload_insns = 0;
5706 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5707 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5708 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5709 rtx operand_reload_insns = 0;
5710 rtx other_operand_reload_insns = 0;
5711 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5712 rtx following_insn = NEXT_INSN (insn);
5713 rtx before_insn = insn;
5715 /* Values to be put in spill_reg_store are put here first. */
5716 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5718 for (j = 0; j < reload_n_operands; j++)
5719 input_reload_insns[j] = input_address_reload_insns[j]
5720 = output_reload_insns[j] = output_address_reload_insns[j]
5721 = other_output_reload_insns[j] = 0;
5723 /* Now output the instructions to copy the data into and out of the
5724 reload registers. Do these in the order that the reloads were reported,
5725 since reloads of base and index registers precede reloads of operands
5726 and the operands may need the base and index registers reloaded. */
5728 for (j = 0; j < n_reloads; j++)
5731 rtx oldequiv_reg = 0;
5732 rtx this_reload_insn = 0;
5734 if (reload_spill_index[j] >= 0)
5735 new_spill_reg_store[reload_spill_index[j]] = 0;
5738 if (old != 0 && ! reload_inherited[j]
5739 && ! rtx_equal_p (reload_reg_rtx[j], old)
5740 && reload_reg_rtx[j] != 0)
5742 register rtx reloadreg = reload_reg_rtx[j];
5744 enum machine_mode mode;
5747 /* Determine the mode to reload in.
5748 This is very tricky because we have three to choose from.
5749 There is the mode the insn operand wants (reload_inmode[J]).
5750 There is the mode of the reload register RELOADREG.
5751 There is the intrinsic mode of the operand, which we could find
5752 by stripping some SUBREGs.
5753 It turns out that RELOADREG's mode is irrelevant:
5754 we can change that arbitrarily.
5756 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5757 then the reload reg may not support QImode moves, so use SImode.
5758 If foo is in memory due to spilling a pseudo reg, this is safe,
5759 because the QImode value is in the least significant part of a
5760 slot big enough for a SImode. If foo is some other sort of
5761 memory reference, then it is impossible to reload this case,
5762 so previous passes had better make sure this never happens.
5764 Then consider a one-word union which has SImode and one of its
5765 members is a float, being fetched as (SUBREG:SF union:SI).
5766 We must fetch that as SFmode because we could be loading into
5767 a float-only register. In this case OLD's mode is correct.
5769 Consider an immediate integer: it has VOIDmode. Here we need
5770 to get a mode from something else.
5772 In some cases, there is a fourth mode, the operand's
5773 containing mode. If the insn specifies a containing mode for
5774 this operand, it overrides all others.
5776 I am not sure whether the algorithm here is always right,
5777 but it does the right things in those cases. */
5779 mode = GET_MODE (old);
5780 if (mode == VOIDmode)
5781 mode = reload_inmode[j];
5783 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5784 /* If we need a secondary register for this operation, see if
5785 the value is already in a register in that class. Don't
5786 do this if the secondary register will be used as a scratch
5789 if (reload_secondary_in_reload[j] >= 0
5790 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5793 = find_equiv_reg (old, insn,
5794 reload_reg_class[reload_secondary_in_reload[j]],
5795 -1, NULL_PTR, 0, mode);
5798 /* If reloading from memory, see if there is a register
5799 that already holds the same value. If so, reload from there.
5800 We can pass 0 as the reload_reg_p argument because
5801 any other reload has either already been emitted,
5802 in which case find_equiv_reg will see the reload-insn,
5803 or has yet to be emitted, in which case it doesn't matter
5804 because we will use this equiv reg right away. */
5806 if (oldequiv == 0 && optimize
5807 && (GET_CODE (old) == MEM
5808 || (GET_CODE (old) == REG
5809 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5810 && reg_renumber[REGNO (old)] < 0)))
5811 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5812 -1, NULL_PTR, 0, mode);
5816 int regno = true_regnum (oldequiv);
5818 /* If OLDEQUIV is a spill register, don't use it for this
5819 if any other reload needs it at an earlier stage of this insn
5820 or at this stage. */
5821 if (spill_reg_order[regno] >= 0
5822 && (! reload_reg_free_p (regno, reload_opnum[j],
5823 reload_when_needed[j])
5824 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5825 reload_when_needed[j])))
5828 /* If OLDEQUIV is not a spill register,
5829 don't use it if any other reload wants it. */
5830 if (spill_reg_order[regno] < 0)
5833 for (k = 0; k < n_reloads; k++)
5834 if (reload_reg_rtx[k] != 0 && k != j
5835 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5843 /* If it is no cheaper to copy from OLDEQUIV into the
5844 reload register than it would be to move from memory,
5845 don't use it. Likewise, if we need a secondary register
5849 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5850 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5851 reload_reg_class[j])
5852 >= MEMORY_MOVE_COST (mode)))
5853 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5854 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5858 #ifdef SECONDARY_MEMORY_NEEDED
5859 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5860 REGNO_REG_CLASS (regno),
5869 else if (GET_CODE (oldequiv) == REG)
5870 oldequiv_reg = oldequiv;
5871 else if (GET_CODE (oldequiv) == SUBREG)
5872 oldequiv_reg = SUBREG_REG (oldequiv);
5874 /* If we are reloading from a register that was recently stored in
5875 with an output-reload, see if we can prove there was
5876 actually no need to store the old value in it. */
5878 if (optimize && GET_CODE (oldequiv) == REG
5879 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5880 && spill_reg_order[REGNO (oldequiv)] >= 0
5881 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5882 && find_reg_note (insn, REG_DEAD, reload_in[j])
5883 /* This is unsafe if operand occurs more than once in current
5884 insn. Perhaps some occurrences weren't reloaded. */
5885 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5886 delete_output_reload
5887 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5889 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5890 then load RELOADREG from OLDEQUIV. Note that we cannot use
5891 gen_lowpart_common since it can do the wrong thing when
5892 RELOADREG has a multi-word mode. Note that RELOADREG
5893 must always be a REG here. */
5895 if (GET_MODE (reloadreg) != mode)
5896 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5897 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5898 oldequiv = SUBREG_REG (oldequiv);
5899 if (GET_MODE (oldequiv) != VOIDmode
5900 && mode != GET_MODE (oldequiv))
5901 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5903 /* Switch to the right place to emit the reload insns. */
5904 switch (reload_when_needed[j])
5907 where = &other_input_reload_insns;
5909 case RELOAD_FOR_INPUT:
5910 where = &input_reload_insns[reload_opnum[j]];
5912 case RELOAD_FOR_INPUT_ADDRESS:
5913 where = &input_address_reload_insns[reload_opnum[j]];
5915 case RELOAD_FOR_OUTPUT_ADDRESS:
5916 where = &output_address_reload_insns[reload_opnum[j]];
5918 case RELOAD_FOR_OPERAND_ADDRESS:
5919 where = &operand_reload_insns;
5921 case RELOAD_FOR_OPADDR_ADDR:
5922 where = &other_operand_reload_insns;
5924 case RELOAD_FOR_OTHER_ADDRESS:
5925 where = &other_input_address_reload_insns;
5931 push_to_sequence (*where);
5934 /* Auto-increment addresses must be reloaded in a special way. */
5935 if (GET_CODE (oldequiv) == POST_INC
5936 || GET_CODE (oldequiv) == POST_DEC
5937 || GET_CODE (oldequiv) == PRE_INC
5938 || GET_CODE (oldequiv) == PRE_DEC)
5940 /* We are not going to bother supporting the case where a
5941 incremented register can't be copied directly from
5942 OLDEQUIV since this seems highly unlikely. */
5943 if (reload_secondary_in_reload[j] >= 0)
5945 /* Prevent normal processing of this reload. */
5947 /* Output a special code sequence for this case. */
5948 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5951 /* If we are reloading a pseudo-register that was set by the previous
5952 insn, see if we can get rid of that pseudo-register entirely
5953 by redirecting the previous insn into our reload register. */
5955 else if (optimize && GET_CODE (old) == REG
5956 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5957 && dead_or_set_p (insn, old)
5958 /* This is unsafe if some other reload
5959 uses the same reg first. */
5960 && reload_reg_free_before_p (REGNO (reloadreg),
5962 reload_when_needed[j]))
5964 rtx temp = PREV_INSN (insn);
5965 while (temp && GET_CODE (temp) == NOTE)
5966 temp = PREV_INSN (temp);
5968 && GET_CODE (temp) == INSN
5969 && GET_CODE (PATTERN (temp)) == SET
5970 && SET_DEST (PATTERN (temp)) == old
5971 /* Make sure we can access insn_operand_constraint. */
5972 && asm_noperands (PATTERN (temp)) < 0
5973 /* This is unsafe if prev insn rejects our reload reg. */
5974 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5976 /* This is unsafe if operand occurs more than once in current
5977 insn. Perhaps some occurrences aren't reloaded. */
5978 && count_occurrences (PATTERN (insn), old) == 1
5979 /* Don't risk splitting a matching pair of operands. */
5980 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5982 /* Store into the reload register instead of the pseudo. */
5983 SET_DEST (PATTERN (temp)) = reloadreg;
5984 /* If these are the only uses of the pseudo reg,
5985 pretend for GDB it lives in the reload reg we used. */
5986 if (reg_n_deaths[REGNO (old)] == 1
5987 && reg_n_sets[REGNO (old)] == 1)
5989 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5990 alter_reg (REGNO (old), -1);
5996 /* We can't do that, so output an insn to load RELOADREG. */
6000 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6001 rtx second_reload_reg = 0;
6002 enum insn_code icode;
6004 /* If we have a secondary reload, pick up the secondary register
6005 and icode, if any. If OLDEQUIV and OLD are different or
6006 if this is an in-out reload, recompute whether or not we
6007 still need a secondary register and what the icode should
6008 be. If we still need a secondary register and the class or
6009 icode is different, go back to reloading from OLD if using
6010 OLDEQUIV means that we got the wrong type of register. We
6011 cannot have different class or icode due to an in-out reload
6012 because we don't make such reloads when both the input and
6013 output need secondary reload registers. */
6015 if (reload_secondary_in_reload[j] >= 0)
6017 int secondary_reload = reload_secondary_in_reload[j];
6018 rtx real_oldequiv = oldequiv;
6021 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6022 and similarly for OLD.
6023 See comments in get_secondary_reload in reload.c. */
6024 if (GET_CODE (oldequiv) == REG
6025 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6026 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6027 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6029 if (GET_CODE (old) == REG
6030 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6031 && reg_equiv_mem[REGNO (old)] != 0)
6032 real_old = reg_equiv_mem[REGNO (old)];
6034 second_reload_reg = reload_reg_rtx[secondary_reload];
6035 icode = reload_secondary_in_icode[j];
6037 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6038 || (reload_in[j] != 0 && reload_out[j] != 0))
6040 enum reg_class new_class
6041 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6042 mode, real_oldequiv);
6044 if (new_class == NO_REGS)
6045 second_reload_reg = 0;
6048 enum insn_code new_icode;
6049 enum machine_mode new_mode;
6051 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6052 REGNO (second_reload_reg)))
6053 oldequiv = old, real_oldequiv = real_old;
6056 new_icode = reload_in_optab[(int) mode];
6057 if (new_icode != CODE_FOR_nothing
6058 && ((insn_operand_predicate[(int) new_icode][0]
6059 && ! ((*insn_operand_predicate[(int) new_icode][0])
6061 || (insn_operand_predicate[(int) new_icode][1]
6062 && ! ((*insn_operand_predicate[(int) new_icode][1])
6063 (real_oldequiv, mode)))))
6064 new_icode = CODE_FOR_nothing;
6066 if (new_icode == CODE_FOR_nothing)
6069 new_mode = insn_operand_mode[(int) new_icode][2];
6071 if (GET_MODE (second_reload_reg) != new_mode)
6073 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6075 oldequiv = old, real_oldequiv = real_old;
6078 = gen_rtx (REG, new_mode,
6079 REGNO (second_reload_reg));
6085 /* If we still need a secondary reload register, check
6086 to see if it is being used as a scratch or intermediate
6087 register and generate code appropriately. If we need
6088 a scratch register, use REAL_OLDEQUIV since the form of
6089 the insn may depend on the actual address if it is
6092 if (second_reload_reg)
6094 if (icode != CODE_FOR_nothing)
6096 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6097 second_reload_reg));
6102 /* See if we need a scratch register to load the
6103 intermediate register (a tertiary reload). */
6104 enum insn_code tertiary_icode
6105 = reload_secondary_in_icode[secondary_reload];
6107 if (tertiary_icode != CODE_FOR_nothing)
6109 rtx third_reload_reg
6110 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6112 emit_insn ((GEN_FCN (tertiary_icode)
6113 (second_reload_reg, real_oldequiv,
6114 third_reload_reg)));
6117 gen_reload (second_reload_reg, oldequiv,
6119 reload_when_needed[j]);
6121 oldequiv = second_reload_reg;
6127 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6128 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6129 reload_when_needed[j]);
6131 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6132 /* We may have to make a REG_DEAD note for the secondary reload
6133 register in the insns we just made. Find the last insn that
6134 mentioned the register. */
6135 if (! special && second_reload_reg
6136 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6140 for (prev = get_last_insn (); prev;
6141 prev = PREV_INSN (prev))
6142 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6143 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6146 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6155 this_reload_insn = get_last_insn ();
6156 /* End this sequence. */
6157 *where = get_insns ();
6161 /* Add a note saying the input reload reg
6162 dies in this insn, if anyone cares. */
6163 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6165 && reload_reg_rtx[j] != old
6166 && reload_reg_rtx[j] != 0
6167 && reload_out[j] == 0
6168 && ! reload_inherited[j]
6169 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6171 register rtx reloadreg = reload_reg_rtx[j];
6174 /* We can't abort here because we need to support this for sched.c.
6175 It's not terrible to miss a REG_DEAD note, but we should try
6176 to figure out how to do this correctly. */
6177 /* The code below is incorrect for address-only reloads. */
6178 if (reload_when_needed[j] != RELOAD_OTHER
6179 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6183 /* Add a death note to this insn, for an input reload. */
6185 if ((reload_when_needed[j] == RELOAD_OTHER
6186 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6187 && ! dead_or_set_p (insn, reloadreg))
6189 = gen_rtx (EXPR_LIST, REG_DEAD,
6190 reloadreg, REG_NOTES (insn));
6193 /* When we inherit a reload, the last marked death of the reload reg
6194 may no longer really be a death. */
6195 if (reload_reg_rtx[j] != 0
6196 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6197 && reload_inherited[j])
6199 /* Handle inheriting an output reload.
6200 Remove the death note from the output reload insn. */
6201 if (reload_spill_index[j] >= 0
6202 && GET_CODE (reload_in[j]) == REG
6203 && spill_reg_store[reload_spill_index[j]] != 0
6204 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6205 REG_DEAD, REGNO (reload_reg_rtx[j])))
6206 remove_death (REGNO (reload_reg_rtx[j]),
6207 spill_reg_store[reload_spill_index[j]]);
6208 /* Likewise for input reloads that were inherited. */
6209 else if (reload_spill_index[j] >= 0
6210 && GET_CODE (reload_in[j]) == REG
6211 && spill_reg_store[reload_spill_index[j]] == 0
6212 && reload_inheritance_insn[j] != 0
6213 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6214 REGNO (reload_reg_rtx[j])))
6215 remove_death (REGNO (reload_reg_rtx[j]),
6216 reload_inheritance_insn[j]);
6221 /* We got this register from find_equiv_reg.
6222 Search back for its last death note and get rid of it.
6223 But don't search back too far.
6224 Don't go past a place where this reg is set,
6225 since a death note before that remains valid. */
6226 for (prev = PREV_INSN (insn);
6227 prev && GET_CODE (prev) != CODE_LABEL;
6228 prev = PREV_INSN (prev))
6229 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6230 && dead_or_set_p (prev, reload_reg_rtx[j]))
6232 if (find_regno_note (prev, REG_DEAD,
6233 REGNO (reload_reg_rtx[j])))
6234 remove_death (REGNO (reload_reg_rtx[j]), prev);
6240 /* We might have used find_equiv_reg above to choose an alternate
6241 place from which to reload. If so, and it died, we need to remove
6242 that death and move it to one of the insns we just made. */
6244 if (oldequiv_reg != 0
6245 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6249 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6250 prev = PREV_INSN (prev))
6251 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6252 && dead_or_set_p (prev, oldequiv_reg))
6254 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6256 for (prev1 = this_reload_insn;
6257 prev1; prev1 = PREV_INSN (prev1))
6258 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6259 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6262 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6267 remove_death (REGNO (oldequiv_reg), prev);
6274 /* If we are reloading a register that was recently stored in with an
6275 output-reload, see if we can prove there was
6276 actually no need to store the old value in it. */
6278 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6279 && reload_in[j] != 0
6280 && GET_CODE (reload_in[j]) == REG
6282 /* There doesn't seem to be any reason to restrict this to pseudos
6283 and doing so loses in the case where we are copying from a
6284 register of the wrong class. */
6285 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6287 && spill_reg_store[reload_spill_index[j]] != 0
6288 /* This is unsafe if some other reload uses the same reg first. */
6289 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6290 reload_opnum[j], reload_when_needed[j])
6291 && dead_or_set_p (insn, reload_in[j])
6292 /* This is unsafe if operand occurs more than once in current
6293 insn. Perhaps some occurrences weren't reloaded. */
6294 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6295 delete_output_reload (insn, j,
6296 spill_reg_store[reload_spill_index[j]]);
6298 /* Input-reloading is done. Now do output-reloading,
6299 storing the value from the reload-register after the main insn
6300 if reload_out[j] is nonzero.
6302 ??? At some point we need to support handling output reloads of
6303 JUMP_INSNs or insns that set cc0. */
6304 old = reload_out[j];
6306 && reload_reg_rtx[j] != old
6307 && reload_reg_rtx[j] != 0)
6309 register rtx reloadreg = reload_reg_rtx[j];
6310 register rtx second_reloadreg = 0;
6312 enum machine_mode mode;
6315 /* An output operand that dies right away does need a reload,
6316 but need not be copied from it. Show the new location in the
6318 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6319 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6321 XEXP (note, 0) = reload_reg_rtx[j];
6324 /* Likewise for a SUBREG of an operand that dies. */
6325 else if (GET_CODE (old) == SUBREG
6326 && GET_CODE (SUBREG_REG (old)) == REG
6327 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6330 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6334 else if (GET_CODE (old) == SCRATCH)
6335 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6336 but we don't want to make an output reload. */
6340 /* Strip off of OLD any size-increasing SUBREGs such as
6341 (SUBREG:SI foo:QI 0). */
6343 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6344 && (GET_MODE_SIZE (GET_MODE (old))
6345 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6346 old = SUBREG_REG (old);
6349 /* If is a JUMP_INSN, we can't support output reloads yet. */
6350 if (GET_CODE (insn) == JUMP_INSN)
6353 if (reload_when_needed[j] == RELOAD_OTHER)
6356 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6358 /* Determine the mode to reload in.
6359 See comments above (for input reloading). */
6361 mode = GET_MODE (old);
6362 if (mode == VOIDmode)
6364 /* VOIDmode should never happen for an output. */
6365 if (asm_noperands (PATTERN (insn)) < 0)
6366 /* It's the compiler's fault. */
6367 fatal_insn ("VOIDmode on an output", insn);
6368 error_for_asm (insn, "output operand is constant in `asm'");
6369 /* Prevent crash--use something we know is valid. */
6371 old = gen_rtx (REG, mode, REGNO (reloadreg));
6374 if (GET_MODE (reloadreg) != mode)
6375 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6377 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6379 /* If we need two reload regs, set RELOADREG to the intermediate
6380 one, since it will be stored into OLD. We might need a secondary
6381 register only for an input reload, so check again here. */
6383 if (reload_secondary_out_reload[j] >= 0)
6387 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6388 && reg_equiv_mem[REGNO (old)] != 0)
6389 real_old = reg_equiv_mem[REGNO (old)];
6391 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6395 second_reloadreg = reloadreg;
6396 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6398 /* See if RELOADREG is to be used as a scratch register
6399 or as an intermediate register. */
6400 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6402 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6403 (real_old, second_reloadreg, reloadreg)));
6408 /* See if we need both a scratch and intermediate reload
6411 int secondary_reload = reload_secondary_out_reload[j];
6412 enum insn_code tertiary_icode
6413 = reload_secondary_out_icode[secondary_reload];
6415 if (GET_MODE (reloadreg) != mode)
6416 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6418 if (tertiary_icode != CODE_FOR_nothing)
6421 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6424 /* Copy primary reload reg to secondary reload reg.
6425 (Note that these have been swapped above, then
6426 secondary reload reg to OLD using our insn. */
6428 /* If REAL_OLD is a paradoxical SUBREG, remove it
6429 and try to put the opposite SUBREG on
6431 if (GET_CODE (real_old) == SUBREG
6432 && (GET_MODE_SIZE (GET_MODE (real_old))
6433 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6434 && 0 != (tem = gen_lowpart_common
6435 (GET_MODE (SUBREG_REG (real_old)),
6437 real_old = SUBREG_REG (real_old), reloadreg = tem;
6439 gen_reload (reloadreg, second_reloadreg,
6440 reload_opnum[j], reload_when_needed[j]);
6441 emit_insn ((GEN_FCN (tertiary_icode)
6442 (real_old, reloadreg, third_reloadreg)));
6447 /* Copy between the reload regs here and then to
6450 gen_reload (reloadreg, second_reloadreg,
6451 reload_opnum[j], reload_when_needed[j]);
6457 /* Output the last reload insn. */
6459 gen_reload (old, reloadreg, reload_opnum[j],
6460 reload_when_needed[j]);
6462 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6463 /* If final will look at death notes for this reg,
6464 put one on the last output-reload insn to use it. Similarly
6465 for any secondary register. */
6466 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6467 for (p = get_last_insn (); p; p = PREV_INSN (p))
6468 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6469 && reg_overlap_mentioned_for_reload_p (reloadreg,
6471 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6472 reloadreg, REG_NOTES (p));
6474 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6475 if (! special && second_reloadreg
6476 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6477 for (p = get_last_insn (); p; p = PREV_INSN (p))
6478 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6479 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6481 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6482 second_reloadreg, REG_NOTES (p));
6485 /* Look at all insns we emitted, just to be safe. */
6486 for (p = get_insns (); p; p = NEXT_INSN (p))
6487 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6489 /* If this output reload doesn't come from a spill reg,
6490 clear any memory of reloaded copies of the pseudo reg.
6491 If this output reload comes from a spill reg,
6492 reg_has_output_reload will make this do nothing. */
6493 note_stores (PATTERN (p), forget_old_reloads_1);
6495 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6496 && reload_spill_index[j] >= 0)
6497 new_spill_reg_store[reload_spill_index[j]] = p;
6500 if (reload_when_needed[j] == RELOAD_OTHER)
6502 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6503 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6506 output_reload_insns[reload_opnum[j]] = get_insns ();
6512 /* Now write all the insns we made for reloads in the order expected by
6513 the allocation functions. Prior to the insn being reloaded, we write
6514 the following reloads:
6516 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6518 RELOAD_OTHER reloads.
6520 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6521 the RELOAD_FOR_INPUT reload for the operand.
6523 RELOAD_FOR_OPADDR_ADDRS reloads.
6525 RELOAD_FOR_OPERAND_ADDRESS reloads.
6527 After the insn being reloaded, we write the following:
6529 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6530 the RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6531 reloads for the operand. The RELOAD_OTHER output reloads are output
6532 in descending order by reload number. */
6534 emit_insns_before (other_input_address_reload_insns, before_insn);
6535 emit_insns_before (other_input_reload_insns, before_insn);
6537 for (j = 0; j < reload_n_operands; j++)
6539 emit_insns_before (input_address_reload_insns[j], before_insn);
6540 emit_insns_before (input_reload_insns[j], before_insn);
6543 emit_insns_before (other_operand_reload_insns, before_insn);
6544 emit_insns_before (operand_reload_insns, before_insn);
6546 for (j = 0; j < reload_n_operands; j++)
6548 emit_insns_before (output_address_reload_insns[j], following_insn);
6549 emit_insns_before (output_reload_insns[j], following_insn);
6550 emit_insns_before (other_output_reload_insns[j], following_insn);
6553 /* Move death notes from INSN
6554 to output-operand-address and output reload insns. */
6555 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6558 /* Loop over those insns, last ones first. */
6559 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6560 insn1 = PREV_INSN (insn1))
6561 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6563 rtx source = SET_SRC (PATTERN (insn1));
6564 rtx dest = SET_DEST (PATTERN (insn1));
6566 /* The note we will examine next. */
6567 rtx reg_notes = REG_NOTES (insn);
6568 /* The place that pointed to this note. */
6569 rtx *prev_reg_note = ®_NOTES (insn);
6571 /* If the note is for something used in the source of this
6572 reload insn, or in the output address, move the note. */
6575 rtx next_reg_notes = XEXP (reg_notes, 1);
6576 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6577 && GET_CODE (XEXP (reg_notes, 0)) == REG
6578 && ((GET_CODE (dest) != REG
6579 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6581 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6584 *prev_reg_note = next_reg_notes;
6585 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6586 REG_NOTES (insn1) = reg_notes;
6589 prev_reg_note = &XEXP (reg_notes, 1);
6591 reg_notes = next_reg_notes;
6597 /* For all the spill regs newly reloaded in this instruction,
6598 record what they were reloaded from, so subsequent instructions
6599 can inherit the reloads.
6601 Update spill_reg_store for the reloads of this insn.
6602 Copy the elements that were updated in the loop above. */
6604 for (j = 0; j < n_reloads; j++)
6606 register int r = reload_order[j];
6607 register int i = reload_spill_index[r];
6609 /* I is nonneg if this reload used one of the spill regs.
6610 If reload_reg_rtx[r] is 0, this is an optional reload
6611 that we opted to ignore. */
6613 if (i >= 0 && reload_reg_rtx[r] != 0)
6616 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6618 int part_reaches_end = 0;
6619 int all_reaches_end = 1;
6621 /* For a multi register reload, we need to check if all or part
6622 of the value lives to the end. */
6623 for (k = 0; k < nr; k++)
6625 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6626 reload_when_needed[r]))
6627 part_reaches_end = 1;
6629 all_reaches_end = 0;
6632 /* Ignore reloads that don't reach the end of the insn in
6634 if (all_reaches_end)
6636 /* First, clear out memory of what used to be in this spill reg.
6637 If consecutive registers are used, clear them all. */
6639 for (k = 0; k < nr; k++)
6641 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6642 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6645 /* Maybe the spill reg contains a copy of reload_out. */
6646 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6648 register int nregno = REGNO (reload_out[r]);
6649 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6650 : HARD_REGNO_NREGS (nregno,
6651 GET_MODE (reload_reg_rtx[r])));
6653 spill_reg_store[i] = new_spill_reg_store[i];
6654 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6656 /* If NREGNO is a hard register, it may occupy more than
6657 one register. If it does, say what is in the
6658 rest of the registers assuming that both registers
6659 agree on how many words the object takes. If not,
6660 invalidate the subsequent registers. */
6662 if (nregno < FIRST_PSEUDO_REGISTER)
6663 for (k = 1; k < nnr; k++)
6664 reg_last_reload_reg[nregno + k]
6667 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6668 REGNO (reload_reg_rtx[r]) + k)
6671 /* Now do the inverse operation. */
6672 for (k = 0; k < nr; k++)
6674 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6675 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6678 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6682 /* Maybe the spill reg contains a copy of reload_in. Only do
6683 something if there will not be an output reload for
6684 the register being reloaded. */
6685 else if (reload_out[r] == 0
6686 && reload_in[r] != 0
6687 && ((GET_CODE (reload_in[r]) == REG
6688 && ! reg_has_output_reload[REGNO (reload_in[r])])
6689 || (GET_CODE (reload_in_reg[r]) == REG
6690 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6692 register int nregno;
6695 if (GET_CODE (reload_in[r]) == REG)
6696 nregno = REGNO (reload_in[r]);
6698 nregno = REGNO (reload_in_reg[r]);
6700 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6701 : HARD_REGNO_NREGS (nregno,
6702 GET_MODE (reload_reg_rtx[r])));
6704 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6706 if (nregno < FIRST_PSEUDO_REGISTER)
6707 for (k = 1; k < nnr; k++)
6708 reg_last_reload_reg[nregno + k]
6711 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6712 REGNO (reload_reg_rtx[r]) + k)
6715 /* Unless we inherited this reload, show we haven't
6716 recently done a store. */
6717 if (! reload_inherited[r])
6718 spill_reg_store[i] = 0;
6720 for (k = 0; k < nr; k++)
6722 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6723 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6726 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6732 /* However, if part of the reload reaches the end, then we must
6733 invalidate the old info for the part that survives to the end. */
6734 else if (part_reaches_end)
6736 for (k = 0; k < nr; k++)
6737 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6739 reload_when_needed[r]))
6741 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6742 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6747 /* The following if-statement was #if 0'd in 1.34 (or before...).
6748 It's reenabled in 1.35 because supposedly nothing else
6749 deals with this problem. */
6751 /* If a register gets output-reloaded from a non-spill register,
6752 that invalidates any previous reloaded copy of it.
6753 But forget_old_reloads_1 won't get to see it, because
6754 it thinks only about the original insn. So invalidate it here. */
6755 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6757 register int nregno = REGNO (reload_out[r]);
6758 if (nregno >= FIRST_PSEUDO_REGISTER)
6759 reg_last_reload_reg[nregno] = 0;
6762 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
6764 while (num_regs-- > 0)
6765 reg_last_reload_reg[nregno + num_regs] = 0;
6771 /* Emit code to perform a reload from IN (which may be a reload register) to
6772 OUT (which may also be a reload register). IN or OUT is from operand
6773 OPNUM with reload type TYPE.
6775 Returns first insn emitted. */
6778 gen_reload (out, in, opnum, type)
6782 enum reload_type type;
6784 rtx last = get_last_insn ();
6787 /* If IN is a paradoxical SUBREG, remove it and try to put the
6788 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6789 if (GET_CODE (in) == SUBREG
6790 && (GET_MODE_SIZE (GET_MODE (in))
6791 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6792 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6793 in = SUBREG_REG (in), out = tem;
6794 else if (GET_CODE (out) == SUBREG
6795 && (GET_MODE_SIZE (GET_MODE (out))
6796 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6797 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6798 out = SUBREG_REG (out), in = tem;
6800 /* How to do this reload can get quite tricky. Normally, we are being
6801 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6802 register that didn't get a hard register. In that case we can just
6803 call emit_move_insn.
6805 We can also be asked to reload a PLUS that adds a register or a MEM to
6806 another register, constant or MEM. This can occur during frame pointer
6807 elimination and while reloading addresses. This case is handled by
6808 trying to emit a single insn to perform the add. If it is not valid,
6809 we use a two insn sequence.
6811 Finally, we could be called to handle an 'o' constraint by putting
6812 an address into a register. In that case, we first try to do this
6813 with a named pattern of "reload_load_address". If no such pattern
6814 exists, we just emit a SET insn and hope for the best (it will normally
6815 be valid on machines that use 'o').
6817 This entire process is made complex because reload will never
6818 process the insns we generate here and so we must ensure that
6819 they will fit their constraints and also by the fact that parts of
6820 IN might be being reloaded separately and replaced with spill registers.
6821 Because of this, we are, in some sense, just guessing the right approach
6822 here. The one listed above seems to work.
6824 ??? At some point, this whole thing needs to be rethought. */
6826 if (GET_CODE (in) == PLUS
6827 && (GET_CODE (XEXP (in, 0)) == REG
6828 || GET_CODE (XEXP (in, 0)) == SUBREG
6829 || GET_CODE (XEXP (in, 0)) == MEM)
6830 && (GET_CODE (XEXP (in, 1)) == REG
6831 || GET_CODE (XEXP (in, 1)) == SUBREG
6832 || CONSTANT_P (XEXP (in, 1))
6833 || GET_CODE (XEXP (in, 1)) == MEM))
6835 /* We need to compute the sum of a register or a MEM and another
6836 register, constant, or MEM, and put it into the reload
6837 register. The best possible way of doing this is if the machine
6838 has a three-operand ADD insn that accepts the required operands.
6840 The simplest approach is to try to generate such an insn and see if it
6841 is recognized and matches its constraints. If so, it can be used.
6843 It might be better not to actually emit the insn unless it is valid,
6844 but we need to pass the insn as an operand to `recog' and
6845 `insn_extract' and it is simpler to emit and then delete the insn if
6846 not valid than to dummy things up. */
6848 rtx op0, op1, tem, insn;
6851 op0 = find_replacement (&XEXP (in, 0));
6852 op1 = find_replacement (&XEXP (in, 1));
6854 /* Since constraint checking is strict, commutativity won't be
6855 checked, so we need to do that here to avoid spurious failure
6856 if the add instruction is two-address and the second operand
6857 of the add is the same as the reload reg, which is frequently
6858 the case. If the insn would be A = B + A, rearrange it so
6859 it will be A = A + B as constrain_operands expects. */
6861 if (GET_CODE (XEXP (in, 1)) == REG
6862 && REGNO (out) == REGNO (XEXP (in, 1)))
6863 tem = op0, op0 = op1, op1 = tem;
6865 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6866 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6868 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6869 code = recog_memoized (insn);
6873 insn_extract (insn);
6874 /* We want constrain operands to treat this insn strictly in
6875 its validity determination, i.e., the way it would after reload
6877 if (constrain_operands (code, 1))
6881 delete_insns_since (last);
6883 /* If that failed, we must use a conservative two-insn sequence.
6884 use move to copy constant, MEM, or pseudo register to the reload
6885 register since "move" will be able to handle an arbitrary operand,
6886 unlike add which can't, in general. Then add the registers.
6888 If there is another way to do this for a specific machine, a
6889 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6892 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
6893 || (GET_CODE (op1) == REG
6894 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6895 tem = op0, op0 = op1, op1 = tem;
6897 gen_reload (out, op0, opnum, type);
6899 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6900 This fixes a problem on the 32K where the stack pointer cannot
6901 be used as an operand of an add insn. */
6903 if (rtx_equal_p (op0, op1))
6906 insn = emit_insn (gen_add2_insn (out, op1));
6908 /* If that failed, copy the address register to the reload register.
6909 Then add the constant to the reload register. */
6911 code = recog_memoized (insn);
6915 insn_extract (insn);
6916 /* We want constrain operands to treat this insn strictly in
6917 its validity determination, i.e., the way it would after reload
6919 if (constrain_operands (code, 1))
6923 delete_insns_since (last);
6925 gen_reload (out, op1, opnum, type);
6926 emit_insn (gen_add2_insn (out, op0));
6929 #ifdef SECONDARY_MEMORY_NEEDED
6930 /* If we need a memory location to do the move, do it that way. */
6931 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6932 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6933 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6934 REGNO_REG_CLASS (REGNO (out)),
6937 /* Get the memory to use and rewrite both registers to its mode. */
6938 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6940 if (GET_MODE (loc) != GET_MODE (out))
6941 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6943 if (GET_MODE (loc) != GET_MODE (in))
6944 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6946 gen_reload (loc, in, opnum, type);
6947 gen_reload (out, loc, opnum, type);
6951 /* If IN is a simple operand, use gen_move_insn. */
6952 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6953 emit_insn (gen_move_insn (out, in));
6955 #ifdef HAVE_reload_load_address
6956 else if (HAVE_reload_load_address)
6957 emit_insn (gen_reload_load_address (out, in));
6960 /* Otherwise, just write (set OUT IN) and hope for the best. */
6962 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6964 /* Return the first insn emitted.
6965 We can not just return get_last_insn, because there may have
6966 been multiple instructions emitted. Also note that gen_move_insn may
6967 emit more than one insn itself, so we can not assume that there is one
6968 insn emitted per emit_insn_before call. */
6970 return last ? NEXT_INSN (last) : get_insns ();
6973 /* Delete a previously made output-reload
6974 whose result we now believe is not needed.
6975 First we double-check.
6977 INSN is the insn now being processed.
6978 OUTPUT_RELOAD_INSN is the insn of the output reload.
6979 J is the reload-number for this insn. */
6982 delete_output_reload (insn, j, output_reload_insn)
6985 rtx output_reload_insn;
6989 /* Get the raw pseudo-register referred to. */
6991 rtx reg = reload_in[j];
6992 while (GET_CODE (reg) == SUBREG)
6993 reg = SUBREG_REG (reg);
6995 /* If the pseudo-reg we are reloading is no longer referenced
6996 anywhere between the store into it and here,
6997 and no jumps or labels intervene, then the value can get
6998 here through the reload reg alone.
6999 Otherwise, give up--return. */
7000 for (i1 = NEXT_INSN (output_reload_insn);
7001 i1 != insn; i1 = NEXT_INSN (i1))
7003 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7005 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7006 && reg_mentioned_p (reg, PATTERN (i1)))
7010 if (cannot_omit_stores[REGNO (reg)])
7013 /* If this insn will store in the pseudo again,
7014 the previous store can be removed. */
7015 if (reload_out[j] == reload_in[j])
7016 delete_insn (output_reload_insn);
7018 /* See if the pseudo reg has been completely replaced
7019 with reload regs. If so, delete the store insn
7020 and forget we had a stack slot for the pseudo. */
7021 else if (reg_n_deaths[REGNO (reg)] == 1
7022 && reg_basic_block[REGNO (reg)] >= 0
7023 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7027 /* We know that it was used only between here
7028 and the beginning of the current basic block.
7029 (We also know that the last use before INSN was
7030 the output reload we are thinking of deleting, but never mind that.)
7031 Search that range; see if any ref remains. */
7032 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7034 rtx set = single_set (i2);
7036 /* Uses which just store in the pseudo don't count,
7037 since if they are the only uses, they are dead. */
7038 if (set != 0 && SET_DEST (set) == reg)
7040 if (GET_CODE (i2) == CODE_LABEL
7041 || GET_CODE (i2) == JUMP_INSN)
7043 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7044 && reg_mentioned_p (reg, PATTERN (i2)))
7045 /* Some other ref remains;
7046 we can't do anything. */
7050 /* Delete the now-dead stores into this pseudo. */
7051 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7053 rtx set = single_set (i2);
7055 if (set != 0 && SET_DEST (set) == reg)
7057 /* This might be a basic block head,
7058 thus don't use delete_insn. */
7059 PUT_CODE (i2, NOTE);
7060 NOTE_SOURCE_FILE (i2) = 0;
7061 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7063 if (GET_CODE (i2) == CODE_LABEL
7064 || GET_CODE (i2) == JUMP_INSN)
7068 /* For the debugging info,
7069 say the pseudo lives in this reload reg. */
7070 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7071 alter_reg (REGNO (reg), -1);
7075 /* Output reload-insns to reload VALUE into RELOADREG.
7076 VALUE is an autoincrement or autodecrement RTX whose operand
7077 is a register or memory location;
7078 so reloading involves incrementing that location.
7080 INC_AMOUNT is the number to increment or decrement by (always positive).
7081 This cannot be deduced from VALUE. */
7084 inc_for_reload (reloadreg, value, inc_amount)
7089 /* REG or MEM to be copied and incremented. */
7090 rtx incloc = XEXP (value, 0);
7091 /* Nonzero if increment after copying. */
7092 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7098 /* No hard register is equivalent to this register after
7099 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7100 we could inc/dec that register as well (maybe even using it for
7101 the source), but I'm not sure it's worth worrying about. */
7102 if (GET_CODE (incloc) == REG)
7103 reg_last_reload_reg[REGNO (incloc)] = 0;
7105 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7106 inc_amount = - inc_amount;
7108 inc = GEN_INT (inc_amount);
7110 /* If this is post-increment, first copy the location to the reload reg. */
7112 emit_insn (gen_move_insn (reloadreg, incloc));
7114 /* See if we can directly increment INCLOC. Use a method similar to that
7117 last = get_last_insn ();
7118 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7119 gen_rtx (PLUS, GET_MODE (incloc),
7122 code = recog_memoized (add_insn);
7125 insn_extract (add_insn);
7126 if (constrain_operands (code, 1))
7128 /* If this is a pre-increment and we have incremented the value
7129 where it lives, copy the incremented value to RELOADREG to
7130 be used as an address. */
7133 emit_insn (gen_move_insn (reloadreg, incloc));
7139 delete_insns_since (last);
7141 /* If couldn't do the increment directly, must increment in RELOADREG.
7142 The way we do this depends on whether this is pre- or post-increment.
7143 For pre-increment, copy INCLOC to the reload register, increment it
7144 there, then save back. */
7148 emit_insn (gen_move_insn (reloadreg, incloc));
7149 emit_insn (gen_add2_insn (reloadreg, inc));
7150 emit_insn (gen_move_insn (incloc, reloadreg));
7155 Because this might be a jump insn or a compare, and because RELOADREG
7156 may not be available after the insn in an input reload, we must do
7157 the incrementation before the insn being reloaded for.
7159 We have already copied INCLOC to RELOADREG. Increment the copy in
7160 RELOADREG, save that back, then decrement RELOADREG so it has
7161 the original value. */
7163 emit_insn (gen_add2_insn (reloadreg, inc));
7164 emit_insn (gen_move_insn (incloc, reloadreg));
7165 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7171 /* Return 1 if we are certain that the constraint-string STRING allows
7172 the hard register REG. Return 0 if we can't be sure of this. */
7175 constraint_accepts_reg_p (string, reg)
7180 int regno = true_regnum (reg);
7183 /* Initialize for first alternative. */
7185 /* Check that each alternative contains `g' or `r'. */
7187 switch (c = *string++)
7190 /* If an alternative lacks `g' or `r', we lose. */
7193 /* If an alternative lacks `g' or `r', we lose. */
7196 /* Initialize for next alternative. */
7201 /* Any general reg wins for this alternative. */
7202 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7206 /* Any reg in specified class wins for this alternative. */
7208 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7210 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7216 /* Return the number of places FIND appears within X, but don't count
7217 an occurrence if some SET_DEST is FIND. */
7220 count_occurrences (x, find)
7221 register rtx x, find;
7224 register enum rtx_code code;
7225 register char *format_ptr;
7233 code = GET_CODE (x);
7248 if (SET_DEST (x) == find)
7249 return count_occurrences (SET_SRC (x), find);
7253 format_ptr = GET_RTX_FORMAT (code);
7256 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7258 switch (*format_ptr++)
7261 count += count_occurrences (XEXP (x, i), find);
7265 if (XVEC (x, i) != NULL)
7267 for (j = 0; j < XVECLEN (x, i); j++)
7268 count += count_occurrences (XVECEXP (x, i, j), find);