1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
32 #include "hard-reg-set.h"
35 #include "basic-block.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
166 static HARD_REG_SET bad_spill_regs;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
220 static char spill_indirect_levels;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs[N_REG_CLASSES];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed;
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
260 enum reg_class reload_address_base_reg_class;
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
266 enum reg_class reload_address_index_reg_class;
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
271 int reload_in_progress = 0;
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
284 struct obstack reload_obstack;
285 char *reload_firstobj;
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
301 static struct elim_table
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
325 #ifdef ELIMINABLE_REGS
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
351 /* Number of labels in the current function. */
353 static int num_labels;
355 struct hard_reg_n_uses { int regno; int uses; };
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx, enum machine_mode,
399 static void reload_cse_invalidate_mem PROTO((rtx));
400 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
401 static void reload_cse_regs PROTO((rtx));
402 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
403 static int reload_cse_noop_set_p PROTO((rtx));
404 static void reload_cse_simplify_set PROTO((rtx, rtx));
405 static void reload_cse_check_clobber PROTO((rtx, rtx));
406 static void reload_cse_record_set PROTO((rtx, rtx));
408 /* Initialize the reload pass once per compilation. */
415 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
416 Set spill_indirect_levels to the number of levels such addressing is
417 permitted, zero if it is not permitted at all. */
420 = gen_rtx (MEM, Pmode,
421 gen_rtx (PLUS, Pmode,
422 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
424 spill_indirect_levels = 0;
426 while (memory_address_p (QImode, tem))
428 spill_indirect_levels++;
429 tem = gen_rtx (MEM, Pmode, tem);
432 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
434 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
435 indirect_symref_ok = memory_address_p (QImode, tem);
437 /* See if reg+reg is a valid (and offsettable) address. */
439 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
441 tem = gen_rtx (PLUS, Pmode,
442 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
443 gen_rtx (REG, Pmode, i));
444 /* This way, we make sure that reg+reg is an offsettable address. */
445 tem = plus_constant (tem, 4);
447 if (memory_address_p (QImode, tem))
449 double_reg_address_ok = 1;
454 /* Initialize obstack for our rtl allocation. */
455 gcc_obstack_init (&reload_obstack);
456 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
458 /* Decide which register class should be used when reloading
459 addresses. If we are using SMALL_REGISTER_CLASSES, and any
460 parameters are passed in registers, then we do not want to use
461 those registers when reloading an address. Otherwise, if a
462 function argument needs a reload, we may wind up clobbering
463 another argument to the function which was already computed. If
464 we find a subset class which simply avoids those registers, we
465 use it instead. ??? It would be better to only use the
466 restricted class when we actually are loading function arguments,
467 but that is hard to determine. */
468 reload_address_base_reg_class = BASE_REG_CLASS;
469 reload_address_index_reg_class = INDEX_REG_CLASS;
470 #ifdef SMALL_REGISTER_CLASSES
471 if (SMALL_REGISTER_CLASSES)
474 HARD_REG_SET base, index;
477 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
478 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
479 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
481 if (FUNCTION_ARG_REGNO_P (regno))
483 CLEAR_HARD_REG_BIT (base, regno);
484 CLEAR_HARD_REG_BIT (index, regno);
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
490 for (p = reg_class_subclasses[BASE_REG_CLASS];
491 *p != LIM_REG_CLASSES;
494 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
497 reload_address_base_reg_class = *p;
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
504 for (p = reg_class_subclasses[INDEX_REG_CLASS];
505 *p != LIM_REG_CLASSES;
508 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
511 reload_address_index_reg_class = *p;
516 #endif /* SMALL_REGISTER_CLASSES */
519 /* Main entry point for the reload pass.
521 FIRST is the first insn of the function being compiled.
523 GLOBAL nonzero means we were called from global_alloc
524 and should attempt to reallocate any pseudoregs that we
525 displace from hard regs we will use for reloads.
526 If GLOBAL is zero, we do not have enough information to do that,
527 so any pseudo reg that is spilled must go to the stack.
529 DUMPFILE is the global-reg debugging dump file stream, or 0.
530 If it is nonzero, messages are written to it to describe
531 which registers are seized as reload regs, which pseudo regs
532 are spilled from them, and where the pseudo regs are reallocated to.
534 Return value is nonzero if reload failed
535 and we must not do any more for this function. */
538 reload (first, global, dumpfile)
544 register int i, j, k;
546 register struct elim_table *ep;
548 int something_changed;
549 int something_needs_reloads;
550 int something_needs_elimination;
551 int new_basic_block_needs;
552 enum reg_class caller_save_spill_class = NO_REGS;
553 int caller_save_group_size = 1;
555 /* Nonzero means we couldn't get enough spill regs. */
558 /* The basic block number currently being processed for INSN. */
561 /* Make sure even insns with volatile mem refs are recognizable. */
564 /* Enable find_equiv_reg to distinguish insns made by reload. */
565 reload_first_uid = get_max_uid ();
567 for (i = 0; i < N_REG_CLASSES; i++)
568 basic_block_needs[i] = 0;
570 #ifdef SECONDARY_MEMORY_NEEDED
571 /* Initialize the secondary memory table. */
572 clear_secondary_mem ();
575 /* Remember which hard regs appear explicitly
576 before we merge into `regs_ever_live' the ones in which
577 pseudo regs have been allocated. */
578 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
580 /* We don't have a stack slot for any spill reg yet. */
581 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
582 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
584 /* Initialize the save area information for caller-save, in case some
588 /* Compute which hard registers are now in use
589 as homes for pseudo registers.
590 This is done here rather than (eg) in global_alloc
591 because this point is reached even if not optimizing. */
593 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
596 for (i = 0; i < scratch_list_length; i++)
598 mark_scratch_live (scratch_list[i]);
600 /* Make sure that the last insn in the chain
601 is not something that needs reloading. */
602 emit_note (NULL_PTR, NOTE_INSN_DELETED);
604 /* Find all the pseudo registers that didn't get hard regs
605 but do have known equivalent constants or memory slots.
606 These include parameters (known equivalent to parameter slots)
607 and cse'd or loop-moved constant memory addresses.
609 Record constant equivalents in reg_equiv_constant
610 so they will be substituted by find_reloads.
611 Record memory equivalents in reg_mem_equiv so they can
612 be substituted eventually by altering the REG-rtx's. */
614 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
615 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
616 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
617 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
618 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
619 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
620 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
621 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
622 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
623 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
624 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
625 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
626 cannot_omit_stores = (char *) alloca (max_regno);
627 bzero (cannot_omit_stores, max_regno);
629 #ifdef SMALL_REGISTER_CLASSES
630 if (SMALL_REGISTER_CLASSES)
631 CLEAR_HARD_REG_SET (forbidden_regs);
634 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
635 Also find all paradoxical subregs and find largest such for each pseudo.
636 On machines with small register classes, record hard registers that
637 are used for user variables. These can never be used for spills.
638 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
639 caller-saved registers must be marked live. */
641 for (insn = first; insn; insn = NEXT_INSN (insn))
643 rtx set = single_set (insn);
645 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
646 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
647 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
648 if (! call_used_regs[i])
649 regs_ever_live[i] = 1;
651 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
653 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
655 #ifdef LEGITIMATE_PIC_OPERAND_P
656 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
657 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
661 rtx x = XEXP (note, 0);
662 i = REGNO (SET_DEST (set));
663 if (i > LAST_VIRTUAL_REGISTER)
665 if (GET_CODE (x) == MEM)
666 reg_equiv_memory_loc[i] = x;
667 else if (CONSTANT_P (x))
669 if (LEGITIMATE_CONSTANT_P (x))
670 reg_equiv_constant[i] = x;
672 reg_equiv_memory_loc[i]
673 = force_const_mem (GET_MODE (SET_DEST (set)), x);
678 /* If this register is being made equivalent to a MEM
679 and the MEM is not SET_SRC, the equivalencing insn
680 is one with the MEM as a SET_DEST and it occurs later.
681 So don't mark this insn now. */
682 if (GET_CODE (x) != MEM
683 || rtx_equal_p (SET_SRC (set), x))
684 reg_equiv_init[i] = insn;
689 /* If this insn is setting a MEM from a register equivalent to it,
690 this is the equivalencing insn. */
691 else if (set && GET_CODE (SET_DEST (set)) == MEM
692 && GET_CODE (SET_SRC (set)) == REG
693 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
694 && rtx_equal_p (SET_DEST (set),
695 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
696 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
698 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
699 scan_paradoxical_subregs (PATTERN (insn));
702 /* Does this function require a frame pointer? */
704 frame_pointer_needed = (! flag_omit_frame_pointer
705 #ifdef EXIT_IGNORE_STACK
706 /* ?? If EXIT_IGNORE_STACK is set, we will not save
707 and restore sp for alloca. So we can't eliminate
708 the frame pointer in that case. At some point,
709 we should improve this by emitting the
710 sp-adjusting insns for this case. */
711 || (current_function_calls_alloca
712 && EXIT_IGNORE_STACK)
714 || FRAME_POINTER_REQUIRED);
718 /* Initialize the table of registers to eliminate. The way we do this
719 depends on how the eliminable registers were defined. */
720 #ifdef ELIMINABLE_REGS
721 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
723 ep->can_eliminate = ep->can_eliminate_previous
724 = (CAN_ELIMINATE (ep->from, ep->to)
725 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
728 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
729 = ! frame_pointer_needed;
732 /* Count the number of eliminable registers and build the FROM and TO
733 REG rtx's. Note that code in gen_rtx will cause, e.g.,
734 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
735 We depend on this. */
736 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
738 num_eliminable += ep->can_eliminate;
739 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
740 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
743 num_labels = max_label_num () - get_first_label_num ();
745 /* Allocate the tables used to store offset information at labels. */
746 offsets_known_at = (char *) alloca (num_labels);
748 = (int (*)[NUM_ELIMINABLE_REGS])
749 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
751 offsets_known_at -= get_first_label_num ();
752 offsets_at -= get_first_label_num ();
754 /* Alter each pseudo-reg rtx to contain its hard reg number.
755 Assign stack slots to the pseudos that lack hard regs or equivalents.
756 Do not touch virtual registers. */
758 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
761 /* If we have some registers we think can be eliminated, scan all insns to
762 see if there is an insn that sets one of these registers to something
763 other than itself plus a constant. If so, the register cannot be
764 eliminated. Doing this scan here eliminates an extra pass through the
765 main reload loop in the most common case where register elimination
767 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
768 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
769 || GET_CODE (insn) == CALL_INSN)
770 note_stores (PATTERN (insn), mark_not_eliminable);
772 #ifndef REGISTER_CONSTRAINTS
773 /* If all the pseudo regs have hard regs,
774 except for those that are never referenced,
775 we know that no reloads are needed. */
776 /* But that is not true if there are register constraints, since
777 in that case some pseudos might be in the wrong kind of hard reg. */
779 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
780 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
783 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
787 /* Compute the order of preference for hard registers to spill.
788 Store them by decreasing preference in potential_reload_regs. */
790 order_regs_for_reload (global);
792 /* So far, no hard regs have been spilled. */
794 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
795 spill_reg_order[i] = -1;
797 /* Initialize to -1, which means take the first spill register. */
800 /* On most machines, we can't use any register explicitly used in the
801 rtl as a spill register. But on some, we have to. Those will have
802 taken care to keep the life of hard regs as short as possible. */
804 #ifdef SMALL_REGISTER_CLASSES
805 if (! SMALL_REGISTER_CLASSES)
807 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
809 /* Spill any hard regs that we know we can't eliminate. */
810 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
811 if (! ep->can_eliminate)
812 spill_hard_reg (ep->from, global, dumpfile, 1);
814 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
815 if (frame_pointer_needed)
816 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
820 for (i = 0; i < N_REG_CLASSES; i++)
822 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
823 bzero (basic_block_needs[i], n_basic_blocks);
826 /* From now on, we need to emit any moves without making new pseudos. */
827 reload_in_progress = 1;
829 /* This loop scans the entire function each go-round
830 and repeats until one repetition spills no additional hard regs. */
832 /* This flag is set when a pseudo reg is spilled,
833 to require another pass. Note that getting an additional reload
834 reg does not necessarily imply any pseudo reg was spilled;
835 sometimes we find a reload reg that no pseudo reg was allocated in. */
836 something_changed = 1;
837 /* This flag is set if there are any insns that require reloading. */
838 something_needs_reloads = 0;
839 /* This flag is set if there are any insns that require register
841 something_needs_elimination = 0;
842 while (something_changed)
846 /* For each class, number of reload regs needed in that class.
847 This is the maximum over all insns of the needs in that class
848 of the individual insn. */
849 int max_needs[N_REG_CLASSES];
850 /* For each class, size of group of consecutive regs
851 that is needed for the reloads of this class. */
852 int group_size[N_REG_CLASSES];
853 /* For each class, max number of consecutive groups needed.
854 (Each group contains group_size[CLASS] consecutive registers.) */
855 int max_groups[N_REG_CLASSES];
856 /* For each class, max number needed of regs that don't belong
857 to any of the groups. */
858 int max_nongroups[N_REG_CLASSES];
859 /* For each class, the machine mode which requires consecutive
860 groups of regs of that class.
861 If two different modes ever require groups of one class,
862 they must be the same size and equally restrictive for that class,
863 otherwise we can't handle the complexity. */
864 enum machine_mode group_mode[N_REG_CLASSES];
865 /* Record the insn where each maximum need is first found. */
866 rtx max_needs_insn[N_REG_CLASSES];
867 rtx max_groups_insn[N_REG_CLASSES];
868 rtx max_nongroups_insn[N_REG_CLASSES];
870 HOST_WIDE_INT starting_frame_size;
871 int previous_frame_pointer_needed = frame_pointer_needed;
872 static char *reg_class_names[] = REG_CLASS_NAMES;
874 something_changed = 0;
875 bzero ((char *) max_needs, sizeof max_needs);
876 bzero ((char *) max_groups, sizeof max_groups);
877 bzero ((char *) max_nongroups, sizeof max_nongroups);
878 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
879 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
880 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
881 bzero ((char *) group_size, sizeof group_size);
882 for (i = 0; i < N_REG_CLASSES; i++)
883 group_mode[i] = VOIDmode;
885 /* Keep track of which basic blocks are needing the reloads. */
888 /* Remember whether any element of basic_block_needs
889 changes from 0 to 1 in this pass. */
890 new_basic_block_needs = 0;
892 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
893 here because the stack size may be a part of the offset computation
894 for register elimination, and there might have been new stack slots
895 created in the last iteration of this loop. */
896 assign_stack_local (BLKmode, 0, 0);
898 starting_frame_size = get_frame_size ();
900 /* Reset all offsets on eliminable registers to their initial values. */
901 #ifdef ELIMINABLE_REGS
902 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
904 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
905 ep->previous_offset = ep->offset
906 = ep->max_offset = ep->initial_offset;
909 #ifdef INITIAL_FRAME_POINTER_OFFSET
910 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
912 if (!FRAME_POINTER_REQUIRED)
914 reg_eliminate[0].initial_offset = 0;
916 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
917 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
920 num_not_at_initial_offset = 0;
922 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
924 /* Set a known offset for each forced label to be at the initial offset
925 of each elimination. We do this because we assume that all
926 computed jumps occur from a location where each elimination is
927 at its initial offset. */
929 for (x = forced_labels; x; x = XEXP (x, 1))
931 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
933 /* For each pseudo register that has an equivalent location defined,
934 try to eliminate any eliminable registers (such as the frame pointer)
935 assuming initial offsets for the replacement register, which
938 If the resulting location is directly addressable, substitute
939 the MEM we just got directly for the old REG.
941 If it is not addressable but is a constant or the sum of a hard reg
942 and constant, it is probably not addressable because the constant is
943 out of range, in that case record the address; we will generate
944 hairy code to compute the address in a register each time it is
945 needed. Similarly if it is a hard register, but one that is not
946 valid as an address register.
948 If the location is not addressable, but does not have one of the
949 above forms, assign a stack slot. We have to do this to avoid the
950 potential of producing lots of reloads if, e.g., a location involves
951 a pseudo that didn't get a hard register and has an equivalent memory
952 location that also involves a pseudo that didn't get a hard register.
954 Perhaps at some point we will improve reload_when_needed handling
955 so this problem goes away. But that's very hairy. */
957 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
958 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
960 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
962 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
964 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
965 else if (CONSTANT_P (XEXP (x, 0))
966 || (GET_CODE (XEXP (x, 0)) == REG
967 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
968 || (GET_CODE (XEXP (x, 0)) == PLUS
969 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
970 && (REGNO (XEXP (XEXP (x, 0), 0))
971 < FIRST_PSEUDO_REGISTER)
972 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
973 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
976 /* Make a new stack slot. Then indicate that something
977 changed so we go back and recompute offsets for
978 eliminable registers because the allocation of memory
979 below might change some offset. reg_equiv_{mem,address}
980 will be set up for this pseudo on the next pass around
982 reg_equiv_memory_loc[i] = 0;
983 reg_equiv_init[i] = 0;
985 something_changed = 1;
989 /* If we allocated another pseudo to the stack, redo elimination
991 if (something_changed)
994 /* If caller-saves needs a group, initialize the group to include
995 the size and mode required for caller-saves. */
997 if (caller_save_group_size > 1)
999 group_mode[(int) caller_save_spill_class] = Pmode;
1000 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1003 /* Compute the most additional registers needed by any instruction.
1004 Collect information separately for each class of regs. */
1006 for (insn = first; insn; insn = NEXT_INSN (insn))
1008 if (global && this_block + 1 < n_basic_blocks
1009 && insn == basic_block_head[this_block+1])
1012 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1013 might include REG_LABEL), we need to see what effects this
1014 has on the known offsets at labels. */
1016 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1017 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1018 && REG_NOTES (insn) != 0))
1019 set_label_offsets (insn, insn, 0);
1021 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1023 /* Nonzero means don't use a reload reg that overlaps
1024 the place where a function value can be returned. */
1025 rtx avoid_return_reg = 0;
1027 rtx old_body = PATTERN (insn);
1028 int old_code = INSN_CODE (insn);
1029 rtx old_notes = REG_NOTES (insn);
1030 int did_elimination = 0;
1032 /* To compute the number of reload registers of each class
1033 needed for an insn, we must simulate what choose_reload_regs
1034 can do. We do this by splitting an insn into an "input" and
1035 an "output" part. RELOAD_OTHER reloads are used in both.
1036 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1037 which must be live over the entire input section of reloads,
1038 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1039 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1042 The registers needed for output are RELOAD_OTHER and
1043 RELOAD_FOR_OUTPUT, which are live for the entire output
1044 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1045 reloads for each operand.
1047 The total number of registers needed is the maximum of the
1048 inputs and outputs. */
1052 /* [0] is normal, [1] is nongroup. */
1053 int regs[2][N_REG_CLASSES];
1054 int groups[N_REG_CLASSES];
1057 /* Each `struct needs' corresponds to one RELOAD_... type. */
1061 struct needs output;
1063 struct needs other_addr;
1064 struct needs op_addr;
1065 struct needs op_addr_reload;
1066 struct needs in_addr[MAX_RECOG_OPERANDS];
1067 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1068 struct needs out_addr[MAX_RECOG_OPERANDS];
1069 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1072 /* If needed, eliminate any eliminable registers. */
1074 did_elimination = eliminate_regs_in_insn (insn, 0);
1076 #ifdef SMALL_REGISTER_CLASSES
1077 /* Set avoid_return_reg if this is an insn
1078 that might use the value of a function call. */
1079 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1081 if (GET_CODE (PATTERN (insn)) == SET)
1082 after_call = SET_DEST (PATTERN (insn));
1083 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1084 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1085 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1089 else if (SMALL_REGISTER_CLASSES
1091 && !(GET_CODE (PATTERN (insn)) == SET
1092 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1094 if (reg_referenced_p (after_call, PATTERN (insn)))
1095 avoid_return_reg = after_call;
1098 #endif /* SMALL_REGISTER_CLASSES */
1100 /* Analyze the instruction. */
1101 find_reloads (insn, 0, spill_indirect_levels, global,
1104 /* Remember for later shortcuts which insns had any reloads or
1105 register eliminations.
1107 One might think that it would be worthwhile to mark insns
1108 that need register replacements but not reloads, but this is
1109 not safe because find_reloads may do some manipulation of
1110 the insn (such as swapping commutative operands), which would
1111 be lost when we restore the old pattern after register
1112 replacement. So the actions of find_reloads must be redone in
1113 subsequent passes or in reload_as_needed.
1115 However, it is safe to mark insns that need reloads
1116 but not register replacement. */
1118 PUT_MODE (insn, (did_elimination ? QImode
1119 : n_reloads ? HImode
1120 : GET_MODE (insn) == DImode ? DImode
1123 /* Discard any register replacements done. */
1124 if (did_elimination)
1126 obstack_free (&reload_obstack, reload_firstobj);
1127 PATTERN (insn) = old_body;
1128 INSN_CODE (insn) = old_code;
1129 REG_NOTES (insn) = old_notes;
1130 something_needs_elimination = 1;
1133 /* If this insn has no reloads, we need not do anything except
1134 in the case of a CALL_INSN when we have caller-saves and
1135 caller-save needs reloads. */
1138 && ! (GET_CODE (insn) == CALL_INSN
1139 && caller_save_spill_class != NO_REGS))
1142 something_needs_reloads = 1;
1143 bzero ((char *) &insn_needs, sizeof insn_needs);
1145 /* Count each reload once in every class
1146 containing the reload's own class. */
1148 for (i = 0; i < n_reloads; i++)
1150 register enum reg_class *p;
1151 enum reg_class class = reload_reg_class[i];
1153 enum machine_mode mode;
1155 struct needs *this_needs;
1157 /* Don't count the dummy reloads, for which one of the
1158 regs mentioned in the insn can be used for reloading.
1159 Don't count optional reloads.
1160 Don't count reloads that got combined with others. */
1161 if (reload_reg_rtx[i] != 0
1162 || reload_optional[i] != 0
1163 || (reload_out[i] == 0 && reload_in[i] == 0
1164 && ! reload_secondary_p[i]))
1167 /* Show that a reload register of this class is needed
1168 in this basic block. We do not use insn_needs and
1169 insn_groups because they are overly conservative for
1171 if (global && ! basic_block_needs[(int) class][this_block])
1173 basic_block_needs[(int) class][this_block] = 1;
1174 new_basic_block_needs = 1;
1178 mode = reload_inmode[i];
1179 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1180 mode = reload_outmode[i];
1181 size = CLASS_MAX_NREGS (class, mode);
1183 /* If this class doesn't want a group, determine if we have
1184 a nongroup need or a regular need. We have a nongroup
1185 need if this reload conflicts with a group reload whose
1186 class intersects with this reload's class. */
1190 for (j = 0; j < n_reloads; j++)
1191 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1192 (GET_MODE_SIZE (reload_outmode[j])
1193 > GET_MODE_SIZE (reload_inmode[j]))
1197 && (!reload_optional[j])
1198 && (reload_in[j] != 0 || reload_out[j] != 0
1199 || reload_secondary_p[j])
1200 && reloads_conflict (i, j)
1201 && reg_classes_intersect_p (class,
1202 reload_reg_class[j]))
1208 /* Decide which time-of-use to count this reload for. */
1209 switch (reload_when_needed[i])
1212 this_needs = &insn_needs.other;
1214 case RELOAD_FOR_INPUT:
1215 this_needs = &insn_needs.input;
1217 case RELOAD_FOR_OUTPUT:
1218 this_needs = &insn_needs.output;
1220 case RELOAD_FOR_INSN:
1221 this_needs = &insn_needs.insn;
1223 case RELOAD_FOR_OTHER_ADDRESS:
1224 this_needs = &insn_needs.other_addr;
1226 case RELOAD_FOR_INPUT_ADDRESS:
1227 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1229 case RELOAD_FOR_INPADDR_ADDRESS:
1230 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1232 case RELOAD_FOR_OUTPUT_ADDRESS:
1233 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1235 case RELOAD_FOR_OUTADDR_ADDRESS:
1236 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1238 case RELOAD_FOR_OPERAND_ADDRESS:
1239 this_needs = &insn_needs.op_addr;
1241 case RELOAD_FOR_OPADDR_ADDR:
1242 this_needs = &insn_needs.op_addr_reload;
1248 enum machine_mode other_mode, allocate_mode;
1250 /* Count number of groups needed separately from
1251 number of individual regs needed. */
1252 this_needs->groups[(int) class]++;
1253 p = reg_class_superclasses[(int) class];
1254 while (*p != LIM_REG_CLASSES)
1255 this_needs->groups[(int) *p++]++;
1257 /* Record size and mode of a group of this class. */
1258 /* If more than one size group is needed,
1259 make all groups the largest needed size. */
1260 if (group_size[(int) class] < size)
1262 other_mode = group_mode[(int) class];
1263 allocate_mode = mode;
1265 group_size[(int) class] = size;
1266 group_mode[(int) class] = mode;
1271 allocate_mode = group_mode[(int) class];
1274 /* Crash if two dissimilar machine modes both need
1275 groups of consecutive regs of the same class. */
1277 if (other_mode != VOIDmode && other_mode != allocate_mode
1278 && ! modes_equiv_for_class_p (allocate_mode,
1280 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1285 this_needs->regs[nongroup_need][(int) class] += 1;
1286 p = reg_class_superclasses[(int) class];
1287 while (*p != LIM_REG_CLASSES)
1288 this_needs->regs[nongroup_need][(int) *p++] += 1;
1294 /* All reloads have been counted for this insn;
1295 now merge the various times of use.
1296 This sets insn_needs, etc., to the maximum total number
1297 of registers needed at any point in this insn. */
1299 for (i = 0; i < N_REG_CLASSES; i++)
1301 int in_max, out_max;
1303 /* Compute normal and nongroup needs. */
1304 for (j = 0; j <= 1; j++)
1306 for (in_max = 0, out_max = 0, k = 0;
1307 k < reload_n_operands; k++)
1310 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1313 insn_needs.in_addr_addr[k].regs[j][i]);
1315 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1318 insn_needs.out_addr_addr[k].regs[j][i]);
1321 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1322 and operand addresses but not things used to reload
1323 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1324 don't conflict with things needed to reload inputs or
1327 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1328 insn_needs.op_addr_reload.regs[j][i]),
1331 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1333 insn_needs.input.regs[j][i]
1334 = MAX (insn_needs.input.regs[j][i]
1335 + insn_needs.op_addr.regs[j][i]
1336 + insn_needs.insn.regs[j][i],
1337 in_max + insn_needs.input.regs[j][i]);
1339 insn_needs.output.regs[j][i] += out_max;
1340 insn_needs.other.regs[j][i]
1341 += MAX (MAX (insn_needs.input.regs[j][i],
1342 insn_needs.output.regs[j][i]),
1343 insn_needs.other_addr.regs[j][i]);
1347 /* Now compute group needs. */
1348 for (in_max = 0, out_max = 0, j = 0;
1349 j < reload_n_operands; j++)
1351 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1352 in_max = MAX (in_max,
1353 insn_needs.in_addr_addr[j].groups[i]);
1355 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1357 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1360 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1361 insn_needs.op_addr_reload.groups[i]),
1363 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1365 insn_needs.input.groups[i]
1366 = MAX (insn_needs.input.groups[i]
1367 + insn_needs.op_addr.groups[i]
1368 + insn_needs.insn.groups[i],
1369 in_max + insn_needs.input.groups[i]);
1371 insn_needs.output.groups[i] += out_max;
1372 insn_needs.other.groups[i]
1373 += MAX (MAX (insn_needs.input.groups[i],
1374 insn_needs.output.groups[i]),
1375 insn_needs.other_addr.groups[i]);
1378 /* If this is a CALL_INSN and caller-saves will need
1379 a spill register, act as if the spill register is
1380 needed for this insn. However, the spill register
1381 can be used by any reload of this insn, so we only
1382 need do something if no need for that class has
1385 The assumption that every CALL_INSN will trigger a
1386 caller-save is highly conservative, however, the number
1387 of cases where caller-saves will need a spill register but
1388 a block containing a CALL_INSN won't need a spill register
1389 of that class should be quite rare.
1391 If a group is needed, the size and mode of the group will
1392 have been set up at the beginning of this loop. */
1394 if (GET_CODE (insn) == CALL_INSN
1395 && caller_save_spill_class != NO_REGS)
1397 /* See if this register would conflict with any reload
1398 that needs a group. */
1399 int nongroup_need = 0;
1400 int *caller_save_needs;
1402 for (j = 0; j < n_reloads; j++)
1403 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1404 (GET_MODE_SIZE (reload_outmode[j])
1405 > GET_MODE_SIZE (reload_inmode[j]))
1409 && reg_classes_intersect_p (caller_save_spill_class,
1410 reload_reg_class[j]))
1417 = (caller_save_group_size > 1
1418 ? insn_needs.other.groups
1419 : insn_needs.other.regs[nongroup_need]);
1421 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1423 register enum reg_class *p
1424 = reg_class_superclasses[(int) caller_save_spill_class];
1426 caller_save_needs[(int) caller_save_spill_class]++;
1428 while (*p != LIM_REG_CLASSES)
1429 caller_save_needs[(int) *p++] += 1;
1432 /* Show that this basic block will need a register of
1436 && ! (basic_block_needs[(int) caller_save_spill_class]
1439 basic_block_needs[(int) caller_save_spill_class]
1441 new_basic_block_needs = 1;
1445 #ifdef SMALL_REGISTER_CLASSES
1446 /* If this insn stores the value of a function call,
1447 and that value is in a register that has been spilled,
1448 and if the insn needs a reload in a class
1449 that might use that register as the reload register,
1450 then add add an extra need in that class.
1451 This makes sure we have a register available that does
1452 not overlap the return value. */
1454 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1456 int regno = REGNO (avoid_return_reg);
1458 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1460 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1462 /* First compute the "basic needs", which counts a
1463 need only in the smallest class in which it
1466 bcopy ((char *) insn_needs.other.regs[0],
1467 (char *) basic_needs, sizeof basic_needs);
1468 bcopy ((char *) insn_needs.other.groups,
1469 (char *) basic_groups, sizeof basic_groups);
1471 for (i = 0; i < N_REG_CLASSES; i++)
1475 if (basic_needs[i] >= 0)
1476 for (p = reg_class_superclasses[i];
1477 *p != LIM_REG_CLASSES; p++)
1478 basic_needs[(int) *p] -= basic_needs[i];
1480 if (basic_groups[i] >= 0)
1481 for (p = reg_class_superclasses[i];
1482 *p != LIM_REG_CLASSES; p++)
1483 basic_groups[(int) *p] -= basic_groups[i];
1486 /* Now count extra regs if there might be a conflict with
1487 the return value register. */
1489 for (r = regno; r < regno + nregs; r++)
1490 if (spill_reg_order[r] >= 0)
1491 for (i = 0; i < N_REG_CLASSES; i++)
1492 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1494 if (basic_needs[i] > 0)
1498 insn_needs.other.regs[0][i]++;
1499 p = reg_class_superclasses[i];
1500 while (*p != LIM_REG_CLASSES)
1501 insn_needs.other.regs[0][(int) *p++]++;
1503 if (basic_groups[i] > 0)
1507 insn_needs.other.groups[i]++;
1508 p = reg_class_superclasses[i];
1509 while (*p != LIM_REG_CLASSES)
1510 insn_needs.other.groups[(int) *p++]++;
1514 #endif /* SMALL_REGISTER_CLASSES */
1516 /* For each class, collect maximum need of any insn. */
1518 for (i = 0; i < N_REG_CLASSES; i++)
1520 if (max_needs[i] < insn_needs.other.regs[0][i])
1522 max_needs[i] = insn_needs.other.regs[0][i];
1523 max_needs_insn[i] = insn;
1525 if (max_groups[i] < insn_needs.other.groups[i])
1527 max_groups[i] = insn_needs.other.groups[i];
1528 max_groups_insn[i] = insn;
1530 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1532 max_nongroups[i] = insn_needs.other.regs[1][i];
1533 max_nongroups_insn[i] = insn;
1537 /* Note that there is a continue statement above. */
1540 /* If we allocated any new memory locations, make another pass
1541 since it might have changed elimination offsets. */
1542 if (starting_frame_size != get_frame_size ())
1543 something_changed = 1;
1546 for (i = 0; i < N_REG_CLASSES; i++)
1548 if (max_needs[i] > 0)
1550 ";; Need %d reg%s of class %s (for insn %d).\n",
1551 max_needs[i], max_needs[i] == 1 ? "" : "s",
1552 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1553 if (max_nongroups[i] > 0)
1555 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1556 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1557 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1558 if (max_groups[i] > 0)
1560 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1561 max_groups[i], max_groups[i] == 1 ? "" : "s",
1562 mode_name[(int) group_mode[i]],
1563 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1566 /* If we have caller-saves, set up the save areas and see if caller-save
1567 will need a spill register. */
1569 if (caller_save_needed)
1571 /* Set the offsets for setup_save_areas. */
1572 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
1574 ep->previous_offset = ep->max_offset;
1576 if ( ! setup_save_areas (&something_changed)
1577 && caller_save_spill_class == NO_REGS)
1579 /* The class we will need depends on whether the machine
1580 supports the sum of two registers for an address; see
1581 find_address_reloads for details. */
1583 caller_save_spill_class
1584 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1585 caller_save_group_size
1586 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1587 something_changed = 1;
1591 /* See if anything that happened changes which eliminations are valid.
1592 For example, on the Sparc, whether or not the frame pointer can
1593 be eliminated can depend on what registers have been used. We need
1594 not check some conditions again (such as flag_omit_frame_pointer)
1595 since they can't have changed. */
1597 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1598 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1599 #ifdef ELIMINABLE_REGS
1600 || ! CAN_ELIMINATE (ep->from, ep->to)
1603 ep->can_eliminate = 0;
1605 /* Look for the case where we have discovered that we can't replace
1606 register A with register B and that means that we will now be
1607 trying to replace register A with register C. This means we can
1608 no longer replace register C with register B and we need to disable
1609 such an elimination, if it exists. This occurs often with A == ap,
1610 B == sp, and C == fp. */
1612 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1614 struct elim_table *op;
1615 register int new_to = -1;
1617 if (! ep->can_eliminate && ep->can_eliminate_previous)
1619 /* Find the current elimination for ep->from, if there is a
1621 for (op = reg_eliminate;
1622 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1623 if (op->from == ep->from && op->can_eliminate)
1629 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1631 for (op = reg_eliminate;
1632 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1633 if (op->from == new_to && op->to == ep->to)
1634 op->can_eliminate = 0;
1638 /* See if any registers that we thought we could eliminate the previous
1639 time are no longer eliminable. If so, something has changed and we
1640 must spill the register. Also, recompute the number of eliminable
1641 registers and see if the frame pointer is needed; it is if there is
1642 no elimination of the frame pointer that we can perform. */
1644 frame_pointer_needed = 1;
1645 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1647 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1648 && ep->to != HARD_FRAME_POINTER_REGNUM)
1649 frame_pointer_needed = 0;
1651 if (! ep->can_eliminate && ep->can_eliminate_previous)
1653 ep->can_eliminate_previous = 0;
1654 spill_hard_reg (ep->from, global, dumpfile, 1);
1655 something_changed = 1;
1660 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1661 /* If we didn't need a frame pointer last time, but we do now, spill
1662 the hard frame pointer. */
1663 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1665 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1666 something_changed = 1;
1670 /* If all needs are met, we win. */
1672 for (i = 0; i < N_REG_CLASSES; i++)
1673 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1675 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1678 /* Not all needs are met; must spill some hard regs. */
1680 /* Put all registers spilled so far back in potential_reload_regs, but
1681 put them at the front, since we've already spilled most of the
1682 pseudos in them (we might have left some pseudos unspilled if they
1683 were in a block that didn't need any spill registers of a conflicting
1684 class. We used to try to mark off the need for those registers,
1685 but doing so properly is very complex and reallocating them is the
1686 simpler approach. First, "pack" potential_reload_regs by pushing
1687 any nonnegative entries towards the end. That will leave room
1688 for the registers we already spilled.
1690 Also, undo the marking of the spill registers from the last time
1691 around in FORBIDDEN_REGS since we will be probably be allocating
1694 ??? It is theoretically possible that we might end up not using one
1695 of our previously-spilled registers in this allocation, even though
1696 they are at the head of the list. It's not clear what to do about
1697 this, but it was no better before, when we marked off the needs met
1698 by the previously-spilled registers. With the current code, globals
1699 can be allocated into these registers, but locals cannot. */
1703 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1704 if (potential_reload_regs[i] != -1)
1705 potential_reload_regs[j--] = potential_reload_regs[i];
1707 for (i = 0; i < n_spills; i++)
1709 potential_reload_regs[i] = spill_regs[i];
1710 spill_reg_order[spill_regs[i]] = -1;
1711 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1717 /* Now find more reload regs to satisfy the remaining need
1718 Do it by ascending class number, since otherwise a reg
1719 might be spilled for a big class and might fail to count
1720 for a smaller class even though it belongs to that class.
1722 Count spilled regs in `spills', and add entries to
1723 `spill_regs' and `spill_reg_order'.
1725 ??? Note there is a problem here.
1726 When there is a need for a group in a high-numbered class,
1727 and also need for non-group regs that come from a lower class,
1728 the non-group regs are chosen first. If there aren't many regs,
1729 they might leave no room for a group.
1731 This was happening on the 386. To fix it, we added the code
1732 that calls possible_group_p, so that the lower class won't
1733 break up the last possible group.
1735 Really fixing the problem would require changes above
1736 in counting the regs already spilled, and in choose_reload_regs.
1737 It might be hard to avoid introducing bugs there. */
1739 CLEAR_HARD_REG_SET (counted_for_groups);
1740 CLEAR_HARD_REG_SET (counted_for_nongroups);
1742 for (class = 0; class < N_REG_CLASSES; class++)
1744 /* First get the groups of registers.
1745 If we got single registers first, we might fragment
1747 while (max_groups[class] > 0)
1749 /* If any single spilled regs happen to form groups,
1750 count them now. Maybe we don't really need
1751 to spill another group. */
1752 count_possible_groups (group_size, group_mode, max_groups,
1755 if (max_groups[class] <= 0)
1758 /* Groups of size 2 (the only groups used on most machines)
1759 are treated specially. */
1760 if (group_size[class] == 2)
1762 /* First, look for a register that will complete a group. */
1763 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1767 j = potential_reload_regs[i];
1768 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1770 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1771 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1772 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1773 && HARD_REGNO_MODE_OK (other, group_mode[class])
1774 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1776 /* We don't want one part of another group.
1777 We could get "two groups" that overlap! */
1778 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1780 (j < FIRST_PSEUDO_REGISTER - 1
1781 && (other = j + 1, spill_reg_order[other] >= 0)
1782 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1783 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1784 && HARD_REGNO_MODE_OK (j, group_mode[class])
1785 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1787 && ! TEST_HARD_REG_BIT (counted_for_groups,
1790 register enum reg_class *p;
1792 /* We have found one that will complete a group,
1793 so count off one group as provided. */
1794 max_groups[class]--;
1795 p = reg_class_superclasses[class];
1796 while (*p != LIM_REG_CLASSES)
1798 if (group_size [(int) *p] <= group_size [class])
1799 max_groups[(int) *p]--;
1803 /* Indicate both these regs are part of a group. */
1804 SET_HARD_REG_BIT (counted_for_groups, j);
1805 SET_HARD_REG_BIT (counted_for_groups, other);
1809 /* We can't complete a group, so start one. */
1810 #ifdef SMALL_REGISTER_CLASSES
1811 /* Look for a pair neither of which is explicitly used. */
1812 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1816 j = potential_reload_regs[i];
1817 /* Verify that J+1 is a potential reload reg. */
1818 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1819 if (potential_reload_regs[k] == j + 1)
1821 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1822 && k < FIRST_PSEUDO_REGISTER
1823 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1824 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1825 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1826 && HARD_REGNO_MODE_OK (j, group_mode[class])
1827 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1829 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1830 /* Reject J at this stage
1831 if J+1 was explicitly used. */
1832 && ! regs_explicitly_used[j + 1])
1836 /* Now try any group at all
1837 whose registers are not in bad_spill_regs. */
1838 if (i == FIRST_PSEUDO_REGISTER)
1839 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1842 j = potential_reload_regs[i];
1843 /* Verify that J+1 is a potential reload reg. */
1844 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1845 if (potential_reload_regs[k] == j + 1)
1847 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1848 && k < FIRST_PSEUDO_REGISTER
1849 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1850 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1851 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1852 && HARD_REGNO_MODE_OK (j, group_mode[class])
1853 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1855 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1859 /* I should be the index in potential_reload_regs
1860 of the new reload reg we have found. */
1862 if (i >= FIRST_PSEUDO_REGISTER)
1864 /* There are no groups left to spill. */
1865 spill_failure (max_groups_insn[class]);
1871 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1876 /* For groups of more than 2 registers,
1877 look for a sufficient sequence of unspilled registers,
1878 and spill them all at once. */
1879 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1883 j = potential_reload_regs[i];
1885 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1886 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1888 /* Check each reg in the sequence. */
1889 for (k = 0; k < group_size[class]; k++)
1890 if (! (spill_reg_order[j + k] < 0
1891 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1892 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1894 /* We got a full sequence, so spill them all. */
1895 if (k == group_size[class])
1897 register enum reg_class *p;
1898 for (k = 0; k < group_size[class]; k++)
1901 SET_HARD_REG_BIT (counted_for_groups, j + k);
1902 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1903 if (potential_reload_regs[idx] == j + k)
1906 |= new_spill_reg (idx, class,
1907 max_needs, NULL_PTR,
1911 /* We have found one that will complete a group,
1912 so count off one group as provided. */
1913 max_groups[class]--;
1914 p = reg_class_superclasses[class];
1915 while (*p != LIM_REG_CLASSES)
1917 if (group_size [(int) *p]
1918 <= group_size [class])
1919 max_groups[(int) *p]--;
1926 /* We couldn't find any registers for this reload.
1927 Avoid going into an infinite loop. */
1928 if (i >= FIRST_PSEUDO_REGISTER)
1930 /* There are no groups left. */
1931 spill_failure (max_groups_insn[class]);
1938 /* Now similarly satisfy all need for single registers. */
1940 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1942 /* If we spilled enough regs, but they weren't counted
1943 against the non-group need, see if we can count them now.
1944 If so, we can avoid some actual spilling. */
1945 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1946 for (i = 0; i < n_spills; i++)
1947 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1949 && !TEST_HARD_REG_BIT (counted_for_groups,
1951 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1953 && max_nongroups[class] > 0)
1955 register enum reg_class *p;
1957 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1958 max_nongroups[class]--;
1959 p = reg_class_superclasses[class];
1960 while (*p != LIM_REG_CLASSES)
1961 max_nongroups[(int) *p++]--;
1963 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1966 /* Consider the potential reload regs that aren't
1967 yet in use as reload regs, in order of preference.
1968 Find the most preferred one that's in this class. */
1970 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1971 if (potential_reload_regs[i] >= 0
1972 && TEST_HARD_REG_BIT (reg_class_contents[class],
1973 potential_reload_regs[i])
1974 /* If this reg will not be available for groups,
1975 pick one that does not foreclose possible groups.
1976 This is a kludge, and not very general,
1977 but it should be sufficient to make the 386 work,
1978 and the problem should not occur on machines with
1980 && (max_nongroups[class] == 0
1981 || possible_group_p (potential_reload_regs[i], max_groups)))
1984 /* If we couldn't get a register, try to get one even if we
1985 might foreclose possible groups. This may cause problems
1986 later, but that's better than aborting now, since it is
1987 possible that we will, in fact, be able to form the needed
1988 group even with this allocation. */
1990 if (i >= FIRST_PSEUDO_REGISTER
1991 && (asm_noperands (max_needs[class] > 0
1992 ? max_needs_insn[class]
1993 : max_nongroups_insn[class])
1995 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1996 if (potential_reload_regs[i] >= 0
1997 && TEST_HARD_REG_BIT (reg_class_contents[class],
1998 potential_reload_regs[i]))
2001 /* I should be the index in potential_reload_regs
2002 of the new reload reg we have found. */
2004 if (i >= FIRST_PSEUDO_REGISTER)
2006 /* There are no possible registers left to spill. */
2007 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2008 : max_nongroups_insn[class]);
2014 |= new_spill_reg (i, class, max_needs, max_nongroups,
2020 /* If global-alloc was run, notify it of any register eliminations we have
2023 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2024 if (ep->can_eliminate)
2025 mark_elimination (ep->from, ep->to);
2027 /* Insert code to save and restore call-clobbered hard regs
2028 around calls. Tell if what mode to use so that we will process
2029 those insns in reload_as_needed if we have to. */
2031 if (caller_save_needed)
2032 save_call_clobbered_regs (num_eliminable ? QImode
2033 : caller_save_spill_class != NO_REGS ? HImode
2036 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2037 If that insn didn't set the register (i.e., it copied the register to
2038 memory), just delete that insn instead of the equivalencing insn plus
2039 anything now dead. If we call delete_dead_insn on that insn, we may
2040 delete the insn that actually sets the register if the register die
2041 there and that is incorrect. */
2043 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2044 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2045 && GET_CODE (reg_equiv_init[i]) != NOTE)
2047 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2048 delete_dead_insn (reg_equiv_init[i]);
2051 PUT_CODE (reg_equiv_init[i], NOTE);
2052 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2053 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2057 /* Use the reload registers where necessary
2058 by generating move instructions to move the must-be-register
2059 values into or out of the reload registers. */
2061 if (something_needs_reloads || something_needs_elimination
2062 || (caller_save_needed && num_eliminable)
2063 || caller_save_spill_class != NO_REGS)
2064 reload_as_needed (first, global);
2066 /* If we were able to eliminate the frame pointer, show that it is no
2067 longer live at the start of any basic block. If it ls live by
2068 virtue of being in a pseudo, that pseudo will be marked live
2069 and hence the frame pointer will be known to be live via that
2072 if (! frame_pointer_needed)
2073 for (i = 0; i < n_basic_blocks; i++)
2074 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
2075 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
2076 % REGSET_ELT_BITS));
2078 /* Come here (with failure set nonzero) if we can't get enough spill regs
2079 and we decide not to abort about it. */
2082 reload_in_progress = 0;
2084 /* Now eliminate all pseudo regs by modifying them into
2085 their equivalent memory references.
2086 The REG-rtx's for the pseudos are modified in place,
2087 so all insns that used to refer to them now refer to memory.
2089 For a reg that has a reg_equiv_address, all those insns
2090 were changed by reloading so that no insns refer to it any longer;
2091 but the DECL_RTL of a variable decl may refer to it,
2092 and if so this causes the debugging info to mention the variable. */
2094 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2098 if (reg_equiv_mem[i])
2100 addr = XEXP (reg_equiv_mem[i], 0);
2101 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2103 if (reg_equiv_address[i])
2104 addr = reg_equiv_address[i];
2107 if (reg_renumber[i] < 0)
2109 rtx reg = regno_reg_rtx[i];
2110 XEXP (reg, 0) = addr;
2111 REG_USERVAR_P (reg) = 0;
2112 MEM_IN_STRUCT_P (reg) = in_struct;
2113 PUT_CODE (reg, MEM);
2115 else if (reg_equiv_mem[i])
2116 XEXP (reg_equiv_mem[i], 0) = addr;
2120 /* Do a very simple CSE pass over just the hard registers. */
2122 reload_cse_regs (first);
2124 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2125 /* Make a pass over all the insns and remove death notes for things that
2126 are no longer registers or no longer die in the insn (e.g., an input
2127 and output pseudo being tied). */
2129 for (insn = first; insn; insn = NEXT_INSN (insn))
2130 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2134 for (note = REG_NOTES (insn); note; note = next)
2136 next = XEXP (note, 1);
2137 if (REG_NOTE_KIND (note) == REG_DEAD
2138 && (GET_CODE (XEXP (note, 0)) != REG
2139 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2140 remove_note (insn, note);
2145 /* Indicate that we no longer have known memory locations or constants. */
2146 reg_equiv_constant = 0;
2147 reg_equiv_memory_loc = 0;
2150 free (scratch_list);
2153 free (scratch_block);
2156 CLEAR_HARD_REG_SET (used_spill_regs);
2157 for (i = 0; i < n_spills; i++)
2158 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2163 /* Nonzero if, after spilling reg REGNO for non-groups,
2164 it will still be possible to find a group if we still need one. */
2167 possible_group_p (regno, max_groups)
2172 int class = (int) NO_REGS;
2174 for (i = 0; i < (int) N_REG_CLASSES; i++)
2175 if (max_groups[i] > 0)
2181 if (class == (int) NO_REGS)
2184 /* Consider each pair of consecutive registers. */
2185 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2187 /* Ignore pairs that include reg REGNO. */
2188 if (i == regno || i + 1 == regno)
2191 /* Ignore pairs that are outside the class that needs the group.
2192 ??? Here we fail to handle the case where two different classes
2193 independently need groups. But this never happens with our
2194 current machine descriptions. */
2195 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2196 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2199 /* A pair of consecutive regs we can still spill does the trick. */
2200 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2201 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2202 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2205 /* A pair of one already spilled and one we can spill does it
2206 provided the one already spilled is not otherwise reserved. */
2207 if (spill_reg_order[i] < 0
2208 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2209 && spill_reg_order[i + 1] >= 0
2210 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2211 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2213 if (spill_reg_order[i + 1] < 0
2214 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2215 && spill_reg_order[i] >= 0
2216 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2217 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2224 /* Count any groups of CLASS that can be formed from the registers recently
2228 count_possible_groups (group_size, group_mode, max_groups, class)
2230 enum machine_mode *group_mode;
2237 /* Now find all consecutive groups of spilled registers
2238 and mark each group off against the need for such groups.
2239 But don't count them against ordinary need, yet. */
2241 if (group_size[class] == 0)
2244 CLEAR_HARD_REG_SET (new);
2246 /* Make a mask of all the regs that are spill regs in class I. */
2247 for (i = 0; i < n_spills; i++)
2248 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2249 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2250 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2251 SET_HARD_REG_BIT (new, spill_regs[i]);
2253 /* Find each consecutive group of them. */
2254 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2255 if (TEST_HARD_REG_BIT (new, i)
2256 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2257 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2259 for (j = 1; j < group_size[class]; j++)
2260 if (! TEST_HARD_REG_BIT (new, i + j))
2263 if (j == group_size[class])
2265 /* We found a group. Mark it off against this class's need for
2266 groups, and against each superclass too. */
2267 register enum reg_class *p;
2269 max_groups[class]--;
2270 p = reg_class_superclasses[class];
2271 while (*p != LIM_REG_CLASSES)
2273 if (group_size [(int) *p] <= group_size [class])
2274 max_groups[(int) *p]--;
2278 /* Don't count these registers again. */
2279 for (j = 0; j < group_size[class]; j++)
2280 SET_HARD_REG_BIT (counted_for_groups, i + j);
2283 /* Skip to the last reg in this group. When i is incremented above,
2284 it will then point to the first reg of the next possible group. */
2289 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2290 another mode that needs to be reloaded for the same register class CLASS.
2291 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2292 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2294 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2295 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2296 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2297 causes unnecessary failures on machines requiring alignment of register
2298 groups when the two modes are different sizes, because the larger mode has
2299 more strict alignment rules than the smaller mode. */
2302 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2303 enum machine_mode allocate_mode, other_mode;
2304 enum reg_class class;
2307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2309 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2310 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2311 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2317 /* Handle the failure to find a register to spill.
2318 INSN should be one of the insns which needed this particular spill reg. */
2321 spill_failure (insn)
2324 if (asm_noperands (PATTERN (insn)) >= 0)
2325 error_for_asm (insn, "`asm' needs too many reloads");
2327 fatal_insn ("Unable to find a register to spill.", insn);
2330 /* Add a new register to the tables of available spill-registers
2331 (as well as spilling all pseudos allocated to the register).
2332 I is the index of this register in potential_reload_regs.
2333 CLASS is the regclass whose need is being satisfied.
2334 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2335 so that this register can count off against them.
2336 MAX_NONGROUPS is 0 if this register is part of a group.
2337 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2340 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2348 register enum reg_class *p;
2350 int regno = potential_reload_regs[i];
2352 if (i >= FIRST_PSEUDO_REGISTER)
2353 abort (); /* Caller failed to find any register. */
2355 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2356 fatal ("fixed or forbidden register was spilled.\n\
2357 This may be due to a compiler bug or to impossible asm\n\
2358 statements or clauses.");
2360 /* Make reg REGNO an additional reload reg. */
2362 potential_reload_regs[i] = -1;
2363 spill_regs[n_spills] = regno;
2364 spill_reg_order[regno] = n_spills;
2366 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2368 /* Clear off the needs we just satisfied. */
2371 p = reg_class_superclasses[class];
2372 while (*p != LIM_REG_CLASSES)
2373 max_needs[(int) *p++]--;
2375 if (max_nongroups && max_nongroups[class] > 0)
2377 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2378 max_nongroups[class]--;
2379 p = reg_class_superclasses[class];
2380 while (*p != LIM_REG_CLASSES)
2381 max_nongroups[(int) *p++]--;
2384 /* Spill every pseudo reg that was allocated to this reg
2385 or to something that overlaps this reg. */
2387 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2389 /* If there are some registers still to eliminate and this register
2390 wasn't ever used before, additional stack space may have to be
2391 allocated to store this register. Thus, we may have changed the offset
2392 between the stack and frame pointers, so mark that something has changed.
2393 (If new pseudos were spilled, thus requiring more space, VAL would have
2394 been set non-zero by the call to spill_hard_reg above since additional
2395 reloads may be needed in that case.
2397 One might think that we need only set VAL to 1 if this is a call-used
2398 register. However, the set of registers that must be saved by the
2399 prologue is not identical to the call-used set. For example, the
2400 register used by the call insn for the return PC is a call-used register,
2401 but must be saved by the prologue. */
2402 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2405 regs_ever_live[spill_regs[n_spills]] = 1;
2411 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2412 data that is dead in INSN. */
2415 delete_dead_insn (insn)
2418 rtx prev = prev_real_insn (insn);
2421 /* If the previous insn sets a register that dies in our insn, delete it
2423 if (prev && GET_CODE (PATTERN (prev)) == SET
2424 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2425 && reg_mentioned_p (prev_dest, PATTERN (insn))
2426 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2427 delete_dead_insn (prev);
2429 PUT_CODE (insn, NOTE);
2430 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2431 NOTE_SOURCE_FILE (insn) = 0;
2434 /* Modify the home of pseudo-reg I.
2435 The new home is present in reg_renumber[I].
2437 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2438 or it may be -1, meaning there is none or it is not relevant.
2439 This is used so that all pseudos spilled from a given hard reg
2440 can share one stack slot. */
2443 alter_reg (i, from_reg)
2447 /* When outputting an inline function, this can happen
2448 for a reg that isn't actually used. */
2449 if (regno_reg_rtx[i] == 0)
2452 /* If the reg got changed to a MEM at rtl-generation time,
2454 if (GET_CODE (regno_reg_rtx[i]) != REG)
2457 /* Modify the reg-rtx to contain the new hard reg
2458 number or else to contain its pseudo reg number. */
2459 REGNO (regno_reg_rtx[i])
2460 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2462 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2463 allocate a stack slot for it. */
2465 if (reg_renumber[i] < 0
2466 && reg_n_refs[i] > 0
2467 && reg_equiv_constant[i] == 0
2468 && reg_equiv_memory_loc[i] == 0)
2471 int inherent_size = PSEUDO_REGNO_BYTES (i);
2472 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2475 /* Each pseudo reg has an inherent size which comes from its own mode,
2476 and a total size which provides room for paradoxical subregs
2477 which refer to the pseudo reg in wider modes.
2479 We can use a slot already allocated if it provides both
2480 enough inherent space and enough total space.
2481 Otherwise, we allocate a new slot, making sure that it has no less
2482 inherent space, and no less total space, then the previous slot. */
2485 /* No known place to spill from => no slot to reuse. */
2486 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2487 inherent_size == total_size ? 0 : -1);
2488 if (BYTES_BIG_ENDIAN)
2489 /* Cancel the big-endian correction done in assign_stack_local.
2490 Get the address of the beginning of the slot.
2491 This is so we can do a big-endian correction unconditionally
2493 adjust = inherent_size - total_size;
2495 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2497 /* Reuse a stack slot if possible. */
2498 else if (spill_stack_slot[from_reg] != 0
2499 && spill_stack_slot_width[from_reg] >= total_size
2500 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2502 x = spill_stack_slot[from_reg];
2503 /* Allocate a bigger slot. */
2506 /* Compute maximum size needed, both for inherent size
2507 and for total size. */
2508 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2510 if (spill_stack_slot[from_reg])
2512 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2514 mode = GET_MODE (spill_stack_slot[from_reg]);
2515 if (spill_stack_slot_width[from_reg] > total_size)
2516 total_size = spill_stack_slot_width[from_reg];
2518 /* Make a slot with that size. */
2519 x = assign_stack_local (mode, total_size,
2520 inherent_size == total_size ? 0 : -1);
2522 if (BYTES_BIG_ENDIAN)
2524 /* Cancel the big-endian correction done in assign_stack_local.
2525 Get the address of the beginning of the slot.
2526 This is so we can do a big-endian correction unconditionally
2528 adjust = GET_MODE_SIZE (mode) - total_size;
2530 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2533 plus_constant (XEXP (x, 0), adjust));
2535 spill_stack_slot[from_reg] = stack_slot;
2536 spill_stack_slot_width[from_reg] = total_size;
2539 /* On a big endian machine, the "address" of the slot
2540 is the address of the low part that fits its inherent mode. */
2541 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2542 adjust += (total_size - inherent_size);
2544 /* If we have any adjustment to make, or if the stack slot is the
2545 wrong mode, make a new stack slot. */
2546 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2548 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2549 plus_constant (XEXP (x, 0), adjust));
2550 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2553 /* Save the stack slot for later. */
2554 reg_equiv_memory_loc[i] = x;
2558 /* Mark the slots in regs_ever_live for the hard regs
2559 used by pseudo-reg number REGNO. */
2562 mark_home_live (regno)
2565 register int i, lim;
2566 i = reg_renumber[regno];
2569 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2571 regs_ever_live[i++] = 1;
2574 /* Mark the registers used in SCRATCH as being live. */
2577 mark_scratch_live (scratch)
2581 int regno = REGNO (scratch);
2582 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2584 for (i = regno; i < lim; i++)
2585 regs_ever_live[i] = 1;
2588 /* This function handles the tracking of elimination offsets around branches.
2590 X is a piece of RTL being scanned.
2592 INSN is the insn that it came from, if any.
2594 INITIAL_P is non-zero if we are to set the offset to be the initial
2595 offset and zero if we are setting the offset of the label to be the
2599 set_label_offsets (x, insn, initial_p)
2604 enum rtx_code code = GET_CODE (x);
2607 struct elim_table *p;
2612 if (LABEL_REF_NONLOCAL_P (x))
2617 /* ... fall through ... */
2620 /* If we know nothing about this label, set the desired offsets. Note
2621 that this sets the offset at a label to be the offset before a label
2622 if we don't know anything about the label. This is not correct for
2623 the label after a BARRIER, but is the best guess we can make. If
2624 we guessed wrong, we will suppress an elimination that might have
2625 been possible had we been able to guess correctly. */
2627 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2629 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2630 offsets_at[CODE_LABEL_NUMBER (x)][i]
2631 = (initial_p ? reg_eliminate[i].initial_offset
2632 : reg_eliminate[i].offset);
2633 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2636 /* Otherwise, if this is the definition of a label and it is
2637 preceded by a BARRIER, set our offsets to the known offset of
2641 && (tem = prev_nonnote_insn (insn)) != 0
2642 && GET_CODE (tem) == BARRIER)
2644 num_not_at_initial_offset = 0;
2645 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2647 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2648 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2649 if (reg_eliminate[i].can_eliminate
2650 && (reg_eliminate[i].offset
2651 != reg_eliminate[i].initial_offset))
2652 num_not_at_initial_offset++;
2657 /* If neither of the above cases is true, compare each offset
2658 with those previously recorded and suppress any eliminations
2659 where the offsets disagree. */
2661 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2662 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2663 != (initial_p ? reg_eliminate[i].initial_offset
2664 : reg_eliminate[i].offset))
2665 reg_eliminate[i].can_eliminate = 0;
2670 set_label_offsets (PATTERN (insn), insn, initial_p);
2672 /* ... fall through ... */
2676 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2677 and hence must have all eliminations at their initial offsets. */
2678 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2679 if (REG_NOTE_KIND (tem) == REG_LABEL)
2680 set_label_offsets (XEXP (tem, 0), insn, 1);
2685 /* Each of the labels in the address vector must be at their initial
2686 offsets. We want the first first for ADDR_VEC and the second
2687 field for ADDR_DIFF_VEC. */
2689 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2690 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2695 /* We only care about setting PC. If the source is not RETURN,
2696 IF_THEN_ELSE, or a label, disable any eliminations not at
2697 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2698 isn't one of those possibilities. For branches to a label,
2699 call ourselves recursively.
2701 Note that this can disable elimination unnecessarily when we have
2702 a non-local goto since it will look like a non-constant jump to
2703 someplace in the current function. This isn't a significant
2704 problem since such jumps will normally be when all elimination
2705 pairs are back to their initial offsets. */
2707 if (SET_DEST (x) != pc_rtx)
2710 switch (GET_CODE (SET_SRC (x)))
2717 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2721 tem = XEXP (SET_SRC (x), 1);
2722 if (GET_CODE (tem) == LABEL_REF)
2723 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2724 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2727 tem = XEXP (SET_SRC (x), 2);
2728 if (GET_CODE (tem) == LABEL_REF)
2729 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2730 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2735 /* If we reach here, all eliminations must be at their initial
2736 offset because we are doing a jump to a variable address. */
2737 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2738 if (p->offset != p->initial_offset)
2739 p->can_eliminate = 0;
2743 /* Used for communication between the next two function to properly share
2744 the vector for an ASM_OPERANDS. */
2746 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2748 /* Scan X and replace any eliminable registers (such as fp) with a
2749 replacement (such as sp), plus an offset.
2751 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2752 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2753 MEM, we are allowed to replace a sum of a register and the constant zero
2754 with the register, which we cannot do outside a MEM. In addition, we need
2755 to record the fact that a register is referenced outside a MEM.
2757 If INSN is an insn, it is the insn containing X. If we replace a REG
2758 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2759 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2760 that the REG is being modified.
2762 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2763 That's used when we eliminate in expressions stored in notes.
2764 This means, do not set ref_outside_mem even if the reference
2767 If we see a modification to a register we know about, take the
2768 appropriate action (see case SET, below).
2770 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2771 replacements done assuming all offsets are at their initial values. If
2772 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2773 encounter, return the actual location so that find_reloads will do
2774 the proper thing. */
2777 eliminate_regs (x, mem_mode, insn, storing)
2779 enum machine_mode mem_mode;
2783 enum rtx_code code = GET_CODE (x);
2784 struct elim_table *ep;
2809 /* First handle the case where we encounter a bare register that
2810 is eliminable. Replace it with a PLUS. */
2811 if (regno < FIRST_PSEUDO_REGISTER)
2813 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2815 if (ep->from_rtx == x && ep->can_eliminate)
2818 /* Refs inside notes don't count for this purpose. */
2819 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2820 || GET_CODE (insn) == INSN_LIST)))
2821 ep->ref_outside_mem = 1;
2822 return plus_constant (ep->to_rtx, ep->previous_offset);
2826 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2827 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2829 /* In this case, find_reloads would attempt to either use an
2830 incorrect address (if something is not at its initial offset)
2831 or substitute an replaced address into an insn (which loses
2832 if the offset is changed by some later action). So we simply
2833 return the replaced stack slot (assuming it is changed by
2834 elimination) and ignore the fact that this is actually a
2835 reference to the pseudo. Ensure we make a copy of the
2836 address in case it is shared. */
2837 new = eliminate_regs (reg_equiv_memory_loc[regno],
2839 if (new != reg_equiv_memory_loc[regno])
2841 cannot_omit_stores[regno] = 1;
2842 return copy_rtx (new);
2848 /* If this is the sum of an eliminable register and a constant, rework
2850 if (GET_CODE (XEXP (x, 0)) == REG
2851 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2852 && CONSTANT_P (XEXP (x, 1)))
2854 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2856 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2859 /* Refs inside notes don't count for this purpose. */
2860 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2861 || GET_CODE (insn) == INSN_LIST)))
2862 ep->ref_outside_mem = 1;
2864 /* The only time we want to replace a PLUS with a REG (this
2865 occurs when the constant operand of the PLUS is the negative
2866 of the offset) is when we are inside a MEM. We won't want
2867 to do so at other times because that would change the
2868 structure of the insn in a way that reload can't handle.
2869 We special-case the commonest situation in
2870 eliminate_regs_in_insn, so just replace a PLUS with a
2871 PLUS here, unless inside a MEM. */
2872 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2873 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2876 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2877 plus_constant (XEXP (x, 1),
2878 ep->previous_offset));
2881 /* If the register is not eliminable, we are done since the other
2882 operand is a constant. */
2886 /* If this is part of an address, we want to bring any constant to the
2887 outermost PLUS. We will do this by doing register replacement in
2888 our operands and seeing if a constant shows up in one of them.
2890 We assume here this is part of an address (or a "load address" insn)
2891 since an eliminable register is not likely to appear in any other
2894 If we have (plus (eliminable) (reg)), we want to produce
2895 (plus (plus (replacement) (reg) (const))). If this was part of a
2896 normal add insn, (plus (replacement) (reg)) will be pushed as a
2897 reload. This is the desired action. */
2900 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2901 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2903 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2905 /* If one side is a PLUS and the other side is a pseudo that
2906 didn't get a hard register but has a reg_equiv_constant,
2907 we must replace the constant here since it may no longer
2908 be in the position of any operand. */
2909 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2910 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2911 && reg_renumber[REGNO (new1)] < 0
2912 && reg_equiv_constant != 0
2913 && reg_equiv_constant[REGNO (new1)] != 0)
2914 new1 = reg_equiv_constant[REGNO (new1)];
2915 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2916 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2917 && reg_renumber[REGNO (new0)] < 0
2918 && reg_equiv_constant[REGNO (new0)] != 0)
2919 new0 = reg_equiv_constant[REGNO (new0)];
2921 new = form_sum (new0, new1);
2923 /* As above, if we are not inside a MEM we do not want to
2924 turn a PLUS into something else. We might try to do so here
2925 for an addition of 0 if we aren't optimizing. */
2926 if (! mem_mode && GET_CODE (new) != PLUS)
2927 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2935 /* If this is the product of an eliminable register and a
2936 constant, apply the distribute law and move the constant out
2937 so that we have (plus (mult ..) ..). This is needed in order
2938 to keep load-address insns valid. This case is pathological.
2939 We ignore the possibility of overflow here. */
2940 if (GET_CODE (XEXP (x, 0)) == REG
2941 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2942 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2943 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2945 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2948 /* Refs inside notes don't count for this purpose. */
2949 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2950 || GET_CODE (insn) == INSN_LIST)))
2951 ep->ref_outside_mem = 1;
2954 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2955 ep->previous_offset * INTVAL (XEXP (x, 1)));
2958 /* ... fall through ... */
2963 case DIV: case UDIV:
2964 case MOD: case UMOD:
2965 case AND: case IOR: case XOR:
2966 case ROTATERT: case ROTATE:
2967 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2969 case GE: case GT: case GEU: case GTU:
2970 case LE: case LT: case LEU: case LTU:
2972 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2974 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
2976 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2977 return gen_rtx (code, GET_MODE (x), new0, new1);
2982 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2985 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2986 if (new != XEXP (x, 0))
2987 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2990 /* ... fall through ... */
2993 /* Now do eliminations in the rest of the chain. If this was
2994 an EXPR_LIST, this might result in allocating more memory than is
2995 strictly needed, but it simplifies the code. */
2998 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2999 if (new != XEXP (x, 1))
3000 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3008 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3009 if (ep->to_rtx == XEXP (x, 0))
3011 int size = GET_MODE_SIZE (mem_mode);
3013 /* If more bytes than MEM_MODE are pushed, account for them. */
3014 #ifdef PUSH_ROUNDING
3015 if (ep->to_rtx == stack_pointer_rtx)
3016 size = PUSH_ROUNDING (size);
3018 if (code == PRE_DEC || code == POST_DEC)
3024 /* Fall through to generic unary operation case. */
3025 case STRICT_LOW_PART:
3027 case SIGN_EXTEND: case ZERO_EXTEND:
3028 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3029 case FLOAT: case FIX:
3030 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3034 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3035 if (new != XEXP (x, 0))
3036 return gen_rtx (code, GET_MODE (x), new);
3040 /* Similar to above processing, but preserve SUBREG_WORD.
3041 Convert (subreg (mem)) to (mem) if not paradoxical.
3042 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3043 pseudo didn't get a hard reg, we must replace this with the
3044 eliminated version of the memory location because push_reloads
3045 may do the replacement in certain circumstances. */
3046 if (GET_CODE (SUBREG_REG (x)) == REG
3047 && (GET_MODE_SIZE (GET_MODE (x))
3048 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3049 && reg_equiv_memory_loc != 0
3050 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3052 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3055 /* If we didn't change anything, we must retain the pseudo. */
3056 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3057 new = SUBREG_REG (x);
3060 /* Otherwise, ensure NEW isn't shared in case we have to reload
3062 new = copy_rtx (new);
3064 /* In this case, we must show that the pseudo is used in this
3065 insn so that delete_output_reload will do the right thing. */
3066 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3067 && GET_CODE (insn) != INSN_LIST)
3068 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3073 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3075 if (new != XEXP (x, 0))
3077 int x_size = GET_MODE_SIZE (GET_MODE (x));
3078 int new_size = GET_MODE_SIZE (GET_MODE (new));
3080 /* When asked to spill a partial word subreg, we need to go
3081 ahead and spill the whole thing against the possibility
3082 that we reload the whole reg and find garbage at the top. */
3084 && GET_CODE (new) == MEM
3085 && x_size < new_size
3086 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3087 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3089 else if (GET_CODE (new) == MEM
3090 && x_size <= new_size
3091 #ifdef LOAD_EXTEND_OP
3092 /* On these machines we will be reloading what is
3093 inside the SUBREG if it originally was a pseudo and
3094 the inner and outer modes are both a word or
3095 smaller. So leave the SUBREG then. */
3096 && ! (GET_CODE (SUBREG_REG (x)) == REG
3097 && x_size <= UNITS_PER_WORD
3098 && new_size <= UNITS_PER_WORD
3099 && x_size > new_size
3100 && INTEGRAL_MODE_P (GET_MODE (new))
3101 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3105 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3106 enum machine_mode mode = GET_MODE (x);
3108 if (BYTES_BIG_ENDIAN)
3109 offset += (MIN (UNITS_PER_WORD,
3110 GET_MODE_SIZE (GET_MODE (new)))
3111 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3113 PUT_MODE (new, mode);
3114 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3118 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3124 /* If using a register that is the source of an eliminate we still
3125 think can be performed, note it cannot be performed since we don't
3126 know how this register is used. */
3127 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3128 if (ep->from_rtx == XEXP (x, 0))
3129 ep->can_eliminate = 0;
3131 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3132 if (new != XEXP (x, 0))
3133 return gen_rtx (code, GET_MODE (x), new);
3137 /* If clobbering a register that is the replacement register for an
3138 elimination we still think can be performed, note that it cannot
3139 be performed. Otherwise, we need not be concerned about it. */
3140 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3141 if (ep->to_rtx == XEXP (x, 0))
3142 ep->can_eliminate = 0;
3144 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3145 if (new != XEXP (x, 0))
3146 return gen_rtx (code, GET_MODE (x), new);
3152 /* Properly handle sharing input and constraint vectors. */
3153 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3155 /* When we come to a new vector not seen before,
3156 scan all its elements; keep the old vector if none
3157 of them changes; otherwise, make a copy. */
3158 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3159 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3160 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3161 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3164 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3165 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3168 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3169 new_asm_operands_vec = old_asm_operands_vec;
3171 new_asm_operands_vec
3172 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3175 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3176 if (new_asm_operands_vec == old_asm_operands_vec)
3179 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3180 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3181 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3182 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3183 ASM_OPERANDS_SOURCE_FILE (x),
3184 ASM_OPERANDS_SOURCE_LINE (x));
3185 new->volatil = x->volatil;
3190 /* Check for setting a register that we know about. */
3191 if (GET_CODE (SET_DEST (x)) == REG)
3193 /* See if this is setting the replacement register for an
3196 If DEST is the hard frame pointer, we do nothing because we
3197 assume that all assignments to the frame pointer are for
3198 non-local gotos and are being done at a time when they are valid
3199 and do not disturb anything else. Some machines want to
3200 eliminate a fake argument pointer (or even a fake frame pointer)
3201 with either the real frame or the stack pointer. Assignments to
3202 the hard frame pointer must not prevent this elimination. */
3204 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3206 if (ep->to_rtx == SET_DEST (x)
3207 && SET_DEST (x) != hard_frame_pointer_rtx)
3209 /* If it is being incremented, adjust the offset. Otherwise,
3210 this elimination can't be done. */
3211 rtx src = SET_SRC (x);
3213 if (GET_CODE (src) == PLUS
3214 && XEXP (src, 0) == SET_DEST (x)
3215 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3216 ep->offset -= INTVAL (XEXP (src, 1));
3218 ep->can_eliminate = 0;
3221 /* Now check to see we are assigning to a register that can be
3222 eliminated. If so, it must be as part of a PARALLEL, since we
3223 will not have been called if this is a single SET. So indicate
3224 that we can no longer eliminate this reg. */
3225 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3227 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3228 ep->can_eliminate = 0;
3231 /* Now avoid the loop below in this common case. */
3233 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3234 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3236 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3237 write a CLOBBER insn. */
3238 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3239 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3240 && GET_CODE (insn) != INSN_LIST)
3241 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3243 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3244 return gen_rtx (SET, VOIDmode, new0, new1);
3250 /* Our only special processing is to pass the mode of the MEM to our
3251 recursive call and copy the flags. While we are here, handle this
3252 case more efficiently. */
3253 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3254 if (new != XEXP (x, 0))
3256 new = gen_rtx (MEM, GET_MODE (x), new);
3257 new->volatil = x->volatil;
3258 new->unchanging = x->unchanging;
3259 new->in_struct = x->in_struct;
3266 /* Process each of our operands recursively. If any have changed, make a
3268 fmt = GET_RTX_FORMAT (code);
3269 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3273 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3274 if (new != XEXP (x, i) && ! copied)
3276 rtx new_x = rtx_alloc (code);
3277 bcopy ((char *) x, (char *) new_x,
3278 (sizeof (*new_x) - sizeof (new_x->fld)
3279 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3285 else if (*fmt == 'E')
3288 for (j = 0; j < XVECLEN (x, i); j++)
3290 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3291 if (new != XVECEXP (x, i, j) && ! copied_vec)
3293 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3297 rtx new_x = rtx_alloc (code);
3298 bcopy ((char *) x, (char *) new_x,
3299 (sizeof (*new_x) - sizeof (new_x->fld)
3300 + (sizeof (new_x->fld[0])
3301 * GET_RTX_LENGTH (code))));
3305 XVEC (x, i) = new_v;
3308 XVECEXP (x, i, j) = new;
3316 /* Scan INSN and eliminate all eliminable registers in it.
3318 If REPLACE is nonzero, do the replacement destructively. Also
3319 delete the insn as dead it if it is setting an eliminable register.
3321 If REPLACE is zero, do all our allocations in reload_obstack.
3323 If no eliminations were done and this insn doesn't require any elimination
3324 processing (these are not identical conditions: it might be updating sp,
3325 but not referencing fp; this needs to be seen during reload_as_needed so
3326 that the offset between fp and sp can be taken into consideration), zero
3327 is returned. Otherwise, 1 is returned. */
3330 eliminate_regs_in_insn (insn, replace)
3334 rtx old_body = PATTERN (insn);
3335 rtx old_set = single_set (insn);
3338 struct elim_table *ep;
3341 push_obstacks (&reload_obstack, &reload_obstack);
3343 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3344 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3346 /* Check for setting an eliminable register. */
3347 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3348 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3350 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3351 /* If this is setting the frame pointer register to the
3352 hardware frame pointer register and this is an elimination
3353 that will be done (tested above), this insn is really
3354 adjusting the frame pointer downward to compensate for
3355 the adjustment done before a nonlocal goto. */
3356 if (ep->from == FRAME_POINTER_REGNUM
3357 && ep->to == HARD_FRAME_POINTER_REGNUM)
3359 rtx src = SET_SRC (old_set);
3361 rtx prev_insn, prev_set;
3363 if (src == ep->to_rtx)
3365 else if (GET_CODE (src) == PLUS
3366 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3367 offset = INTVAL (XEXP (src, 0)), ok = 1;
3368 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3369 && (prev_set = single_set (prev_insn)) != 0
3370 && rtx_equal_p (SET_DEST (prev_set), src))
3372 src = SET_SRC (prev_set);
3373 if (src == ep->to_rtx)
3375 else if (GET_CODE (src) == PLUS
3376 && GET_CODE (XEXP (src, 0)) == CONST_INT
3377 && XEXP (src, 1) == ep->to_rtx)
3378 offset = INTVAL (XEXP (src, 0)), ok = 1;
3379 else if (GET_CODE (src) == PLUS
3380 && GET_CODE (XEXP (src, 1)) == CONST_INT
3381 && XEXP (src, 0) == ep->to_rtx)
3382 offset = INTVAL (XEXP (src, 1)), ok = 1;
3390 = plus_constant (ep->to_rtx, offset - ep->offset);
3392 /* First see if this insn remains valid when we
3393 make the change. If not, keep the INSN_CODE
3394 the same and let reload fit it up. */
3395 validate_change (insn, &SET_SRC (old_set), src, 1);
3396 validate_change (insn, &SET_DEST (old_set),
3398 if (! apply_change_group ())
3400 SET_SRC (old_set) = src;
3401 SET_DEST (old_set) = ep->to_rtx;
3411 /* In this case this insn isn't serving a useful purpose. We
3412 will delete it in reload_as_needed once we know that this
3413 elimination is, in fact, being done.
3415 If REPLACE isn't set, we can't delete this insn, but needn't
3416 process it since it won't be used unless something changes. */
3418 delete_dead_insn (insn);
3423 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3424 in the insn is the negative of the offset in FROM. Substitute
3425 (set (reg) (reg to)) for the insn and change its code.
3427 We have to do this here, rather than in eliminate_regs, do that we can
3428 change the insn code. */
3430 if (GET_CODE (SET_SRC (old_set)) == PLUS
3431 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3432 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3433 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3435 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3436 && ep->can_eliminate)
3438 /* We must stop at the first elimination that will be used.
3439 If this one would replace the PLUS with a REG, do it
3440 now. Otherwise, quit the loop and let eliminate_regs
3441 do its normal replacement. */
3442 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3444 /* We assume here that we don't need a PARALLEL of
3445 any CLOBBERs for this assignment. There's not
3446 much we can do if we do need it. */
3447 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3448 SET_DEST (old_set), ep->to_rtx);
3449 INSN_CODE (insn) = -1;
3458 old_asm_operands_vec = 0;
3460 /* Replace the body of this insn with a substituted form. If we changed
3461 something, return non-zero.
3463 If we are replacing a body that was a (set X (plus Y Z)), try to
3464 re-recognize the insn. We do this in case we had a simple addition
3465 but now can do this as a load-address. This saves an insn in this
3468 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3469 if (new_body != old_body)
3471 /* If we aren't replacing things permanently and we changed something,
3472 make another copy to ensure that all the RTL is new. Otherwise
3473 things can go wrong if find_reload swaps commutative operands
3474 and one is inside RTL that has been copied while the other is not. */
3476 /* Don't copy an asm_operands because (1) there's no need and (2)
3477 copy_rtx can't do it properly when there are multiple outputs. */
3478 if (! replace && asm_noperands (old_body) < 0)
3479 new_body = copy_rtx (new_body);
3481 /* If we had a move insn but now we don't, rerecognize it. This will
3482 cause spurious re-recognition if the old move had a PARALLEL since
3483 the new one still will, but we can't call single_set without
3484 having put NEW_BODY into the insn and the re-recognition won't
3485 hurt in this rare case. */
3487 && ((GET_CODE (SET_SRC (old_set)) == REG
3488 && (GET_CODE (new_body) != SET
3489 || GET_CODE (SET_SRC (new_body)) != REG))
3490 /* If this was a load from or store to memory, compare
3491 the MEM in recog_operand to the one in the insn. If they
3492 are not equal, then rerecognize the insn. */
3494 && ((GET_CODE (SET_SRC (old_set)) == MEM
3495 && SET_SRC (old_set) != recog_operand[1])
3496 || (GET_CODE (SET_DEST (old_set)) == MEM
3497 && SET_DEST (old_set) != recog_operand[0])))
3498 /* If this was an add insn before, rerecognize. */
3499 || GET_CODE (SET_SRC (old_set)) == PLUS))
3501 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3502 /* If recognition fails, store the new body anyway.
3503 It's normal to have recognition failures here
3504 due to bizarre memory addresses; reloading will fix them. */
3505 PATTERN (insn) = new_body;
3508 PATTERN (insn) = new_body;
3513 /* Loop through all elimination pairs. See if any have changed and
3514 recalculate the number not at initial offset.
3516 Compute the maximum offset (minimum offset if the stack does not
3517 grow downward) for each elimination pair.
3519 We also detect a cases where register elimination cannot be done,
3520 namely, if a register would be both changed and referenced outside a MEM
3521 in the resulting insn since such an insn is often undefined and, even if
3522 not, we cannot know what meaning will be given to it. Note that it is
3523 valid to have a register used in an address in an insn that changes it
3524 (presumably with a pre- or post-increment or decrement).
3526 If anything changes, return nonzero. */
3528 num_not_at_initial_offset = 0;
3529 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3531 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3532 ep->can_eliminate = 0;
3534 ep->ref_outside_mem = 0;
3536 if (ep->previous_offset != ep->offset)
3539 ep->previous_offset = ep->offset;
3540 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3541 num_not_at_initial_offset++;
3543 #ifdef STACK_GROWS_DOWNWARD
3544 ep->max_offset = MAX (ep->max_offset, ep->offset);
3546 ep->max_offset = MIN (ep->max_offset, ep->offset);
3551 /* If we changed something, perform elimination in REG_NOTES. This is
3552 needed even when REPLACE is zero because a REG_DEAD note might refer
3553 to a register that we eliminate and could cause a different number
3554 of spill registers to be needed in the final reload pass than in
3556 if (val && REG_NOTES (insn) != 0)
3557 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3565 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3566 replacement we currently believe is valid, mark it as not eliminable if X
3567 modifies DEST in any way other than by adding a constant integer to it.
3569 If DEST is the frame pointer, we do nothing because we assume that
3570 all assignments to the hard frame pointer are nonlocal gotos and are being
3571 done at a time when they are valid and do not disturb anything else.
3572 Some machines want to eliminate a fake argument pointer with either the
3573 frame or stack pointer. Assignments to the hard frame pointer must not
3574 prevent this elimination.
3576 Called via note_stores from reload before starting its passes to scan
3577 the insns of the function. */
3580 mark_not_eliminable (dest, x)
3586 /* A SUBREG of a hard register here is just changing its mode. We should
3587 not see a SUBREG of an eliminable hard register, but check just in
3589 if (GET_CODE (dest) == SUBREG)
3590 dest = SUBREG_REG (dest);
3592 if (dest == hard_frame_pointer_rtx)
3595 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3596 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3597 && (GET_CODE (x) != SET
3598 || GET_CODE (SET_SRC (x)) != PLUS
3599 || XEXP (SET_SRC (x), 0) != dest
3600 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3602 reg_eliminate[i].can_eliminate_previous
3603 = reg_eliminate[i].can_eliminate = 0;
3608 /* Kick all pseudos out of hard register REGNO.
3609 If GLOBAL is nonzero, try to find someplace else to put them.
3610 If DUMPFILE is nonzero, log actions taken on that file.
3612 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3613 because we found we can't eliminate some register. In the case, no pseudos
3614 are allowed to be in the register, even if they are only in a block that
3615 doesn't require spill registers, unlike the case when we are spilling this
3616 hard reg to produce another spill register.
3618 Return nonzero if any pseudos needed to be kicked out. */
3621 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3627 enum reg_class class = REGNO_REG_CLASS (regno);
3628 int something_changed = 0;
3631 SET_HARD_REG_BIT (forbidden_regs, regno);
3634 regs_ever_live[regno] = 1;
3636 /* Spill every pseudo reg that was allocated to this reg
3637 or to something that overlaps this reg. */
3639 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3640 if (reg_renumber[i] >= 0
3641 && reg_renumber[i] <= regno
3643 + HARD_REGNO_NREGS (reg_renumber[i],
3644 PSEUDO_REGNO_MODE (i))
3647 /* If this register belongs solely to a basic block which needed no
3648 spilling of any class that this register is contained in,
3649 leave it be, unless we are spilling this register because
3650 it was a hard register that can't be eliminated. */
3652 if (! cant_eliminate
3653 && basic_block_needs[0]
3654 && reg_basic_block[i] >= 0
3655 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3659 for (p = reg_class_superclasses[(int) class];
3660 *p != LIM_REG_CLASSES; p++)
3661 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3664 if (*p == LIM_REG_CLASSES)
3668 /* Mark it as no longer having a hard register home. */
3669 reg_renumber[i] = -1;
3670 /* We will need to scan everything again. */
3671 something_changed = 1;
3673 retry_global_alloc (i, forbidden_regs);
3675 alter_reg (i, regno);
3678 if (reg_renumber[i] == -1)
3679 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3681 fprintf (dumpfile, " Register %d now in %d.\n\n",
3682 i, reg_renumber[i]);
3685 for (i = 0; i < scratch_list_length; i++)
3687 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3689 if (! cant_eliminate && basic_block_needs[0]
3690 && ! basic_block_needs[(int) class][scratch_block[i]])
3694 for (p = reg_class_superclasses[(int) class];
3695 *p != LIM_REG_CLASSES; p++)
3696 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3699 if (*p == LIM_REG_CLASSES)
3702 PUT_CODE (scratch_list[i], SCRATCH);
3703 scratch_list[i] = 0;
3704 something_changed = 1;
3709 return something_changed;
3712 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3713 Also mark any hard registers used to store user variables as
3714 forbidden from being used for spill registers. */
3717 scan_paradoxical_subregs (x)
3722 register enum rtx_code code = GET_CODE (x);
3727 #ifdef SMALL_REGISTER_CLASSES
3728 if (SMALL_REGISTER_CLASSES
3729 && REGNO (x) < FIRST_PSEUDO_REGISTER
3730 && REG_USERVAR_P (x))
3731 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3747 if (GET_CODE (SUBREG_REG (x)) == REG
3748 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3749 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3750 = GET_MODE_SIZE (GET_MODE (x));
3754 fmt = GET_RTX_FORMAT (code);
3755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3758 scan_paradoxical_subregs (XEXP (x, i));
3759 else if (fmt[i] == 'E')
3762 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3763 scan_paradoxical_subregs (XVECEXP (x, i, j));
3769 hard_reg_use_compare (p1p, p2p)
3770 const GENERIC_PTR p1p;
3771 const GENERIC_PTR p2p;
3773 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3774 *p2 = (struct hard_reg_n_uses *)p2p;
3775 int tem = p1->uses - p2->uses;
3776 if (tem != 0) return tem;
3777 /* If regs are equally good, sort by regno,
3778 so that the results of qsort leave nothing to chance. */
3779 return p1->regno - p2->regno;
3782 /* Choose the order to consider regs for use as reload registers
3783 based on how much trouble would be caused by spilling one.
3784 Store them in order of decreasing preference in potential_reload_regs. */
3787 order_regs_for_reload (global)
3794 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3796 CLEAR_HARD_REG_SET (bad_spill_regs);
3798 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3799 potential_reload_regs[i] = -1;
3801 /* Count number of uses of each hard reg by pseudo regs allocated to it
3802 and then order them by decreasing use. */
3804 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3806 hard_reg_n_uses[i].uses = 0;
3807 hard_reg_n_uses[i].regno = i;
3810 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3812 int regno = reg_renumber[i];
3815 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3818 /* If allocated by local-alloc, show more uses since
3819 we're not going to be able to reallocate it, but
3820 we might if allocated by global alloc. */
3821 if (global && reg_allocno[i] < 0)
3822 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3824 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3827 large += reg_n_refs[i];
3830 /* Now fixed registers (which cannot safely be used for reloading)
3831 get a very high use count so they will be considered least desirable.
3832 Registers used explicitly in the rtl code are almost as bad. */
3834 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3838 hard_reg_n_uses[i].uses += 2 * large + 2;
3839 SET_HARD_REG_BIT (bad_spill_regs, i);
3841 else if (regs_explicitly_used[i])
3843 hard_reg_n_uses[i].uses += large + 1;
3844 /* ??? We are doing this here because of the potential that
3845 bad code may be generated if a register explicitly used in
3846 an insn was used as a spill register for that insn. But
3847 not using these are spill registers may lose on some machine.
3848 We'll have to see how this works out. */
3849 #ifdef SMALL_REGISTER_CLASSES
3850 if (! SMALL_REGISTER_CLASSES)
3852 SET_HARD_REG_BIT (bad_spill_regs, i);
3855 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3856 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3858 #ifdef ELIMINABLE_REGS
3859 /* If registers other than the frame pointer are eliminable, mark them as
3861 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3863 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3864 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3868 /* Prefer registers not so far used, for use in temporary loading.
3869 Among them, if REG_ALLOC_ORDER is defined, use that order.
3870 Otherwise, prefer registers not preserved by calls. */
3872 #ifdef REG_ALLOC_ORDER
3873 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3875 int regno = reg_alloc_order[i];
3877 if (hard_reg_n_uses[regno].uses == 0)
3878 potential_reload_regs[o++] = regno;
3881 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3883 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3884 potential_reload_regs[o++] = i;
3886 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3888 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3889 potential_reload_regs[o++] = i;
3893 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3894 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3896 /* Now add the regs that are already used,
3897 preferring those used less often. The fixed and otherwise forbidden
3898 registers will be at the end of this list. */
3900 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3901 if (hard_reg_n_uses[i].uses != 0)
3902 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3905 /* Used in reload_as_needed to sort the spilled regs. */
3908 compare_spill_regs (r1p, r2p)
3909 const GENERIC_PTR r1p;
3910 const GENERIC_PTR r2p;
3912 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3916 /* Reload pseudo-registers into hard regs around each insn as needed.
3917 Additional register load insns are output before the insn that needs it
3918 and perhaps store insns after insns that modify the reloaded pseudo reg.
3920 reg_last_reload_reg and reg_reloaded_contents keep track of
3921 which registers are already available in reload registers.
3922 We update these for the reloads that we perform,
3923 as the insns are scanned. */
3926 reload_as_needed (first, live_known)
3936 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3937 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3938 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3939 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3940 reg_has_output_reload = (char *) alloca (max_regno);
3941 for (i = 0; i < n_spills; i++)
3943 reg_reloaded_contents[i] = -1;
3944 reg_reloaded_insn[i] = 0;
3947 /* Reset all offsets on eliminable registers to their initial values. */
3948 #ifdef ELIMINABLE_REGS
3949 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3951 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3952 reg_eliminate[i].initial_offset);
3953 reg_eliminate[i].previous_offset
3954 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3957 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3958 reg_eliminate[0].previous_offset
3959 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3962 num_not_at_initial_offset = 0;
3964 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3965 pack registers with group needs. */
3968 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3969 for (i = 0; i < n_spills; i++)
3970 spill_reg_order[spill_regs[i]] = i;
3973 for (insn = first; insn;)
3975 register rtx next = NEXT_INSN (insn);
3977 /* Notice when we move to a new basic block. */
3978 if (live_known && this_block + 1 < n_basic_blocks
3979 && insn == basic_block_head[this_block+1])
3982 /* If we pass a label, copy the offsets from the label information
3983 into the current offsets of each elimination. */
3984 if (GET_CODE (insn) == CODE_LABEL)
3986 num_not_at_initial_offset = 0;
3987 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3989 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3990 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3991 if (reg_eliminate[i].can_eliminate
3992 && (reg_eliminate[i].offset
3993 != reg_eliminate[i].initial_offset))
3994 num_not_at_initial_offset++;
3998 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4000 rtx avoid_return_reg = 0;
4001 rtx oldpat = PATTERN (insn);
4003 #ifdef SMALL_REGISTER_CLASSES
4004 /* Set avoid_return_reg if this is an insn
4005 that might use the value of a function call. */
4006 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4008 if (GET_CODE (PATTERN (insn)) == SET)
4009 after_call = SET_DEST (PATTERN (insn));
4010 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4011 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4012 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4016 else if (SMALL_REGISTER_CLASSES
4018 && !(GET_CODE (PATTERN (insn)) == SET
4019 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4021 if (reg_referenced_p (after_call, PATTERN (insn)))
4022 avoid_return_reg = after_call;
4025 #endif /* SMALL_REGISTER_CLASSES */
4027 /* If this is a USE and CLOBBER of a MEM, ensure that any
4028 references to eliminable registers have been removed. */
4030 if ((GET_CODE (PATTERN (insn)) == USE
4031 || GET_CODE (PATTERN (insn)) == CLOBBER)
4032 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4033 XEXP (XEXP (PATTERN (insn), 0), 0)
4034 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4035 GET_MODE (XEXP (PATTERN (insn), 0)),
4038 /* If we need to do register elimination processing, do so.
4039 This might delete the insn, in which case we are done. */
4040 if (num_eliminable && GET_MODE (insn) == QImode)
4042 eliminate_regs_in_insn (insn, 1);
4043 if (GET_CODE (insn) == NOTE)
4050 if (GET_MODE (insn) == VOIDmode)
4052 /* First find the pseudo regs that must be reloaded for this insn.
4053 This info is returned in the tables reload_... (see reload.h).
4054 Also modify the body of INSN by substituting RELOAD
4055 rtx's for those pseudo regs. */
4058 bzero (reg_has_output_reload, max_regno);
4059 CLEAR_HARD_REG_SET (reg_is_output_reload);
4061 find_reloads (insn, 1, spill_indirect_levels, live_known,
4067 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4071 /* If this block has not had spilling done for a
4072 particular clas and we have any non-optionals that need a
4073 spill reg in that class, abort. */
4075 for (class = 0; class < N_REG_CLASSES; class++)
4076 if (basic_block_needs[class] != 0
4077 && basic_block_needs[class][this_block] == 0)
4078 for (i = 0; i < n_reloads; i++)
4079 if (class == (int) reload_reg_class[i]
4080 && reload_reg_rtx[i] == 0
4081 && ! reload_optional[i]
4082 && (reload_in[i] != 0 || reload_out[i] != 0
4083 || reload_secondary_p[i] != 0))
4084 fatal_insn ("Non-optional registers need a spill register", insn);
4086 /* Now compute which reload regs to reload them into. Perhaps
4087 reusing reload regs from previous insns, or else output
4088 load insns to reload them. Maybe output store insns too.
4089 Record the choices of reload reg in reload_reg_rtx. */
4090 choose_reload_regs (insn, avoid_return_reg);
4092 #ifdef SMALL_REGISTER_CLASSES
4093 /* Merge any reloads that we didn't combine for fear of
4094 increasing the number of spill registers needed but now
4095 discover can be safely merged. */
4096 if (SMALL_REGISTER_CLASSES)
4097 merge_assigned_reloads (insn);
4100 /* Generate the insns to reload operands into or out of
4101 their reload regs. */
4102 emit_reload_insns (insn);
4104 /* Substitute the chosen reload regs from reload_reg_rtx
4105 into the insn's body (or perhaps into the bodies of other
4106 load and store insn that we just made for reloading
4107 and that we moved the structure into). */
4110 /* If this was an ASM, make sure that all the reload insns
4111 we have generated are valid. If not, give an error
4114 if (asm_noperands (PATTERN (insn)) >= 0)
4115 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4116 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4117 && (recog_memoized (p) < 0
4118 || (insn_extract (p),
4119 ! constrain_operands (INSN_CODE (p), 1))))
4121 error_for_asm (insn,
4122 "`asm' operand requires impossible reload");
4124 NOTE_SOURCE_FILE (p) = 0;
4125 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4128 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4129 is no longer validly lying around to save a future reload.
4130 Note that this does not detect pseudos that were reloaded
4131 for this insn in order to be stored in
4132 (obeying register constraints). That is correct; such reload
4133 registers ARE still valid. */
4134 note_stores (oldpat, forget_old_reloads_1);
4136 /* There may have been CLOBBER insns placed after INSN. So scan
4137 between INSN and NEXT and use them to forget old reloads. */
4138 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4139 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4140 note_stores (PATTERN (x), forget_old_reloads_1);
4143 /* Likewise for regs altered by auto-increment in this insn.
4144 But note that the reg-notes are not changed by reloading:
4145 they still contain the pseudo-regs, not the spill regs. */
4146 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4147 if (REG_NOTE_KIND (x) == REG_INC)
4149 /* See if this pseudo reg was reloaded in this insn.
4150 If so, its last-reload info is still valid
4151 because it is based on this insn's reload. */
4152 for (i = 0; i < n_reloads; i++)
4153 if (reload_out[i] == XEXP (x, 0))
4157 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4161 /* A reload reg's contents are unknown after a label. */
4162 if (GET_CODE (insn) == CODE_LABEL)
4163 for (i = 0; i < n_spills; i++)
4165 reg_reloaded_contents[i] = -1;
4166 reg_reloaded_insn[i] = 0;
4169 /* Don't assume a reload reg is still good after a call insn
4170 if it is a call-used reg. */
4171 else if (GET_CODE (insn) == CALL_INSN)
4172 for (i = 0; i < n_spills; i++)
4173 if (call_used_regs[spill_regs[i]])
4175 reg_reloaded_contents[i] = -1;
4176 reg_reloaded_insn[i] = 0;
4179 /* In case registers overlap, allow certain insns to invalidate
4180 particular hard registers. */
4182 #ifdef INSN_CLOBBERS_REGNO_P
4183 for (i = 0 ; i < n_spills ; i++)
4184 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4186 reg_reloaded_contents[i] = -1;
4187 reg_reloaded_insn[i] = 0;
4199 /* Discard all record of any value reloaded from X,
4200 or reloaded in X from someplace else;
4201 unless X is an output reload reg of the current insn.
4203 X may be a hard reg (the reload reg)
4204 or it may be a pseudo reg that was reloaded from. */
4207 forget_old_reloads_1 (x, ignored)
4215 /* note_stores does give us subregs of hard regs. */
4216 while (GET_CODE (x) == SUBREG)
4218 offset += SUBREG_WORD (x);
4222 if (GET_CODE (x) != REG)
4225 regno = REGNO (x) + offset;
4227 if (regno >= FIRST_PSEUDO_REGISTER)
4232 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4233 /* Storing into a spilled-reg invalidates its contents.
4234 This can happen if a block-local pseudo is allocated to that reg
4235 and it wasn't spilled because this block's total need is 0.
4236 Then some insn might have an optional reload and use this reg. */
4237 for (i = 0; i < nr; i++)
4238 if (spill_reg_order[regno + i] >= 0
4239 /* But don't do this if the reg actually serves as an output
4240 reload reg in the current instruction. */
4242 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4244 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4245 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4249 /* Since value of X has changed,
4250 forget any value previously copied from it. */
4253 /* But don't forget a copy if this is the output reload
4254 that establishes the copy's validity. */
4255 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4256 reg_last_reload_reg[regno + nr] = 0;
4259 /* For each reload, the mode of the reload register. */
4260 static enum machine_mode reload_mode[MAX_RELOADS];
4262 /* For each reload, the largest number of registers it will require. */
4263 static int reload_nregs[MAX_RELOADS];
4265 /* Comparison function for qsort to decide which of two reloads
4266 should be handled first. *P1 and *P2 are the reload numbers. */
4269 reload_reg_class_lower (r1p, r2p)
4270 const GENERIC_PTR r1p;
4271 const GENERIC_PTR r2p;
4273 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4276 /* Consider required reloads before optional ones. */
4277 t = reload_optional[r1] - reload_optional[r2];
4281 /* Count all solitary classes before non-solitary ones. */
4282 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4283 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4287 /* Aside from solitaires, consider all multi-reg groups first. */
4288 t = reload_nregs[r2] - reload_nregs[r1];
4292 /* Consider reloads in order of increasing reg-class number. */
4293 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4297 /* If reloads are equally urgent, sort by reload number,
4298 so that the results of qsort leave nothing to chance. */
4302 /* The following HARD_REG_SETs indicate when each hard register is
4303 used for a reload of various parts of the current insn. */
4305 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4306 static HARD_REG_SET reload_reg_used;
4307 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4308 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4309 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4310 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4311 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4312 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4313 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4314 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4315 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4316 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4317 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4318 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4319 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4320 static HARD_REG_SET reload_reg_used_in_op_addr;
4321 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4322 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4323 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4324 static HARD_REG_SET reload_reg_used_in_insn;
4325 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4326 static HARD_REG_SET reload_reg_used_in_other_addr;
4328 /* If reg is in use as a reload reg for any sort of reload. */
4329 static HARD_REG_SET reload_reg_used_at_all;
4331 /* If reg is use as an inherited reload. We just mark the first register
4333 static HARD_REG_SET reload_reg_used_for_inherit;
4335 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4336 TYPE. MODE is used to indicate how many consecutive regs are
4340 mark_reload_reg_in_use (regno, opnum, type, mode)
4343 enum reload_type type;
4344 enum machine_mode mode;
4346 int nregs = HARD_REGNO_NREGS (regno, mode);
4349 for (i = regno; i < nregs + regno; i++)
4354 SET_HARD_REG_BIT (reload_reg_used, i);
4357 case RELOAD_FOR_INPUT_ADDRESS:
4358 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4361 case RELOAD_FOR_INPADDR_ADDRESS:
4362 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4365 case RELOAD_FOR_OUTPUT_ADDRESS:
4366 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4369 case RELOAD_FOR_OUTADDR_ADDRESS:
4370 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4373 case RELOAD_FOR_OPERAND_ADDRESS:
4374 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4377 case RELOAD_FOR_OPADDR_ADDR:
4378 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4381 case RELOAD_FOR_OTHER_ADDRESS:
4382 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4385 case RELOAD_FOR_INPUT:
4386 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4389 case RELOAD_FOR_OUTPUT:
4390 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4393 case RELOAD_FOR_INSN:
4394 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4398 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4402 /* Similarly, but show REGNO is no longer in use for a reload. */
4405 clear_reload_reg_in_use (regno, opnum, type, mode)
4408 enum reload_type type;
4409 enum machine_mode mode;
4411 int nregs = HARD_REGNO_NREGS (regno, mode);
4414 for (i = regno; i < nregs + regno; i++)
4419 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4422 case RELOAD_FOR_INPUT_ADDRESS:
4423 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4426 case RELOAD_FOR_INPADDR_ADDRESS:
4427 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4430 case RELOAD_FOR_OUTPUT_ADDRESS:
4431 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4434 case RELOAD_FOR_OUTADDR_ADDRESS:
4435 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4438 case RELOAD_FOR_OPERAND_ADDRESS:
4439 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4442 case RELOAD_FOR_OPADDR_ADDR:
4443 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4446 case RELOAD_FOR_OTHER_ADDRESS:
4447 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4450 case RELOAD_FOR_INPUT:
4451 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4454 case RELOAD_FOR_OUTPUT:
4455 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4458 case RELOAD_FOR_INSN:
4459 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4465 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4466 specified by OPNUM and TYPE. */
4469 reload_reg_free_p (regno, opnum, type)
4472 enum reload_type type;
4476 /* In use for a RELOAD_OTHER means it's not available for anything. */
4477 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4483 /* In use for anything means we can't use it for RELOAD_OTHER. */
4484 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4486 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4489 for (i = 0; i < reload_n_operands; i++)
4490 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4491 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4492 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4493 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4494 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4495 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4500 case RELOAD_FOR_INPUT:
4501 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4502 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4505 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4508 /* If it is used for some other input, can't use it. */
4509 for (i = 0; i < reload_n_operands; i++)
4510 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4513 /* If it is used in a later operand's address, can't use it. */
4514 for (i = opnum + 1; i < reload_n_operands; i++)
4515 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4516 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4521 case RELOAD_FOR_INPUT_ADDRESS:
4522 /* Can't use a register if it is used for an input address for this
4523 operand or used as an input in an earlier one. */
4524 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4525 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4528 for (i = 0; i < opnum; i++)
4529 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4534 case RELOAD_FOR_INPADDR_ADDRESS:
4535 /* Can't use a register if it is used for an input address
4536 address for this operand or used as an input in an earlier
4538 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4541 for (i = 0; i < opnum; i++)
4542 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4547 case RELOAD_FOR_OUTPUT_ADDRESS:
4548 /* Can't use a register if it is used for an output address for this
4549 operand or used as an output in this or a later operand. */
4550 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4553 for (i = opnum; i < reload_n_operands; i++)
4554 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4559 case RELOAD_FOR_OUTADDR_ADDRESS:
4560 /* Can't use a register if it is used for an output address
4561 address for this operand or used as an output in this or a
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4566 for (i = opnum; i < reload_n_operands; i++)
4567 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4572 case RELOAD_FOR_OPERAND_ADDRESS:
4573 for (i = 0; i < reload_n_operands; i++)
4574 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4577 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4578 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4580 case RELOAD_FOR_OPADDR_ADDR:
4581 for (i = 0; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4585 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4587 case RELOAD_FOR_OUTPUT:
4588 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4589 outputs, or an operand address for this or an earlier output. */
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4593 for (i = 0; i < reload_n_operands; i++)
4594 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4597 for (i = 0; i <= opnum; i++)
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4599 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4604 case RELOAD_FOR_INSN:
4605 for (i = 0; i < reload_n_operands; i++)
4606 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4607 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4610 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4611 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4613 case RELOAD_FOR_OTHER_ADDRESS:
4614 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4619 /* Return 1 if the value in reload reg REGNO, as used by a reload
4620 needed for the part of the insn specified by OPNUM and TYPE,
4621 is not in use for a reload in any prior part of the insn.
4623 We can assume that the reload reg was already tested for availability
4624 at the time it is needed, and we should not check this again,
4625 in case the reg has already been marked in use. */
4628 reload_reg_free_before_p (regno, opnum, type)
4631 enum reload_type type;
4637 case RELOAD_FOR_OTHER_ADDRESS:
4638 /* These always come first. */
4642 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4644 /* If this use is for part of the insn,
4645 check the reg is not in use for any prior part. It is tempting
4646 to try to do this by falling through from objecs that occur
4647 later in the insn to ones that occur earlier, but that will not
4648 correctly take into account the fact that here we MUST ignore
4649 things that would prevent the register from being allocated in
4650 the first place, since we know that it was allocated. */
4652 case RELOAD_FOR_OUTPUT_ADDRESS:
4653 case RELOAD_FOR_OUTADDR_ADDRESS:
4654 /* Earlier reloads are for earlier outputs or their addresses,
4655 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4656 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4658 for (i = 0; i < opnum; i++)
4659 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4660 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4661 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4664 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4667 for (i = 0; i < reload_n_operands; i++)
4668 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4669 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4670 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4673 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4674 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4675 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4677 case RELOAD_FOR_OUTPUT:
4678 /* This can't be used in the output address for this operand and
4679 anything that can't be used for it, except that we've already
4680 tested for RELOAD_FOR_INSN objects. */
4682 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4683 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4686 for (i = 0; i < opnum; i++)
4687 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4688 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4689 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4692 for (i = 0; i < reload_n_operands; i++)
4693 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4694 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4695 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4696 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4699 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4701 case RELOAD_FOR_OPERAND_ADDRESS:
4702 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4703 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4706 /* ... fall through ... */
4708 case RELOAD_FOR_OPADDR_ADDR:
4709 case RELOAD_FOR_INSN:
4710 /* These can't conflict with inputs, or each other, so all we have to
4711 test is input addresses and the addresses of OTHER items. */
4713 for (i = 0; i < reload_n_operands; i++)
4714 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4715 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4718 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4720 case RELOAD_FOR_INPUT:
4721 /* The only things earlier are the address for this and
4722 earlier inputs, other inputs (which we know we don't conflict
4723 with), and addresses of RELOAD_OTHER objects. */
4725 for (i = 0; i <= opnum; i++)
4726 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4727 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4730 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4732 case RELOAD_FOR_INPUT_ADDRESS:
4733 case RELOAD_FOR_INPADDR_ADDRESS:
4734 /* Similarly, all we have to check is for use in earlier inputs'
4736 for (i = 0; i < opnum; i++)
4737 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4738 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4741 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4746 /* Return 1 if the value in reload reg REGNO, as used by a reload
4747 needed for the part of the insn specified by OPNUM and TYPE,
4748 is still available in REGNO at the end of the insn.
4750 We can assume that the reload reg was already tested for availability
4751 at the time it is needed, and we should not check this again,
4752 in case the reg has already been marked in use. */
4755 reload_reg_reaches_end_p (regno, opnum, type)
4758 enum reload_type type;
4765 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4766 its value must reach the end. */
4769 /* If this use is for part of the insn,
4770 its value reaches if no subsequent part uses the same register.
4771 Just like the above function, don't try to do this with lots
4774 case RELOAD_FOR_OTHER_ADDRESS:
4775 /* Here we check for everything else, since these don't conflict
4776 with anything else and everything comes later. */
4778 for (i = 0; i < reload_n_operands; i++)
4779 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4780 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4781 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4782 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4783 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4784 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4787 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4788 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4789 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4791 case RELOAD_FOR_INPUT_ADDRESS:
4792 case RELOAD_FOR_INPADDR_ADDRESS:
4793 /* Similar, except that we check only for this and subsequent inputs
4794 and the address of only subsequent inputs and we do not need
4795 to check for RELOAD_OTHER objects since they are known not to
4798 for (i = opnum; i < reload_n_operands; i++)
4799 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4802 for (i = opnum + 1; i < reload_n_operands; i++)
4803 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4804 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4807 for (i = 0; i < reload_n_operands; i++)
4808 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4809 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4810 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4813 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4816 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4817 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4819 case RELOAD_FOR_INPUT:
4820 /* Similar to input address, except we start at the next operand for
4821 both input and input address and we do not check for
4822 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4825 for (i = opnum + 1; i < reload_n_operands; i++)
4826 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4827 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4831 /* ... fall through ... */
4833 case RELOAD_FOR_OPERAND_ADDRESS:
4834 /* Check outputs and their addresses. */
4836 for (i = 0; i < reload_n_operands; i++)
4837 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4838 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4839 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4844 case RELOAD_FOR_OPADDR_ADDR:
4845 for (i = 0; i < reload_n_operands; i++)
4846 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4847 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4848 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4851 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4852 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4854 case RELOAD_FOR_INSN:
4855 /* These conflict with other outputs with RELOAD_OTHER. So
4856 we need only check for output addresses. */
4860 /* ... fall through ... */
4862 case RELOAD_FOR_OUTPUT:
4863 case RELOAD_FOR_OUTPUT_ADDRESS:
4864 case RELOAD_FOR_OUTADDR_ADDRESS:
4865 /* We already know these can't conflict with a later output. So the
4866 only thing to check are later output addresses. */
4867 for (i = opnum + 1; i < reload_n_operands; i++)
4868 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4869 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4878 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4881 This function uses the same algorithm as reload_reg_free_p above. */
4884 reloads_conflict (r1, r2)
4887 enum reload_type r1_type = reload_when_needed[r1];
4888 enum reload_type r2_type = reload_when_needed[r2];
4889 int r1_opnum = reload_opnum[r1];
4890 int r2_opnum = reload_opnum[r2];
4892 /* RELOAD_OTHER conflicts with everything. */
4893 if (r2_type == RELOAD_OTHER)
4896 /* Otherwise, check conflicts differently for each type. */
4900 case RELOAD_FOR_INPUT:
4901 return (r2_type == RELOAD_FOR_INSN
4902 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4903 || r2_type == RELOAD_FOR_OPADDR_ADDR
4904 || r2_type == RELOAD_FOR_INPUT
4905 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4906 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4907 && r2_opnum > r1_opnum));
4909 case RELOAD_FOR_INPUT_ADDRESS:
4910 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4911 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4913 case RELOAD_FOR_INPADDR_ADDRESS:
4914 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4915 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4917 case RELOAD_FOR_OUTPUT_ADDRESS:
4918 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4919 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4921 case RELOAD_FOR_OUTADDR_ADDRESS:
4922 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4923 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4925 case RELOAD_FOR_OPERAND_ADDRESS:
4926 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4927 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4929 case RELOAD_FOR_OPADDR_ADDR:
4930 return (r2_type == RELOAD_FOR_INPUT
4931 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4933 case RELOAD_FOR_OUTPUT:
4934 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4935 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4936 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4937 && r2_opnum >= r1_opnum));
4939 case RELOAD_FOR_INSN:
4940 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4941 || r2_type == RELOAD_FOR_INSN
4942 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4944 case RELOAD_FOR_OTHER_ADDRESS:
4945 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4955 /* Vector of reload-numbers showing the order in which the reloads should
4957 short reload_order[MAX_RELOADS];
4959 /* Indexed by reload number, 1 if incoming value
4960 inherited from previous insns. */
4961 char reload_inherited[MAX_RELOADS];
4963 /* For an inherited reload, this is the insn the reload was inherited from,
4964 if we know it. Otherwise, this is 0. */
4965 rtx reload_inheritance_insn[MAX_RELOADS];
4967 /* If non-zero, this is a place to get the value of the reload,
4968 rather than using reload_in. */
4969 rtx reload_override_in[MAX_RELOADS];
4971 /* For each reload, the index in spill_regs of the spill register used,
4972 or -1 if we did not need one of the spill registers for this reload. */
4973 int reload_spill_index[MAX_RELOADS];
4975 /* Find a spill register to use as a reload register for reload R.
4976 LAST_RELOAD is non-zero if this is the last reload for the insn being
4979 Set reload_reg_rtx[R] to the register allocated.
4981 If NOERROR is nonzero, we return 1 if successful,
4982 or 0 if we couldn't find a spill reg and we didn't change anything. */
4985 allocate_reload_reg (r, insn, last_reload, noerror)
4997 /* If we put this reload ahead, thinking it is a group,
4998 then insist on finding a group. Otherwise we can grab a
4999 reg that some other reload needs.
5000 (That can happen when we have a 68000 DATA_OR_FP_REG
5001 which is a group of data regs or one fp reg.)
5002 We need not be so restrictive if there are no more reloads
5005 ??? Really it would be nicer to have smarter handling
5006 for that kind of reg class, where a problem like this is normal.
5007 Perhaps those classes should be avoided for reloading
5008 by use of more alternatives. */
5010 int force_group = reload_nregs[r] > 1 && ! last_reload;
5012 /* If we want a single register and haven't yet found one,
5013 take any reg in the right class and not in use.
5014 If we want a consecutive group, here is where we look for it.
5016 We use two passes so we can first look for reload regs to
5017 reuse, which are already in use for other reloads in this insn,
5018 and only then use additional registers.
5019 I think that maximizing reuse is needed to make sure we don't
5020 run out of reload regs. Suppose we have three reloads, and
5021 reloads A and B can share regs. These need two regs.
5022 Suppose A and B are given different regs.
5023 That leaves none for C. */
5024 for (pass = 0; pass < 2; pass++)
5026 /* I is the index in spill_regs.
5027 We advance it round-robin between insns to use all spill regs
5028 equally, so that inherited reloads have a chance
5029 of leapfrogging each other. Don't do this, however, when we have
5030 group needs and failure would be fatal; if we only have a relatively
5031 small number of spill registers, and more than one of them has
5032 group needs, then by starting in the middle, we may end up
5033 allocating the first one in such a way that we are not left with
5034 sufficient groups to handle the rest. */
5036 if (noerror || ! force_group)
5041 for (count = 0; count < n_spills; count++)
5043 int class = (int) reload_reg_class[r];
5045 i = (i + 1) % n_spills;
5047 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5048 reload_when_needed[r])
5049 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5050 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5051 /* Look first for regs to share, then for unshared. But
5052 don't share regs used for inherited reloads; they are
5053 the ones we want to preserve. */
5055 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5057 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5060 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5061 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5062 (on 68000) got us two FP regs. If NR is 1,
5063 we would reject both of them. */
5065 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5066 /* If we need only one reg, we have already won. */
5069 /* But reject a single reg if we demand a group. */
5074 /* Otherwise check that as many consecutive regs as we need
5076 Also, don't use for a group registers that are
5077 needed for nongroups. */
5078 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5081 regno = spill_regs[i] + nr - 1;
5082 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5083 && spill_reg_order[regno] >= 0
5084 && reload_reg_free_p (regno, reload_opnum[r],
5085 reload_when_needed[r])
5086 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5096 /* If we found something on pass 1, omit pass 2. */
5097 if (count < n_spills)
5101 /* We should have found a spill register by now. */
5102 if (count == n_spills)
5109 /* I is the index in SPILL_REG_RTX of the reload register we are to
5110 allocate. Get an rtx for it and find its register number. */
5112 new = spill_reg_rtx[i];
5114 if (new == 0 || GET_MODE (new) != reload_mode[r])
5115 spill_reg_rtx[i] = new
5116 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5118 regno = true_regnum (new);
5120 /* Detect when the reload reg can't hold the reload mode.
5121 This used to be one `if', but Sequent compiler can't handle that. */
5122 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5124 enum machine_mode test_mode = VOIDmode;
5126 test_mode = GET_MODE (reload_in[r]);
5127 /* If reload_in[r] has VOIDmode, it means we will load it
5128 in whatever mode the reload reg has: to wit, reload_mode[r].
5129 We have already tested that for validity. */
5130 /* Aside from that, we need to test that the expressions
5131 to reload from or into have modes which are valid for this
5132 reload register. Otherwise the reload insns would be invalid. */
5133 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5134 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5135 if (! (reload_out[r] != 0
5136 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5138 /* The reg is OK. */
5141 /* Mark as in use for this insn the reload regs we use
5143 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5144 reload_when_needed[r], reload_mode[r]);
5146 reload_reg_rtx[r] = new;
5147 reload_spill_index[r] = i;
5152 /* The reg is not OK. */
5157 if (asm_noperands (PATTERN (insn)) < 0)
5158 /* It's the compiler's fault. */
5159 fatal_insn ("Could not find a spill register", insn);
5161 /* It's the user's fault; the operand's mode and constraint
5162 don't match. Disable this reload so we don't crash in final. */
5163 error_for_asm (insn,
5164 "`asm' operand constraint incompatible with operand size");
5167 reload_reg_rtx[r] = 0;
5168 reload_optional[r] = 1;
5169 reload_secondary_p[r] = 1;
5174 /* Assign hard reg targets for the pseudo-registers we must reload
5175 into hard regs for this insn.
5176 Also output the instructions to copy them in and out of the hard regs.
5178 For machines with register classes, we are responsible for
5179 finding a reload reg in the proper class. */
5182 choose_reload_regs (insn, avoid_return_reg)
5184 rtx avoid_return_reg;
5187 int max_group_size = 1;
5188 enum reg_class group_class = NO_REGS;
5191 rtx save_reload_reg_rtx[MAX_RELOADS];
5192 char save_reload_inherited[MAX_RELOADS];
5193 rtx save_reload_inheritance_insn[MAX_RELOADS];
5194 rtx save_reload_override_in[MAX_RELOADS];
5195 int save_reload_spill_index[MAX_RELOADS];
5196 HARD_REG_SET save_reload_reg_used;
5197 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5198 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5199 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5200 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5201 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5202 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5203 HARD_REG_SET save_reload_reg_used_in_op_addr;
5204 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5205 HARD_REG_SET save_reload_reg_used_in_insn;
5206 HARD_REG_SET save_reload_reg_used_in_other_addr;
5207 HARD_REG_SET save_reload_reg_used_at_all;
5209 bzero (reload_inherited, MAX_RELOADS);
5210 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5211 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5213 CLEAR_HARD_REG_SET (reload_reg_used);
5214 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5215 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5216 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5217 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5218 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5220 for (i = 0; i < reload_n_operands; i++)
5222 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5223 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5224 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5225 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5226 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5227 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5230 #ifdef SMALL_REGISTER_CLASSES
5231 /* Don't bother with avoiding the return reg
5232 if we have no mandatory reload that could use it. */
5233 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5236 int regno = REGNO (avoid_return_reg);
5238 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5241 for (r = regno; r < regno + nregs; r++)
5242 if (spill_reg_order[r] >= 0)
5243 for (j = 0; j < n_reloads; j++)
5244 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5245 && (reload_in[j] != 0 || reload_out[j] != 0
5246 || reload_secondary_p[j])
5248 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5251 avoid_return_reg = 0;
5253 #endif /* SMALL_REGISTER_CLASSES */
5255 #if 0 /* Not needed, now that we can always retry without inheritance. */
5256 /* See if we have more mandatory reloads than spill regs.
5257 If so, then we cannot risk optimizations that could prevent
5258 reloads from sharing one spill register.
5260 Since we will try finding a better register than reload_reg_rtx
5261 unless it is equal to reload_in or reload_out, count such reloads. */
5265 #ifdef SMALL_REGISTER_CLASSES
5266 if (SMALL_REGISTER_CLASSES)
5267 tem = (avoid_return_reg != 0);
5269 for (j = 0; j < n_reloads; j++)
5270 if (! reload_optional[j]
5271 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5272 && (reload_reg_rtx[j] == 0
5273 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5274 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5281 #ifdef SMALL_REGISTER_CLASSES
5282 /* Don't use the subroutine call return reg for a reload
5283 if we are supposed to avoid it. */
5284 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5286 int regno = REGNO (avoid_return_reg);
5288 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5291 for (r = regno; r < regno + nregs; r++)
5292 if (spill_reg_order[r] >= 0)
5293 SET_HARD_REG_BIT (reload_reg_used, r);
5295 #endif /* SMALL_REGISTER_CLASSES */
5297 /* In order to be certain of getting the registers we need,
5298 we must sort the reloads into order of increasing register class.
5299 Then our grabbing of reload registers will parallel the process
5300 that provided the reload registers.
5302 Also note whether any of the reloads wants a consecutive group of regs.
5303 If so, record the maximum size of the group desired and what
5304 register class contains all the groups needed by this insn. */
5306 for (j = 0; j < n_reloads; j++)
5308 reload_order[j] = j;
5309 reload_spill_index[j] = -1;
5312 = (reload_inmode[j] == VOIDmode
5313 || (GET_MODE_SIZE (reload_outmode[j])
5314 > GET_MODE_SIZE (reload_inmode[j])))
5315 ? reload_outmode[j] : reload_inmode[j];
5317 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5319 if (reload_nregs[j] > 1)
5321 max_group_size = MAX (reload_nregs[j], max_group_size);
5322 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5325 /* If we have already decided to use a certain register,
5326 don't use it in another way. */
5327 if (reload_reg_rtx[j])
5328 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5329 reload_when_needed[j], reload_mode[j]);
5333 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5335 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5336 sizeof reload_reg_rtx);
5337 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5338 bcopy ((char *) reload_inheritance_insn,
5339 (char *) save_reload_inheritance_insn,
5340 sizeof reload_inheritance_insn);
5341 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5342 sizeof reload_override_in);
5343 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5344 sizeof reload_spill_index);
5345 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5346 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5347 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5348 reload_reg_used_in_op_addr);
5350 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5351 reload_reg_used_in_op_addr_reload);
5353 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5354 reload_reg_used_in_insn);
5355 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5356 reload_reg_used_in_other_addr);
5358 for (i = 0; i < reload_n_operands; i++)
5360 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5361 reload_reg_used_in_output[i]);
5362 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5363 reload_reg_used_in_input[i]);
5364 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5365 reload_reg_used_in_input_addr[i]);
5366 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5367 reload_reg_used_in_inpaddr_addr[i]);
5368 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5369 reload_reg_used_in_output_addr[i]);
5370 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5371 reload_reg_used_in_outaddr_addr[i]);
5374 /* If -O, try first with inheritance, then turning it off.
5375 If not -O, don't do inheritance.
5376 Using inheritance when not optimizing leads to paradoxes
5377 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5378 because one side of the comparison might be inherited. */
5380 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5382 /* Process the reloads in order of preference just found.
5383 Beyond this point, subregs can be found in reload_reg_rtx.
5385 This used to look for an existing reloaded home for all
5386 of the reloads, and only then perform any new reloads.
5387 But that could lose if the reloads were done out of reg-class order
5388 because a later reload with a looser constraint might have an old
5389 home in a register needed by an earlier reload with a tighter constraint.
5391 To solve this, we make two passes over the reloads, in the order
5392 described above. In the first pass we try to inherit a reload
5393 from a previous insn. If there is a later reload that needs a
5394 class that is a proper subset of the class being processed, we must
5395 also allocate a spill register during the first pass.
5397 Then make a second pass over the reloads to allocate any reloads
5398 that haven't been given registers yet. */
5400 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5402 for (j = 0; j < n_reloads; j++)
5404 register int r = reload_order[j];
5406 /* Ignore reloads that got marked inoperative. */
5407 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5410 /* If find_reloads chose a to use reload_in or reload_out as a reload
5411 register, we don't need to chose one. Otherwise, try even if it found
5412 one since we might save an insn if we find the value lying around. */
5413 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5414 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5415 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5418 #if 0 /* No longer needed for correct operation.
5419 It might give better code, or might not; worth an experiment? */
5420 /* If this is an optional reload, we can't inherit from earlier insns
5421 until we are sure that any non-optional reloads have been allocated.
5422 The following code takes advantage of the fact that optional reloads
5423 are at the end of reload_order. */
5424 if (reload_optional[r] != 0)
5425 for (i = 0; i < j; i++)
5426 if ((reload_out[reload_order[i]] != 0
5427 || reload_in[reload_order[i]] != 0
5428 || reload_secondary_p[reload_order[i]])
5429 && ! reload_optional[reload_order[i]]
5430 && reload_reg_rtx[reload_order[i]] == 0)
5431 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5434 /* First see if this pseudo is already available as reloaded
5435 for a previous insn. We cannot try to inherit for reloads
5436 that are smaller than the maximum number of registers needed
5437 for groups unless the register we would allocate cannot be used
5440 We could check here to see if this is a secondary reload for
5441 an object that is already in a register of the desired class.
5442 This would avoid the need for the secondary reload register.
5443 But this is complex because we can't easily determine what
5444 objects might want to be loaded via this reload. So let a register
5445 be allocated here. In `emit_reload_insns' we suppress one of the
5446 loads in the case described above. */
5450 register int regno = -1;
5451 enum machine_mode mode;
5453 if (reload_in[r] == 0)
5455 else if (GET_CODE (reload_in[r]) == REG)
5457 regno = REGNO (reload_in[r]);
5458 mode = GET_MODE (reload_in[r]);
5460 else if (GET_CODE (reload_in_reg[r]) == REG)
5462 regno = REGNO (reload_in_reg[r]);
5463 mode = GET_MODE (reload_in_reg[r]);
5466 /* This won't work, since REGNO can be a pseudo reg number.
5467 Also, it takes much more hair to keep track of all the things
5468 that can invalidate an inherited reload of part of a pseudoreg. */
5469 else if (GET_CODE (reload_in[r]) == SUBREG
5470 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5471 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5474 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5476 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5478 if (reg_reloaded_contents[i] == regno
5479 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5480 >= GET_MODE_SIZE (mode))
5481 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5482 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5484 && (reload_nregs[r] == max_group_size
5485 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5487 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5488 reload_when_needed[r])
5489 && reload_reg_free_before_p (spill_regs[i],
5491 reload_when_needed[r]))
5493 /* If a group is needed, verify that all the subsequent
5494 registers still have their values intact. */
5496 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5499 for (k = 1; k < nr; k++)
5500 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5508 /* We found a register that contains the
5509 value we need. If this register is the
5510 same as an `earlyclobber' operand of the
5511 current insn, just mark it as a place to
5512 reload from since we can't use it as the
5513 reload register itself. */
5515 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5516 if (reg_overlap_mentioned_for_reload_p
5517 (reg_last_reload_reg[regno],
5518 reload_earlyclobbers[i1]))
5521 if (i1 != n_earlyclobbers
5522 /* Don't really use the inherited spill reg
5523 if we need it wider than we've got it. */
5524 || (GET_MODE_SIZE (reload_mode[r])
5525 > GET_MODE_SIZE (mode)))
5526 reload_override_in[r] = reg_last_reload_reg[regno];
5530 /* We can use this as a reload reg. */
5531 /* Mark the register as in use for this part of
5533 mark_reload_reg_in_use (spill_regs[i],
5535 reload_when_needed[r],
5537 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5538 reload_inherited[r] = 1;
5539 reload_inheritance_insn[r]
5540 = reg_reloaded_insn[i];
5541 reload_spill_index[r] = i;
5542 for (k = 0; k < nr; k++)
5543 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5551 /* Here's another way to see if the value is already lying around. */
5553 && reload_in[r] != 0
5554 && ! reload_inherited[r]
5555 && reload_out[r] == 0
5556 && (CONSTANT_P (reload_in[r])
5557 || GET_CODE (reload_in[r]) == PLUS
5558 || GET_CODE (reload_in[r]) == REG
5559 || GET_CODE (reload_in[r]) == MEM)
5560 && (reload_nregs[r] == max_group_size
5561 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5564 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5565 -1, NULL_PTR, 0, reload_mode[r]);
5570 if (GET_CODE (equiv) == REG)
5571 regno = REGNO (equiv);
5572 else if (GET_CODE (equiv) == SUBREG)
5574 /* This must be a SUBREG of a hard register.
5575 Make a new REG since this might be used in an
5576 address and not all machines support SUBREGs
5578 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5579 equiv = gen_rtx (REG, reload_mode[r], regno);
5585 /* If we found a spill reg, reject it unless it is free
5586 and of the desired class. */
5588 && ((spill_reg_order[regno] >= 0
5589 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5590 reload_when_needed[r]))
5591 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5595 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5598 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5601 /* We found a register that contains the value we need.
5602 If this register is the same as an `earlyclobber' operand
5603 of the current insn, just mark it as a place to reload from
5604 since we can't use it as the reload register itself. */
5607 for (i = 0; i < n_earlyclobbers; i++)
5608 if (reg_overlap_mentioned_for_reload_p (equiv,
5609 reload_earlyclobbers[i]))
5611 reload_override_in[r] = equiv;
5616 /* JRV: If the equiv register we have found is
5617 explicitly clobbered in the current insn, mark but
5618 don't use, as above. */
5620 if (equiv != 0 && regno_clobbered_p (regno, insn))
5622 reload_override_in[r] = equiv;
5626 /* If we found an equivalent reg, say no code need be generated
5627 to load it, and use it as our reload reg. */
5628 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5630 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5632 reload_reg_rtx[r] = equiv;
5633 reload_inherited[r] = 1;
5635 /* If any of the hard registers in EQUIV are spill
5636 registers, mark them as in use for this insn. */
5637 for (k = 0; k < nr; k++)
5639 i = spill_reg_order[regno + k];
5642 mark_reload_reg_in_use (regno, reload_opnum[r],
5643 reload_when_needed[r],
5645 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5652 /* If we found a register to use already, or if this is an optional
5653 reload, we are done. */
5654 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5657 #if 0 /* No longer needed for correct operation. Might or might not
5658 give better code on the average. Want to experiment? */
5660 /* See if there is a later reload that has a class different from our
5661 class that intersects our class or that requires less register
5662 than our reload. If so, we must allocate a register to this
5663 reload now, since that reload might inherit a previous reload
5664 and take the only available register in our class. Don't do this
5665 for optional reloads since they will force all previous reloads
5666 to be allocated. Also don't do this for reloads that have been
5669 for (i = j + 1; i < n_reloads; i++)
5671 int s = reload_order[i];
5673 if ((reload_in[s] == 0 && reload_out[s] == 0
5674 && ! reload_secondary_p[s])
5675 || reload_optional[s])
5678 if ((reload_reg_class[s] != reload_reg_class[r]
5679 && reg_classes_intersect_p (reload_reg_class[r],
5680 reload_reg_class[s]))
5681 || reload_nregs[s] < reload_nregs[r])
5688 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5692 /* Now allocate reload registers for anything non-optional that
5693 didn't get one yet. */
5694 for (j = 0; j < n_reloads; j++)
5696 register int r = reload_order[j];
5698 /* Ignore reloads that got marked inoperative. */
5699 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5702 /* Skip reloads that already have a register allocated or are
5704 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5707 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5711 /* If that loop got all the way, we have won. */
5716 /* Loop around and try without any inheritance. */
5717 /* First undo everything done by the failed attempt
5718 to allocate with inheritance. */
5719 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5720 sizeof reload_reg_rtx);
5721 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5722 sizeof reload_inherited);
5723 bcopy ((char *) save_reload_inheritance_insn,
5724 (char *) reload_inheritance_insn,
5725 sizeof reload_inheritance_insn);
5726 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5727 sizeof reload_override_in);
5728 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5729 sizeof reload_spill_index);
5730 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5731 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5732 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5733 save_reload_reg_used_in_op_addr);
5734 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5735 save_reload_reg_used_in_op_addr_reload);
5736 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5737 save_reload_reg_used_in_insn);
5738 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5739 save_reload_reg_used_in_other_addr);
5741 for (i = 0; i < reload_n_operands; i++)
5743 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5744 save_reload_reg_used_in_input[i]);
5745 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5746 save_reload_reg_used_in_output[i]);
5747 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5748 save_reload_reg_used_in_input_addr[i]);
5749 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5750 save_reload_reg_used_in_inpaddr_addr[i]);
5751 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5752 save_reload_reg_used_in_output_addr[i]);
5753 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5754 save_reload_reg_used_in_outaddr_addr[i]);
5758 /* If we thought we could inherit a reload, because it seemed that
5759 nothing else wanted the same reload register earlier in the insn,
5760 verify that assumption, now that all reloads have been assigned. */
5762 for (j = 0; j < n_reloads; j++)
5764 register int r = reload_order[j];
5766 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5767 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5769 reload_when_needed[r]))
5770 reload_inherited[r] = 0;
5772 /* If we found a better place to reload from,
5773 validate it in the same fashion, if it is a reload reg. */
5774 if (reload_override_in[r]
5775 && (GET_CODE (reload_override_in[r]) == REG
5776 || GET_CODE (reload_override_in[r]) == SUBREG))
5778 int regno = true_regnum (reload_override_in[r]);
5779 if (spill_reg_order[regno] >= 0
5780 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5781 reload_when_needed[r]))
5782 reload_override_in[r] = 0;
5786 /* Now that reload_override_in is known valid,
5787 actually override reload_in. */
5788 for (j = 0; j < n_reloads; j++)
5789 if (reload_override_in[j])
5790 reload_in[j] = reload_override_in[j];
5792 /* If this reload won't be done because it has been cancelled or is
5793 optional and not inherited, clear reload_reg_rtx so other
5794 routines (such as subst_reloads) don't get confused. */
5795 for (j = 0; j < n_reloads; j++)
5796 if (reload_reg_rtx[j] != 0
5797 && ((reload_optional[j] && ! reload_inherited[j])
5798 || (reload_in[j] == 0 && reload_out[j] == 0
5799 && ! reload_secondary_p[j])))
5801 int regno = true_regnum (reload_reg_rtx[j]);
5803 if (spill_reg_order[regno] >= 0)
5804 clear_reload_reg_in_use (regno, reload_opnum[j],
5805 reload_when_needed[j], reload_mode[j]);
5806 reload_reg_rtx[j] = 0;
5809 /* Record which pseudos and which spill regs have output reloads. */
5810 for (j = 0; j < n_reloads; j++)
5812 register int r = reload_order[j];
5814 i = reload_spill_index[r];
5816 /* I is nonneg if this reload used one of the spill regs.
5817 If reload_reg_rtx[r] is 0, this is an optional reload
5818 that we opted to ignore. */
5819 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5820 && reload_reg_rtx[r] != 0)
5822 register int nregno = REGNO (reload_out[r]);
5825 if (nregno < FIRST_PSEUDO_REGISTER)
5826 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5829 reg_has_output_reload[nregno + nr] = 1;
5833 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5835 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5838 if (reload_when_needed[r] != RELOAD_OTHER
5839 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5840 && reload_when_needed[r] != RELOAD_FOR_INSN)
5846 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5847 reloads of the same item for fear that we might not have enough reload
5848 registers. However, normally they will get the same reload register
5849 and hence actually need not be loaded twice.
5851 Here we check for the most common case of this phenomenon: when we have
5852 a number of reloads for the same object, each of which were allocated
5853 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5854 reload, and is not modified in the insn itself. If we find such,
5855 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5856 This will not increase the number of spill registers needed and will
5857 prevent redundant code. */
5859 #ifdef SMALL_REGISTER_CLASSES
5862 merge_assigned_reloads (insn)
5867 /* Scan all the reloads looking for ones that only load values and
5868 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5869 assigned and not modified by INSN. */
5871 for (i = 0; i < n_reloads; i++)
5873 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5874 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5875 || reg_set_p (reload_reg_rtx[i], insn))
5878 /* Look at all other reloads. Ensure that the only use of this
5879 reload_reg_rtx is in a reload that just loads the same value
5880 as we do. Note that any secondary reloads must be of the identical
5881 class since the values, modes, and result registers are the
5882 same, so we need not do anything with any secondary reloads. */
5884 for (j = 0; j < n_reloads; j++)
5886 if (i == j || reload_reg_rtx[j] == 0
5887 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5891 /* If the reload regs aren't exactly the same (e.g, different modes)
5892 or if the values are different, we can't merge anything with this
5895 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5896 || reload_out[j] != 0 || reload_in[j] == 0
5897 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5901 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5902 we, in fact, found any matching reloads. */
5906 for (j = 0; j < n_reloads; j++)
5907 if (i != j && reload_reg_rtx[j] != 0
5908 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5910 reload_when_needed[i] = RELOAD_OTHER;
5912 transfer_replacements (i, j);
5915 /* If this is now RELOAD_OTHER, look for any reloads that load
5916 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5917 if they were for inputs, RELOAD_OTHER for outputs. Note that
5918 this test is equivalent to looking for reloads for this operand
5921 if (reload_when_needed[i] == RELOAD_OTHER)
5922 for (j = 0; j < n_reloads; j++)
5923 if (reload_in[j] != 0
5924 && reload_when_needed[i] != RELOAD_OTHER
5925 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5927 reload_when_needed[j]
5928 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5929 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5930 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5934 #endif /* SMALL_RELOAD_CLASSES */
5936 /* Output insns to reload values in and out of the chosen reload regs. */
5939 emit_reload_insns (insn)
5943 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5944 rtx other_input_address_reload_insns = 0;
5945 rtx other_input_reload_insns = 0;
5946 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5947 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5948 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5949 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5950 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5951 rtx operand_reload_insns = 0;
5952 rtx other_operand_reload_insns = 0;
5953 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5954 rtx following_insn = NEXT_INSN (insn);
5955 rtx before_insn = insn;
5957 /* Values to be put in spill_reg_store are put here first. */
5958 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5960 for (j = 0; j < reload_n_operands; j++)
5961 input_reload_insns[j] = input_address_reload_insns[j]
5962 = inpaddr_address_reload_insns[j]
5963 = output_reload_insns[j] = output_address_reload_insns[j]
5964 = outaddr_address_reload_insns[j]
5965 = other_output_reload_insns[j] = 0;
5967 /* Now output the instructions to copy the data into and out of the
5968 reload registers. Do these in the order that the reloads were reported,
5969 since reloads of base and index registers precede reloads of operands
5970 and the operands may need the base and index registers reloaded. */
5972 for (j = 0; j < n_reloads; j++)
5975 rtx oldequiv_reg = 0;
5976 rtx this_reload_insn = 0;
5978 if (reload_spill_index[j] >= 0)
5979 new_spill_reg_store[reload_spill_index[j]] = 0;
5982 if (old != 0 && ! reload_inherited[j]
5983 && ! rtx_equal_p (reload_reg_rtx[j], old)
5984 && reload_reg_rtx[j] != 0)
5986 register rtx reloadreg = reload_reg_rtx[j];
5988 enum machine_mode mode;
5991 /* Determine the mode to reload in.
5992 This is very tricky because we have three to choose from.
5993 There is the mode the insn operand wants (reload_inmode[J]).
5994 There is the mode of the reload register RELOADREG.
5995 There is the intrinsic mode of the operand, which we could find
5996 by stripping some SUBREGs.
5997 It turns out that RELOADREG's mode is irrelevant:
5998 we can change that arbitrarily.
6000 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6001 then the reload reg may not support QImode moves, so use SImode.
6002 If foo is in memory due to spilling a pseudo reg, this is safe,
6003 because the QImode value is in the least significant part of a
6004 slot big enough for a SImode. If foo is some other sort of
6005 memory reference, then it is impossible to reload this case,
6006 so previous passes had better make sure this never happens.
6008 Then consider a one-word union which has SImode and one of its
6009 members is a float, being fetched as (SUBREG:SF union:SI).
6010 We must fetch that as SFmode because we could be loading into
6011 a float-only register. In this case OLD's mode is correct.
6013 Consider an immediate integer: it has VOIDmode. Here we need
6014 to get a mode from something else.
6016 In some cases, there is a fourth mode, the operand's
6017 containing mode. If the insn specifies a containing mode for
6018 this operand, it overrides all others.
6020 I am not sure whether the algorithm here is always right,
6021 but it does the right things in those cases. */
6023 mode = GET_MODE (old);
6024 if (mode == VOIDmode)
6025 mode = reload_inmode[j];
6027 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6028 /* If we need a secondary register for this operation, see if
6029 the value is already in a register in that class. Don't
6030 do this if the secondary register will be used as a scratch
6033 if (reload_secondary_in_reload[j] >= 0
6034 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6037 = find_equiv_reg (old, insn,
6038 reload_reg_class[reload_secondary_in_reload[j]],
6039 -1, NULL_PTR, 0, mode);
6042 /* If reloading from memory, see if there is a register
6043 that already holds the same value. If so, reload from there.
6044 We can pass 0 as the reload_reg_p argument because
6045 any other reload has either already been emitted,
6046 in which case find_equiv_reg will see the reload-insn,
6047 or has yet to be emitted, in which case it doesn't matter
6048 because we will use this equiv reg right away. */
6050 if (oldequiv == 0 && optimize
6051 && (GET_CODE (old) == MEM
6052 || (GET_CODE (old) == REG
6053 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6054 && reg_renumber[REGNO (old)] < 0)))
6055 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6056 -1, NULL_PTR, 0, mode);
6060 int regno = true_regnum (oldequiv);
6062 /* If OLDEQUIV is a spill register, don't use it for this
6063 if any other reload needs it at an earlier stage of this insn
6064 or at this stage. */
6065 if (spill_reg_order[regno] >= 0
6066 && (! reload_reg_free_p (regno, reload_opnum[j],
6067 reload_when_needed[j])
6068 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6069 reload_when_needed[j])))
6072 /* If OLDEQUIV is not a spill register,
6073 don't use it if any other reload wants it. */
6074 if (spill_reg_order[regno] < 0)
6077 for (k = 0; k < n_reloads; k++)
6078 if (reload_reg_rtx[k] != 0 && k != j
6079 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6087 /* If it is no cheaper to copy from OLDEQUIV into the
6088 reload register than it would be to move from memory,
6089 don't use it. Likewise, if we need a secondary register
6093 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6094 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6095 reload_reg_class[j])
6096 >= MEMORY_MOVE_COST (mode)))
6097 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6098 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6102 #ifdef SECONDARY_MEMORY_NEEDED
6103 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6104 REGNO_REG_CLASS (regno),
6113 else if (GET_CODE (oldequiv) == REG)
6114 oldequiv_reg = oldequiv;
6115 else if (GET_CODE (oldequiv) == SUBREG)
6116 oldequiv_reg = SUBREG_REG (oldequiv);
6118 /* If we are reloading from a register that was recently stored in
6119 with an output-reload, see if we can prove there was
6120 actually no need to store the old value in it. */
6122 if (optimize && GET_CODE (oldequiv) == REG
6123 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6124 && spill_reg_order[REGNO (oldequiv)] >= 0
6125 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6126 && find_reg_note (insn, REG_DEAD, reload_in[j])
6127 /* This is unsafe if operand occurs more than once in current
6128 insn. Perhaps some occurrences weren't reloaded. */
6129 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6130 delete_output_reload
6131 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6133 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6134 then load RELOADREG from OLDEQUIV. Note that we cannot use
6135 gen_lowpart_common since it can do the wrong thing when
6136 RELOADREG has a multi-word mode. Note that RELOADREG
6137 must always be a REG here. */
6139 if (GET_MODE (reloadreg) != mode)
6140 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6141 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6142 oldequiv = SUBREG_REG (oldequiv);
6143 if (GET_MODE (oldequiv) != VOIDmode
6144 && mode != GET_MODE (oldequiv))
6145 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6147 /* Switch to the right place to emit the reload insns. */
6148 switch (reload_when_needed[j])
6151 where = &other_input_reload_insns;
6153 case RELOAD_FOR_INPUT:
6154 where = &input_reload_insns[reload_opnum[j]];
6156 case RELOAD_FOR_INPUT_ADDRESS:
6157 where = &input_address_reload_insns[reload_opnum[j]];
6159 case RELOAD_FOR_INPADDR_ADDRESS:
6160 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6162 case RELOAD_FOR_OUTPUT_ADDRESS:
6163 where = &output_address_reload_insns[reload_opnum[j]];
6165 case RELOAD_FOR_OUTADDR_ADDRESS:
6166 where = &outaddr_address_reload_insns[reload_opnum[j]];
6168 case RELOAD_FOR_OPERAND_ADDRESS:
6169 where = &operand_reload_insns;
6171 case RELOAD_FOR_OPADDR_ADDR:
6172 where = &other_operand_reload_insns;
6174 case RELOAD_FOR_OTHER_ADDRESS:
6175 where = &other_input_address_reload_insns;
6181 push_to_sequence (*where);
6184 /* Auto-increment addresses must be reloaded in a special way. */
6185 if (GET_CODE (oldequiv) == POST_INC
6186 || GET_CODE (oldequiv) == POST_DEC
6187 || GET_CODE (oldequiv) == PRE_INC
6188 || GET_CODE (oldequiv) == PRE_DEC)
6190 /* We are not going to bother supporting the case where a
6191 incremented register can't be copied directly from
6192 OLDEQUIV since this seems highly unlikely. */
6193 if (reload_secondary_in_reload[j] >= 0)
6195 /* Prevent normal processing of this reload. */
6197 /* Output a special code sequence for this case. */
6198 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6201 /* If we are reloading a pseudo-register that was set by the previous
6202 insn, see if we can get rid of that pseudo-register entirely
6203 by redirecting the previous insn into our reload register. */
6205 else if (optimize && GET_CODE (old) == REG
6206 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6207 && dead_or_set_p (insn, old)
6208 /* This is unsafe if some other reload
6209 uses the same reg first. */
6210 && reload_reg_free_before_p (REGNO (reloadreg),
6212 reload_when_needed[j]))
6214 rtx temp = PREV_INSN (insn);
6215 while (temp && GET_CODE (temp) == NOTE)
6216 temp = PREV_INSN (temp);
6218 && GET_CODE (temp) == INSN
6219 && GET_CODE (PATTERN (temp)) == SET
6220 && SET_DEST (PATTERN (temp)) == old
6221 /* Make sure we can access insn_operand_constraint. */
6222 && asm_noperands (PATTERN (temp)) < 0
6223 /* This is unsafe if prev insn rejects our reload reg. */
6224 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6226 /* This is unsafe if operand occurs more than once in current
6227 insn. Perhaps some occurrences aren't reloaded. */
6228 && count_occurrences (PATTERN (insn), old) == 1
6229 /* Don't risk splitting a matching pair of operands. */
6230 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6232 /* Store into the reload register instead of the pseudo. */
6233 SET_DEST (PATTERN (temp)) = reloadreg;
6234 /* If these are the only uses of the pseudo reg,
6235 pretend for GDB it lives in the reload reg we used. */
6236 if (reg_n_deaths[REGNO (old)] == 1
6237 && reg_n_sets[REGNO (old)] == 1)
6239 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6240 alter_reg (REGNO (old), -1);
6246 /* We can't do that, so output an insn to load RELOADREG. */
6250 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6251 rtx second_reload_reg = 0;
6252 enum insn_code icode;
6254 /* If we have a secondary reload, pick up the secondary register
6255 and icode, if any. If OLDEQUIV and OLD are different or
6256 if this is an in-out reload, recompute whether or not we
6257 still need a secondary register and what the icode should
6258 be. If we still need a secondary register and the class or
6259 icode is different, go back to reloading from OLD if using
6260 OLDEQUIV means that we got the wrong type of register. We
6261 cannot have different class or icode due to an in-out reload
6262 because we don't make such reloads when both the input and
6263 output need secondary reload registers. */
6265 if (reload_secondary_in_reload[j] >= 0)
6267 int secondary_reload = reload_secondary_in_reload[j];
6268 rtx real_oldequiv = oldequiv;
6271 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6272 and similarly for OLD.
6273 See comments in get_secondary_reload in reload.c. */
6274 if (GET_CODE (oldequiv) == REG
6275 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6276 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6277 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6279 if (GET_CODE (old) == REG
6280 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6281 && reg_equiv_mem[REGNO (old)] != 0)
6282 real_old = reg_equiv_mem[REGNO (old)];
6284 second_reload_reg = reload_reg_rtx[secondary_reload];
6285 icode = reload_secondary_in_icode[j];
6287 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6288 || (reload_in[j] != 0 && reload_out[j] != 0))
6290 enum reg_class new_class
6291 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6292 mode, real_oldequiv);
6294 if (new_class == NO_REGS)
6295 second_reload_reg = 0;
6298 enum insn_code new_icode;
6299 enum machine_mode new_mode;
6301 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6302 REGNO (second_reload_reg)))
6303 oldequiv = old, real_oldequiv = real_old;
6306 new_icode = reload_in_optab[(int) mode];
6307 if (new_icode != CODE_FOR_nothing
6308 && ((insn_operand_predicate[(int) new_icode][0]
6309 && ! ((*insn_operand_predicate[(int) new_icode][0])
6311 || (insn_operand_predicate[(int) new_icode][1]
6312 && ! ((*insn_operand_predicate[(int) new_icode][1])
6313 (real_oldequiv, mode)))))
6314 new_icode = CODE_FOR_nothing;
6316 if (new_icode == CODE_FOR_nothing)
6319 new_mode = insn_operand_mode[(int) new_icode][2];
6321 if (GET_MODE (second_reload_reg) != new_mode)
6323 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6325 oldequiv = old, real_oldequiv = real_old;
6328 = gen_rtx (REG, new_mode,
6329 REGNO (second_reload_reg));
6335 /* If we still need a secondary reload register, check
6336 to see if it is being used as a scratch or intermediate
6337 register and generate code appropriately. If we need
6338 a scratch register, use REAL_OLDEQUIV since the form of
6339 the insn may depend on the actual address if it is
6342 if (second_reload_reg)
6344 if (icode != CODE_FOR_nothing)
6346 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6347 second_reload_reg));
6352 /* See if we need a scratch register to load the
6353 intermediate register (a tertiary reload). */
6354 enum insn_code tertiary_icode
6355 = reload_secondary_in_icode[secondary_reload];
6357 if (tertiary_icode != CODE_FOR_nothing)
6359 rtx third_reload_reg
6360 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6362 emit_insn ((GEN_FCN (tertiary_icode)
6363 (second_reload_reg, real_oldequiv,
6364 third_reload_reg)));
6367 gen_reload (second_reload_reg, oldequiv,
6369 reload_when_needed[j]);
6371 oldequiv = second_reload_reg;
6377 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6378 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6379 reload_when_needed[j]);
6381 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6382 /* We may have to make a REG_DEAD note for the secondary reload
6383 register in the insns we just made. Find the last insn that
6384 mentioned the register. */
6385 if (! special && second_reload_reg
6386 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6390 for (prev = get_last_insn (); prev;
6391 prev = PREV_INSN (prev))
6392 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6393 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6396 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6405 this_reload_insn = get_last_insn ();
6406 /* End this sequence. */
6407 *where = get_insns ();
6411 /* Add a note saying the input reload reg
6412 dies in this insn, if anyone cares. */
6413 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6415 && reload_reg_rtx[j] != old
6416 && reload_reg_rtx[j] != 0
6417 && reload_out[j] == 0
6418 && ! reload_inherited[j]
6419 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6421 register rtx reloadreg = reload_reg_rtx[j];
6424 /* We can't abort here because we need to support this for sched.c.
6425 It's not terrible to miss a REG_DEAD note, but we should try
6426 to figure out how to do this correctly. */
6427 /* The code below is incorrect for address-only reloads. */
6428 if (reload_when_needed[j] != RELOAD_OTHER
6429 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6433 /* Add a death note to this insn, for an input reload. */
6435 if ((reload_when_needed[j] == RELOAD_OTHER
6436 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6437 && ! dead_or_set_p (insn, reloadreg))
6439 = gen_rtx (EXPR_LIST, REG_DEAD,
6440 reloadreg, REG_NOTES (insn));
6443 /* When we inherit a reload, the last marked death of the reload reg
6444 may no longer really be a death. */
6445 if (reload_reg_rtx[j] != 0
6446 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6447 && reload_inherited[j])
6449 /* Handle inheriting an output reload.
6450 Remove the death note from the output reload insn. */
6451 if (reload_spill_index[j] >= 0
6452 && GET_CODE (reload_in[j]) == REG
6453 && spill_reg_store[reload_spill_index[j]] != 0
6454 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6455 REG_DEAD, REGNO (reload_reg_rtx[j])))
6456 remove_death (REGNO (reload_reg_rtx[j]),
6457 spill_reg_store[reload_spill_index[j]]);
6458 /* Likewise for input reloads that were inherited. */
6459 else if (reload_spill_index[j] >= 0
6460 && GET_CODE (reload_in[j]) == REG
6461 && spill_reg_store[reload_spill_index[j]] == 0
6462 && reload_inheritance_insn[j] != 0
6463 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6464 REGNO (reload_reg_rtx[j])))
6465 remove_death (REGNO (reload_reg_rtx[j]),
6466 reload_inheritance_insn[j]);
6471 /* We got this register from find_equiv_reg.
6472 Search back for its last death note and get rid of it.
6473 But don't search back too far.
6474 Don't go past a place where this reg is set,
6475 since a death note before that remains valid. */
6476 for (prev = PREV_INSN (insn);
6477 prev && GET_CODE (prev) != CODE_LABEL;
6478 prev = PREV_INSN (prev))
6479 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6480 && dead_or_set_p (prev, reload_reg_rtx[j]))
6482 if (find_regno_note (prev, REG_DEAD,
6483 REGNO (reload_reg_rtx[j])))
6484 remove_death (REGNO (reload_reg_rtx[j]), prev);
6490 /* We might have used find_equiv_reg above to choose an alternate
6491 place from which to reload. If so, and it died, we need to remove
6492 that death and move it to one of the insns we just made. */
6494 if (oldequiv_reg != 0
6495 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6499 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6500 prev = PREV_INSN (prev))
6501 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6502 && dead_or_set_p (prev, oldequiv_reg))
6504 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6506 for (prev1 = this_reload_insn;
6507 prev1; prev1 = PREV_INSN (prev1))
6508 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6509 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6512 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6517 remove_death (REGNO (oldequiv_reg), prev);
6524 /* If we are reloading a register that was recently stored in with an
6525 output-reload, see if we can prove there was
6526 actually no need to store the old value in it. */
6528 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6529 && reload_in[j] != 0
6530 && GET_CODE (reload_in[j]) == REG
6532 /* There doesn't seem to be any reason to restrict this to pseudos
6533 and doing so loses in the case where we are copying from a
6534 register of the wrong class. */
6535 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6537 && spill_reg_store[reload_spill_index[j]] != 0
6538 /* This is unsafe if some other reload uses the same reg first. */
6539 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6540 reload_opnum[j], reload_when_needed[j])
6541 && dead_or_set_p (insn, reload_in[j])
6542 /* This is unsafe if operand occurs more than once in current
6543 insn. Perhaps some occurrences weren't reloaded. */
6544 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6545 delete_output_reload (insn, j,
6546 spill_reg_store[reload_spill_index[j]]);
6548 /* Input-reloading is done. Now do output-reloading,
6549 storing the value from the reload-register after the main insn
6550 if reload_out[j] is nonzero.
6552 ??? At some point we need to support handling output reloads of
6553 JUMP_INSNs or insns that set cc0. */
6554 old = reload_out[j];
6556 && reload_reg_rtx[j] != old
6557 && reload_reg_rtx[j] != 0)
6559 register rtx reloadreg = reload_reg_rtx[j];
6560 register rtx second_reloadreg = 0;
6562 enum machine_mode mode;
6565 /* An output operand that dies right away does need a reload,
6566 but need not be copied from it. Show the new location in the
6568 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6569 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6571 XEXP (note, 0) = reload_reg_rtx[j];
6574 /* Likewise for a SUBREG of an operand that dies. */
6575 else if (GET_CODE (old) == SUBREG
6576 && GET_CODE (SUBREG_REG (old)) == REG
6577 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6580 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6584 else if (GET_CODE (old) == SCRATCH)
6585 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6586 but we don't want to make an output reload. */
6590 /* Strip off of OLD any size-increasing SUBREGs such as
6591 (SUBREG:SI foo:QI 0). */
6593 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6594 && (GET_MODE_SIZE (GET_MODE (old))
6595 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6596 old = SUBREG_REG (old);
6599 /* If is a JUMP_INSN, we can't support output reloads yet. */
6600 if (GET_CODE (insn) == JUMP_INSN)
6603 if (reload_when_needed[j] == RELOAD_OTHER)
6606 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6608 /* Determine the mode to reload in.
6609 See comments above (for input reloading). */
6611 mode = GET_MODE (old);
6612 if (mode == VOIDmode)
6614 /* VOIDmode should never happen for an output. */
6615 if (asm_noperands (PATTERN (insn)) < 0)
6616 /* It's the compiler's fault. */
6617 fatal_insn ("VOIDmode on an output", insn);
6618 error_for_asm (insn, "output operand is constant in `asm'");
6619 /* Prevent crash--use something we know is valid. */
6621 old = gen_rtx (REG, mode, REGNO (reloadreg));
6624 if (GET_MODE (reloadreg) != mode)
6625 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6627 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6629 /* If we need two reload regs, set RELOADREG to the intermediate
6630 one, since it will be stored into OLD. We might need a secondary
6631 register only for an input reload, so check again here. */
6633 if (reload_secondary_out_reload[j] >= 0)
6637 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6638 && reg_equiv_mem[REGNO (old)] != 0)
6639 real_old = reg_equiv_mem[REGNO (old)];
6641 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6645 second_reloadreg = reloadreg;
6646 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6648 /* See if RELOADREG is to be used as a scratch register
6649 or as an intermediate register. */
6650 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6652 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6653 (real_old, second_reloadreg, reloadreg)));
6658 /* See if we need both a scratch and intermediate reload
6661 int secondary_reload = reload_secondary_out_reload[j];
6662 enum insn_code tertiary_icode
6663 = reload_secondary_out_icode[secondary_reload];
6665 if (GET_MODE (reloadreg) != mode)
6666 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6668 if (tertiary_icode != CODE_FOR_nothing)
6671 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6674 /* Copy primary reload reg to secondary reload reg.
6675 (Note that these have been swapped above, then
6676 secondary reload reg to OLD using our insn. */
6678 /* If REAL_OLD is a paradoxical SUBREG, remove it
6679 and try to put the opposite SUBREG on
6681 if (GET_CODE (real_old) == SUBREG
6682 && (GET_MODE_SIZE (GET_MODE (real_old))
6683 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6684 && 0 != (tem = gen_lowpart_common
6685 (GET_MODE (SUBREG_REG (real_old)),
6687 real_old = SUBREG_REG (real_old), reloadreg = tem;
6689 gen_reload (reloadreg, second_reloadreg,
6690 reload_opnum[j], reload_when_needed[j]);
6691 emit_insn ((GEN_FCN (tertiary_icode)
6692 (real_old, reloadreg, third_reloadreg)));
6697 /* Copy between the reload regs here and then to
6700 gen_reload (reloadreg, second_reloadreg,
6701 reload_opnum[j], reload_when_needed[j]);
6707 /* Output the last reload insn. */
6709 gen_reload (old, reloadreg, reload_opnum[j],
6710 reload_when_needed[j]);
6712 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6713 /* If final will look at death notes for this reg,
6714 put one on the last output-reload insn to use it. Similarly
6715 for any secondary register. */
6716 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6717 for (p = get_last_insn (); p; p = PREV_INSN (p))
6718 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6719 && reg_overlap_mentioned_for_reload_p (reloadreg,
6721 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6722 reloadreg, REG_NOTES (p));
6724 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6725 if (! special && second_reloadreg
6726 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6727 for (p = get_last_insn (); p; p = PREV_INSN (p))
6728 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6729 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6731 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6732 second_reloadreg, REG_NOTES (p));
6735 /* Look at all insns we emitted, just to be safe. */
6736 for (p = get_insns (); p; p = NEXT_INSN (p))
6737 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6739 /* If this output reload doesn't come from a spill reg,
6740 clear any memory of reloaded copies of the pseudo reg.
6741 If this output reload comes from a spill reg,
6742 reg_has_output_reload will make this do nothing. */
6743 note_stores (PATTERN (p), forget_old_reloads_1);
6745 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6746 && reload_spill_index[j] >= 0)
6747 new_spill_reg_store[reload_spill_index[j]] = p;
6750 if (reload_when_needed[j] == RELOAD_OTHER)
6752 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6753 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6756 output_reload_insns[reload_opnum[j]] = get_insns ();
6762 /* Now write all the insns we made for reloads in the order expected by
6763 the allocation functions. Prior to the insn being reloaded, we write
6764 the following reloads:
6766 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6768 RELOAD_OTHER reloads.
6770 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6771 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6772 RELOAD_FOR_INPUT reload for the operand.
6774 RELOAD_FOR_OPADDR_ADDRS reloads.
6776 RELOAD_FOR_OPERAND_ADDRESS reloads.
6778 After the insn being reloaded, we write the following:
6780 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6781 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6782 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6783 reloads for the operand. The RELOAD_OTHER output reloads are
6784 output in descending order by reload number. */
6786 emit_insns_before (other_input_address_reload_insns, before_insn);
6787 emit_insns_before (other_input_reload_insns, before_insn);
6789 for (j = 0; j < reload_n_operands; j++)
6791 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6792 emit_insns_before (input_address_reload_insns[j], before_insn);
6793 emit_insns_before (input_reload_insns[j], before_insn);
6796 emit_insns_before (other_operand_reload_insns, before_insn);
6797 emit_insns_before (operand_reload_insns, before_insn);
6799 for (j = 0; j < reload_n_operands; j++)
6801 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6802 emit_insns_before (output_address_reload_insns[j], following_insn);
6803 emit_insns_before (output_reload_insns[j], following_insn);
6804 emit_insns_before (other_output_reload_insns[j], following_insn);
6807 /* Move death notes from INSN
6808 to output-operand-address and output reload insns. */
6809 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6812 /* Loop over those insns, last ones first. */
6813 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6814 insn1 = PREV_INSN (insn1))
6815 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6817 rtx source = SET_SRC (PATTERN (insn1));
6818 rtx dest = SET_DEST (PATTERN (insn1));
6820 /* The note we will examine next. */
6821 rtx reg_notes = REG_NOTES (insn);
6822 /* The place that pointed to this note. */
6823 rtx *prev_reg_note = ®_NOTES (insn);
6825 /* If the note is for something used in the source of this
6826 reload insn, or in the output address, move the note. */
6829 rtx next_reg_notes = XEXP (reg_notes, 1);
6830 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6831 && GET_CODE (XEXP (reg_notes, 0)) == REG
6832 && ((GET_CODE (dest) != REG
6833 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6835 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6838 *prev_reg_note = next_reg_notes;
6839 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6840 REG_NOTES (insn1) = reg_notes;
6843 prev_reg_note = &XEXP (reg_notes, 1);
6845 reg_notes = next_reg_notes;
6851 /* For all the spill regs newly reloaded in this instruction,
6852 record what they were reloaded from, so subsequent instructions
6853 can inherit the reloads.
6855 Update spill_reg_store for the reloads of this insn.
6856 Copy the elements that were updated in the loop above. */
6858 for (j = 0; j < n_reloads; j++)
6860 register int r = reload_order[j];
6861 register int i = reload_spill_index[r];
6863 /* I is nonneg if this reload used one of the spill regs.
6864 If reload_reg_rtx[r] is 0, this is an optional reload
6865 that we opted to ignore. */
6867 if (i >= 0 && reload_reg_rtx[r] != 0)
6870 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6872 int part_reaches_end = 0;
6873 int all_reaches_end = 1;
6875 /* For a multi register reload, we need to check if all or part
6876 of the value lives to the end. */
6877 for (k = 0; k < nr; k++)
6879 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6880 reload_when_needed[r]))
6881 part_reaches_end = 1;
6883 all_reaches_end = 0;
6886 /* Ignore reloads that don't reach the end of the insn in
6888 if (all_reaches_end)
6890 /* First, clear out memory of what used to be in this spill reg.
6891 If consecutive registers are used, clear them all. */
6893 for (k = 0; k < nr; k++)
6895 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6896 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6899 /* Maybe the spill reg contains a copy of reload_out. */
6900 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6902 register int nregno = REGNO (reload_out[r]);
6903 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6904 : HARD_REGNO_NREGS (nregno,
6905 GET_MODE (reload_reg_rtx[r])));
6907 spill_reg_store[i] = new_spill_reg_store[i];
6908 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6910 /* If NREGNO is a hard register, it may occupy more than
6911 one register. If it does, say what is in the
6912 rest of the registers assuming that both registers
6913 agree on how many words the object takes. If not,
6914 invalidate the subsequent registers. */
6916 if (nregno < FIRST_PSEUDO_REGISTER)
6917 for (k = 1; k < nnr; k++)
6918 reg_last_reload_reg[nregno + k]
6921 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6922 REGNO (reload_reg_rtx[r]) + k)
6925 /* Now do the inverse operation. */
6926 for (k = 0; k < nr; k++)
6928 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6929 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6932 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6936 /* Maybe the spill reg contains a copy of reload_in. Only do
6937 something if there will not be an output reload for
6938 the register being reloaded. */
6939 else if (reload_out[r] == 0
6940 && reload_in[r] != 0
6941 && ((GET_CODE (reload_in[r]) == REG
6942 && ! reg_has_output_reload[REGNO (reload_in[r])])
6943 || (GET_CODE (reload_in_reg[r]) == REG
6944 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6946 register int nregno;
6949 if (GET_CODE (reload_in[r]) == REG)
6950 nregno = REGNO (reload_in[r]);
6952 nregno = REGNO (reload_in_reg[r]);
6954 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6955 : HARD_REGNO_NREGS (nregno,
6956 GET_MODE (reload_reg_rtx[r])));
6958 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6960 if (nregno < FIRST_PSEUDO_REGISTER)
6961 for (k = 1; k < nnr; k++)
6962 reg_last_reload_reg[nregno + k]
6965 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6966 REGNO (reload_reg_rtx[r]) + k)
6969 /* Unless we inherited this reload, show we haven't
6970 recently done a store. */
6971 if (! reload_inherited[r])
6972 spill_reg_store[i] = 0;
6974 for (k = 0; k < nr; k++)
6976 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6977 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6980 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6986 /* However, if part of the reload reaches the end, then we must
6987 invalidate the old info for the part that survives to the end. */
6988 else if (part_reaches_end)
6990 for (k = 0; k < nr; k++)
6991 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6993 reload_when_needed[r]))
6995 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6996 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7001 /* The following if-statement was #if 0'd in 1.34 (or before...).
7002 It's reenabled in 1.35 because supposedly nothing else
7003 deals with this problem. */
7005 /* If a register gets output-reloaded from a non-spill register,
7006 that invalidates any previous reloaded copy of it.
7007 But forget_old_reloads_1 won't get to see it, because
7008 it thinks only about the original insn. So invalidate it here. */
7009 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7011 register int nregno = REGNO (reload_out[r]);
7012 if (nregno >= FIRST_PSEUDO_REGISTER)
7013 reg_last_reload_reg[nregno] = 0;
7016 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7018 while (num_regs-- > 0)
7019 reg_last_reload_reg[nregno + num_regs] = 0;
7025 /* Emit code to perform a reload from IN (which may be a reload register) to
7026 OUT (which may also be a reload register). IN or OUT is from operand
7027 OPNUM with reload type TYPE.
7029 Returns first insn emitted. */
7032 gen_reload (out, in, opnum, type)
7036 enum reload_type type;
7038 rtx last = get_last_insn ();
7041 /* If IN is a paradoxical SUBREG, remove it and try to put the
7042 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7043 if (GET_CODE (in) == SUBREG
7044 && (GET_MODE_SIZE (GET_MODE (in))
7045 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7046 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7047 in = SUBREG_REG (in), out = tem;
7048 else if (GET_CODE (out) == SUBREG
7049 && (GET_MODE_SIZE (GET_MODE (out))
7050 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7051 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7052 out = SUBREG_REG (out), in = tem;
7054 /* How to do this reload can get quite tricky. Normally, we are being
7055 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7056 register that didn't get a hard register. In that case we can just
7057 call emit_move_insn.
7059 We can also be asked to reload a PLUS that adds a register or a MEM to
7060 another register, constant or MEM. This can occur during frame pointer
7061 elimination and while reloading addresses. This case is handled by
7062 trying to emit a single insn to perform the add. If it is not valid,
7063 we use a two insn sequence.
7065 Finally, we could be called to handle an 'o' constraint by putting
7066 an address into a register. In that case, we first try to do this
7067 with a named pattern of "reload_load_address". If no such pattern
7068 exists, we just emit a SET insn and hope for the best (it will normally
7069 be valid on machines that use 'o').
7071 This entire process is made complex because reload will never
7072 process the insns we generate here and so we must ensure that
7073 they will fit their constraints and also by the fact that parts of
7074 IN might be being reloaded separately and replaced with spill registers.
7075 Because of this, we are, in some sense, just guessing the right approach
7076 here. The one listed above seems to work.
7078 ??? At some point, this whole thing needs to be rethought. */
7080 if (GET_CODE (in) == PLUS
7081 && (GET_CODE (XEXP (in, 0)) == REG
7082 || GET_CODE (XEXP (in, 0)) == SUBREG
7083 || GET_CODE (XEXP (in, 0)) == MEM)
7084 && (GET_CODE (XEXP (in, 1)) == REG
7085 || GET_CODE (XEXP (in, 1)) == SUBREG
7086 || CONSTANT_P (XEXP (in, 1))
7087 || GET_CODE (XEXP (in, 1)) == MEM))
7089 /* We need to compute the sum of a register or a MEM and another
7090 register, constant, or MEM, and put it into the reload
7091 register. The best possible way of doing this is if the machine
7092 has a three-operand ADD insn that accepts the required operands.
7094 The simplest approach is to try to generate such an insn and see if it
7095 is recognized and matches its constraints. If so, it can be used.
7097 It might be better not to actually emit the insn unless it is valid,
7098 but we need to pass the insn as an operand to `recog' and
7099 `insn_extract' and it is simpler to emit and then delete the insn if
7100 not valid than to dummy things up. */
7102 rtx op0, op1, tem, insn;
7105 op0 = find_replacement (&XEXP (in, 0));
7106 op1 = find_replacement (&XEXP (in, 1));
7108 /* Since constraint checking is strict, commutativity won't be
7109 checked, so we need to do that here to avoid spurious failure
7110 if the add instruction is two-address and the second operand
7111 of the add is the same as the reload reg, which is frequently
7112 the case. If the insn would be A = B + A, rearrange it so
7113 it will be A = A + B as constrain_operands expects. */
7115 if (GET_CODE (XEXP (in, 1)) == REG
7116 && REGNO (out) == REGNO (XEXP (in, 1)))
7117 tem = op0, op0 = op1, op1 = tem;
7119 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7120 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7122 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7123 code = recog_memoized (insn);
7127 insn_extract (insn);
7128 /* We want constrain operands to treat this insn strictly in
7129 its validity determination, i.e., the way it would after reload
7131 if (constrain_operands (code, 1))
7135 delete_insns_since (last);
7137 /* If that failed, we must use a conservative two-insn sequence.
7138 use move to copy constant, MEM, or pseudo register to the reload
7139 register since "move" will be able to handle an arbitrary operand,
7140 unlike add which can't, in general. Then add the registers.
7142 If there is another way to do this for a specific machine, a
7143 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7146 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7147 || (GET_CODE (op1) == REG
7148 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7149 tem = op0, op0 = op1, op1 = tem;
7151 gen_reload (out, op0, opnum, type);
7153 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7154 This fixes a problem on the 32K where the stack pointer cannot
7155 be used as an operand of an add insn. */
7157 if (rtx_equal_p (op0, op1))
7160 insn = emit_insn (gen_add2_insn (out, op1));
7162 /* If that failed, copy the address register to the reload register.
7163 Then add the constant to the reload register. */
7165 code = recog_memoized (insn);
7169 insn_extract (insn);
7170 /* We want constrain operands to treat this insn strictly in
7171 its validity determination, i.e., the way it would after reload
7173 if (constrain_operands (code, 1))
7177 delete_insns_since (last);
7179 gen_reload (out, op1, opnum, type);
7180 emit_insn (gen_add2_insn (out, op0));
7183 #ifdef SECONDARY_MEMORY_NEEDED
7184 /* If we need a memory location to do the move, do it that way. */
7185 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7186 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7187 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7188 REGNO_REG_CLASS (REGNO (out)),
7191 /* Get the memory to use and rewrite both registers to its mode. */
7192 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7194 if (GET_MODE (loc) != GET_MODE (out))
7195 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7197 if (GET_MODE (loc) != GET_MODE (in))
7198 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7200 gen_reload (loc, in, opnum, type);
7201 gen_reload (out, loc, opnum, type);
7205 /* If IN is a simple operand, use gen_move_insn. */
7206 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7207 emit_insn (gen_move_insn (out, in));
7209 #ifdef HAVE_reload_load_address
7210 else if (HAVE_reload_load_address)
7211 emit_insn (gen_reload_load_address (out, in));
7214 /* Otherwise, just write (set OUT IN) and hope for the best. */
7216 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7218 /* Return the first insn emitted.
7219 We can not just return get_last_insn, because there may have
7220 been multiple instructions emitted. Also note that gen_move_insn may
7221 emit more than one insn itself, so we can not assume that there is one
7222 insn emitted per emit_insn_before call. */
7224 return last ? NEXT_INSN (last) : get_insns ();
7227 /* Delete a previously made output-reload
7228 whose result we now believe is not needed.
7229 First we double-check.
7231 INSN is the insn now being processed.
7232 OUTPUT_RELOAD_INSN is the insn of the output reload.
7233 J is the reload-number for this insn. */
7236 delete_output_reload (insn, j, output_reload_insn)
7239 rtx output_reload_insn;
7243 /* Get the raw pseudo-register referred to. */
7245 rtx reg = reload_in[j];
7246 while (GET_CODE (reg) == SUBREG)
7247 reg = SUBREG_REG (reg);
7249 /* If the pseudo-reg we are reloading is no longer referenced
7250 anywhere between the store into it and here,
7251 and no jumps or labels intervene, then the value can get
7252 here through the reload reg alone.
7253 Otherwise, give up--return. */
7254 for (i1 = NEXT_INSN (output_reload_insn);
7255 i1 != insn; i1 = NEXT_INSN (i1))
7257 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7259 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7260 && reg_mentioned_p (reg, PATTERN (i1)))
7264 if (cannot_omit_stores[REGNO (reg)])
7267 /* If this insn will store in the pseudo again,
7268 the previous store can be removed. */
7269 if (reload_out[j] == reload_in[j])
7270 delete_insn (output_reload_insn);
7272 /* See if the pseudo reg has been completely replaced
7273 with reload regs. If so, delete the store insn
7274 and forget we had a stack slot for the pseudo. */
7275 else if (reg_n_deaths[REGNO (reg)] == 1
7276 && reg_basic_block[REGNO (reg)] >= 0
7277 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7281 /* We know that it was used only between here
7282 and the beginning of the current basic block.
7283 (We also know that the last use before INSN was
7284 the output reload we are thinking of deleting, but never mind that.)
7285 Search that range; see if any ref remains. */
7286 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7288 rtx set = single_set (i2);
7290 /* Uses which just store in the pseudo don't count,
7291 since if they are the only uses, they are dead. */
7292 if (set != 0 && SET_DEST (set) == reg)
7294 if (GET_CODE (i2) == CODE_LABEL
7295 || GET_CODE (i2) == JUMP_INSN)
7297 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7298 && reg_mentioned_p (reg, PATTERN (i2)))
7299 /* Some other ref remains;
7300 we can't do anything. */
7304 /* Delete the now-dead stores into this pseudo. */
7305 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7307 rtx set = single_set (i2);
7309 if (set != 0 && SET_DEST (set) == reg)
7311 /* This might be a basic block head,
7312 thus don't use delete_insn. */
7313 PUT_CODE (i2, NOTE);
7314 NOTE_SOURCE_FILE (i2) = 0;
7315 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7317 if (GET_CODE (i2) == CODE_LABEL
7318 || GET_CODE (i2) == JUMP_INSN)
7322 /* For the debugging info,
7323 say the pseudo lives in this reload reg. */
7324 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7325 alter_reg (REGNO (reg), -1);
7329 /* Output reload-insns to reload VALUE into RELOADREG.
7330 VALUE is an autoincrement or autodecrement RTX whose operand
7331 is a register or memory location;
7332 so reloading involves incrementing that location.
7334 INC_AMOUNT is the number to increment or decrement by (always positive).
7335 This cannot be deduced from VALUE. */
7338 inc_for_reload (reloadreg, value, inc_amount)
7343 /* REG or MEM to be copied and incremented. */
7344 rtx incloc = XEXP (value, 0);
7345 /* Nonzero if increment after copying. */
7346 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7352 /* No hard register is equivalent to this register after
7353 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7354 we could inc/dec that register as well (maybe even using it for
7355 the source), but I'm not sure it's worth worrying about. */
7356 if (GET_CODE (incloc) == REG)
7357 reg_last_reload_reg[REGNO (incloc)] = 0;
7359 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7360 inc_amount = - inc_amount;
7362 inc = GEN_INT (inc_amount);
7364 /* If this is post-increment, first copy the location to the reload reg. */
7366 emit_insn (gen_move_insn (reloadreg, incloc));
7368 /* See if we can directly increment INCLOC. Use a method similar to that
7371 last = get_last_insn ();
7372 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7373 gen_rtx (PLUS, GET_MODE (incloc),
7376 code = recog_memoized (add_insn);
7379 insn_extract (add_insn);
7380 if (constrain_operands (code, 1))
7382 /* If this is a pre-increment and we have incremented the value
7383 where it lives, copy the incremented value to RELOADREG to
7384 be used as an address. */
7387 emit_insn (gen_move_insn (reloadreg, incloc));
7393 delete_insns_since (last);
7395 /* If couldn't do the increment directly, must increment in RELOADREG.
7396 The way we do this depends on whether this is pre- or post-increment.
7397 For pre-increment, copy INCLOC to the reload register, increment it
7398 there, then save back. */
7402 emit_insn (gen_move_insn (reloadreg, incloc));
7403 emit_insn (gen_add2_insn (reloadreg, inc));
7404 emit_insn (gen_move_insn (incloc, reloadreg));
7409 Because this might be a jump insn or a compare, and because RELOADREG
7410 may not be available after the insn in an input reload, we must do
7411 the incrementation before the insn being reloaded for.
7413 We have already copied INCLOC to RELOADREG. Increment the copy in
7414 RELOADREG, save that back, then decrement RELOADREG so it has
7415 the original value. */
7417 emit_insn (gen_add2_insn (reloadreg, inc));
7418 emit_insn (gen_move_insn (incloc, reloadreg));
7419 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7425 /* Return 1 if we are certain that the constraint-string STRING allows
7426 the hard register REG. Return 0 if we can't be sure of this. */
7429 constraint_accepts_reg_p (string, reg)
7434 int regno = true_regnum (reg);
7437 /* Initialize for first alternative. */
7439 /* Check that each alternative contains `g' or `r'. */
7441 switch (c = *string++)
7444 /* If an alternative lacks `g' or `r', we lose. */
7447 /* If an alternative lacks `g' or `r', we lose. */
7450 /* Initialize for next alternative. */
7455 /* Any general reg wins for this alternative. */
7456 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7460 /* Any reg in specified class wins for this alternative. */
7462 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7464 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7470 /* Return the number of places FIND appears within X, but don't count
7471 an occurrence if some SET_DEST is FIND. */
7474 count_occurrences (x, find)
7475 register rtx x, find;
7478 register enum rtx_code code;
7479 register char *format_ptr;
7487 code = GET_CODE (x);
7502 if (SET_DEST (x) == find)
7503 return count_occurrences (SET_SRC (x), find);
7507 format_ptr = GET_RTX_FORMAT (code);
7510 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7512 switch (*format_ptr++)
7515 count += count_occurrences (XEXP (x, i), find);
7519 if (XVEC (x, i) != NULL)
7521 for (j = 0; j < XVECLEN (x, i); j++)
7522 count += count_occurrences (XVECEXP (x, i, j), find);
7530 /* This array holds values which are equivalent to a hard register
7531 during reload_cse_regs. Each array element is an EXPR_LIST of
7532 values. Each time a hard register is set, we set the corresponding
7533 array element to the value. Each time a hard register is copied
7534 into memory, we add the memory location to the corresponding array
7535 element. We don't store values or memory addresses with side
7536 effects in this array.
7538 If the value is a CONST_INT, then the mode of the containing
7539 EXPR_LIST is the mode in which that CONST_INT was referenced.
7541 We sometimes clobber a specific entry in a list. In that case, we
7542 just set XEXP (list-entry, 0) to 0. */
7544 static rtx *reg_values;
7546 /* Invalidate any entries in reg_values which depend on REGNO,
7547 including those for REGNO itself. This is called if REGNO is
7548 changing. If CLOBBER is true, then always forget anything we
7549 currently know about REGNO. MODE is the mode of the assignment to
7550 REGNO, which is used to determine how many hard registers are being
7551 changed. If MODE is VOIDmode, then only REGNO is being changed;
7552 this is used when invalidating call clobbered registers across a
7556 reload_cse_invalidate_regno (regno, mode, clobber)
7558 enum machine_mode mode;
7564 /* Our callers don't always go through true_regnum; we may see a
7565 pseudo-register here from a CLOBBER or the like. We probably
7566 won't ever see a pseudo-register that has a real register number,
7567 for we check anyhow for safety. */
7568 if (regno >= FIRST_PSEUDO_REGISTER)
7569 regno = reg_renumber[regno];
7573 if (mode == VOIDmode)
7574 endregno = regno + 1;
7576 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7579 for (i = regno; i < endregno; i++)
7582 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7586 for (x = reg_values[i]; x; x = XEXP (x, 1))
7588 if (XEXP (x, 0) != 0
7589 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_RTX))
7591 /* If this is the only entry on the list, clear
7592 reg_values[i]. Otherwise, just clear this entry on
7594 if (XEXP (x, 1) == 0 && x == reg_values[i])
7605 /* The memory at address (plus MEM_BASE MEM_OFFSET), where MEM_OFFSET
7606 is a CONST_INT, is being changed. MEM_MODE is the mode of the
7607 memory reference. Return whether this change will invalidate VAL. */
7610 reload_cse_mem_conflict_p (mem_base, mem_offset, mem_mode, val)
7613 enum machine_mode mem_mode;
7620 code = GET_CODE (val);
7623 /* Get rid of a few simple cases quickly. */
7638 rtx val_base, val_offset;
7640 if (mem_mode == BLKmode || GET_MODE (val) == BLKmode)
7643 val_offset = const0_rtx;
7644 val_base = eliminate_constant_term (XEXP (val, 0), &val_offset);
7646 /* If MEM_BASE and VAL_BASE are the same, but the offsets do
7647 not overlap, then we do not have a conflict on this MEM.
7648 For complete safety, we still need to check that VAL_BASE
7649 itself does not contain an overlapping MEM.
7651 We can't simplify the check to just OFFSET + SIZE <=
7652 OTHER_OFFSET, because SIZE might cause OFFSET to wrap from
7653 positive to negative. If we used unsigned arithmetic, we
7654 would have the same problem wrapping around zero. */
7656 if (rtx_equal_p (mem_base, val_base)
7657 && ((INTVAL (mem_offset) < INTVAL (val_offset)
7658 && (INTVAL (mem_offset) + GET_MODE_SIZE (mem_mode)
7659 <= INTVAL (val_offset)))
7660 || (INTVAL (val_offset) < INTVAL (mem_offset)
7661 && (INTVAL (val_offset) + GET_MODE_SIZE (GET_MODE (val))
7662 <= INTVAL (mem_offset)))))
7663 return reload_cse_mem_conflict_p (mem_base, mem_offset, mem_mode,
7673 fmt = GET_RTX_FORMAT (code);
7675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7679 if (reload_cse_mem_conflict_p (mem_base, mem_offset, mem_mode,
7683 else if (fmt[i] == 'E')
7687 for (j = 0; j < XVECLEN (val, i); j++)
7688 if (reload_cse_mem_conflict_p (mem_base, mem_offset, mem_mode,
7689 XVECEXP (val, i, j)))
7697 /* Invalidate any entries in reg_values which are changed because of a
7698 store to MEM_RTX. If this is called because of a non-const call
7699 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7702 reload_cse_invalidate_mem (mem_rtx)
7706 rtx mem_base, mem_offset;
7707 enum machine_mode mem_mode;
7709 /* We detect certain cases where memory addresses can not conflict:
7710 if they use the same register, and the offsets do not overlap. */
7712 mem_offset = const0_rtx;
7713 mem_base = eliminate_constant_term (XEXP (mem_rtx, 0), &mem_offset);
7714 mem_mode = GET_MODE (mem_rtx);
7716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7720 for (x = reg_values[i]; x; x = XEXP (x, 1))
7722 if (XEXP (x, 0) != 0
7723 && reload_cse_mem_conflict_p (mem_base, mem_offset, mem_mode,
7726 /* If this is the only entry on the list, clear
7727 reg_values[i]. Otherwise, just clear this entry on
7729 if (XEXP (x, 1) == 0 && x == reg_values[i])
7740 /* Invalidate DEST, which is being assigned to or clobbered. The
7741 second parameter exists so that this function can be passed to
7742 note_stores; it is ignored. */
7745 reload_cse_invalidate_rtx (dest, ignore)
7749 while (GET_CODE (dest) == STRICT_LOW_PART
7750 || GET_CODE (dest) == SIGN_EXTRACT
7751 || GET_CODE (dest) == ZERO_EXTRACT
7752 || GET_CODE (dest) == SUBREG)
7753 dest = XEXP (dest, 0);
7755 if (GET_CODE (dest) == REG)
7756 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7757 else if (GET_CODE (dest) == MEM)
7758 reload_cse_invalidate_mem (dest);
7761 /* Do a very simple CSE pass over the hard registers.
7763 This function detects no-op moves where we happened to assign two
7764 different pseudo-registers to the same hard register, and then
7765 copied one to the other. Reload will generate a useless
7766 instruction copying a register to itself.
7768 This function also detects cases where we load a value from memory
7769 into two different registers, and (if memory is more expensive than
7770 registers) changes it to simply copy the first register into the
7774 reload_cse_regs (first)
7782 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7786 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7787 free them when we are done. */
7788 push_obstacks (&reload_obstack, &reload_obstack);
7789 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7791 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7792 memory for a non-const call instruction. */
7793 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7795 for (insn = first; insn; insn = NEXT_INSN (insn))
7799 if (GET_CODE (insn) == CODE_LABEL)
7801 /* Forget all the register values at a code label. We don't
7802 try to do anything clever around jumps. */
7803 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7809 #ifdef NON_SAVING_SETJMP
7810 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7811 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7820 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7823 /* If this is a call instruction, forget anything stored in a
7824 call clobbered register, or, if this is not a const call, in
7826 if (GET_CODE (insn) == CALL_INSN)
7828 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7829 if (call_used_regs[i])
7830 reload_cse_invalidate_regno (i, VOIDmode, 1);
7832 if (! CONST_CALL_P (insn))
7833 reload_cse_invalidate_mem (callmem);
7836 body = PATTERN (insn);
7837 if (GET_CODE (body) == SET)
7839 if (reload_cse_noop_set_p (body))
7841 /* If we were preserving death notes, then we would want
7842 to remove any existing death note for the register
7844 PUT_CODE (insn, NOTE);
7845 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7846 NOTE_SOURCE_FILE (insn) = 0;
7848 /* We're done with this insn. */
7852 reload_cse_simplify_set (body, insn);
7853 reload_cse_record_set (body, body);
7855 else if (GET_CODE (body) == PARALLEL)
7859 /* If every action in a PARALLEL is a noop, we can delete
7860 the entire PARALLEL. */
7861 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7862 if (GET_CODE (XVECEXP (body, 0, i)) != SET
7863 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i)))
7867 /* If we were preserving death notes, then we would want
7868 to remove any existing death notes for the registers
7870 PUT_CODE (insn, NOTE);
7871 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7872 NOTE_SOURCE_FILE (insn) = 0;
7874 /* We're done with this insn. */
7878 /* Look through the PARALLEL and record the values being
7879 set, if possible. Also handle any CLOBBERs. */
7880 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7882 rtx x = XVECEXP (body, 0, i);
7884 if (GET_CODE (x) == SET)
7885 reload_cse_record_set (x, body);
7887 note_stores (x, reload_cse_invalidate_rtx);
7891 note_stores (body, reload_cse_invalidate_rtx);
7894 /* Clobber any registers which appear in REG_INC notes. We
7895 could keep track of the changes to their values, but it is
7896 unlikely to help. */
7900 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
7901 if (REG_NOTE_KIND (x) == REG_INC)
7902 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
7906 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
7907 after we have processed the insn. */
7908 if (GET_CODE (insn) == CALL_INSN)
7912 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
7913 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
7914 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
7918 /* Free all the temporary structures we created, and go back to the
7919 regular obstacks. */
7920 obstack_free (&reload_obstack, firstobj);
7924 /* Return whether the values known for REGNO are equal to VAL. MODE
7925 is the mode of the object that VAL is being copied to; this matters
7926 if VAL is a CONST_INT. */
7929 reload_cse_regno_equal_p (regno, val, mode)
7932 enum machine_mode mode;
7939 for (x = reg_values[regno]; x; x = XEXP (x, 1))
7940 if (XEXP (x, 0) != 0
7941 && rtx_equal_p (XEXP (x, 0), val)
7942 && (GET_CODE (val) != CONST_INT
7943 || mode == GET_MODE (x)
7944 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7945 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7946 GET_MODE_BITSIZE (GET_MODE (x))))))
7952 /* See whether a single SET instruction is a nooop. */
7955 reload_cse_noop_set_p (set)
7959 enum machine_mode dest_mode;
7962 src = SET_SRC (set);
7963 dest = SET_DEST (set);
7964 dest_mode = GET_MODE (dest);
7966 if (side_effects_p (src))
7969 dreg = true_regnum (dest);
7970 sreg = true_regnum (src);
7974 /* Check for setting a register to itself. */
7978 /* Check for setting a register to a value which we already know
7979 is in the register. */
7980 if (reload_cse_regno_equal_p (dreg, src, dest_mode))
7983 /* Check for setting a register DREG to another register SREG
7984 where SREG is equal to a value which is already in DREG. */
7989 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
7990 if (XEXP (x, 0) != 0
7991 && reload_cse_regno_equal_p (dreg, XEXP (x, 0), dest_mode))
7995 else if (GET_CODE (dest) == MEM)
7997 /* Check for storing a register to memory when we know that the
7998 register is equivalent to the memory location. */
8000 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8001 && ! side_effects_p (dest))
8008 /* Try to simplify a single SET instruction. SET is the set pattern.
8009 INSN is the instruction it came from. */
8012 reload_cse_simplify_set (set, insn)
8018 enum machine_mode dest_mode;
8019 enum reg_class dclass;
8022 /* We only handle one case: if we set a register to a value which is
8023 not a register, we try to find that value in some other register
8024 and change the set into a register copy. */
8026 dreg = true_regnum (SET_DEST (set));
8030 src = SET_SRC (set);
8031 if (side_effects_p (src) || true_regnum (src) >= 0)
8034 /* If memory loads are cheaper than register copies, don't change
8036 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8039 dest_mode = GET_MODE (SET_DEST (set));
8040 dclass = REGNO_REG_CLASS (dreg);
8041 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8044 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8045 && reload_cse_regno_equal_p (i, src, dest_mode))
8049 /* Pop back to the real obstacks while changing the insn. */
8052 validated = validate_change (insn, &SET_SRC (set),
8053 gen_rtx (REG, dest_mode, i), 0);
8055 /* Go back to the obstack we are using for temporary
8057 push_obstacks (&reload_obstack, &reload_obstack);
8065 /* These two variables are used to pass information from
8066 reload_cse_record_set to reload_cse_check_clobber. */
8068 static int reload_cse_check_clobbered;
8069 static rtx reload_cse_check_src;
8071 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8072 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8073 second argument, which is passed by note_stores, is ignored. */
8076 reload_cse_check_clobber (dest, ignore)
8080 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8081 reload_cse_check_clobbered = 1;
8084 /* Record the result of a SET instruction. SET is the set pattern.
8085 BODY is the pattern of the insn that it came from. */
8088 reload_cse_record_set (set, body)
8094 enum machine_mode dest_mode;
8096 dest = SET_DEST (set);
8097 src = SET_SRC (set);
8098 dreg = true_regnum (dest);
8099 sreg = true_regnum (src);
8100 dest_mode = GET_MODE (dest);
8102 /* We can only handle an assignment to a register, or a store of a
8103 register to a memory location. For other cases, we just clobber
8104 the destination. We also have to just clobber if there are side
8105 effects in SRC or DEST. */
8106 if ((dreg < 0 && GET_CODE (dest) != MEM)
8107 || side_effects_p (src)
8108 || side_effects_p (dest))
8110 reload_cse_invalidate_rtx (dest, NULL_RTX);
8115 /* We don't try to handle values involving CC, because it's a pain
8116 to keep track of when they have to be invalidated. */
8117 if (reg_mentioned_p (cc0_rtx, src)
8118 || reg_mentioned_p (cc0_rtx, dest))
8120 reload_cse_invalidate_rtx (dest, NULL_RTX);
8125 /* If BODY is a PARALLEL, then we need to see whether the source of
8126 SET is clobbered by some other instruction in the PARALLEL. */
8127 if (GET_CODE (body) == PARALLEL)
8131 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8135 x = XVECEXP (body, 0, i);
8139 reload_cse_check_clobbered = 0;
8140 reload_cse_check_src = src;
8141 note_stores (x, reload_cse_check_clobber);
8142 if (reload_cse_check_clobbered)
8144 reload_cse_invalidate_rtx (dest, NULL_RTX);
8154 /* This is an assignment to a register. Update the value we
8155 have stored for the register. */
8157 reg_values[dreg] = reg_values[sreg];
8159 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8161 /* We've changed DREG, so invalidate any values held by other
8162 registers that depend upon it. */
8163 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8165 /* If this assignment changes more than one hard register,
8166 forget anything we know about the others. */
8167 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8168 reg_values[dreg + i] = 0;
8170 else if (GET_CODE (dest) == MEM)
8172 /* Invalidate conflicting memory locations. */
8173 reload_cse_invalidate_mem (dest);
8175 /* If we're storing a register to memory, add DEST to the list
8177 if (sreg >= 0 && ! side_effects_p (dest))
8178 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8183 /* We should have bailed out earlier. */