1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
32 #include "hard-reg-set.h"
35 #include "basic-block.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
166 static HARD_REG_SET bad_spill_regs;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
220 static char spill_indirect_levels;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs[N_REG_CLASSES];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed;
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
260 enum reg_class reload_address_base_reg_class;
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
266 enum reg_class reload_address_index_reg_class;
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
271 int reload_in_progress = 0;
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
284 struct obstack reload_obstack;
285 char *reload_firstobj;
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
301 static struct elim_table
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
325 #ifdef ELIMINABLE_REGS
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
351 /* Number of labels in the current function. */
353 static int num_labels;
355 struct hard_reg_n_uses { int regno; int uses; };
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
398 static void reload_cse_invalidate_mem PROTO((rtx));
399 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
400 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
401 static int reload_cse_noop_set_p PROTO((rtx, rtx));
402 static void reload_cse_simplify_set PROTO((rtx, rtx));
403 static void reload_cse_check_clobber PROTO((rtx, rtx));
404 static void reload_cse_record_set PROTO((rtx, rtx));
406 /* Initialize the reload pass once per compilation. */
413 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
414 Set spill_indirect_levels to the number of levels such addressing is
415 permitted, zero if it is not permitted at all. */
418 = gen_rtx (MEM, Pmode,
419 gen_rtx (PLUS, Pmode,
420 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
422 spill_indirect_levels = 0;
424 while (memory_address_p (QImode, tem))
426 spill_indirect_levels++;
427 tem = gen_rtx (MEM, Pmode, tem);
430 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
432 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
433 indirect_symref_ok = memory_address_p (QImode, tem);
435 /* See if reg+reg is a valid (and offsettable) address. */
437 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
439 tem = gen_rtx (PLUS, Pmode,
440 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
441 gen_rtx (REG, Pmode, i));
442 /* This way, we make sure that reg+reg is an offsettable address. */
443 tem = plus_constant (tem, 4);
445 if (memory_address_p (QImode, tem))
447 double_reg_address_ok = 1;
452 /* Initialize obstack for our rtl allocation. */
453 gcc_obstack_init (&reload_obstack);
454 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
456 /* Decide which register class should be used when reloading
457 addresses. If we are using SMALL_REGISTER_CLASSES, and any
458 parameters are passed in registers, then we do not want to use
459 those registers when reloading an address. Otherwise, if a
460 function argument needs a reload, we may wind up clobbering
461 another argument to the function which was already computed. If
462 we find a subset class which simply avoids those registers, we
463 use it instead. ??? It would be better to only use the
464 restricted class when we actually are loading function arguments,
465 but that is hard to determine. */
466 reload_address_base_reg_class = BASE_REG_CLASS;
467 reload_address_index_reg_class = INDEX_REG_CLASS;
468 #ifdef SMALL_REGISTER_CLASSES
469 if (SMALL_REGISTER_CLASSES)
472 HARD_REG_SET base, index;
475 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
476 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
477 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
479 if (FUNCTION_ARG_REGNO_P (regno))
481 CLEAR_HARD_REG_BIT (base, regno);
482 CLEAR_HARD_REG_BIT (index, regno);
486 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
488 for (p = reg_class_subclasses[BASE_REG_CLASS];
489 *p != LIM_REG_CLASSES;
492 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
495 reload_address_base_reg_class = *p;
500 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
502 for (p = reg_class_subclasses[INDEX_REG_CLASS];
503 *p != LIM_REG_CLASSES;
506 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
509 reload_address_index_reg_class = *p;
514 #endif /* SMALL_REGISTER_CLASSES */
517 /* Main entry point for the reload pass.
519 FIRST is the first insn of the function being compiled.
521 GLOBAL nonzero means we were called from global_alloc
522 and should attempt to reallocate any pseudoregs that we
523 displace from hard regs we will use for reloads.
524 If GLOBAL is zero, we do not have enough information to do that,
525 so any pseudo reg that is spilled must go to the stack.
527 DUMPFILE is the global-reg debugging dump file stream, or 0.
528 If it is nonzero, messages are written to it to describe
529 which registers are seized as reload regs, which pseudo regs
530 are spilled from them, and where the pseudo regs are reallocated to.
532 Return value is nonzero if reload failed
533 and we must not do any more for this function. */
536 reload (first, global, dumpfile)
542 register int i, j, k;
544 register struct elim_table *ep;
546 /* The two pointers used to track the true location of the memory used
547 for label offsets. */
548 char *real_known_ptr = NULL_PTR;
549 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
551 int something_changed;
552 int something_needs_reloads;
553 int something_needs_elimination;
554 int new_basic_block_needs;
555 enum reg_class caller_save_spill_class = NO_REGS;
556 int caller_save_group_size = 1;
558 /* Nonzero means we couldn't get enough spill regs. */
561 /* The basic block number currently being processed for INSN. */
564 /* Make sure even insns with volatile mem refs are recognizable. */
567 /* Enable find_equiv_reg to distinguish insns made by reload. */
568 reload_first_uid = get_max_uid ();
570 for (i = 0; i < N_REG_CLASSES; i++)
571 basic_block_needs[i] = 0;
573 #ifdef SECONDARY_MEMORY_NEEDED
574 /* Initialize the secondary memory table. */
575 clear_secondary_mem ();
578 /* Remember which hard regs appear explicitly
579 before we merge into `regs_ever_live' the ones in which
580 pseudo regs have been allocated. */
581 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
583 /* We don't have a stack slot for any spill reg yet. */
584 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
585 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
587 /* Initialize the save area information for caller-save, in case some
591 /* Compute which hard registers are now in use
592 as homes for pseudo registers.
593 This is done here rather than (eg) in global_alloc
594 because this point is reached even if not optimizing. */
595 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
598 /* A function that receives a nonlocal goto must save all call-saved
600 if (current_function_has_nonlocal_label)
601 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
603 if (! call_used_regs[i] && ! fixed_regs[i])
604 regs_ever_live[i] = 1;
607 for (i = 0; i < scratch_list_length; i++)
609 mark_scratch_live (scratch_list[i]);
611 /* Make sure that the last insn in the chain
612 is not something that needs reloading. */
613 emit_note (NULL_PTR, NOTE_INSN_DELETED);
615 /* Find all the pseudo registers that didn't get hard regs
616 but do have known equivalent constants or memory slots.
617 These include parameters (known equivalent to parameter slots)
618 and cse'd or loop-moved constant memory addresses.
620 Record constant equivalents in reg_equiv_constant
621 so they will be substituted by find_reloads.
622 Record memory equivalents in reg_mem_equiv so they can
623 be substituted eventually by altering the REG-rtx's. */
625 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
626 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
627 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
628 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
629 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
630 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
631 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
632 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
633 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
634 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
635 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
636 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
637 cannot_omit_stores = (char *) alloca (max_regno);
638 bzero (cannot_omit_stores, max_regno);
640 #ifdef SMALL_REGISTER_CLASSES
641 if (SMALL_REGISTER_CLASSES)
642 CLEAR_HARD_REG_SET (forbidden_regs);
645 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
646 Also find all paradoxical subregs and find largest such for each pseudo.
647 On machines with small register classes, record hard registers that
648 are used for user variables. These can never be used for spills.
649 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
650 caller-saved registers must be marked live. */
652 for (insn = first; insn; insn = NEXT_INSN (insn))
654 rtx set = single_set (insn);
656 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
657 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
658 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
659 if (! call_used_regs[i])
660 regs_ever_live[i] = 1;
662 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
664 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
666 #ifdef LEGITIMATE_PIC_OPERAND_P
667 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
668 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
672 rtx x = XEXP (note, 0);
673 i = REGNO (SET_DEST (set));
674 if (i > LAST_VIRTUAL_REGISTER)
676 if (GET_CODE (x) == MEM)
677 reg_equiv_memory_loc[i] = x;
678 else if (CONSTANT_P (x))
680 if (LEGITIMATE_CONSTANT_P (x))
681 reg_equiv_constant[i] = x;
683 reg_equiv_memory_loc[i]
684 = force_const_mem (GET_MODE (SET_DEST (set)), x);
689 /* If this register is being made equivalent to a MEM
690 and the MEM is not SET_SRC, the equivalencing insn
691 is one with the MEM as a SET_DEST and it occurs later.
692 So don't mark this insn now. */
693 if (GET_CODE (x) != MEM
694 || rtx_equal_p (SET_SRC (set), x))
695 reg_equiv_init[i] = insn;
700 /* If this insn is setting a MEM from a register equivalent to it,
701 this is the equivalencing insn. */
702 else if (set && GET_CODE (SET_DEST (set)) == MEM
703 && GET_CODE (SET_SRC (set)) == REG
704 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
705 && rtx_equal_p (SET_DEST (set),
706 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
707 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
709 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
710 scan_paradoxical_subregs (PATTERN (insn));
713 /* Does this function require a frame pointer? */
715 frame_pointer_needed = (! flag_omit_frame_pointer
716 #ifdef EXIT_IGNORE_STACK
717 /* ?? If EXIT_IGNORE_STACK is set, we will not save
718 and restore sp for alloca. So we can't eliminate
719 the frame pointer in that case. At some point,
720 we should improve this by emitting the
721 sp-adjusting insns for this case. */
722 || (current_function_calls_alloca
723 && EXIT_IGNORE_STACK)
725 || FRAME_POINTER_REQUIRED);
729 /* Initialize the table of registers to eliminate. The way we do this
730 depends on how the eliminable registers were defined. */
731 #ifdef ELIMINABLE_REGS
732 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
734 ep->can_eliminate = ep->can_eliminate_previous
735 = (CAN_ELIMINATE (ep->from, ep->to)
736 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
739 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
740 = ! frame_pointer_needed;
743 /* Count the number of eliminable registers and build the FROM and TO
744 REG rtx's. Note that code in gen_rtx will cause, e.g.,
745 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
746 We depend on this. */
747 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
749 num_eliminable += ep->can_eliminate;
750 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
751 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
754 num_labels = max_label_num () - get_first_label_num ();
756 /* Allocate the tables used to store offset information at labels. */
757 /* We used to use alloca here, but the size of what it would try to
758 allocate would occasionally cause it to exceed the stack limit and
759 cause a core dump. */
760 real_known_ptr = xmalloc (num_labels);
762 = (int (*)[NUM_ELIMINABLE_REGS])
763 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
765 offsets_known_at = real_known_ptr - get_first_label_num ();
767 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
769 /* Alter each pseudo-reg rtx to contain its hard reg number.
770 Assign stack slots to the pseudos that lack hard regs or equivalents.
771 Do not touch virtual registers. */
773 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
776 /* If we have some registers we think can be eliminated, scan all insns to
777 see if there is an insn that sets one of these registers to something
778 other than itself plus a constant. If so, the register cannot be
779 eliminated. Doing this scan here eliminates an extra pass through the
780 main reload loop in the most common case where register elimination
782 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
783 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
784 || GET_CODE (insn) == CALL_INSN)
785 note_stores (PATTERN (insn), mark_not_eliminable);
787 #ifndef REGISTER_CONSTRAINTS
788 /* If all the pseudo regs have hard regs,
789 except for those that are never referenced,
790 we know that no reloads are needed. */
791 /* But that is not true if there are register constraints, since
792 in that case some pseudos might be in the wrong kind of hard reg. */
794 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
795 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
798 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
800 free (real_known_ptr);
806 /* Compute the order of preference for hard registers to spill.
807 Store them by decreasing preference in potential_reload_regs. */
809 order_regs_for_reload (global);
811 /* So far, no hard regs have been spilled. */
813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
814 spill_reg_order[i] = -1;
816 /* Initialize to -1, which means take the first spill register. */
819 /* On most machines, we can't use any register explicitly used in the
820 rtl as a spill register. But on some, we have to. Those will have
821 taken care to keep the life of hard regs as short as possible. */
823 #ifdef SMALL_REGISTER_CLASSES
824 if (! SMALL_REGISTER_CLASSES)
826 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
828 /* Spill any hard regs that we know we can't eliminate. */
829 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
830 if (! ep->can_eliminate)
831 spill_hard_reg (ep->from, global, dumpfile, 1);
833 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
834 if (frame_pointer_needed)
835 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
839 for (i = 0; i < N_REG_CLASSES; i++)
841 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
842 bzero (basic_block_needs[i], n_basic_blocks);
845 /* From now on, we need to emit any moves without making new pseudos. */
846 reload_in_progress = 1;
848 /* This loop scans the entire function each go-round
849 and repeats until one repetition spills no additional hard regs. */
851 /* This flag is set when a pseudo reg is spilled,
852 to require another pass. Note that getting an additional reload
853 reg does not necessarily imply any pseudo reg was spilled;
854 sometimes we find a reload reg that no pseudo reg was allocated in. */
855 something_changed = 1;
856 /* This flag is set if there are any insns that require reloading. */
857 something_needs_reloads = 0;
858 /* This flag is set if there are any insns that require register
860 something_needs_elimination = 0;
861 while (something_changed)
865 /* For each class, number of reload regs needed in that class.
866 This is the maximum over all insns of the needs in that class
867 of the individual insn. */
868 int max_needs[N_REG_CLASSES];
869 /* For each class, size of group of consecutive regs
870 that is needed for the reloads of this class. */
871 int group_size[N_REG_CLASSES];
872 /* For each class, max number of consecutive groups needed.
873 (Each group contains group_size[CLASS] consecutive registers.) */
874 int max_groups[N_REG_CLASSES];
875 /* For each class, max number needed of regs that don't belong
876 to any of the groups. */
877 int max_nongroups[N_REG_CLASSES];
878 /* For each class, the machine mode which requires consecutive
879 groups of regs of that class.
880 If two different modes ever require groups of one class,
881 they must be the same size and equally restrictive for that class,
882 otherwise we can't handle the complexity. */
883 enum machine_mode group_mode[N_REG_CLASSES];
884 /* Record the insn where each maximum need is first found. */
885 rtx max_needs_insn[N_REG_CLASSES];
886 rtx max_groups_insn[N_REG_CLASSES];
887 rtx max_nongroups_insn[N_REG_CLASSES];
889 HOST_WIDE_INT starting_frame_size;
890 int previous_frame_pointer_needed = frame_pointer_needed;
891 static char *reg_class_names[] = REG_CLASS_NAMES;
893 something_changed = 0;
894 bzero ((char *) max_needs, sizeof max_needs);
895 bzero ((char *) max_groups, sizeof max_groups);
896 bzero ((char *) max_nongroups, sizeof max_nongroups);
897 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
898 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
899 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
900 bzero ((char *) group_size, sizeof group_size);
901 for (i = 0; i < N_REG_CLASSES; i++)
902 group_mode[i] = VOIDmode;
904 /* Keep track of which basic blocks are needing the reloads. */
907 /* Remember whether any element of basic_block_needs
908 changes from 0 to 1 in this pass. */
909 new_basic_block_needs = 0;
911 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
912 here because the stack size may be a part of the offset computation
913 for register elimination, and there might have been new stack slots
914 created in the last iteration of this loop. */
915 assign_stack_local (BLKmode, 0, 0);
917 starting_frame_size = get_frame_size ();
919 /* Reset all offsets on eliminable registers to their initial values. */
920 #ifdef ELIMINABLE_REGS
921 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
923 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
924 ep->previous_offset = ep->offset
925 = ep->max_offset = ep->initial_offset;
928 #ifdef INITIAL_FRAME_POINTER_OFFSET
929 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
931 if (!FRAME_POINTER_REQUIRED)
933 reg_eliminate[0].initial_offset = 0;
935 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
936 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
939 num_not_at_initial_offset = 0;
941 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
943 /* Set a known offset for each forced label to be at the initial offset
944 of each elimination. We do this because we assume that all
945 computed jumps occur from a location where each elimination is
946 at its initial offset. */
948 for (x = forced_labels; x; x = XEXP (x, 1))
950 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
952 /* For each pseudo register that has an equivalent location defined,
953 try to eliminate any eliminable registers (such as the frame pointer)
954 assuming initial offsets for the replacement register, which
957 If the resulting location is directly addressable, substitute
958 the MEM we just got directly for the old REG.
960 If it is not addressable but is a constant or the sum of a hard reg
961 and constant, it is probably not addressable because the constant is
962 out of range, in that case record the address; we will generate
963 hairy code to compute the address in a register each time it is
964 needed. Similarly if it is a hard register, but one that is not
965 valid as an address register.
967 If the location is not addressable, but does not have one of the
968 above forms, assign a stack slot. We have to do this to avoid the
969 potential of producing lots of reloads if, e.g., a location involves
970 a pseudo that didn't get a hard register and has an equivalent memory
971 location that also involves a pseudo that didn't get a hard register.
973 Perhaps at some point we will improve reload_when_needed handling
974 so this problem goes away. But that's very hairy. */
976 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
977 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
979 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
981 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
983 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
984 else if (CONSTANT_P (XEXP (x, 0))
985 || (GET_CODE (XEXP (x, 0)) == REG
986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
987 || (GET_CODE (XEXP (x, 0)) == PLUS
988 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
989 && (REGNO (XEXP (XEXP (x, 0), 0))
990 < FIRST_PSEUDO_REGISTER)
991 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
992 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
995 /* Make a new stack slot. Then indicate that something
996 changed so we go back and recompute offsets for
997 eliminable registers because the allocation of memory
998 below might change some offset. reg_equiv_{mem,address}
999 will be set up for this pseudo on the next pass around
1001 reg_equiv_memory_loc[i] = 0;
1002 reg_equiv_init[i] = 0;
1004 something_changed = 1;
1008 /* If we allocated another pseudo to the stack, redo elimination
1010 if (something_changed)
1013 /* If caller-saves needs a group, initialize the group to include
1014 the size and mode required for caller-saves. */
1016 if (caller_save_group_size > 1)
1018 group_mode[(int) caller_save_spill_class] = Pmode;
1019 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1022 /* Compute the most additional registers needed by any instruction.
1023 Collect information separately for each class of regs. */
1025 for (insn = first; insn; insn = NEXT_INSN (insn))
1027 if (global && this_block + 1 < n_basic_blocks
1028 && insn == basic_block_head[this_block+1])
1031 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1032 might include REG_LABEL), we need to see what effects this
1033 has on the known offsets at labels. */
1035 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1036 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1037 && REG_NOTES (insn) != 0))
1038 set_label_offsets (insn, insn, 0);
1040 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1042 /* Nonzero means don't use a reload reg that overlaps
1043 the place where a function value can be returned. */
1044 rtx avoid_return_reg = 0;
1046 rtx old_body = PATTERN (insn);
1047 int old_code = INSN_CODE (insn);
1048 rtx old_notes = REG_NOTES (insn);
1049 int did_elimination = 0;
1051 /* To compute the number of reload registers of each class
1052 needed for an insn, we must simulate what choose_reload_regs
1053 can do. We do this by splitting an insn into an "input" and
1054 an "output" part. RELOAD_OTHER reloads are used in both.
1055 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1056 which must be live over the entire input section of reloads,
1057 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1058 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1061 The registers needed for output are RELOAD_OTHER and
1062 RELOAD_FOR_OUTPUT, which are live for the entire output
1063 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1064 reloads for each operand.
1066 The total number of registers needed is the maximum of the
1067 inputs and outputs. */
1071 /* [0] is normal, [1] is nongroup. */
1072 int regs[2][N_REG_CLASSES];
1073 int groups[N_REG_CLASSES];
1076 /* Each `struct needs' corresponds to one RELOAD_... type. */
1080 struct needs output;
1082 struct needs other_addr;
1083 struct needs op_addr;
1084 struct needs op_addr_reload;
1085 struct needs in_addr[MAX_RECOG_OPERANDS];
1086 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1087 struct needs out_addr[MAX_RECOG_OPERANDS];
1088 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1091 /* If needed, eliminate any eliminable registers. */
1093 did_elimination = eliminate_regs_in_insn (insn, 0);
1095 #ifdef SMALL_REGISTER_CLASSES
1096 /* Set avoid_return_reg if this is an insn
1097 that might use the value of a function call. */
1098 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1100 if (GET_CODE (PATTERN (insn)) == SET)
1101 after_call = SET_DEST (PATTERN (insn));
1102 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1103 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1104 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1108 else if (SMALL_REGISTER_CLASSES
1110 && !(GET_CODE (PATTERN (insn)) == SET
1111 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1113 if (reg_referenced_p (after_call, PATTERN (insn)))
1114 avoid_return_reg = after_call;
1117 #endif /* SMALL_REGISTER_CLASSES */
1119 /* Analyze the instruction. */
1120 find_reloads (insn, 0, spill_indirect_levels, global,
1123 /* Remember for later shortcuts which insns had any reloads or
1124 register eliminations.
1126 One might think that it would be worthwhile to mark insns
1127 that need register replacements but not reloads, but this is
1128 not safe because find_reloads may do some manipulation of
1129 the insn (such as swapping commutative operands), which would
1130 be lost when we restore the old pattern after register
1131 replacement. So the actions of find_reloads must be redone in
1132 subsequent passes or in reload_as_needed.
1134 However, it is safe to mark insns that need reloads
1135 but not register replacement. */
1137 PUT_MODE (insn, (did_elimination ? QImode
1138 : n_reloads ? HImode
1139 : GET_MODE (insn) == DImode ? DImode
1142 /* Discard any register replacements done. */
1143 if (did_elimination)
1145 obstack_free (&reload_obstack, reload_firstobj);
1146 PATTERN (insn) = old_body;
1147 INSN_CODE (insn) = old_code;
1148 REG_NOTES (insn) = old_notes;
1149 something_needs_elimination = 1;
1152 /* If this insn has no reloads, we need not do anything except
1153 in the case of a CALL_INSN when we have caller-saves and
1154 caller-save needs reloads. */
1157 && ! (GET_CODE (insn) == CALL_INSN
1158 && caller_save_spill_class != NO_REGS))
1161 something_needs_reloads = 1;
1162 bzero ((char *) &insn_needs, sizeof insn_needs);
1164 /* Count each reload once in every class
1165 containing the reload's own class. */
1167 for (i = 0; i < n_reloads; i++)
1169 register enum reg_class *p;
1170 enum reg_class class = reload_reg_class[i];
1172 enum machine_mode mode;
1174 struct needs *this_needs;
1176 /* Don't count the dummy reloads, for which one of the
1177 regs mentioned in the insn can be used for reloading.
1178 Don't count optional reloads.
1179 Don't count reloads that got combined with others. */
1180 if (reload_reg_rtx[i] != 0
1181 || reload_optional[i] != 0
1182 || (reload_out[i] == 0 && reload_in[i] == 0
1183 && ! reload_secondary_p[i]))
1186 /* Show that a reload register of this class is needed
1187 in this basic block. We do not use insn_needs and
1188 insn_groups because they are overly conservative for
1190 if (global && ! basic_block_needs[(int) class][this_block])
1192 basic_block_needs[(int) class][this_block] = 1;
1193 new_basic_block_needs = 1;
1196 mode = reload_inmode[i];
1197 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1198 mode = reload_outmode[i];
1199 size = CLASS_MAX_NREGS (class, mode);
1201 /* If this class doesn't want a group, determine if we have
1202 a nongroup need or a regular need. We have a nongroup
1203 need if this reload conflicts with a group reload whose
1204 class intersects with this reload's class. */
1208 for (j = 0; j < n_reloads; j++)
1209 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1210 (GET_MODE_SIZE (reload_outmode[j])
1211 > GET_MODE_SIZE (reload_inmode[j]))
1215 && (!reload_optional[j])
1216 && (reload_in[j] != 0 || reload_out[j] != 0
1217 || reload_secondary_p[j])
1218 && reloads_conflict (i, j)
1219 && reg_classes_intersect_p (class,
1220 reload_reg_class[j]))
1226 /* Decide which time-of-use to count this reload for. */
1227 switch (reload_when_needed[i])
1230 this_needs = &insn_needs.other;
1232 case RELOAD_FOR_INPUT:
1233 this_needs = &insn_needs.input;
1235 case RELOAD_FOR_OUTPUT:
1236 this_needs = &insn_needs.output;
1238 case RELOAD_FOR_INSN:
1239 this_needs = &insn_needs.insn;
1241 case RELOAD_FOR_OTHER_ADDRESS:
1242 this_needs = &insn_needs.other_addr;
1244 case RELOAD_FOR_INPUT_ADDRESS:
1245 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1247 case RELOAD_FOR_INPADDR_ADDRESS:
1248 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1250 case RELOAD_FOR_OUTPUT_ADDRESS:
1251 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1253 case RELOAD_FOR_OUTADDR_ADDRESS:
1254 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1256 case RELOAD_FOR_OPERAND_ADDRESS:
1257 this_needs = &insn_needs.op_addr;
1259 case RELOAD_FOR_OPADDR_ADDR:
1260 this_needs = &insn_needs.op_addr_reload;
1266 enum machine_mode other_mode, allocate_mode;
1268 /* Count number of groups needed separately from
1269 number of individual regs needed. */
1270 this_needs->groups[(int) class]++;
1271 p = reg_class_superclasses[(int) class];
1272 while (*p != LIM_REG_CLASSES)
1273 this_needs->groups[(int) *p++]++;
1275 /* Record size and mode of a group of this class. */
1276 /* If more than one size group is needed,
1277 make all groups the largest needed size. */
1278 if (group_size[(int) class] < size)
1280 other_mode = group_mode[(int) class];
1281 allocate_mode = mode;
1283 group_size[(int) class] = size;
1284 group_mode[(int) class] = mode;
1289 allocate_mode = group_mode[(int) class];
1292 /* Crash if two dissimilar machine modes both need
1293 groups of consecutive regs of the same class. */
1295 if (other_mode != VOIDmode && other_mode != allocate_mode
1296 && ! modes_equiv_for_class_p (allocate_mode,
1298 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1303 this_needs->regs[nongroup_need][(int) class] += 1;
1304 p = reg_class_superclasses[(int) class];
1305 while (*p != LIM_REG_CLASSES)
1306 this_needs->regs[nongroup_need][(int) *p++] += 1;
1312 /* All reloads have been counted for this insn;
1313 now merge the various times of use.
1314 This sets insn_needs, etc., to the maximum total number
1315 of registers needed at any point in this insn. */
1317 for (i = 0; i < N_REG_CLASSES; i++)
1319 int in_max, out_max;
1321 /* Compute normal and nongroup needs. */
1322 for (j = 0; j <= 1; j++)
1324 for (in_max = 0, out_max = 0, k = 0;
1325 k < reload_n_operands; k++)
1329 (insn_needs.in_addr[k].regs[j][i]
1330 + insn_needs.in_addr_addr[k].regs[j][i]));
1332 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1335 insn_needs.out_addr_addr[k].regs[j][i]);
1338 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1339 and operand addresses but not things used to reload
1340 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1341 don't conflict with things needed to reload inputs or
1344 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1345 insn_needs.op_addr_reload.regs[j][i]),
1348 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1350 insn_needs.input.regs[j][i]
1351 = MAX (insn_needs.input.regs[j][i]
1352 + insn_needs.op_addr.regs[j][i]
1353 + insn_needs.insn.regs[j][i],
1354 in_max + insn_needs.input.regs[j][i]);
1356 insn_needs.output.regs[j][i] += out_max;
1357 insn_needs.other.regs[j][i]
1358 += MAX (MAX (insn_needs.input.regs[j][i],
1359 insn_needs.output.regs[j][i]),
1360 insn_needs.other_addr.regs[j][i]);
1364 /* Now compute group needs. */
1365 for (in_max = 0, out_max = 0, j = 0;
1366 j < reload_n_operands; j++)
1368 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1369 in_max = MAX (in_max,
1370 insn_needs.in_addr_addr[j].groups[i]);
1372 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1374 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1377 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1378 insn_needs.op_addr_reload.groups[i]),
1380 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1382 insn_needs.input.groups[i]
1383 = MAX (insn_needs.input.groups[i]
1384 + insn_needs.op_addr.groups[i]
1385 + insn_needs.insn.groups[i],
1386 in_max + insn_needs.input.groups[i]);
1388 insn_needs.output.groups[i] += out_max;
1389 insn_needs.other.groups[i]
1390 += MAX (MAX (insn_needs.input.groups[i],
1391 insn_needs.output.groups[i]),
1392 insn_needs.other_addr.groups[i]);
1395 /* If this is a CALL_INSN and caller-saves will need
1396 a spill register, act as if the spill register is
1397 needed for this insn. However, the spill register
1398 can be used by any reload of this insn, so we only
1399 need do something if no need for that class has
1402 The assumption that every CALL_INSN will trigger a
1403 caller-save is highly conservative, however, the number
1404 of cases where caller-saves will need a spill register but
1405 a block containing a CALL_INSN won't need a spill register
1406 of that class should be quite rare.
1408 If a group is needed, the size and mode of the group will
1409 have been set up at the beginning of this loop. */
1411 if (GET_CODE (insn) == CALL_INSN
1412 && caller_save_spill_class != NO_REGS)
1414 /* See if this register would conflict with any reload
1415 that needs a group. */
1416 int nongroup_need = 0;
1417 int *caller_save_needs;
1419 for (j = 0; j < n_reloads; j++)
1420 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1421 (GET_MODE_SIZE (reload_outmode[j])
1422 > GET_MODE_SIZE (reload_inmode[j]))
1426 && reg_classes_intersect_p (caller_save_spill_class,
1427 reload_reg_class[j]))
1434 = (caller_save_group_size > 1
1435 ? insn_needs.other.groups
1436 : insn_needs.other.regs[nongroup_need]);
1438 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1440 register enum reg_class *p
1441 = reg_class_superclasses[(int) caller_save_spill_class];
1443 caller_save_needs[(int) caller_save_spill_class]++;
1445 while (*p != LIM_REG_CLASSES)
1446 caller_save_needs[(int) *p++] += 1;
1449 /* Show that this basic block will need a register of
1453 && ! (basic_block_needs[(int) caller_save_spill_class]
1456 basic_block_needs[(int) caller_save_spill_class]
1458 new_basic_block_needs = 1;
1462 #ifdef SMALL_REGISTER_CLASSES
1463 /* If this insn stores the value of a function call,
1464 and that value is in a register that has been spilled,
1465 and if the insn needs a reload in a class
1466 that might use that register as the reload register,
1467 then add add an extra need in that class.
1468 This makes sure we have a register available that does
1469 not overlap the return value. */
1471 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1473 int regno = REGNO (avoid_return_reg);
1475 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1477 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1479 /* First compute the "basic needs", which counts a
1480 need only in the smallest class in which it
1483 bcopy ((char *) insn_needs.other.regs[0],
1484 (char *) basic_needs, sizeof basic_needs);
1485 bcopy ((char *) insn_needs.other.groups,
1486 (char *) basic_groups, sizeof basic_groups);
1488 for (i = 0; i < N_REG_CLASSES; i++)
1492 if (basic_needs[i] >= 0)
1493 for (p = reg_class_superclasses[i];
1494 *p != LIM_REG_CLASSES; p++)
1495 basic_needs[(int) *p] -= basic_needs[i];
1497 if (basic_groups[i] >= 0)
1498 for (p = reg_class_superclasses[i];
1499 *p != LIM_REG_CLASSES; p++)
1500 basic_groups[(int) *p] -= basic_groups[i];
1503 /* Now count extra regs if there might be a conflict with
1504 the return value register. */
1506 for (r = regno; r < regno + nregs; r++)
1507 if (spill_reg_order[r] >= 0)
1508 for (i = 0; i < N_REG_CLASSES; i++)
1509 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1511 if (basic_needs[i] > 0)
1515 insn_needs.other.regs[0][i]++;
1516 p = reg_class_superclasses[i];
1517 while (*p != LIM_REG_CLASSES)
1518 insn_needs.other.regs[0][(int) *p++]++;
1520 if (basic_groups[i] > 0)
1524 insn_needs.other.groups[i]++;
1525 p = reg_class_superclasses[i];
1526 while (*p != LIM_REG_CLASSES)
1527 insn_needs.other.groups[(int) *p++]++;
1531 #endif /* SMALL_REGISTER_CLASSES */
1533 /* For each class, collect maximum need of any insn. */
1535 for (i = 0; i < N_REG_CLASSES; i++)
1537 if (max_needs[i] < insn_needs.other.regs[0][i])
1539 max_needs[i] = insn_needs.other.regs[0][i];
1540 max_needs_insn[i] = insn;
1542 if (max_groups[i] < insn_needs.other.groups[i])
1544 max_groups[i] = insn_needs.other.groups[i];
1545 max_groups_insn[i] = insn;
1547 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1549 max_nongroups[i] = insn_needs.other.regs[1][i];
1550 max_nongroups_insn[i] = insn;
1554 /* Note that there is a continue statement above. */
1557 /* If we allocated any new memory locations, make another pass
1558 since it might have changed elimination offsets. */
1559 if (starting_frame_size != get_frame_size ())
1560 something_changed = 1;
1563 for (i = 0; i < N_REG_CLASSES; i++)
1565 if (max_needs[i] > 0)
1567 ";; Need %d reg%s of class %s (for insn %d).\n",
1568 max_needs[i], max_needs[i] == 1 ? "" : "s",
1569 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1570 if (max_nongroups[i] > 0)
1572 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1573 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1574 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1575 if (max_groups[i] > 0)
1577 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1578 max_groups[i], max_groups[i] == 1 ? "" : "s",
1579 mode_name[(int) group_mode[i]],
1580 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1583 /* If we have caller-saves, set up the save areas and see if caller-save
1584 will need a spill register. */
1586 if (caller_save_needed)
1588 /* Set the offsets for setup_save_areas. */
1589 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
1591 ep->previous_offset = ep->max_offset;
1593 if ( ! setup_save_areas (&something_changed)
1594 && caller_save_spill_class == NO_REGS)
1596 /* The class we will need depends on whether the machine
1597 supports the sum of two registers for an address; see
1598 find_address_reloads for details. */
1600 caller_save_spill_class
1601 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1602 caller_save_group_size
1603 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1604 something_changed = 1;
1608 /* See if anything that happened changes which eliminations are valid.
1609 For example, on the Sparc, whether or not the frame pointer can
1610 be eliminated can depend on what registers have been used. We need
1611 not check some conditions again (such as flag_omit_frame_pointer)
1612 since they can't have changed. */
1614 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1615 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1616 #ifdef ELIMINABLE_REGS
1617 || ! CAN_ELIMINATE (ep->from, ep->to)
1620 ep->can_eliminate = 0;
1622 /* Look for the case where we have discovered that we can't replace
1623 register A with register B and that means that we will now be
1624 trying to replace register A with register C. This means we can
1625 no longer replace register C with register B and we need to disable
1626 such an elimination, if it exists. This occurs often with A == ap,
1627 B == sp, and C == fp. */
1629 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1631 struct elim_table *op;
1632 register int new_to = -1;
1634 if (! ep->can_eliminate && ep->can_eliminate_previous)
1636 /* Find the current elimination for ep->from, if there is a
1638 for (op = reg_eliminate;
1639 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1640 if (op->from == ep->from && op->can_eliminate)
1646 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1648 for (op = reg_eliminate;
1649 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1650 if (op->from == new_to && op->to == ep->to)
1651 op->can_eliminate = 0;
1655 /* See if any registers that we thought we could eliminate the previous
1656 time are no longer eliminable. If so, something has changed and we
1657 must spill the register. Also, recompute the number of eliminable
1658 registers and see if the frame pointer is needed; it is if there is
1659 no elimination of the frame pointer that we can perform. */
1661 frame_pointer_needed = 1;
1662 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1664 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1665 && ep->to != HARD_FRAME_POINTER_REGNUM)
1666 frame_pointer_needed = 0;
1668 if (! ep->can_eliminate && ep->can_eliminate_previous)
1670 ep->can_eliminate_previous = 0;
1671 spill_hard_reg (ep->from, global, dumpfile, 1);
1672 something_changed = 1;
1677 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1678 /* If we didn't need a frame pointer last time, but we do now, spill
1679 the hard frame pointer. */
1680 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1682 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1683 something_changed = 1;
1687 /* If all needs are met, we win. */
1689 for (i = 0; i < N_REG_CLASSES; i++)
1690 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1692 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1695 /* Not all needs are met; must spill some hard regs. */
1697 /* Put all registers spilled so far back in potential_reload_regs, but
1698 put them at the front, since we've already spilled most of the
1699 pseudos in them (we might have left some pseudos unspilled if they
1700 were in a block that didn't need any spill registers of a conflicting
1701 class. We used to try to mark off the need for those registers,
1702 but doing so properly is very complex and reallocating them is the
1703 simpler approach. First, "pack" potential_reload_regs by pushing
1704 any nonnegative entries towards the end. That will leave room
1705 for the registers we already spilled.
1707 Also, undo the marking of the spill registers from the last time
1708 around in FORBIDDEN_REGS since we will be probably be allocating
1711 ??? It is theoretically possible that we might end up not using one
1712 of our previously-spilled registers in this allocation, even though
1713 they are at the head of the list. It's not clear what to do about
1714 this, but it was no better before, when we marked off the needs met
1715 by the previously-spilled registers. With the current code, globals
1716 can be allocated into these registers, but locals cannot. */
1720 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1721 if (potential_reload_regs[i] != -1)
1722 potential_reload_regs[j--] = potential_reload_regs[i];
1724 for (i = 0; i < n_spills; i++)
1726 potential_reload_regs[i] = spill_regs[i];
1727 spill_reg_order[spill_regs[i]] = -1;
1728 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1734 /* Now find more reload regs to satisfy the remaining need
1735 Do it by ascending class number, since otherwise a reg
1736 might be spilled for a big class and might fail to count
1737 for a smaller class even though it belongs to that class.
1739 Count spilled regs in `spills', and add entries to
1740 `spill_regs' and `spill_reg_order'.
1742 ??? Note there is a problem here.
1743 When there is a need for a group in a high-numbered class,
1744 and also need for non-group regs that come from a lower class,
1745 the non-group regs are chosen first. If there aren't many regs,
1746 they might leave no room for a group.
1748 This was happening on the 386. To fix it, we added the code
1749 that calls possible_group_p, so that the lower class won't
1750 break up the last possible group.
1752 Really fixing the problem would require changes above
1753 in counting the regs already spilled, and in choose_reload_regs.
1754 It might be hard to avoid introducing bugs there. */
1756 CLEAR_HARD_REG_SET (counted_for_groups);
1757 CLEAR_HARD_REG_SET (counted_for_nongroups);
1759 for (class = 0; class < N_REG_CLASSES; class++)
1761 /* First get the groups of registers.
1762 If we got single registers first, we might fragment
1764 while (max_groups[class] > 0)
1766 /* If any single spilled regs happen to form groups,
1767 count them now. Maybe we don't really need
1768 to spill another group. */
1769 count_possible_groups (group_size, group_mode, max_groups,
1772 if (max_groups[class] <= 0)
1775 /* Groups of size 2 (the only groups used on most machines)
1776 are treated specially. */
1777 if (group_size[class] == 2)
1779 /* First, look for a register that will complete a group. */
1780 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1784 j = potential_reload_regs[i];
1785 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1787 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1788 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1789 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1790 && HARD_REGNO_MODE_OK (other, group_mode[class])
1791 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1793 /* We don't want one part of another group.
1794 We could get "two groups" that overlap! */
1795 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1797 (j < FIRST_PSEUDO_REGISTER - 1
1798 && (other = j + 1, spill_reg_order[other] >= 0)
1799 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1800 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1801 && HARD_REGNO_MODE_OK (j, group_mode[class])
1802 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1804 && ! TEST_HARD_REG_BIT (counted_for_groups,
1807 register enum reg_class *p;
1809 /* We have found one that will complete a group,
1810 so count off one group as provided. */
1811 max_groups[class]--;
1812 p = reg_class_superclasses[class];
1813 while (*p != LIM_REG_CLASSES)
1815 if (group_size [(int) *p] <= group_size [class])
1816 max_groups[(int) *p]--;
1820 /* Indicate both these regs are part of a group. */
1821 SET_HARD_REG_BIT (counted_for_groups, j);
1822 SET_HARD_REG_BIT (counted_for_groups, other);
1826 /* We can't complete a group, so start one. */
1827 #ifdef SMALL_REGISTER_CLASSES
1828 /* Look for a pair neither of which is explicitly used. */
1829 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1830 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1833 j = potential_reload_regs[i];
1834 /* Verify that J+1 is a potential reload reg. */
1835 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1836 if (potential_reload_regs[k] == j + 1)
1838 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1839 && k < FIRST_PSEUDO_REGISTER
1840 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1841 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1842 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1843 && HARD_REGNO_MODE_OK (j, group_mode[class])
1844 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1846 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1847 /* Reject J at this stage
1848 if J+1 was explicitly used. */
1849 && ! regs_explicitly_used[j + 1])
1853 /* Now try any group at all
1854 whose registers are not in bad_spill_regs. */
1855 if (i == FIRST_PSEUDO_REGISTER)
1856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1859 j = potential_reload_regs[i];
1860 /* Verify that J+1 is a potential reload reg. */
1861 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1862 if (potential_reload_regs[k] == j + 1)
1864 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1865 && k < FIRST_PSEUDO_REGISTER
1866 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1867 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1868 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1869 && HARD_REGNO_MODE_OK (j, group_mode[class])
1870 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1872 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1876 /* I should be the index in potential_reload_regs
1877 of the new reload reg we have found. */
1879 if (i >= FIRST_PSEUDO_REGISTER)
1881 /* There are no groups left to spill. */
1882 spill_failure (max_groups_insn[class]);
1888 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1893 /* For groups of more than 2 registers,
1894 look for a sufficient sequence of unspilled registers,
1895 and spill them all at once. */
1896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1900 j = potential_reload_regs[i];
1902 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1903 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1905 /* Check each reg in the sequence. */
1906 for (k = 0; k < group_size[class]; k++)
1907 if (! (spill_reg_order[j + k] < 0
1908 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1909 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1911 /* We got a full sequence, so spill them all. */
1912 if (k == group_size[class])
1914 register enum reg_class *p;
1915 for (k = 0; k < group_size[class]; k++)
1918 SET_HARD_REG_BIT (counted_for_groups, j + k);
1919 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1920 if (potential_reload_regs[idx] == j + k)
1923 |= new_spill_reg (idx, class,
1924 max_needs, NULL_PTR,
1928 /* We have found one that will complete a group,
1929 so count off one group as provided. */
1930 max_groups[class]--;
1931 p = reg_class_superclasses[class];
1932 while (*p != LIM_REG_CLASSES)
1934 if (group_size [(int) *p]
1935 <= group_size [class])
1936 max_groups[(int) *p]--;
1943 /* We couldn't find any registers for this reload.
1944 Avoid going into an infinite loop. */
1945 if (i >= FIRST_PSEUDO_REGISTER)
1947 /* There are no groups left. */
1948 spill_failure (max_groups_insn[class]);
1955 /* Now similarly satisfy all need for single registers. */
1957 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1959 /* If we spilled enough regs, but they weren't counted
1960 against the non-group need, see if we can count them now.
1961 If so, we can avoid some actual spilling. */
1962 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1963 for (i = 0; i < n_spills; i++)
1964 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1966 && !TEST_HARD_REG_BIT (counted_for_groups,
1968 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1970 && max_nongroups[class] > 0)
1972 register enum reg_class *p;
1974 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1975 max_nongroups[class]--;
1976 p = reg_class_superclasses[class];
1977 while (*p != LIM_REG_CLASSES)
1978 max_nongroups[(int) *p++]--;
1980 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1983 /* Consider the potential reload regs that aren't
1984 yet in use as reload regs, in order of preference.
1985 Find the most preferred one that's in this class. */
1987 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1988 if (potential_reload_regs[i] >= 0
1989 && TEST_HARD_REG_BIT (reg_class_contents[class],
1990 potential_reload_regs[i])
1991 /* If this reg will not be available for groups,
1992 pick one that does not foreclose possible groups.
1993 This is a kludge, and not very general,
1994 but it should be sufficient to make the 386 work,
1995 and the problem should not occur on machines with
1997 && (max_nongroups[class] == 0
1998 || possible_group_p (potential_reload_regs[i], max_groups)))
2001 /* If we couldn't get a register, try to get one even if we
2002 might foreclose possible groups. This may cause problems
2003 later, but that's better than aborting now, since it is
2004 possible that we will, in fact, be able to form the needed
2005 group even with this allocation. */
2007 if (i >= FIRST_PSEUDO_REGISTER
2008 && (asm_noperands (max_needs[class] > 0
2009 ? max_needs_insn[class]
2010 : max_nongroups_insn[class])
2012 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2013 if (potential_reload_regs[i] >= 0
2014 && TEST_HARD_REG_BIT (reg_class_contents[class],
2015 potential_reload_regs[i]))
2018 /* I should be the index in potential_reload_regs
2019 of the new reload reg we have found. */
2021 if (i >= FIRST_PSEUDO_REGISTER)
2023 /* There are no possible registers left to spill. */
2024 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2025 : max_nongroups_insn[class]);
2031 |= new_spill_reg (i, class, max_needs, max_nongroups,
2037 /* If global-alloc was run, notify it of any register eliminations we have
2040 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2041 if (ep->can_eliminate)
2042 mark_elimination (ep->from, ep->to);
2044 /* Insert code to save and restore call-clobbered hard regs
2045 around calls. Tell if what mode to use so that we will process
2046 those insns in reload_as_needed if we have to. */
2048 if (caller_save_needed)
2049 save_call_clobbered_regs (num_eliminable ? QImode
2050 : caller_save_spill_class != NO_REGS ? HImode
2053 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2054 If that insn didn't set the register (i.e., it copied the register to
2055 memory), just delete that insn instead of the equivalencing insn plus
2056 anything now dead. If we call delete_dead_insn on that insn, we may
2057 delete the insn that actually sets the register if the register die
2058 there and that is incorrect. */
2060 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2061 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2062 && GET_CODE (reg_equiv_init[i]) != NOTE)
2064 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2065 delete_dead_insn (reg_equiv_init[i]);
2068 PUT_CODE (reg_equiv_init[i], NOTE);
2069 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2070 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2074 /* Use the reload registers where necessary
2075 by generating move instructions to move the must-be-register
2076 values into or out of the reload registers. */
2078 if (something_needs_reloads || something_needs_elimination
2079 || (caller_save_needed && num_eliminable)
2080 || caller_save_spill_class != NO_REGS)
2081 reload_as_needed (first, global);
2083 /* If we were able to eliminate the frame pointer, show that it is no
2084 longer live at the start of any basic block. If it ls live by
2085 virtue of being in a pseudo, that pseudo will be marked live
2086 and hence the frame pointer will be known to be live via that
2089 if (! frame_pointer_needed)
2090 for (i = 0; i < n_basic_blocks; i++)
2091 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2092 HARD_FRAME_POINTER_REGNUM);
2094 /* Come here (with failure set nonzero) if we can't get enough spill regs
2095 and we decide not to abort about it. */
2098 reload_in_progress = 0;
2100 /* Now eliminate all pseudo regs by modifying them into
2101 their equivalent memory references.
2102 The REG-rtx's for the pseudos are modified in place,
2103 so all insns that used to refer to them now refer to memory.
2105 For a reg that has a reg_equiv_address, all those insns
2106 were changed by reloading so that no insns refer to it any longer;
2107 but the DECL_RTL of a variable decl may refer to it,
2108 and if so this causes the debugging info to mention the variable. */
2110 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2114 if (reg_equiv_mem[i])
2116 addr = XEXP (reg_equiv_mem[i], 0);
2117 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2119 if (reg_equiv_address[i])
2120 addr = reg_equiv_address[i];
2123 if (reg_renumber[i] < 0)
2125 rtx reg = regno_reg_rtx[i];
2126 XEXP (reg, 0) = addr;
2127 REG_USERVAR_P (reg) = 0;
2128 MEM_IN_STRUCT_P (reg) = in_struct;
2129 PUT_CODE (reg, MEM);
2131 else if (reg_equiv_mem[i])
2132 XEXP (reg_equiv_mem[i], 0) = addr;
2136 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2137 /* Make a pass over all the insns and remove death notes for things that
2138 are no longer registers or no longer die in the insn (e.g., an input
2139 and output pseudo being tied). */
2141 for (insn = first; insn; insn = NEXT_INSN (insn))
2142 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2146 for (note = REG_NOTES (insn); note; note = next)
2148 next = XEXP (note, 1);
2149 if (REG_NOTE_KIND (note) == REG_DEAD
2150 && (GET_CODE (XEXP (note, 0)) != REG
2151 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2152 remove_note (insn, note);
2157 /* If we are doing stack checking, give a warning if this function's
2158 frame size is larger than we expect. */
2159 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2161 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2163 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2164 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2165 size += UNITS_PER_WORD;
2167 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2168 warning ("frame size too large for reliable stack checking");
2171 /* Indicate that we no longer have known memory locations or constants. */
2172 reg_equiv_constant = 0;
2173 reg_equiv_memory_loc = 0;
2176 free (real_known_ptr);
2181 free (scratch_list);
2184 free (scratch_block);
2187 CLEAR_HARD_REG_SET (used_spill_regs);
2188 for (i = 0; i < n_spills; i++)
2189 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2194 /* Nonzero if, after spilling reg REGNO for non-groups,
2195 it will still be possible to find a group if we still need one. */
2198 possible_group_p (regno, max_groups)
2203 int class = (int) NO_REGS;
2205 for (i = 0; i < (int) N_REG_CLASSES; i++)
2206 if (max_groups[i] > 0)
2212 if (class == (int) NO_REGS)
2215 /* Consider each pair of consecutive registers. */
2216 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2218 /* Ignore pairs that include reg REGNO. */
2219 if (i == regno || i + 1 == regno)
2222 /* Ignore pairs that are outside the class that needs the group.
2223 ??? Here we fail to handle the case where two different classes
2224 independently need groups. But this never happens with our
2225 current machine descriptions. */
2226 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2227 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2230 /* A pair of consecutive regs we can still spill does the trick. */
2231 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2232 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2233 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2236 /* A pair of one already spilled and one we can spill does it
2237 provided the one already spilled is not otherwise reserved. */
2238 if (spill_reg_order[i] < 0
2239 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2240 && spill_reg_order[i + 1] >= 0
2241 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2242 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2244 if (spill_reg_order[i + 1] < 0
2245 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2246 && spill_reg_order[i] >= 0
2247 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2248 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2255 /* Count any groups of CLASS that can be formed from the registers recently
2259 count_possible_groups (group_size, group_mode, max_groups, class)
2261 enum machine_mode *group_mode;
2268 /* Now find all consecutive groups of spilled registers
2269 and mark each group off against the need for such groups.
2270 But don't count them against ordinary need, yet. */
2272 if (group_size[class] == 0)
2275 CLEAR_HARD_REG_SET (new);
2277 /* Make a mask of all the regs that are spill regs in class I. */
2278 for (i = 0; i < n_spills; i++)
2279 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2280 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2281 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2282 SET_HARD_REG_BIT (new, spill_regs[i]);
2284 /* Find each consecutive group of them. */
2285 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2286 if (TEST_HARD_REG_BIT (new, i)
2287 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2288 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2290 for (j = 1; j < group_size[class]; j++)
2291 if (! TEST_HARD_REG_BIT (new, i + j))
2294 if (j == group_size[class])
2296 /* We found a group. Mark it off against this class's need for
2297 groups, and against each superclass too. */
2298 register enum reg_class *p;
2300 max_groups[class]--;
2301 p = reg_class_superclasses[class];
2302 while (*p != LIM_REG_CLASSES)
2304 if (group_size [(int) *p] <= group_size [class])
2305 max_groups[(int) *p]--;
2309 /* Don't count these registers again. */
2310 for (j = 0; j < group_size[class]; j++)
2311 SET_HARD_REG_BIT (counted_for_groups, i + j);
2314 /* Skip to the last reg in this group. When i is incremented above,
2315 it will then point to the first reg of the next possible group. */
2320 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2321 another mode that needs to be reloaded for the same register class CLASS.
2322 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2323 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2325 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2326 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2327 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2328 causes unnecessary failures on machines requiring alignment of register
2329 groups when the two modes are different sizes, because the larger mode has
2330 more strict alignment rules than the smaller mode. */
2333 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2334 enum machine_mode allocate_mode, other_mode;
2335 enum reg_class class;
2338 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2340 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2341 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2342 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2348 /* Handle the failure to find a register to spill.
2349 INSN should be one of the insns which needed this particular spill reg. */
2352 spill_failure (insn)
2355 if (asm_noperands (PATTERN (insn)) >= 0)
2356 error_for_asm (insn, "`asm' needs too many reloads");
2358 fatal_insn ("Unable to find a register to spill.", insn);
2361 /* Add a new register to the tables of available spill-registers
2362 (as well as spilling all pseudos allocated to the register).
2363 I is the index of this register in potential_reload_regs.
2364 CLASS is the regclass whose need is being satisfied.
2365 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2366 so that this register can count off against them.
2367 MAX_NONGROUPS is 0 if this register is part of a group.
2368 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2371 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2379 register enum reg_class *p;
2381 int regno = potential_reload_regs[i];
2383 if (i >= FIRST_PSEUDO_REGISTER)
2384 abort (); /* Caller failed to find any register. */
2386 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2387 fatal ("fixed or forbidden register was spilled.\n\
2388 This may be due to a compiler bug or to impossible asm\n\
2389 statements or clauses.");
2391 /* Make reg REGNO an additional reload reg. */
2393 potential_reload_regs[i] = -1;
2394 spill_regs[n_spills] = regno;
2395 spill_reg_order[regno] = n_spills;
2397 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2399 /* Clear off the needs we just satisfied. */
2402 p = reg_class_superclasses[class];
2403 while (*p != LIM_REG_CLASSES)
2404 max_needs[(int) *p++]--;
2406 if (max_nongroups && max_nongroups[class] > 0)
2408 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2409 max_nongroups[class]--;
2410 p = reg_class_superclasses[class];
2411 while (*p != LIM_REG_CLASSES)
2412 max_nongroups[(int) *p++]--;
2415 /* Spill every pseudo reg that was allocated to this reg
2416 or to something that overlaps this reg. */
2418 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2420 /* If there are some registers still to eliminate and this register
2421 wasn't ever used before, additional stack space may have to be
2422 allocated to store this register. Thus, we may have changed the offset
2423 between the stack and frame pointers, so mark that something has changed.
2424 (If new pseudos were spilled, thus requiring more space, VAL would have
2425 been set non-zero by the call to spill_hard_reg above since additional
2426 reloads may be needed in that case.
2428 One might think that we need only set VAL to 1 if this is a call-used
2429 register. However, the set of registers that must be saved by the
2430 prologue is not identical to the call-used set. For example, the
2431 register used by the call insn for the return PC is a call-used register,
2432 but must be saved by the prologue. */
2433 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2436 regs_ever_live[spill_regs[n_spills]] = 1;
2442 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2443 data that is dead in INSN. */
2446 delete_dead_insn (insn)
2449 rtx prev = prev_real_insn (insn);
2452 /* If the previous insn sets a register that dies in our insn, delete it
2454 if (prev && GET_CODE (PATTERN (prev)) == SET
2455 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2456 && reg_mentioned_p (prev_dest, PATTERN (insn))
2457 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2458 delete_dead_insn (prev);
2460 PUT_CODE (insn, NOTE);
2461 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2462 NOTE_SOURCE_FILE (insn) = 0;
2465 /* Modify the home of pseudo-reg I.
2466 The new home is present in reg_renumber[I].
2468 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2469 or it may be -1, meaning there is none or it is not relevant.
2470 This is used so that all pseudos spilled from a given hard reg
2471 can share one stack slot. */
2474 alter_reg (i, from_reg)
2478 /* When outputting an inline function, this can happen
2479 for a reg that isn't actually used. */
2480 if (regno_reg_rtx[i] == 0)
2483 /* If the reg got changed to a MEM at rtl-generation time,
2485 if (GET_CODE (regno_reg_rtx[i]) != REG)
2488 /* Modify the reg-rtx to contain the new hard reg
2489 number or else to contain its pseudo reg number. */
2490 REGNO (regno_reg_rtx[i])
2491 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2493 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2494 allocate a stack slot for it. */
2496 if (reg_renumber[i] < 0
2497 && REG_N_REFS (i) > 0
2498 && reg_equiv_constant[i] == 0
2499 && reg_equiv_memory_loc[i] == 0)
2502 int inherent_size = PSEUDO_REGNO_BYTES (i);
2503 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2506 /* Each pseudo reg has an inherent size which comes from its own mode,
2507 and a total size which provides room for paradoxical subregs
2508 which refer to the pseudo reg in wider modes.
2510 We can use a slot already allocated if it provides both
2511 enough inherent space and enough total space.
2512 Otherwise, we allocate a new slot, making sure that it has no less
2513 inherent space, and no less total space, then the previous slot. */
2516 /* No known place to spill from => no slot to reuse. */
2517 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2518 inherent_size == total_size ? 0 : -1);
2519 if (BYTES_BIG_ENDIAN)
2520 /* Cancel the big-endian correction done in assign_stack_local.
2521 Get the address of the beginning of the slot.
2522 This is so we can do a big-endian correction unconditionally
2524 adjust = inherent_size - total_size;
2526 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2528 /* Reuse a stack slot if possible. */
2529 else if (spill_stack_slot[from_reg] != 0
2530 && spill_stack_slot_width[from_reg] >= total_size
2531 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2533 x = spill_stack_slot[from_reg];
2534 /* Allocate a bigger slot. */
2537 /* Compute maximum size needed, both for inherent size
2538 and for total size. */
2539 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2541 if (spill_stack_slot[from_reg])
2543 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2545 mode = GET_MODE (spill_stack_slot[from_reg]);
2546 if (spill_stack_slot_width[from_reg] > total_size)
2547 total_size = spill_stack_slot_width[from_reg];
2549 /* Make a slot with that size. */
2550 x = assign_stack_local (mode, total_size,
2551 inherent_size == total_size ? 0 : -1);
2553 if (BYTES_BIG_ENDIAN)
2555 /* Cancel the big-endian correction done in assign_stack_local.
2556 Get the address of the beginning of the slot.
2557 This is so we can do a big-endian correction unconditionally
2559 adjust = GET_MODE_SIZE (mode) - total_size;
2561 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2564 plus_constant (XEXP (x, 0), adjust));
2566 spill_stack_slot[from_reg] = stack_slot;
2567 spill_stack_slot_width[from_reg] = total_size;
2570 /* On a big endian machine, the "address" of the slot
2571 is the address of the low part that fits its inherent mode. */
2572 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2573 adjust += (total_size - inherent_size);
2575 /* If we have any adjustment to make, or if the stack slot is the
2576 wrong mode, make a new stack slot. */
2577 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2579 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2580 plus_constant (XEXP (x, 0), adjust));
2581 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2584 /* Save the stack slot for later. */
2585 reg_equiv_memory_loc[i] = x;
2589 /* Mark the slots in regs_ever_live for the hard regs
2590 used by pseudo-reg number REGNO. */
2593 mark_home_live (regno)
2596 register int i, lim;
2597 i = reg_renumber[regno];
2600 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2602 regs_ever_live[i++] = 1;
2605 /* Mark the registers used in SCRATCH as being live. */
2608 mark_scratch_live (scratch)
2612 int regno = REGNO (scratch);
2613 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2615 for (i = regno; i < lim; i++)
2616 regs_ever_live[i] = 1;
2619 /* This function handles the tracking of elimination offsets around branches.
2621 X is a piece of RTL being scanned.
2623 INSN is the insn that it came from, if any.
2625 INITIAL_P is non-zero if we are to set the offset to be the initial
2626 offset and zero if we are setting the offset of the label to be the
2630 set_label_offsets (x, insn, initial_p)
2635 enum rtx_code code = GET_CODE (x);
2638 struct elim_table *p;
2643 if (LABEL_REF_NONLOCAL_P (x))
2648 /* ... fall through ... */
2651 /* If we know nothing about this label, set the desired offsets. Note
2652 that this sets the offset at a label to be the offset before a label
2653 if we don't know anything about the label. This is not correct for
2654 the label after a BARRIER, but is the best guess we can make. If
2655 we guessed wrong, we will suppress an elimination that might have
2656 been possible had we been able to guess correctly. */
2658 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2660 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2661 offsets_at[CODE_LABEL_NUMBER (x)][i]
2662 = (initial_p ? reg_eliminate[i].initial_offset
2663 : reg_eliminate[i].offset);
2664 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2667 /* Otherwise, if this is the definition of a label and it is
2668 preceded by a BARRIER, set our offsets to the known offset of
2672 && (tem = prev_nonnote_insn (insn)) != 0
2673 && GET_CODE (tem) == BARRIER)
2675 num_not_at_initial_offset = 0;
2676 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2678 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2679 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2680 if (reg_eliminate[i].can_eliminate
2681 && (reg_eliminate[i].offset
2682 != reg_eliminate[i].initial_offset))
2683 num_not_at_initial_offset++;
2688 /* If neither of the above cases is true, compare each offset
2689 with those previously recorded and suppress any eliminations
2690 where the offsets disagree. */
2692 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2693 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2694 != (initial_p ? reg_eliminate[i].initial_offset
2695 : reg_eliminate[i].offset))
2696 reg_eliminate[i].can_eliminate = 0;
2701 set_label_offsets (PATTERN (insn), insn, initial_p);
2703 /* ... fall through ... */
2707 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2708 and hence must have all eliminations at their initial offsets. */
2709 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2710 if (REG_NOTE_KIND (tem) == REG_LABEL)
2711 set_label_offsets (XEXP (tem, 0), insn, 1);
2716 /* Each of the labels in the address vector must be at their initial
2717 offsets. We want the first first for ADDR_VEC and the second
2718 field for ADDR_DIFF_VEC. */
2720 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2721 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2726 /* We only care about setting PC. If the source is not RETURN,
2727 IF_THEN_ELSE, or a label, disable any eliminations not at
2728 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2729 isn't one of those possibilities. For branches to a label,
2730 call ourselves recursively.
2732 Note that this can disable elimination unnecessarily when we have
2733 a non-local goto since it will look like a non-constant jump to
2734 someplace in the current function. This isn't a significant
2735 problem since such jumps will normally be when all elimination
2736 pairs are back to their initial offsets. */
2738 if (SET_DEST (x) != pc_rtx)
2741 switch (GET_CODE (SET_SRC (x)))
2748 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2752 tem = XEXP (SET_SRC (x), 1);
2753 if (GET_CODE (tem) == LABEL_REF)
2754 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2755 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2758 tem = XEXP (SET_SRC (x), 2);
2759 if (GET_CODE (tem) == LABEL_REF)
2760 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2761 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2766 /* If we reach here, all eliminations must be at their initial
2767 offset because we are doing a jump to a variable address. */
2768 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2769 if (p->offset != p->initial_offset)
2770 p->can_eliminate = 0;
2774 /* Used for communication between the next two function to properly share
2775 the vector for an ASM_OPERANDS. */
2777 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2779 /* Scan X and replace any eliminable registers (such as fp) with a
2780 replacement (such as sp), plus an offset.
2782 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2783 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2784 MEM, we are allowed to replace a sum of a register and the constant zero
2785 with the register, which we cannot do outside a MEM. In addition, we need
2786 to record the fact that a register is referenced outside a MEM.
2788 If INSN is an insn, it is the insn containing X. If we replace a REG
2789 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2790 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2791 that the REG is being modified.
2793 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2794 That's used when we eliminate in expressions stored in notes.
2795 This means, do not set ref_outside_mem even if the reference
2798 If we see a modification to a register we know about, take the
2799 appropriate action (see case SET, below).
2801 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2802 replacements done assuming all offsets are at their initial values. If
2803 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2804 encounter, return the actual location so that find_reloads will do
2805 the proper thing. */
2808 eliminate_regs (x, mem_mode, insn, storing)
2810 enum machine_mode mem_mode;
2814 enum rtx_code code = GET_CODE (x);
2815 struct elim_table *ep;
2840 /* First handle the case where we encounter a bare register that
2841 is eliminable. Replace it with a PLUS. */
2842 if (regno < FIRST_PSEUDO_REGISTER)
2844 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2846 if (ep->from_rtx == x && ep->can_eliminate)
2849 /* Refs inside notes don't count for this purpose. */
2850 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2851 || GET_CODE (insn) == INSN_LIST)))
2852 ep->ref_outside_mem = 1;
2853 return plus_constant (ep->to_rtx, ep->previous_offset);
2857 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2858 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2860 /* In this case, find_reloads would attempt to either use an
2861 incorrect address (if something is not at its initial offset)
2862 or substitute an replaced address into an insn (which loses
2863 if the offset is changed by some later action). So we simply
2864 return the replaced stack slot (assuming it is changed by
2865 elimination) and ignore the fact that this is actually a
2866 reference to the pseudo. Ensure we make a copy of the
2867 address in case it is shared. */
2868 new = eliminate_regs (reg_equiv_memory_loc[regno],
2870 if (new != reg_equiv_memory_loc[regno])
2872 cannot_omit_stores[regno] = 1;
2873 return copy_rtx (new);
2879 /* If this is the sum of an eliminable register and a constant, rework
2881 if (GET_CODE (XEXP (x, 0)) == REG
2882 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2883 && CONSTANT_P (XEXP (x, 1)))
2885 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2887 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2890 /* Refs inside notes don't count for this purpose. */
2891 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2892 || GET_CODE (insn) == INSN_LIST)))
2893 ep->ref_outside_mem = 1;
2895 /* The only time we want to replace a PLUS with a REG (this
2896 occurs when the constant operand of the PLUS is the negative
2897 of the offset) is when we are inside a MEM. We won't want
2898 to do so at other times because that would change the
2899 structure of the insn in a way that reload can't handle.
2900 We special-case the commonest situation in
2901 eliminate_regs_in_insn, so just replace a PLUS with a
2902 PLUS here, unless inside a MEM. */
2903 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2904 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2907 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2908 plus_constant (XEXP (x, 1),
2909 ep->previous_offset));
2912 /* If the register is not eliminable, we are done since the other
2913 operand is a constant. */
2917 /* If this is part of an address, we want to bring any constant to the
2918 outermost PLUS. We will do this by doing register replacement in
2919 our operands and seeing if a constant shows up in one of them.
2921 We assume here this is part of an address (or a "load address" insn)
2922 since an eliminable register is not likely to appear in any other
2925 If we have (plus (eliminable) (reg)), we want to produce
2926 (plus (plus (replacement) (reg) (const))). If this was part of a
2927 normal add insn, (plus (replacement) (reg)) will be pushed as a
2928 reload. This is the desired action. */
2931 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2932 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2934 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2936 /* If one side is a PLUS and the other side is a pseudo that
2937 didn't get a hard register but has a reg_equiv_constant,
2938 we must replace the constant here since it may no longer
2939 be in the position of any operand. */
2940 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2941 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2942 && reg_renumber[REGNO (new1)] < 0
2943 && reg_equiv_constant != 0
2944 && reg_equiv_constant[REGNO (new1)] != 0)
2945 new1 = reg_equiv_constant[REGNO (new1)];
2946 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2947 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2948 && reg_renumber[REGNO (new0)] < 0
2949 && reg_equiv_constant[REGNO (new0)] != 0)
2950 new0 = reg_equiv_constant[REGNO (new0)];
2952 new = form_sum (new0, new1);
2954 /* As above, if we are not inside a MEM we do not want to
2955 turn a PLUS into something else. We might try to do so here
2956 for an addition of 0 if we aren't optimizing. */
2957 if (! mem_mode && GET_CODE (new) != PLUS)
2958 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2966 /* If this is the product of an eliminable register and a
2967 constant, apply the distribute law and move the constant out
2968 so that we have (plus (mult ..) ..). This is needed in order
2969 to keep load-address insns valid. This case is pathological.
2970 We ignore the possibility of overflow here. */
2971 if (GET_CODE (XEXP (x, 0)) == REG
2972 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2973 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2974 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2976 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2979 /* Refs inside notes don't count for this purpose. */
2980 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2981 || GET_CODE (insn) == INSN_LIST)))
2982 ep->ref_outside_mem = 1;
2985 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2986 ep->previous_offset * INTVAL (XEXP (x, 1)));
2989 /* ... fall through ... */
2994 case DIV: case UDIV:
2995 case MOD: case UMOD:
2996 case AND: case IOR: case XOR:
2997 case ROTATERT: case ROTATE:
2998 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3000 case GE: case GT: case GEU: case GTU:
3001 case LE: case LT: case LEU: case LTU:
3003 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3005 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
3007 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3008 return gen_rtx (code, GET_MODE (x), new0, new1);
3013 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3016 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3017 if (new != XEXP (x, 0))
3018 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
3021 /* ... fall through ... */
3024 /* Now do eliminations in the rest of the chain. If this was
3025 an EXPR_LIST, this might result in allocating more memory than is
3026 strictly needed, but it simplifies the code. */
3029 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
3030 if (new != XEXP (x, 1))
3031 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3039 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3040 if (ep->to_rtx == XEXP (x, 0))
3042 int size = GET_MODE_SIZE (mem_mode);
3044 /* If more bytes than MEM_MODE are pushed, account for them. */
3045 #ifdef PUSH_ROUNDING
3046 if (ep->to_rtx == stack_pointer_rtx)
3047 size = PUSH_ROUNDING (size);
3049 if (code == PRE_DEC || code == POST_DEC)
3055 /* Fall through to generic unary operation case. */
3056 case STRICT_LOW_PART:
3058 case SIGN_EXTEND: case ZERO_EXTEND:
3059 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3060 case FLOAT: case FIX:
3061 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3065 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3066 if (new != XEXP (x, 0))
3067 return gen_rtx (code, GET_MODE (x), new);
3071 /* Similar to above processing, but preserve SUBREG_WORD.
3072 Convert (subreg (mem)) to (mem) if not paradoxical.
3073 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3074 pseudo didn't get a hard reg, we must replace this with the
3075 eliminated version of the memory location because push_reloads
3076 may do the replacement in certain circumstances. */
3077 if (GET_CODE (SUBREG_REG (x)) == REG
3078 && (GET_MODE_SIZE (GET_MODE (x))
3079 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3080 && reg_equiv_memory_loc != 0
3081 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3083 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3086 /* If we didn't change anything, we must retain the pseudo. */
3087 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3088 new = SUBREG_REG (x);
3091 /* Otherwise, ensure NEW isn't shared in case we have to reload
3093 new = copy_rtx (new);
3095 /* In this case, we must show that the pseudo is used in this
3096 insn so that delete_output_reload will do the right thing. */
3097 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3098 && GET_CODE (insn) != INSN_LIST)
3099 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3104 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3106 if (new != XEXP (x, 0))
3108 int x_size = GET_MODE_SIZE (GET_MODE (x));
3109 int new_size = GET_MODE_SIZE (GET_MODE (new));
3111 /* When asked to spill a partial word subreg, we need to go
3112 ahead and spill the whole thing against the possibility
3113 that we reload the whole reg and find garbage at the top. */
3115 && GET_CODE (new) == MEM
3116 && x_size < new_size
3117 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3118 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3120 else if (GET_CODE (new) == MEM
3121 && x_size <= new_size
3122 #ifdef LOAD_EXTEND_OP
3123 /* On these machines we will be reloading what is
3124 inside the SUBREG if it originally was a pseudo and
3125 the inner and outer modes are both a word or
3126 smaller. So leave the SUBREG then. */
3127 && ! (GET_CODE (SUBREG_REG (x)) == REG
3128 && x_size <= UNITS_PER_WORD
3129 && new_size <= UNITS_PER_WORD
3130 && x_size > new_size
3131 && INTEGRAL_MODE_P (GET_MODE (new))
3132 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3136 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3137 enum machine_mode mode = GET_MODE (x);
3139 if (BYTES_BIG_ENDIAN)
3140 offset += (MIN (UNITS_PER_WORD,
3141 GET_MODE_SIZE (GET_MODE (new)))
3142 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3144 PUT_MODE (new, mode);
3145 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3149 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3155 /* If using a register that is the source of an eliminate we still
3156 think can be performed, note it cannot be performed since we don't
3157 know how this register is used. */
3158 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3159 if (ep->from_rtx == XEXP (x, 0))
3160 ep->can_eliminate = 0;
3162 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3163 if (new != XEXP (x, 0))
3164 return gen_rtx (code, GET_MODE (x), new);
3168 /* If clobbering a register that is the replacement register for an
3169 elimination we still think can be performed, note that it cannot
3170 be performed. Otherwise, we need not be concerned about it. */
3171 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3172 if (ep->to_rtx == XEXP (x, 0))
3173 ep->can_eliminate = 0;
3175 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3176 if (new != XEXP (x, 0))
3177 return gen_rtx (code, GET_MODE (x), new);
3183 /* Properly handle sharing input and constraint vectors. */
3184 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3186 /* When we come to a new vector not seen before,
3187 scan all its elements; keep the old vector if none
3188 of them changes; otherwise, make a copy. */
3189 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3190 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3191 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3192 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3195 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3196 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3199 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3200 new_asm_operands_vec = old_asm_operands_vec;
3202 new_asm_operands_vec
3203 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3206 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3207 if (new_asm_operands_vec == old_asm_operands_vec)
3210 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3211 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3212 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3213 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3214 ASM_OPERANDS_SOURCE_FILE (x),
3215 ASM_OPERANDS_SOURCE_LINE (x));
3216 new->volatil = x->volatil;
3221 /* Check for setting a register that we know about. */
3222 if (GET_CODE (SET_DEST (x)) == REG)
3224 /* See if this is setting the replacement register for an
3227 If DEST is the hard frame pointer, we do nothing because we
3228 assume that all assignments to the frame pointer are for
3229 non-local gotos and are being done at a time when they are valid
3230 and do not disturb anything else. Some machines want to
3231 eliminate a fake argument pointer (or even a fake frame pointer)
3232 with either the real frame or the stack pointer. Assignments to
3233 the hard frame pointer must not prevent this elimination. */
3235 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3237 if (ep->to_rtx == SET_DEST (x)
3238 && SET_DEST (x) != hard_frame_pointer_rtx)
3240 /* If it is being incremented, adjust the offset. Otherwise,
3241 this elimination can't be done. */
3242 rtx src = SET_SRC (x);
3244 if (GET_CODE (src) == PLUS
3245 && XEXP (src, 0) == SET_DEST (x)
3246 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3247 ep->offset -= INTVAL (XEXP (src, 1));
3249 ep->can_eliminate = 0;
3252 /* Now check to see we are assigning to a register that can be
3253 eliminated. If so, it must be as part of a PARALLEL, since we
3254 will not have been called if this is a single SET. So indicate
3255 that we can no longer eliminate this reg. */
3256 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3258 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3259 ep->can_eliminate = 0;
3262 /* Now avoid the loop below in this common case. */
3264 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3265 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3267 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3268 write a CLOBBER insn. */
3269 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3270 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3271 && GET_CODE (insn) != INSN_LIST)
3272 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3274 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3275 return gen_rtx (SET, VOIDmode, new0, new1);
3281 /* Our only special processing is to pass the mode of the MEM to our
3282 recursive call and copy the flags. While we are here, handle this
3283 case more efficiently. */
3284 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3285 if (new != XEXP (x, 0))
3287 new = gen_rtx (MEM, GET_MODE (x), new);
3288 new->volatil = x->volatil;
3289 new->unchanging = x->unchanging;
3290 new->in_struct = x->in_struct;
3297 /* Process each of our operands recursively. If any have changed, make a
3299 fmt = GET_RTX_FORMAT (code);
3300 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3304 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3305 if (new != XEXP (x, i) && ! copied)
3307 rtx new_x = rtx_alloc (code);
3308 bcopy ((char *) x, (char *) new_x,
3309 (sizeof (*new_x) - sizeof (new_x->fld)
3310 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3316 else if (*fmt == 'E')
3319 for (j = 0; j < XVECLEN (x, i); j++)
3321 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3322 if (new != XVECEXP (x, i, j) && ! copied_vec)
3324 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3328 rtx new_x = rtx_alloc (code);
3329 bcopy ((char *) x, (char *) new_x,
3330 (sizeof (*new_x) - sizeof (new_x->fld)
3331 + (sizeof (new_x->fld[0])
3332 * GET_RTX_LENGTH (code))));
3336 XVEC (x, i) = new_v;
3339 XVECEXP (x, i, j) = new;
3347 /* Scan INSN and eliminate all eliminable registers in it.
3349 If REPLACE is nonzero, do the replacement destructively. Also
3350 delete the insn as dead it if it is setting an eliminable register.
3352 If REPLACE is zero, do all our allocations in reload_obstack.
3354 If no eliminations were done and this insn doesn't require any elimination
3355 processing (these are not identical conditions: it might be updating sp,
3356 but not referencing fp; this needs to be seen during reload_as_needed so
3357 that the offset between fp and sp can be taken into consideration), zero
3358 is returned. Otherwise, 1 is returned. */
3361 eliminate_regs_in_insn (insn, replace)
3365 rtx old_body = PATTERN (insn);
3366 rtx old_set = single_set (insn);
3369 struct elim_table *ep;
3372 push_obstacks (&reload_obstack, &reload_obstack);
3374 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3375 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3377 /* Check for setting an eliminable register. */
3378 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3379 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3381 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3382 /* If this is setting the frame pointer register to the
3383 hardware frame pointer register and this is an elimination
3384 that will be done (tested above), this insn is really
3385 adjusting the frame pointer downward to compensate for
3386 the adjustment done before a nonlocal goto. */
3387 if (ep->from == FRAME_POINTER_REGNUM
3388 && ep->to == HARD_FRAME_POINTER_REGNUM)
3390 rtx src = SET_SRC (old_set);
3392 rtx prev_insn, prev_set;
3394 if (src == ep->to_rtx)
3396 else if (GET_CODE (src) == PLUS
3397 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3398 offset = INTVAL (XEXP (src, 0)), ok = 1;
3399 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3400 && (prev_set = single_set (prev_insn)) != 0
3401 && rtx_equal_p (SET_DEST (prev_set), src))
3403 src = SET_SRC (prev_set);
3404 if (src == ep->to_rtx)
3406 else if (GET_CODE (src) == PLUS
3407 && GET_CODE (XEXP (src, 0)) == CONST_INT
3408 && XEXP (src, 1) == ep->to_rtx)
3409 offset = INTVAL (XEXP (src, 0)), ok = 1;
3410 else if (GET_CODE (src) == PLUS
3411 && GET_CODE (XEXP (src, 1)) == CONST_INT
3412 && XEXP (src, 0) == ep->to_rtx)
3413 offset = INTVAL (XEXP (src, 1)), ok = 1;
3421 = plus_constant (ep->to_rtx, offset - ep->offset);
3423 /* First see if this insn remains valid when we
3424 make the change. If not, keep the INSN_CODE
3425 the same and let reload fit it up. */
3426 validate_change (insn, &SET_SRC (old_set), src, 1);
3427 validate_change (insn, &SET_DEST (old_set),
3429 if (! apply_change_group ())
3431 SET_SRC (old_set) = src;
3432 SET_DEST (old_set) = ep->to_rtx;
3442 /* In this case this insn isn't serving a useful purpose. We
3443 will delete it in reload_as_needed once we know that this
3444 elimination is, in fact, being done.
3446 If REPLACE isn't set, we can't delete this insn, but needn't
3447 process it since it won't be used unless something changes. */
3449 delete_dead_insn (insn);
3454 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3455 in the insn is the negative of the offset in FROM. Substitute
3456 (set (reg) (reg to)) for the insn and change its code.
3458 We have to do this here, rather than in eliminate_regs, do that we can
3459 change the insn code. */
3461 if (GET_CODE (SET_SRC (old_set)) == PLUS
3462 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3463 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3464 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3466 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3467 && ep->can_eliminate)
3469 /* We must stop at the first elimination that will be used.
3470 If this one would replace the PLUS with a REG, do it
3471 now. Otherwise, quit the loop and let eliminate_regs
3472 do its normal replacement. */
3473 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3475 /* We assume here that we don't need a PARALLEL of
3476 any CLOBBERs for this assignment. There's not
3477 much we can do if we do need it. */
3478 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3479 SET_DEST (old_set), ep->to_rtx);
3480 INSN_CODE (insn) = -1;
3489 old_asm_operands_vec = 0;
3491 /* Replace the body of this insn with a substituted form. If we changed
3492 something, return non-zero.
3494 If we are replacing a body that was a (set X (plus Y Z)), try to
3495 re-recognize the insn. We do this in case we had a simple addition
3496 but now can do this as a load-address. This saves an insn in this
3499 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3500 if (new_body != old_body)
3502 /* If we aren't replacing things permanently and we changed something,
3503 make another copy to ensure that all the RTL is new. Otherwise
3504 things can go wrong if find_reload swaps commutative operands
3505 and one is inside RTL that has been copied while the other is not. */
3507 /* Don't copy an asm_operands because (1) there's no need and (2)
3508 copy_rtx can't do it properly when there are multiple outputs. */
3509 if (! replace && asm_noperands (old_body) < 0)
3510 new_body = copy_rtx (new_body);
3512 /* If we had a move insn but now we don't, rerecognize it. This will
3513 cause spurious re-recognition if the old move had a PARALLEL since
3514 the new one still will, but we can't call single_set without
3515 having put NEW_BODY into the insn and the re-recognition won't
3516 hurt in this rare case. */
3518 && ((GET_CODE (SET_SRC (old_set)) == REG
3519 && (GET_CODE (new_body) != SET
3520 || GET_CODE (SET_SRC (new_body)) != REG))
3521 /* If this was a load from or store to memory, compare
3522 the MEM in recog_operand to the one in the insn. If they
3523 are not equal, then rerecognize the insn. */
3525 && ((GET_CODE (SET_SRC (old_set)) == MEM
3526 && SET_SRC (old_set) != recog_operand[1])
3527 || (GET_CODE (SET_DEST (old_set)) == MEM
3528 && SET_DEST (old_set) != recog_operand[0])))
3529 /* If this was an add insn before, rerecognize. */
3530 || GET_CODE (SET_SRC (old_set)) == PLUS))
3532 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3533 /* If recognition fails, store the new body anyway.
3534 It's normal to have recognition failures here
3535 due to bizarre memory addresses; reloading will fix them. */
3536 PATTERN (insn) = new_body;
3539 PATTERN (insn) = new_body;
3544 /* Loop through all elimination pairs. See if any have changed and
3545 recalculate the number not at initial offset.
3547 Compute the maximum offset (minimum offset if the stack does not
3548 grow downward) for each elimination pair.
3550 We also detect a cases where register elimination cannot be done,
3551 namely, if a register would be both changed and referenced outside a MEM
3552 in the resulting insn since such an insn is often undefined and, even if
3553 not, we cannot know what meaning will be given to it. Note that it is
3554 valid to have a register used in an address in an insn that changes it
3555 (presumably with a pre- or post-increment or decrement).
3557 If anything changes, return nonzero. */
3559 num_not_at_initial_offset = 0;
3560 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3562 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3563 ep->can_eliminate = 0;
3565 ep->ref_outside_mem = 0;
3567 if (ep->previous_offset != ep->offset)
3570 ep->previous_offset = ep->offset;
3571 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3572 num_not_at_initial_offset++;
3574 #ifdef STACK_GROWS_DOWNWARD
3575 ep->max_offset = MAX (ep->max_offset, ep->offset);
3577 ep->max_offset = MIN (ep->max_offset, ep->offset);
3582 /* If we changed something, perform elimination in REG_NOTES. This is
3583 needed even when REPLACE is zero because a REG_DEAD note might refer
3584 to a register that we eliminate and could cause a different number
3585 of spill registers to be needed in the final reload pass than in
3587 if (val && REG_NOTES (insn) != 0)
3588 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3596 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3597 replacement we currently believe is valid, mark it as not eliminable if X
3598 modifies DEST in any way other than by adding a constant integer to it.
3600 If DEST is the frame pointer, we do nothing because we assume that
3601 all assignments to the hard frame pointer are nonlocal gotos and are being
3602 done at a time when they are valid and do not disturb anything else.
3603 Some machines want to eliminate a fake argument pointer with either the
3604 frame or stack pointer. Assignments to the hard frame pointer must not
3605 prevent this elimination.
3607 Called via note_stores from reload before starting its passes to scan
3608 the insns of the function. */
3611 mark_not_eliminable (dest, x)
3617 /* A SUBREG of a hard register here is just changing its mode. We should
3618 not see a SUBREG of an eliminable hard register, but check just in
3620 if (GET_CODE (dest) == SUBREG)
3621 dest = SUBREG_REG (dest);
3623 if (dest == hard_frame_pointer_rtx)
3626 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3627 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3628 && (GET_CODE (x) != SET
3629 || GET_CODE (SET_SRC (x)) != PLUS
3630 || XEXP (SET_SRC (x), 0) != dest
3631 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3633 reg_eliminate[i].can_eliminate_previous
3634 = reg_eliminate[i].can_eliminate = 0;
3639 /* Kick all pseudos out of hard register REGNO.
3640 If GLOBAL is nonzero, try to find someplace else to put them.
3641 If DUMPFILE is nonzero, log actions taken on that file.
3643 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3644 because we found we can't eliminate some register. In the case, no pseudos
3645 are allowed to be in the register, even if they are only in a block that
3646 doesn't require spill registers, unlike the case when we are spilling this
3647 hard reg to produce another spill register.
3649 Return nonzero if any pseudos needed to be kicked out. */
3652 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3658 enum reg_class class = REGNO_REG_CLASS (regno);
3659 int something_changed = 0;
3662 SET_HARD_REG_BIT (forbidden_regs, regno);
3665 regs_ever_live[regno] = 1;
3667 /* Spill every pseudo reg that was allocated to this reg
3668 or to something that overlaps this reg. */
3670 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3671 if (reg_renumber[i] >= 0
3672 && reg_renumber[i] <= regno
3674 + HARD_REGNO_NREGS (reg_renumber[i],
3675 PSEUDO_REGNO_MODE (i))
3678 /* If this register belongs solely to a basic block which needed no
3679 spilling of any class that this register is contained in,
3680 leave it be, unless we are spilling this register because
3681 it was a hard register that can't be eliminated. */
3683 if (! cant_eliminate
3684 && basic_block_needs[0]
3685 && REG_BASIC_BLOCK (i) >= 0
3686 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3690 for (p = reg_class_superclasses[(int) class];
3691 *p != LIM_REG_CLASSES; p++)
3692 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3695 if (*p == LIM_REG_CLASSES)
3699 /* Mark it as no longer having a hard register home. */
3700 reg_renumber[i] = -1;
3701 /* We will need to scan everything again. */
3702 something_changed = 1;
3704 retry_global_alloc (i, forbidden_regs);
3706 alter_reg (i, regno);
3709 if (reg_renumber[i] == -1)
3710 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3712 fprintf (dumpfile, " Register %d now in %d.\n\n",
3713 i, reg_renumber[i]);
3716 for (i = 0; i < scratch_list_length; i++)
3718 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3720 if (! cant_eliminate && basic_block_needs[0]
3721 && ! basic_block_needs[(int) class][scratch_block[i]])
3725 for (p = reg_class_superclasses[(int) class];
3726 *p != LIM_REG_CLASSES; p++)
3727 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3730 if (*p == LIM_REG_CLASSES)
3733 PUT_CODE (scratch_list[i], SCRATCH);
3734 scratch_list[i] = 0;
3735 something_changed = 1;
3740 return something_changed;
3743 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3744 Also mark any hard registers used to store user variables as
3745 forbidden from being used for spill registers. */
3748 scan_paradoxical_subregs (x)
3753 register enum rtx_code code = GET_CODE (x);
3758 #ifdef SMALL_REGISTER_CLASSES
3759 if (SMALL_REGISTER_CLASSES
3760 && REGNO (x) < FIRST_PSEUDO_REGISTER
3761 && REG_USERVAR_P (x))
3762 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3778 if (GET_CODE (SUBREG_REG (x)) == REG
3779 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3780 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3781 = GET_MODE_SIZE (GET_MODE (x));
3785 fmt = GET_RTX_FORMAT (code);
3786 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3789 scan_paradoxical_subregs (XEXP (x, i));
3790 else if (fmt[i] == 'E')
3793 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3794 scan_paradoxical_subregs (XVECEXP (x, i, j));
3800 hard_reg_use_compare (p1p, p2p)
3801 const GENERIC_PTR p1p;
3802 const GENERIC_PTR p2p;
3804 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3805 *p2 = (struct hard_reg_n_uses *)p2p;
3806 int tem = p1->uses - p2->uses;
3807 if (tem != 0) return tem;
3808 /* If regs are equally good, sort by regno,
3809 so that the results of qsort leave nothing to chance. */
3810 return p1->regno - p2->regno;
3813 /* Choose the order to consider regs for use as reload registers
3814 based on how much trouble would be caused by spilling one.
3815 Store them in order of decreasing preference in potential_reload_regs. */
3818 order_regs_for_reload (global)
3825 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3827 CLEAR_HARD_REG_SET (bad_spill_regs);
3829 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3830 potential_reload_regs[i] = -1;
3832 /* Count number of uses of each hard reg by pseudo regs allocated to it
3833 and then order them by decreasing use. */
3835 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3837 hard_reg_n_uses[i].uses = 0;
3838 hard_reg_n_uses[i].regno = i;
3841 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3843 int regno = reg_renumber[i];
3846 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3849 /* If allocated by local-alloc, show more uses since
3850 we're not going to be able to reallocate it, but
3851 we might if allocated by global alloc. */
3852 if (global && reg_allocno[i] < 0)
3853 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3855 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3858 large += REG_N_REFS (i);
3861 /* Now fixed registers (which cannot safely be used for reloading)
3862 get a very high use count so they will be considered least desirable.
3863 Registers used explicitly in the rtl code are almost as bad. */
3865 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3869 hard_reg_n_uses[i].uses += 2 * large + 2;
3870 SET_HARD_REG_BIT (bad_spill_regs, i);
3872 else if (regs_explicitly_used[i])
3874 hard_reg_n_uses[i].uses += large + 1;
3875 /* ??? We are doing this here because of the potential that
3876 bad code may be generated if a register explicitly used in
3877 an insn was used as a spill register for that insn. But
3878 not using these are spill registers may lose on some machine.
3879 We'll have to see how this works out. */
3880 #ifdef SMALL_REGISTER_CLASSES
3881 if (! SMALL_REGISTER_CLASSES)
3883 SET_HARD_REG_BIT (bad_spill_regs, i);
3886 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3887 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3889 #ifdef ELIMINABLE_REGS
3890 /* If registers other than the frame pointer are eliminable, mark them as
3892 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3894 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3895 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3899 /* Prefer registers not so far used, for use in temporary loading.
3900 Among them, if REG_ALLOC_ORDER is defined, use that order.
3901 Otherwise, prefer registers not preserved by calls. */
3903 #ifdef REG_ALLOC_ORDER
3904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3906 int regno = reg_alloc_order[i];
3908 if (hard_reg_n_uses[regno].uses == 0)
3909 potential_reload_regs[o++] = regno;
3912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3914 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3915 potential_reload_regs[o++] = i;
3917 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3919 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3920 potential_reload_regs[o++] = i;
3924 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3925 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3927 /* Now add the regs that are already used,
3928 preferring those used less often. The fixed and otherwise forbidden
3929 registers will be at the end of this list. */
3931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3932 if (hard_reg_n_uses[i].uses != 0)
3933 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3936 /* Used in reload_as_needed to sort the spilled regs. */
3939 compare_spill_regs (r1p, r2p)
3940 const GENERIC_PTR r1p;
3941 const GENERIC_PTR r2p;
3943 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3947 /* Reload pseudo-registers into hard regs around each insn as needed.
3948 Additional register load insns are output before the insn that needs it
3949 and perhaps store insns after insns that modify the reloaded pseudo reg.
3951 reg_last_reload_reg and reg_reloaded_contents keep track of
3952 which registers are already available in reload registers.
3953 We update these for the reloads that we perform,
3954 as the insns are scanned. */
3957 reload_as_needed (first, live_known)
3967 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3968 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3969 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3970 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3971 reg_has_output_reload = (char *) alloca (max_regno);
3972 for (i = 0; i < n_spills; i++)
3974 reg_reloaded_contents[i] = -1;
3975 reg_reloaded_insn[i] = 0;
3978 /* Reset all offsets on eliminable registers to their initial values. */
3979 #ifdef ELIMINABLE_REGS
3980 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3982 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3983 reg_eliminate[i].initial_offset);
3984 reg_eliminate[i].previous_offset
3985 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3988 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3989 reg_eliminate[0].previous_offset
3990 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3993 num_not_at_initial_offset = 0;
3995 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3996 pack registers with group needs. */
3999 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4000 for (i = 0; i < n_spills; i++)
4001 spill_reg_order[spill_regs[i]] = i;
4004 for (insn = first; insn;)
4006 register rtx next = NEXT_INSN (insn);
4008 /* Notice when we move to a new basic block. */
4009 if (live_known && this_block + 1 < n_basic_blocks
4010 && insn == basic_block_head[this_block+1])
4013 /* If we pass a label, copy the offsets from the label information
4014 into the current offsets of each elimination. */
4015 if (GET_CODE (insn) == CODE_LABEL)
4017 num_not_at_initial_offset = 0;
4018 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4020 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4021 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4022 if (reg_eliminate[i].can_eliminate
4023 && (reg_eliminate[i].offset
4024 != reg_eliminate[i].initial_offset))
4025 num_not_at_initial_offset++;
4029 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4031 rtx avoid_return_reg = 0;
4032 rtx oldpat = PATTERN (insn);
4034 #ifdef SMALL_REGISTER_CLASSES
4035 /* Set avoid_return_reg if this is an insn
4036 that might use the value of a function call. */
4037 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4039 if (GET_CODE (PATTERN (insn)) == SET)
4040 after_call = SET_DEST (PATTERN (insn));
4041 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4042 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4043 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4047 else if (SMALL_REGISTER_CLASSES
4049 && !(GET_CODE (PATTERN (insn)) == SET
4050 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4052 if (reg_referenced_p (after_call, PATTERN (insn)))
4053 avoid_return_reg = after_call;
4056 #endif /* SMALL_REGISTER_CLASSES */
4058 /* If this is a USE and CLOBBER of a MEM, ensure that any
4059 references to eliminable registers have been removed. */
4061 if ((GET_CODE (PATTERN (insn)) == USE
4062 || GET_CODE (PATTERN (insn)) == CLOBBER)
4063 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4064 XEXP (XEXP (PATTERN (insn), 0), 0)
4065 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4066 GET_MODE (XEXP (PATTERN (insn), 0)),
4069 /* If we need to do register elimination processing, do so.
4070 This might delete the insn, in which case we are done. */
4071 if (num_eliminable && GET_MODE (insn) == QImode)
4073 eliminate_regs_in_insn (insn, 1);
4074 if (GET_CODE (insn) == NOTE)
4081 if (GET_MODE (insn) == VOIDmode)
4083 /* First find the pseudo regs that must be reloaded for this insn.
4084 This info is returned in the tables reload_... (see reload.h).
4085 Also modify the body of INSN by substituting RELOAD
4086 rtx's for those pseudo regs. */
4089 bzero (reg_has_output_reload, max_regno);
4090 CLEAR_HARD_REG_SET (reg_is_output_reload);
4092 find_reloads (insn, 1, spill_indirect_levels, live_known,
4098 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4102 /* If this block has not had spilling done for a
4103 particular clas and we have any non-optionals that need a
4104 spill reg in that class, abort. */
4106 for (class = 0; class < N_REG_CLASSES; class++)
4107 if (basic_block_needs[class] != 0
4108 && basic_block_needs[class][this_block] == 0)
4109 for (i = 0; i < n_reloads; i++)
4110 if (class == (int) reload_reg_class[i]
4111 && reload_reg_rtx[i] == 0
4112 && ! reload_optional[i]
4113 && (reload_in[i] != 0 || reload_out[i] != 0
4114 || reload_secondary_p[i] != 0))
4115 fatal_insn ("Non-optional registers need a spill register", insn);
4117 /* Now compute which reload regs to reload them into. Perhaps
4118 reusing reload regs from previous insns, or else output
4119 load insns to reload them. Maybe output store insns too.
4120 Record the choices of reload reg in reload_reg_rtx. */
4121 choose_reload_regs (insn, avoid_return_reg);
4123 #ifdef SMALL_REGISTER_CLASSES
4124 /* Merge any reloads that we didn't combine for fear of
4125 increasing the number of spill registers needed but now
4126 discover can be safely merged. */
4127 if (SMALL_REGISTER_CLASSES)
4128 merge_assigned_reloads (insn);
4131 /* Generate the insns to reload operands into or out of
4132 their reload regs. */
4133 emit_reload_insns (insn);
4135 /* Substitute the chosen reload regs from reload_reg_rtx
4136 into the insn's body (or perhaps into the bodies of other
4137 load and store insn that we just made for reloading
4138 and that we moved the structure into). */
4141 /* If this was an ASM, make sure that all the reload insns
4142 we have generated are valid. If not, give an error
4145 if (asm_noperands (PATTERN (insn)) >= 0)
4146 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4147 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4148 && (recog_memoized (p) < 0
4149 || (insn_extract (p),
4150 ! constrain_operands (INSN_CODE (p), 1))))
4152 error_for_asm (insn,
4153 "`asm' operand requires impossible reload");
4155 NOTE_SOURCE_FILE (p) = 0;
4156 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4159 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4160 is no longer validly lying around to save a future reload.
4161 Note that this does not detect pseudos that were reloaded
4162 for this insn in order to be stored in
4163 (obeying register constraints). That is correct; such reload
4164 registers ARE still valid. */
4165 note_stores (oldpat, forget_old_reloads_1);
4167 /* There may have been CLOBBER insns placed after INSN. So scan
4168 between INSN and NEXT and use them to forget old reloads. */
4169 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4170 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4171 note_stores (PATTERN (x), forget_old_reloads_1);
4174 /* Likewise for regs altered by auto-increment in this insn.
4175 But note that the reg-notes are not changed by reloading:
4176 they still contain the pseudo-regs, not the spill regs. */
4177 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4178 if (REG_NOTE_KIND (x) == REG_INC)
4180 /* See if this pseudo reg was reloaded in this insn.
4181 If so, its last-reload info is still valid
4182 because it is based on this insn's reload. */
4183 for (i = 0; i < n_reloads; i++)
4184 if (reload_out[i] == XEXP (x, 0))
4188 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4192 /* A reload reg's contents are unknown after a label. */
4193 if (GET_CODE (insn) == CODE_LABEL)
4194 for (i = 0; i < n_spills; i++)
4196 reg_reloaded_contents[i] = -1;
4197 reg_reloaded_insn[i] = 0;
4200 /* Don't assume a reload reg is still good after a call insn
4201 if it is a call-used reg. */
4202 else if (GET_CODE (insn) == CALL_INSN)
4203 for (i = 0; i < n_spills; i++)
4204 if (call_used_regs[spill_regs[i]])
4206 reg_reloaded_contents[i] = -1;
4207 reg_reloaded_insn[i] = 0;
4210 /* In case registers overlap, allow certain insns to invalidate
4211 particular hard registers. */
4213 #ifdef INSN_CLOBBERS_REGNO_P
4214 for (i = 0 ; i < n_spills ; i++)
4215 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4217 reg_reloaded_contents[i] = -1;
4218 reg_reloaded_insn[i] = 0;
4230 /* Discard all record of any value reloaded from X,
4231 or reloaded in X from someplace else;
4232 unless X is an output reload reg of the current insn.
4234 X may be a hard reg (the reload reg)
4235 or it may be a pseudo reg that was reloaded from. */
4238 forget_old_reloads_1 (x, ignored)
4246 /* note_stores does give us subregs of hard regs. */
4247 while (GET_CODE (x) == SUBREG)
4249 offset += SUBREG_WORD (x);
4253 if (GET_CODE (x) != REG)
4256 regno = REGNO (x) + offset;
4258 if (regno >= FIRST_PSEUDO_REGISTER)
4263 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4264 /* Storing into a spilled-reg invalidates its contents.
4265 This can happen if a block-local pseudo is allocated to that reg
4266 and it wasn't spilled because this block's total need is 0.
4267 Then some insn might have an optional reload and use this reg. */
4268 for (i = 0; i < nr; i++)
4269 if (spill_reg_order[regno + i] >= 0
4270 /* But don't do this if the reg actually serves as an output
4271 reload reg in the current instruction. */
4273 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4275 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4276 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4280 /* Since value of X has changed,
4281 forget any value previously copied from it. */
4284 /* But don't forget a copy if this is the output reload
4285 that establishes the copy's validity. */
4286 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4287 reg_last_reload_reg[regno + nr] = 0;
4290 /* For each reload, the mode of the reload register. */
4291 static enum machine_mode reload_mode[MAX_RELOADS];
4293 /* For each reload, the largest number of registers it will require. */
4294 static int reload_nregs[MAX_RELOADS];
4296 /* Comparison function for qsort to decide which of two reloads
4297 should be handled first. *P1 and *P2 are the reload numbers. */
4300 reload_reg_class_lower (r1p, r2p)
4301 const GENERIC_PTR r1p;
4302 const GENERIC_PTR r2p;
4304 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4307 /* Consider required reloads before optional ones. */
4308 t = reload_optional[r1] - reload_optional[r2];
4312 /* Count all solitary classes before non-solitary ones. */
4313 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4314 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4318 /* Aside from solitaires, consider all multi-reg groups first. */
4319 t = reload_nregs[r2] - reload_nregs[r1];
4323 /* Consider reloads in order of increasing reg-class number. */
4324 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4328 /* If reloads are equally urgent, sort by reload number,
4329 so that the results of qsort leave nothing to chance. */
4333 /* The following HARD_REG_SETs indicate when each hard register is
4334 used for a reload of various parts of the current insn. */
4336 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4337 static HARD_REG_SET reload_reg_used;
4338 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4339 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4340 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4341 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4342 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4343 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4344 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4345 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4346 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4347 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4348 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4349 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4350 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4351 static HARD_REG_SET reload_reg_used_in_op_addr;
4352 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4353 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4354 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4355 static HARD_REG_SET reload_reg_used_in_insn;
4356 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4357 static HARD_REG_SET reload_reg_used_in_other_addr;
4359 /* If reg is in use as a reload reg for any sort of reload. */
4360 static HARD_REG_SET reload_reg_used_at_all;
4362 /* If reg is use as an inherited reload. We just mark the first register
4364 static HARD_REG_SET reload_reg_used_for_inherit;
4366 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4367 TYPE. MODE is used to indicate how many consecutive regs are
4371 mark_reload_reg_in_use (regno, opnum, type, mode)
4374 enum reload_type type;
4375 enum machine_mode mode;
4377 int nregs = HARD_REGNO_NREGS (regno, mode);
4380 for (i = regno; i < nregs + regno; i++)
4385 SET_HARD_REG_BIT (reload_reg_used, i);
4388 case RELOAD_FOR_INPUT_ADDRESS:
4389 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4392 case RELOAD_FOR_INPADDR_ADDRESS:
4393 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4396 case RELOAD_FOR_OUTPUT_ADDRESS:
4397 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4400 case RELOAD_FOR_OUTADDR_ADDRESS:
4401 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4404 case RELOAD_FOR_OPERAND_ADDRESS:
4405 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4408 case RELOAD_FOR_OPADDR_ADDR:
4409 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4412 case RELOAD_FOR_OTHER_ADDRESS:
4413 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4416 case RELOAD_FOR_INPUT:
4417 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4420 case RELOAD_FOR_OUTPUT:
4421 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4424 case RELOAD_FOR_INSN:
4425 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4429 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4433 /* Similarly, but show REGNO is no longer in use for a reload. */
4436 clear_reload_reg_in_use (regno, opnum, type, mode)
4439 enum reload_type type;
4440 enum machine_mode mode;
4442 int nregs = HARD_REGNO_NREGS (regno, mode);
4445 for (i = regno; i < nregs + regno; i++)
4450 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4453 case RELOAD_FOR_INPUT_ADDRESS:
4454 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4457 case RELOAD_FOR_INPADDR_ADDRESS:
4458 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4461 case RELOAD_FOR_OUTPUT_ADDRESS:
4462 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4465 case RELOAD_FOR_OUTADDR_ADDRESS:
4466 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4469 case RELOAD_FOR_OPERAND_ADDRESS:
4470 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4473 case RELOAD_FOR_OPADDR_ADDR:
4474 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4477 case RELOAD_FOR_OTHER_ADDRESS:
4478 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4481 case RELOAD_FOR_INPUT:
4482 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4485 case RELOAD_FOR_OUTPUT:
4486 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4489 case RELOAD_FOR_INSN:
4490 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4496 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4497 specified by OPNUM and TYPE. */
4500 reload_reg_free_p (regno, opnum, type)
4503 enum reload_type type;
4507 /* In use for a RELOAD_OTHER means it's not available for anything. */
4508 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4514 /* In use for anything means we can't use it for RELOAD_OTHER. */
4515 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4516 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4517 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4520 for (i = 0; i < reload_n_operands; i++)
4521 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4522 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4523 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4525 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4526 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4531 case RELOAD_FOR_INPUT:
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4536 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4539 /* If it is used for some other input, can't use it. */
4540 for (i = 0; i < reload_n_operands; i++)
4541 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4544 /* If it is used in a later operand's address, can't use it. */
4545 for (i = opnum + 1; i < reload_n_operands; i++)
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4547 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4552 case RELOAD_FOR_INPUT_ADDRESS:
4553 /* Can't use a register if it is used for an input address for this
4554 operand or used as an input in an earlier one. */
4555 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4556 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4559 for (i = 0; i < opnum; i++)
4560 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4565 case RELOAD_FOR_INPADDR_ADDRESS:
4566 /* Can't use a register if it is used for an input address
4567 address for this operand or used as an input in an earlier
4569 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4572 for (i = 0; i < opnum; i++)
4573 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4578 case RELOAD_FOR_OUTPUT_ADDRESS:
4579 /* Can't use a register if it is used for an output address for this
4580 operand or used as an output in this or a later operand. */
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4584 for (i = opnum; i < reload_n_operands; i++)
4585 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4590 case RELOAD_FOR_OUTADDR_ADDRESS:
4591 /* Can't use a register if it is used for an output address
4592 address for this operand or used as an output in this or a
4594 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4597 for (i = opnum; i < reload_n_operands; i++)
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4603 case RELOAD_FOR_OPERAND_ADDRESS:
4604 for (i = 0; i < reload_n_operands; i++)
4605 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4608 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4609 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4611 case RELOAD_FOR_OPADDR_ADDR:
4612 for (i = 0; i < reload_n_operands; i++)
4613 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4616 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4618 case RELOAD_FOR_OUTPUT:
4619 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4620 outputs, or an operand address for this or an earlier output. */
4621 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4624 for (i = 0; i < reload_n_operands; i++)
4625 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4628 for (i = 0; i <= opnum; i++)
4629 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4630 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4635 case RELOAD_FOR_INSN:
4636 for (i = 0; i < reload_n_operands; i++)
4637 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4638 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4641 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4642 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4644 case RELOAD_FOR_OTHER_ADDRESS:
4645 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4650 /* Return 1 if the value in reload reg REGNO, as used by a reload
4651 needed for the part of the insn specified by OPNUM and TYPE,
4652 is not in use for a reload in any prior part of the insn.
4654 We can assume that the reload reg was already tested for availability
4655 at the time it is needed, and we should not check this again,
4656 in case the reg has already been marked in use. */
4659 reload_reg_free_before_p (regno, opnum, type)
4662 enum reload_type type;
4668 case RELOAD_FOR_OTHER_ADDRESS:
4669 /* These always come first. */
4673 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4675 /* If this use is for part of the insn,
4676 check the reg is not in use for any prior part. It is tempting
4677 to try to do this by falling through from objecs that occur
4678 later in the insn to ones that occur earlier, but that will not
4679 correctly take into account the fact that here we MUST ignore
4680 things that would prevent the register from being allocated in
4681 the first place, since we know that it was allocated. */
4683 case RELOAD_FOR_OUTPUT_ADDRESS:
4684 case RELOAD_FOR_OUTADDR_ADDRESS:
4685 /* Earlier reloads are for earlier outputs or their addresses,
4686 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4687 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4689 for (i = 0; i < opnum; i++)
4690 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4691 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4692 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4698 for (i = 0; i < reload_n_operands; i++)
4699 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4700 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4701 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4704 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4705 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4706 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4708 case RELOAD_FOR_OUTPUT:
4709 /* This can't be used in the output address for this operand and
4710 anything that can't be used for it, except that we've already
4711 tested for RELOAD_FOR_INSN objects. */
4713 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4714 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4717 for (i = 0; i < opnum; i++)
4718 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4719 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4720 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4723 for (i = 0; i < reload_n_operands; i++)
4724 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4725 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4726 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4727 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4730 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4732 case RELOAD_FOR_OPERAND_ADDRESS:
4733 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4734 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4737 /* ... fall through ... */
4739 case RELOAD_FOR_OPADDR_ADDR:
4740 case RELOAD_FOR_INSN:
4741 /* These can't conflict with inputs, or each other, so all we have to
4742 test is input addresses and the addresses of OTHER items. */
4744 for (i = 0; i < reload_n_operands; i++)
4745 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4746 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4749 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4751 case RELOAD_FOR_INPUT:
4752 /* The only things earlier are the address for this and
4753 earlier inputs, other inputs (which we know we don't conflict
4754 with), and addresses of RELOAD_OTHER objects. */
4756 for (i = 0; i <= opnum; i++)
4757 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4758 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4761 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4763 case RELOAD_FOR_INPUT_ADDRESS:
4764 case RELOAD_FOR_INPADDR_ADDRESS:
4765 /* Similarly, all we have to check is for use in earlier inputs'
4767 for (i = 0; i < opnum; i++)
4768 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4769 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4772 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4777 /* Return 1 if the value in reload reg REGNO, as used by a reload
4778 needed for the part of the insn specified by OPNUM and TYPE,
4779 is still available in REGNO at the end of the insn.
4781 We can assume that the reload reg was already tested for availability
4782 at the time it is needed, and we should not check this again,
4783 in case the reg has already been marked in use. */
4786 reload_reg_reaches_end_p (regno, opnum, type)
4789 enum reload_type type;
4796 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4797 its value must reach the end. */
4800 /* If this use is for part of the insn,
4801 its value reaches if no subsequent part uses the same register.
4802 Just like the above function, don't try to do this with lots
4805 case RELOAD_FOR_OTHER_ADDRESS:
4806 /* Here we check for everything else, since these don't conflict
4807 with anything else and everything comes later. */
4809 for (i = 0; i < reload_n_operands; i++)
4810 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4811 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4812 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4813 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4814 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4815 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4818 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4819 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4820 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4822 case RELOAD_FOR_INPUT_ADDRESS:
4823 case RELOAD_FOR_INPADDR_ADDRESS:
4824 /* Similar, except that we check only for this and subsequent inputs
4825 and the address of only subsequent inputs and we do not need
4826 to check for RELOAD_OTHER objects since they are known not to
4829 for (i = opnum; i < reload_n_operands; i++)
4830 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4833 for (i = opnum + 1; i < reload_n_operands; i++)
4834 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4835 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4838 for (i = 0; i < reload_n_operands; i++)
4839 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4840 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4841 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4844 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4847 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4848 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4850 case RELOAD_FOR_INPUT:
4851 /* Similar to input address, except we start at the next operand for
4852 both input and input address and we do not check for
4853 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4856 for (i = opnum + 1; i < reload_n_operands; i++)
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4858 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4859 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4862 /* ... fall through ... */
4864 case RELOAD_FOR_OPERAND_ADDRESS:
4865 /* Check outputs and their addresses. */
4867 for (i = 0; i < reload_n_operands; i++)
4868 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4869 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4870 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4875 case RELOAD_FOR_OPADDR_ADDR:
4876 for (i = 0; i < reload_n_operands; i++)
4877 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4878 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4879 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4882 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4883 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4885 case RELOAD_FOR_INSN:
4886 /* These conflict with other outputs with RELOAD_OTHER. So
4887 we need only check for output addresses. */
4891 /* ... fall through ... */
4893 case RELOAD_FOR_OUTPUT:
4894 case RELOAD_FOR_OUTPUT_ADDRESS:
4895 case RELOAD_FOR_OUTADDR_ADDRESS:
4896 /* We already know these can't conflict with a later output. So the
4897 only thing to check are later output addresses. */
4898 for (i = opnum + 1; i < reload_n_operands; i++)
4899 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4900 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4909 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4912 This function uses the same algorithm as reload_reg_free_p above. */
4915 reloads_conflict (r1, r2)
4918 enum reload_type r1_type = reload_when_needed[r1];
4919 enum reload_type r2_type = reload_when_needed[r2];
4920 int r1_opnum = reload_opnum[r1];
4921 int r2_opnum = reload_opnum[r2];
4923 /* RELOAD_OTHER conflicts with everything. */
4924 if (r2_type == RELOAD_OTHER)
4927 /* Otherwise, check conflicts differently for each type. */
4931 case RELOAD_FOR_INPUT:
4932 return (r2_type == RELOAD_FOR_INSN
4933 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4934 || r2_type == RELOAD_FOR_OPADDR_ADDR
4935 || r2_type == RELOAD_FOR_INPUT
4936 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4937 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4938 && r2_opnum > r1_opnum));
4940 case RELOAD_FOR_INPUT_ADDRESS:
4941 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4942 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4944 case RELOAD_FOR_INPADDR_ADDRESS:
4945 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4946 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4948 case RELOAD_FOR_OUTPUT_ADDRESS:
4949 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4950 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4952 case RELOAD_FOR_OUTADDR_ADDRESS:
4953 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4954 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4956 case RELOAD_FOR_OPERAND_ADDRESS:
4957 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4958 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4960 case RELOAD_FOR_OPADDR_ADDR:
4961 return (r2_type == RELOAD_FOR_INPUT
4962 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4964 case RELOAD_FOR_OUTPUT:
4965 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4966 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4967 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4968 && r2_opnum >= r1_opnum));
4970 case RELOAD_FOR_INSN:
4971 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4972 || r2_type == RELOAD_FOR_INSN
4973 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4975 case RELOAD_FOR_OTHER_ADDRESS:
4976 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4986 /* Vector of reload-numbers showing the order in which the reloads should
4988 short reload_order[MAX_RELOADS];
4990 /* Indexed by reload number, 1 if incoming value
4991 inherited from previous insns. */
4992 char reload_inherited[MAX_RELOADS];
4994 /* For an inherited reload, this is the insn the reload was inherited from,
4995 if we know it. Otherwise, this is 0. */
4996 rtx reload_inheritance_insn[MAX_RELOADS];
4998 /* If non-zero, this is a place to get the value of the reload,
4999 rather than using reload_in. */
5000 rtx reload_override_in[MAX_RELOADS];
5002 /* For each reload, the index in spill_regs of the spill register used,
5003 or -1 if we did not need one of the spill registers for this reload. */
5004 int reload_spill_index[MAX_RELOADS];
5006 /* Find a spill register to use as a reload register for reload R.
5007 LAST_RELOAD is non-zero if this is the last reload for the insn being
5010 Set reload_reg_rtx[R] to the register allocated.
5012 If NOERROR is nonzero, we return 1 if successful,
5013 or 0 if we couldn't find a spill reg and we didn't change anything. */
5016 allocate_reload_reg (r, insn, last_reload, noerror)
5028 /* If we put this reload ahead, thinking it is a group,
5029 then insist on finding a group. Otherwise we can grab a
5030 reg that some other reload needs.
5031 (That can happen when we have a 68000 DATA_OR_FP_REG
5032 which is a group of data regs or one fp reg.)
5033 We need not be so restrictive if there are no more reloads
5036 ??? Really it would be nicer to have smarter handling
5037 for that kind of reg class, where a problem like this is normal.
5038 Perhaps those classes should be avoided for reloading
5039 by use of more alternatives. */
5041 int force_group = reload_nregs[r] > 1 && ! last_reload;
5043 /* If we want a single register and haven't yet found one,
5044 take any reg in the right class and not in use.
5045 If we want a consecutive group, here is where we look for it.
5047 We use two passes so we can first look for reload regs to
5048 reuse, which are already in use for other reloads in this insn,
5049 and only then use additional registers.
5050 I think that maximizing reuse is needed to make sure we don't
5051 run out of reload regs. Suppose we have three reloads, and
5052 reloads A and B can share regs. These need two regs.
5053 Suppose A and B are given different regs.
5054 That leaves none for C. */
5055 for (pass = 0; pass < 2; pass++)
5057 /* I is the index in spill_regs.
5058 We advance it round-robin between insns to use all spill regs
5059 equally, so that inherited reloads have a chance
5060 of leapfrogging each other. Don't do this, however, when we have
5061 group needs and failure would be fatal; if we only have a relatively
5062 small number of spill registers, and more than one of them has
5063 group needs, then by starting in the middle, we may end up
5064 allocating the first one in such a way that we are not left with
5065 sufficient groups to handle the rest. */
5067 if (noerror || ! force_group)
5072 for (count = 0; count < n_spills; count++)
5074 int class = (int) reload_reg_class[r];
5076 i = (i + 1) % n_spills;
5078 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5079 reload_when_needed[r])
5080 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5081 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5082 /* Look first for regs to share, then for unshared. But
5083 don't share regs used for inherited reloads; they are
5084 the ones we want to preserve. */
5086 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5088 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5091 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5092 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5093 (on 68000) got us two FP regs. If NR is 1,
5094 we would reject both of them. */
5096 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5097 /* If we need only one reg, we have already won. */
5100 /* But reject a single reg if we demand a group. */
5105 /* Otherwise check that as many consecutive regs as we need
5107 Also, don't use for a group registers that are
5108 needed for nongroups. */
5109 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5112 regno = spill_regs[i] + nr - 1;
5113 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5114 && spill_reg_order[regno] >= 0
5115 && reload_reg_free_p (regno, reload_opnum[r],
5116 reload_when_needed[r])
5117 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5127 /* If we found something on pass 1, omit pass 2. */
5128 if (count < n_spills)
5132 /* We should have found a spill register by now. */
5133 if (count == n_spills)
5140 /* I is the index in SPILL_REG_RTX of the reload register we are to
5141 allocate. Get an rtx for it and find its register number. */
5143 new = spill_reg_rtx[i];
5145 if (new == 0 || GET_MODE (new) != reload_mode[r])
5146 spill_reg_rtx[i] = new
5147 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5149 regno = true_regnum (new);
5151 /* Detect when the reload reg can't hold the reload mode.
5152 This used to be one `if', but Sequent compiler can't handle that. */
5153 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5155 enum machine_mode test_mode = VOIDmode;
5157 test_mode = GET_MODE (reload_in[r]);
5158 /* If reload_in[r] has VOIDmode, it means we will load it
5159 in whatever mode the reload reg has: to wit, reload_mode[r].
5160 We have already tested that for validity. */
5161 /* Aside from that, we need to test that the expressions
5162 to reload from or into have modes which are valid for this
5163 reload register. Otherwise the reload insns would be invalid. */
5164 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5165 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5166 if (! (reload_out[r] != 0
5167 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5169 /* The reg is OK. */
5172 /* Mark as in use for this insn the reload regs we use
5174 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5175 reload_when_needed[r], reload_mode[r]);
5177 reload_reg_rtx[r] = new;
5178 reload_spill_index[r] = i;
5183 /* The reg is not OK. */
5188 if (asm_noperands (PATTERN (insn)) < 0)
5189 /* It's the compiler's fault. */
5190 fatal_insn ("Could not find a spill register", insn);
5192 /* It's the user's fault; the operand's mode and constraint
5193 don't match. Disable this reload so we don't crash in final. */
5194 error_for_asm (insn,
5195 "`asm' operand constraint incompatible with operand size");
5198 reload_reg_rtx[r] = 0;
5199 reload_optional[r] = 1;
5200 reload_secondary_p[r] = 1;
5205 /* Assign hard reg targets for the pseudo-registers we must reload
5206 into hard regs for this insn.
5207 Also output the instructions to copy them in and out of the hard regs.
5209 For machines with register classes, we are responsible for
5210 finding a reload reg in the proper class. */
5213 choose_reload_regs (insn, avoid_return_reg)
5215 rtx avoid_return_reg;
5218 int max_group_size = 1;
5219 enum reg_class group_class = NO_REGS;
5222 rtx save_reload_reg_rtx[MAX_RELOADS];
5223 char save_reload_inherited[MAX_RELOADS];
5224 rtx save_reload_inheritance_insn[MAX_RELOADS];
5225 rtx save_reload_override_in[MAX_RELOADS];
5226 int save_reload_spill_index[MAX_RELOADS];
5227 HARD_REG_SET save_reload_reg_used;
5228 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5229 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5230 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5231 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5232 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5233 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5234 HARD_REG_SET save_reload_reg_used_in_op_addr;
5235 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5236 HARD_REG_SET save_reload_reg_used_in_insn;
5237 HARD_REG_SET save_reload_reg_used_in_other_addr;
5238 HARD_REG_SET save_reload_reg_used_at_all;
5240 bzero (reload_inherited, MAX_RELOADS);
5241 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5242 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5244 CLEAR_HARD_REG_SET (reload_reg_used);
5245 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5246 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5247 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5248 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5249 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5251 for (i = 0; i < reload_n_operands; i++)
5253 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5254 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5255 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5256 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5257 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5258 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5261 #ifdef SMALL_REGISTER_CLASSES
5262 /* Don't bother with avoiding the return reg
5263 if we have no mandatory reload that could use it. */
5264 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5267 int regno = REGNO (avoid_return_reg);
5269 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5272 for (r = regno; r < regno + nregs; r++)
5273 if (spill_reg_order[r] >= 0)
5274 for (j = 0; j < n_reloads; j++)
5275 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5276 && (reload_in[j] != 0 || reload_out[j] != 0
5277 || reload_secondary_p[j])
5279 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5282 avoid_return_reg = 0;
5284 #endif /* SMALL_REGISTER_CLASSES */
5286 #if 0 /* Not needed, now that we can always retry without inheritance. */
5287 /* See if we have more mandatory reloads than spill regs.
5288 If so, then we cannot risk optimizations that could prevent
5289 reloads from sharing one spill register.
5291 Since we will try finding a better register than reload_reg_rtx
5292 unless it is equal to reload_in or reload_out, count such reloads. */
5296 #ifdef SMALL_REGISTER_CLASSES
5297 if (SMALL_REGISTER_CLASSES)
5298 tem = (avoid_return_reg != 0);
5300 for (j = 0; j < n_reloads; j++)
5301 if (! reload_optional[j]
5302 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5303 && (reload_reg_rtx[j] == 0
5304 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5305 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5312 #ifdef SMALL_REGISTER_CLASSES
5313 /* Don't use the subroutine call return reg for a reload
5314 if we are supposed to avoid it. */
5315 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5317 int regno = REGNO (avoid_return_reg);
5319 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5322 for (r = regno; r < regno + nregs; r++)
5323 if (spill_reg_order[r] >= 0)
5324 SET_HARD_REG_BIT (reload_reg_used, r);
5326 #endif /* SMALL_REGISTER_CLASSES */
5328 /* In order to be certain of getting the registers we need,
5329 we must sort the reloads into order of increasing register class.
5330 Then our grabbing of reload registers will parallel the process
5331 that provided the reload registers.
5333 Also note whether any of the reloads wants a consecutive group of regs.
5334 If so, record the maximum size of the group desired and what
5335 register class contains all the groups needed by this insn. */
5337 for (j = 0; j < n_reloads; j++)
5339 reload_order[j] = j;
5340 reload_spill_index[j] = -1;
5343 = (reload_inmode[j] == VOIDmode
5344 || (GET_MODE_SIZE (reload_outmode[j])
5345 > GET_MODE_SIZE (reload_inmode[j])))
5346 ? reload_outmode[j] : reload_inmode[j];
5348 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5350 if (reload_nregs[j] > 1)
5352 max_group_size = MAX (reload_nregs[j], max_group_size);
5353 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5356 /* If we have already decided to use a certain register,
5357 don't use it in another way. */
5358 if (reload_reg_rtx[j])
5359 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5360 reload_when_needed[j], reload_mode[j]);
5364 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5366 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5367 sizeof reload_reg_rtx);
5368 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5369 bcopy ((char *) reload_inheritance_insn,
5370 (char *) save_reload_inheritance_insn,
5371 sizeof reload_inheritance_insn);
5372 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5373 sizeof reload_override_in);
5374 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5375 sizeof reload_spill_index);
5376 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5377 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5378 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5379 reload_reg_used_in_op_addr);
5381 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5382 reload_reg_used_in_op_addr_reload);
5384 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5385 reload_reg_used_in_insn);
5386 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5387 reload_reg_used_in_other_addr);
5389 for (i = 0; i < reload_n_operands; i++)
5391 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5392 reload_reg_used_in_output[i]);
5393 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5394 reload_reg_used_in_input[i]);
5395 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5396 reload_reg_used_in_input_addr[i]);
5397 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5398 reload_reg_used_in_inpaddr_addr[i]);
5399 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5400 reload_reg_used_in_output_addr[i]);
5401 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5402 reload_reg_used_in_outaddr_addr[i]);
5405 /* If -O, try first with inheritance, then turning it off.
5406 If not -O, don't do inheritance.
5407 Using inheritance when not optimizing leads to paradoxes
5408 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5409 because one side of the comparison might be inherited. */
5411 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5413 /* Process the reloads in order of preference just found.
5414 Beyond this point, subregs can be found in reload_reg_rtx.
5416 This used to look for an existing reloaded home for all
5417 of the reloads, and only then perform any new reloads.
5418 But that could lose if the reloads were done out of reg-class order
5419 because a later reload with a looser constraint might have an old
5420 home in a register needed by an earlier reload with a tighter constraint.
5422 To solve this, we make two passes over the reloads, in the order
5423 described above. In the first pass we try to inherit a reload
5424 from a previous insn. If there is a later reload that needs a
5425 class that is a proper subset of the class being processed, we must
5426 also allocate a spill register during the first pass.
5428 Then make a second pass over the reloads to allocate any reloads
5429 that haven't been given registers yet. */
5431 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5433 for (j = 0; j < n_reloads; j++)
5435 register int r = reload_order[j];
5437 /* Ignore reloads that got marked inoperative. */
5438 if (reload_out[r] == 0 && reload_in[r] == 0
5439 && ! reload_secondary_p[r])
5442 /* If find_reloads chose a to use reload_in or reload_out as a reload
5443 register, we don't need to chose one. Otherwise, try even if it
5444 found one since we might save an insn if we find the value lying
5446 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5447 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5448 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5451 #if 0 /* No longer needed for correct operation.
5452 It might give better code, or might not; worth an experiment? */
5453 /* If this is an optional reload, we can't inherit from earlier insns
5454 until we are sure that any non-optional reloads have been allocated.
5455 The following code takes advantage of the fact that optional reloads
5456 are at the end of reload_order. */
5457 if (reload_optional[r] != 0)
5458 for (i = 0; i < j; i++)
5459 if ((reload_out[reload_order[i]] != 0
5460 || reload_in[reload_order[i]] != 0
5461 || reload_secondary_p[reload_order[i]])
5462 && ! reload_optional[reload_order[i]]
5463 && reload_reg_rtx[reload_order[i]] == 0)
5464 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5467 /* First see if this pseudo is already available as reloaded
5468 for a previous insn. We cannot try to inherit for reloads
5469 that are smaller than the maximum number of registers needed
5470 for groups unless the register we would allocate cannot be used
5473 We could check here to see if this is a secondary reload for
5474 an object that is already in a register of the desired class.
5475 This would avoid the need for the secondary reload register.
5476 But this is complex because we can't easily determine what
5477 objects might want to be loaded via this reload. So let a
5478 register be allocated here. In `emit_reload_insns' we suppress
5479 one of the loads in the case described above. */
5483 register int regno = -1;
5484 enum machine_mode mode;
5486 if (reload_in[r] == 0)
5488 else if (GET_CODE (reload_in[r]) == REG)
5490 regno = REGNO (reload_in[r]);
5491 mode = GET_MODE (reload_in[r]);
5493 else if (GET_CODE (reload_in_reg[r]) == REG)
5495 regno = REGNO (reload_in_reg[r]);
5496 mode = GET_MODE (reload_in_reg[r]);
5499 /* This won't work, since REGNO can be a pseudo reg number.
5500 Also, it takes much more hair to keep track of all the things
5501 that can invalidate an inherited reload of part of a pseudoreg. */
5502 else if (GET_CODE (reload_in[r]) == SUBREG
5503 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5504 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5507 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5509 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5511 if (reg_reloaded_contents[i] == regno
5512 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5513 >= GET_MODE_SIZE (mode))
5514 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5515 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5517 && (reload_nregs[r] == max_group_size
5518 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5520 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5521 reload_when_needed[r])
5522 && reload_reg_free_before_p (spill_regs[i],
5524 reload_when_needed[r]))
5526 /* If a group is needed, verify that all the subsequent
5527 registers still have their values intact. */
5529 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5532 for (k = 1; k < nr; k++)
5533 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5541 /* We found a register that contains the
5542 value we need. If this register is the
5543 same as an `earlyclobber' operand of the
5544 current insn, just mark it as a place to
5545 reload from since we can't use it as the
5546 reload register itself. */
5548 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5549 if (reg_overlap_mentioned_for_reload_p
5550 (reg_last_reload_reg[regno],
5551 reload_earlyclobbers[i1]))
5554 if (i1 != n_earlyclobbers
5555 /* Don't really use the inherited spill reg
5556 if we need it wider than we've got it. */
5557 || (GET_MODE_SIZE (reload_mode[r])
5558 > GET_MODE_SIZE (mode)))
5559 reload_override_in[r] = reg_last_reload_reg[regno];
5563 /* We can use this as a reload reg. */
5564 /* Mark the register as in use for this part of
5566 mark_reload_reg_in_use (spill_regs[i],
5568 reload_when_needed[r],
5570 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5571 reload_inherited[r] = 1;
5572 reload_inheritance_insn[r]
5573 = reg_reloaded_insn[i];
5574 reload_spill_index[r] = i;
5575 for (k = 0; k < nr; k++)
5576 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5584 /* Here's another way to see if the value is already lying around. */
5586 && reload_in[r] != 0
5587 && ! reload_inherited[r]
5588 && reload_out[r] == 0
5589 && (CONSTANT_P (reload_in[r])
5590 || GET_CODE (reload_in[r]) == PLUS
5591 || GET_CODE (reload_in[r]) == REG
5592 || GET_CODE (reload_in[r]) == MEM)
5593 && (reload_nregs[r] == max_group_size
5594 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5597 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5598 -1, NULL_PTR, 0, reload_mode[r]);
5603 if (GET_CODE (equiv) == REG)
5604 regno = REGNO (equiv);
5605 else if (GET_CODE (equiv) == SUBREG)
5607 /* This must be a SUBREG of a hard register.
5608 Make a new REG since this might be used in an
5609 address and not all machines support SUBREGs
5611 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5612 equiv = gen_rtx (REG, reload_mode[r], regno);
5618 /* If we found a spill reg, reject it unless it is free
5619 and of the desired class. */
5621 && ((spill_reg_order[regno] >= 0
5622 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5623 reload_when_needed[r]))
5624 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5628 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5631 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5634 /* We found a register that contains the value we need.
5635 If this register is the same as an `earlyclobber' operand
5636 of the current insn, just mark it as a place to reload from
5637 since we can't use it as the reload register itself. */
5640 for (i = 0; i < n_earlyclobbers; i++)
5641 if (reg_overlap_mentioned_for_reload_p (equiv,
5642 reload_earlyclobbers[i]))
5644 reload_override_in[r] = equiv;
5649 /* JRV: If the equiv register we have found is
5650 explicitly clobbered in the current insn, mark but
5651 don't use, as above. */
5653 if (equiv != 0 && regno_clobbered_p (regno, insn))
5655 reload_override_in[r] = equiv;
5659 /* If we found an equivalent reg, say no code need be generated
5660 to load it, and use it as our reload reg. */
5661 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5663 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5665 reload_reg_rtx[r] = equiv;
5666 reload_inherited[r] = 1;
5668 /* If any of the hard registers in EQUIV are spill
5669 registers, mark them as in use for this insn. */
5670 for (k = 0; k < nr; k++)
5672 i = spill_reg_order[regno + k];
5675 mark_reload_reg_in_use (regno, reload_opnum[r],
5676 reload_when_needed[r],
5678 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5685 /* If we found a register to use already, or if this is an optional
5686 reload, we are done. */
5687 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5690 #if 0 /* No longer needed for correct operation. Might or might not
5691 give better code on the average. Want to experiment? */
5693 /* See if there is a later reload that has a class different from our
5694 class that intersects our class or that requires less register
5695 than our reload. If so, we must allocate a register to this
5696 reload now, since that reload might inherit a previous reload
5697 and take the only available register in our class. Don't do this
5698 for optional reloads since they will force all previous reloads
5699 to be allocated. Also don't do this for reloads that have been
5702 for (i = j + 1; i < n_reloads; i++)
5704 int s = reload_order[i];
5706 if ((reload_in[s] == 0 && reload_out[s] == 0
5707 && ! reload_secondary_p[s])
5708 || reload_optional[s])
5711 if ((reload_reg_class[s] != reload_reg_class[r]
5712 && reg_classes_intersect_p (reload_reg_class[r],
5713 reload_reg_class[s]))
5714 || reload_nregs[s] < reload_nregs[r])
5721 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5725 /* Now allocate reload registers for anything non-optional that
5726 didn't get one yet. */
5727 for (j = 0; j < n_reloads; j++)
5729 register int r = reload_order[j];
5731 /* Ignore reloads that got marked inoperative. */
5732 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5735 /* Skip reloads that already have a register allocated or are
5737 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5740 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5744 /* If that loop got all the way, we have won. */
5749 /* Loop around and try without any inheritance. */
5750 /* First undo everything done by the failed attempt
5751 to allocate with inheritance. */
5752 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5753 sizeof reload_reg_rtx);
5754 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5755 sizeof reload_inherited);
5756 bcopy ((char *) save_reload_inheritance_insn,
5757 (char *) reload_inheritance_insn,
5758 sizeof reload_inheritance_insn);
5759 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5760 sizeof reload_override_in);
5761 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5762 sizeof reload_spill_index);
5763 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5764 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5765 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5766 save_reload_reg_used_in_op_addr);
5767 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5768 save_reload_reg_used_in_op_addr_reload);
5769 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5770 save_reload_reg_used_in_insn);
5771 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5772 save_reload_reg_used_in_other_addr);
5774 for (i = 0; i < reload_n_operands; i++)
5776 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5777 save_reload_reg_used_in_input[i]);
5778 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5779 save_reload_reg_used_in_output[i]);
5780 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5781 save_reload_reg_used_in_input_addr[i]);
5782 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5783 save_reload_reg_used_in_inpaddr_addr[i]);
5784 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5785 save_reload_reg_used_in_output_addr[i]);
5786 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5787 save_reload_reg_used_in_outaddr_addr[i]);
5791 /* If we thought we could inherit a reload, because it seemed that
5792 nothing else wanted the same reload register earlier in the insn,
5793 verify that assumption, now that all reloads have been assigned. */
5795 for (j = 0; j < n_reloads; j++)
5797 register int r = reload_order[j];
5799 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5800 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5802 reload_when_needed[r]))
5803 reload_inherited[r] = 0;
5805 /* If we found a better place to reload from,
5806 validate it in the same fashion, if it is a reload reg. */
5807 if (reload_override_in[r]
5808 && (GET_CODE (reload_override_in[r]) == REG
5809 || GET_CODE (reload_override_in[r]) == SUBREG))
5811 int regno = true_regnum (reload_override_in[r]);
5812 if (spill_reg_order[regno] >= 0
5813 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5814 reload_when_needed[r]))
5815 reload_override_in[r] = 0;
5819 /* Now that reload_override_in is known valid,
5820 actually override reload_in. */
5821 for (j = 0; j < n_reloads; j++)
5822 if (reload_override_in[j])
5823 reload_in[j] = reload_override_in[j];
5825 /* If this reload won't be done because it has been cancelled or is
5826 optional and not inherited, clear reload_reg_rtx so other
5827 routines (such as subst_reloads) don't get confused. */
5828 for (j = 0; j < n_reloads; j++)
5829 if (reload_reg_rtx[j] != 0
5830 && ((reload_optional[j] && ! reload_inherited[j])
5831 || (reload_in[j] == 0 && reload_out[j] == 0
5832 && ! reload_secondary_p[j])))
5834 int regno = true_regnum (reload_reg_rtx[j]);
5836 if (spill_reg_order[regno] >= 0)
5837 clear_reload_reg_in_use (regno, reload_opnum[j],
5838 reload_when_needed[j], reload_mode[j]);
5839 reload_reg_rtx[j] = 0;
5842 /* Record which pseudos and which spill regs have output reloads. */
5843 for (j = 0; j < n_reloads; j++)
5845 register int r = reload_order[j];
5847 i = reload_spill_index[r];
5849 /* I is nonneg if this reload used one of the spill regs.
5850 If reload_reg_rtx[r] is 0, this is an optional reload
5851 that we opted to ignore. */
5852 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5853 && reload_reg_rtx[r] != 0)
5855 register int nregno = REGNO (reload_out[r]);
5858 if (nregno < FIRST_PSEUDO_REGISTER)
5859 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5862 reg_has_output_reload[nregno + nr] = 1;
5866 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5868 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5871 if (reload_when_needed[r] != RELOAD_OTHER
5872 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5873 && reload_when_needed[r] != RELOAD_FOR_INSN)
5879 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5880 reloads of the same item for fear that we might not have enough reload
5881 registers. However, normally they will get the same reload register
5882 and hence actually need not be loaded twice.
5884 Here we check for the most common case of this phenomenon: when we have
5885 a number of reloads for the same object, each of which were allocated
5886 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5887 reload, and is not modified in the insn itself. If we find such,
5888 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5889 This will not increase the number of spill registers needed and will
5890 prevent redundant code. */
5892 #ifdef SMALL_REGISTER_CLASSES
5895 merge_assigned_reloads (insn)
5900 /* Scan all the reloads looking for ones that only load values and
5901 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5902 assigned and not modified by INSN. */
5904 for (i = 0; i < n_reloads; i++)
5906 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5907 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5908 || reg_set_p (reload_reg_rtx[i], insn))
5911 /* Look at all other reloads. Ensure that the only use of this
5912 reload_reg_rtx is in a reload that just loads the same value
5913 as we do. Note that any secondary reloads must be of the identical
5914 class since the values, modes, and result registers are the
5915 same, so we need not do anything with any secondary reloads. */
5917 for (j = 0; j < n_reloads; j++)
5919 if (i == j || reload_reg_rtx[j] == 0
5920 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5924 /* If the reload regs aren't exactly the same (e.g, different modes)
5925 or if the values are different, we can't merge anything with this
5928 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5929 || reload_out[j] != 0 || reload_in[j] == 0
5930 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5934 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5935 we, in fact, found any matching reloads. */
5939 for (j = 0; j < n_reloads; j++)
5940 if (i != j && reload_reg_rtx[j] != 0
5941 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5943 reload_when_needed[i] = RELOAD_OTHER;
5945 transfer_replacements (i, j);
5948 /* If this is now RELOAD_OTHER, look for any reloads that load
5949 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5950 if they were for inputs, RELOAD_OTHER for outputs. Note that
5951 this test is equivalent to looking for reloads for this operand
5954 if (reload_when_needed[i] == RELOAD_OTHER)
5955 for (j = 0; j < n_reloads; j++)
5956 if (reload_in[j] != 0
5957 && reload_when_needed[i] != RELOAD_OTHER
5958 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5960 reload_when_needed[j]
5961 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5962 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5963 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5967 #endif /* SMALL_RELOAD_CLASSES */
5969 /* Output insns to reload values in and out of the chosen reload regs. */
5972 emit_reload_insns (insn)
5976 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5977 rtx other_input_address_reload_insns = 0;
5978 rtx other_input_reload_insns = 0;
5979 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5980 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5981 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5982 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5983 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5984 rtx operand_reload_insns = 0;
5985 rtx other_operand_reload_insns = 0;
5986 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5987 rtx following_insn = NEXT_INSN (insn);
5988 rtx before_insn = insn;
5990 /* Values to be put in spill_reg_store are put here first. */
5991 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5993 for (j = 0; j < reload_n_operands; j++)
5994 input_reload_insns[j] = input_address_reload_insns[j]
5995 = inpaddr_address_reload_insns[j]
5996 = output_reload_insns[j] = output_address_reload_insns[j]
5997 = outaddr_address_reload_insns[j]
5998 = other_output_reload_insns[j] = 0;
6000 /* Now output the instructions to copy the data into and out of the
6001 reload registers. Do these in the order that the reloads were reported,
6002 since reloads of base and index registers precede reloads of operands
6003 and the operands may need the base and index registers reloaded. */
6005 for (j = 0; j < n_reloads; j++)
6008 rtx oldequiv_reg = 0;
6009 rtx this_reload_insn = 0;
6011 if (reload_spill_index[j] >= 0)
6012 new_spill_reg_store[reload_spill_index[j]] = 0;
6015 if (old != 0 && ! reload_inherited[j]
6016 && ! rtx_equal_p (reload_reg_rtx[j], old)
6017 && reload_reg_rtx[j] != 0)
6019 register rtx reloadreg = reload_reg_rtx[j];
6021 enum machine_mode mode;
6024 /* Determine the mode to reload in.
6025 This is very tricky because we have three to choose from.
6026 There is the mode the insn operand wants (reload_inmode[J]).
6027 There is the mode of the reload register RELOADREG.
6028 There is the intrinsic mode of the operand, which we could find
6029 by stripping some SUBREGs.
6030 It turns out that RELOADREG's mode is irrelevant:
6031 we can change that arbitrarily.
6033 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6034 then the reload reg may not support QImode moves, so use SImode.
6035 If foo is in memory due to spilling a pseudo reg, this is safe,
6036 because the QImode value is in the least significant part of a
6037 slot big enough for a SImode. If foo is some other sort of
6038 memory reference, then it is impossible to reload this case,
6039 so previous passes had better make sure this never happens.
6041 Then consider a one-word union which has SImode and one of its
6042 members is a float, being fetched as (SUBREG:SF union:SI).
6043 We must fetch that as SFmode because we could be loading into
6044 a float-only register. In this case OLD's mode is correct.
6046 Consider an immediate integer: it has VOIDmode. Here we need
6047 to get a mode from something else.
6049 In some cases, there is a fourth mode, the operand's
6050 containing mode. If the insn specifies a containing mode for
6051 this operand, it overrides all others.
6053 I am not sure whether the algorithm here is always right,
6054 but it does the right things in those cases. */
6056 mode = GET_MODE (old);
6057 if (mode == VOIDmode)
6058 mode = reload_inmode[j];
6060 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6061 /* If we need a secondary register for this operation, see if
6062 the value is already in a register in that class. Don't
6063 do this if the secondary register will be used as a scratch
6066 if (reload_secondary_in_reload[j] >= 0
6067 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6070 = find_equiv_reg (old, insn,
6071 reload_reg_class[reload_secondary_in_reload[j]],
6072 -1, NULL_PTR, 0, mode);
6075 /* If reloading from memory, see if there is a register
6076 that already holds the same value. If so, reload from there.
6077 We can pass 0 as the reload_reg_p argument because
6078 any other reload has either already been emitted,
6079 in which case find_equiv_reg will see the reload-insn,
6080 or has yet to be emitted, in which case it doesn't matter
6081 because we will use this equiv reg right away. */
6083 if (oldequiv == 0 && optimize
6084 && (GET_CODE (old) == MEM
6085 || (GET_CODE (old) == REG
6086 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6087 && reg_renumber[REGNO (old)] < 0)))
6088 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6089 -1, NULL_PTR, 0, mode);
6093 int regno = true_regnum (oldequiv);
6095 /* If OLDEQUIV is a spill register, don't use it for this
6096 if any other reload needs it at an earlier stage of this insn
6097 or at this stage. */
6098 if (spill_reg_order[regno] >= 0
6099 && (! reload_reg_free_p (regno, reload_opnum[j],
6100 reload_when_needed[j])
6101 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6102 reload_when_needed[j])))
6105 /* If OLDEQUIV is not a spill register,
6106 don't use it if any other reload wants it. */
6107 if (spill_reg_order[regno] < 0)
6110 for (k = 0; k < n_reloads; k++)
6111 if (reload_reg_rtx[k] != 0 && k != j
6112 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6120 /* If it is no cheaper to copy from OLDEQUIV into the
6121 reload register than it would be to move from memory,
6122 don't use it. Likewise, if we need a secondary register
6126 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6127 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6128 reload_reg_class[j])
6129 >= MEMORY_MOVE_COST (mode)))
6130 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6131 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6135 #ifdef SECONDARY_MEMORY_NEEDED
6136 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6137 REGNO_REG_CLASS (regno),
6146 else if (GET_CODE (oldequiv) == REG)
6147 oldequiv_reg = oldequiv;
6148 else if (GET_CODE (oldequiv) == SUBREG)
6149 oldequiv_reg = SUBREG_REG (oldequiv);
6151 /* If we are reloading from a register that was recently stored in
6152 with an output-reload, see if we can prove there was
6153 actually no need to store the old value in it. */
6155 if (optimize && GET_CODE (oldequiv) == REG
6156 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6157 && spill_reg_order[REGNO (oldequiv)] >= 0
6158 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6159 && find_reg_note (insn, REG_DEAD, reload_in[j])
6160 /* This is unsafe if operand occurs more than once in current
6161 insn. Perhaps some occurrences weren't reloaded. */
6162 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6163 delete_output_reload
6164 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6166 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6167 then load RELOADREG from OLDEQUIV. Note that we cannot use
6168 gen_lowpart_common since it can do the wrong thing when
6169 RELOADREG has a multi-word mode. Note that RELOADREG
6170 must always be a REG here. */
6172 if (GET_MODE (reloadreg) != mode)
6173 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6174 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6175 oldequiv = SUBREG_REG (oldequiv);
6176 if (GET_MODE (oldequiv) != VOIDmode
6177 && mode != GET_MODE (oldequiv))
6178 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6180 /* Switch to the right place to emit the reload insns. */
6181 switch (reload_when_needed[j])
6184 where = &other_input_reload_insns;
6186 case RELOAD_FOR_INPUT:
6187 where = &input_reload_insns[reload_opnum[j]];
6189 case RELOAD_FOR_INPUT_ADDRESS:
6190 where = &input_address_reload_insns[reload_opnum[j]];
6192 case RELOAD_FOR_INPADDR_ADDRESS:
6193 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6195 case RELOAD_FOR_OUTPUT_ADDRESS:
6196 where = &output_address_reload_insns[reload_opnum[j]];
6198 case RELOAD_FOR_OUTADDR_ADDRESS:
6199 where = &outaddr_address_reload_insns[reload_opnum[j]];
6201 case RELOAD_FOR_OPERAND_ADDRESS:
6202 where = &operand_reload_insns;
6204 case RELOAD_FOR_OPADDR_ADDR:
6205 where = &other_operand_reload_insns;
6207 case RELOAD_FOR_OTHER_ADDRESS:
6208 where = &other_input_address_reload_insns;
6214 push_to_sequence (*where);
6217 /* Auto-increment addresses must be reloaded in a special way. */
6218 if (GET_CODE (oldequiv) == POST_INC
6219 || GET_CODE (oldequiv) == POST_DEC
6220 || GET_CODE (oldequiv) == PRE_INC
6221 || GET_CODE (oldequiv) == PRE_DEC)
6223 /* We are not going to bother supporting the case where a
6224 incremented register can't be copied directly from
6225 OLDEQUIV since this seems highly unlikely. */
6226 if (reload_secondary_in_reload[j] >= 0)
6228 /* Prevent normal processing of this reload. */
6230 /* Output a special code sequence for this case. */
6231 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6234 /* If we are reloading a pseudo-register that was set by the previous
6235 insn, see if we can get rid of that pseudo-register entirely
6236 by redirecting the previous insn into our reload register. */
6238 else if (optimize && GET_CODE (old) == REG
6239 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6240 && dead_or_set_p (insn, old)
6241 /* This is unsafe if some other reload
6242 uses the same reg first. */
6243 && reload_reg_free_before_p (REGNO (reloadreg),
6245 reload_when_needed[j]))
6247 rtx temp = PREV_INSN (insn);
6248 while (temp && GET_CODE (temp) == NOTE)
6249 temp = PREV_INSN (temp);
6251 && GET_CODE (temp) == INSN
6252 && GET_CODE (PATTERN (temp)) == SET
6253 && SET_DEST (PATTERN (temp)) == old
6254 /* Make sure we can access insn_operand_constraint. */
6255 && asm_noperands (PATTERN (temp)) < 0
6256 /* This is unsafe if prev insn rejects our reload reg. */
6257 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6259 /* This is unsafe if operand occurs more than once in current
6260 insn. Perhaps some occurrences aren't reloaded. */
6261 && count_occurrences (PATTERN (insn), old) == 1
6262 /* Don't risk splitting a matching pair of operands. */
6263 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6265 /* Store into the reload register instead of the pseudo. */
6266 SET_DEST (PATTERN (temp)) = reloadreg;
6267 /* If these are the only uses of the pseudo reg,
6268 pretend for GDB it lives in the reload reg we used. */
6269 if (REG_N_DEATHS (REGNO (old)) == 1
6270 && REG_N_SETS (REGNO (old)) == 1)
6272 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6273 alter_reg (REGNO (old), -1);
6279 /* We can't do that, so output an insn to load RELOADREG. */
6283 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6284 rtx second_reload_reg = 0;
6285 enum insn_code icode;
6287 /* If we have a secondary reload, pick up the secondary register
6288 and icode, if any. If OLDEQUIV and OLD are different or
6289 if this is an in-out reload, recompute whether or not we
6290 still need a secondary register and what the icode should
6291 be. If we still need a secondary register and the class or
6292 icode is different, go back to reloading from OLD if using
6293 OLDEQUIV means that we got the wrong type of register. We
6294 cannot have different class or icode due to an in-out reload
6295 because we don't make such reloads when both the input and
6296 output need secondary reload registers. */
6298 if (reload_secondary_in_reload[j] >= 0)
6300 int secondary_reload = reload_secondary_in_reload[j];
6301 rtx real_oldequiv = oldequiv;
6304 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6305 and similarly for OLD.
6306 See comments in get_secondary_reload in reload.c. */
6307 if (GET_CODE (oldequiv) == REG
6308 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6309 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6310 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6312 if (GET_CODE (old) == REG
6313 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6314 && reg_equiv_mem[REGNO (old)] != 0)
6315 real_old = reg_equiv_mem[REGNO (old)];
6317 second_reload_reg = reload_reg_rtx[secondary_reload];
6318 icode = reload_secondary_in_icode[j];
6320 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6321 || (reload_in[j] != 0 && reload_out[j] != 0))
6323 enum reg_class new_class
6324 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6325 mode, real_oldequiv);
6327 if (new_class == NO_REGS)
6328 second_reload_reg = 0;
6331 enum insn_code new_icode;
6332 enum machine_mode new_mode;
6334 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6335 REGNO (second_reload_reg)))
6336 oldequiv = old, real_oldequiv = real_old;
6339 new_icode = reload_in_optab[(int) mode];
6340 if (new_icode != CODE_FOR_nothing
6341 && ((insn_operand_predicate[(int) new_icode][0]
6342 && ! ((*insn_operand_predicate[(int) new_icode][0])
6344 || (insn_operand_predicate[(int) new_icode][1]
6345 && ! ((*insn_operand_predicate[(int) new_icode][1])
6346 (real_oldequiv, mode)))))
6347 new_icode = CODE_FOR_nothing;
6349 if (new_icode == CODE_FOR_nothing)
6352 new_mode = insn_operand_mode[(int) new_icode][2];
6354 if (GET_MODE (second_reload_reg) != new_mode)
6356 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6358 oldequiv = old, real_oldequiv = real_old;
6361 = gen_rtx (REG, new_mode,
6362 REGNO (second_reload_reg));
6368 /* If we still need a secondary reload register, check
6369 to see if it is being used as a scratch or intermediate
6370 register and generate code appropriately. If we need
6371 a scratch register, use REAL_OLDEQUIV since the form of
6372 the insn may depend on the actual address if it is
6375 if (second_reload_reg)
6377 if (icode != CODE_FOR_nothing)
6379 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6380 second_reload_reg));
6385 /* See if we need a scratch register to load the
6386 intermediate register (a tertiary reload). */
6387 enum insn_code tertiary_icode
6388 = reload_secondary_in_icode[secondary_reload];
6390 if (tertiary_icode != CODE_FOR_nothing)
6392 rtx third_reload_reg
6393 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6395 emit_insn ((GEN_FCN (tertiary_icode)
6396 (second_reload_reg, real_oldequiv,
6397 third_reload_reg)));
6400 gen_reload (second_reload_reg, oldequiv,
6402 reload_when_needed[j]);
6404 oldequiv = second_reload_reg;
6410 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6411 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6412 reload_when_needed[j]);
6414 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6415 /* We may have to make a REG_DEAD note for the secondary reload
6416 register in the insns we just made. Find the last insn that
6417 mentioned the register. */
6418 if (! special && second_reload_reg
6419 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6423 for (prev = get_last_insn (); prev;
6424 prev = PREV_INSN (prev))
6425 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6426 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6429 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6438 this_reload_insn = get_last_insn ();
6439 /* End this sequence. */
6440 *where = get_insns ();
6444 /* Add a note saying the input reload reg
6445 dies in this insn, if anyone cares. */
6446 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6448 && reload_reg_rtx[j] != old
6449 && reload_reg_rtx[j] != 0
6450 && reload_out[j] == 0
6451 && ! reload_inherited[j]
6452 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6454 register rtx reloadreg = reload_reg_rtx[j];
6457 /* We can't abort here because we need to support this for sched.c.
6458 It's not terrible to miss a REG_DEAD note, but we should try
6459 to figure out how to do this correctly. */
6460 /* The code below is incorrect for address-only reloads. */
6461 if (reload_when_needed[j] != RELOAD_OTHER
6462 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6466 /* Add a death note to this insn, for an input reload. */
6468 if ((reload_when_needed[j] == RELOAD_OTHER
6469 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6470 && ! dead_or_set_p (insn, reloadreg))
6472 = gen_rtx (EXPR_LIST, REG_DEAD,
6473 reloadreg, REG_NOTES (insn));
6476 /* When we inherit a reload, the last marked death of the reload reg
6477 may no longer really be a death. */
6478 if (reload_reg_rtx[j] != 0
6479 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6480 && reload_inherited[j])
6482 /* Handle inheriting an output reload.
6483 Remove the death note from the output reload insn. */
6484 if (reload_spill_index[j] >= 0
6485 && GET_CODE (reload_in[j]) == REG
6486 && spill_reg_store[reload_spill_index[j]] != 0
6487 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6488 REG_DEAD, REGNO (reload_reg_rtx[j])))
6489 remove_death (REGNO (reload_reg_rtx[j]),
6490 spill_reg_store[reload_spill_index[j]]);
6491 /* Likewise for input reloads that were inherited. */
6492 else if (reload_spill_index[j] >= 0
6493 && GET_CODE (reload_in[j]) == REG
6494 && spill_reg_store[reload_spill_index[j]] == 0
6495 && reload_inheritance_insn[j] != 0
6496 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6497 REGNO (reload_reg_rtx[j])))
6498 remove_death (REGNO (reload_reg_rtx[j]),
6499 reload_inheritance_insn[j]);
6504 /* We got this register from find_equiv_reg.
6505 Search back for its last death note and get rid of it.
6506 But don't search back too far.
6507 Don't go past a place where this reg is set,
6508 since a death note before that remains valid. */
6509 for (prev = PREV_INSN (insn);
6510 prev && GET_CODE (prev) != CODE_LABEL;
6511 prev = PREV_INSN (prev))
6512 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6513 && dead_or_set_p (prev, reload_reg_rtx[j]))
6515 if (find_regno_note (prev, REG_DEAD,
6516 REGNO (reload_reg_rtx[j])))
6517 remove_death (REGNO (reload_reg_rtx[j]), prev);
6523 /* We might have used find_equiv_reg above to choose an alternate
6524 place from which to reload. If so, and it died, we need to remove
6525 that death and move it to one of the insns we just made. */
6527 if (oldequiv_reg != 0
6528 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6532 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6533 prev = PREV_INSN (prev))
6534 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6535 && dead_or_set_p (prev, oldequiv_reg))
6537 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6539 for (prev1 = this_reload_insn;
6540 prev1; prev1 = PREV_INSN (prev1))
6541 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6542 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6545 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6550 remove_death (REGNO (oldequiv_reg), prev);
6557 /* If we are reloading a register that was recently stored in with an
6558 output-reload, see if we can prove there was
6559 actually no need to store the old value in it. */
6561 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6562 && reload_in[j] != 0
6563 && GET_CODE (reload_in[j]) == REG
6565 /* There doesn't seem to be any reason to restrict this to pseudos
6566 and doing so loses in the case where we are copying from a
6567 register of the wrong class. */
6568 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6570 && spill_reg_store[reload_spill_index[j]] != 0
6571 /* This is unsafe if some other reload uses the same reg first. */
6572 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6573 reload_opnum[j], reload_when_needed[j])
6574 && dead_or_set_p (insn, reload_in[j])
6575 /* This is unsafe if operand occurs more than once in current
6576 insn. Perhaps some occurrences weren't reloaded. */
6577 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6578 delete_output_reload (insn, j,
6579 spill_reg_store[reload_spill_index[j]]);
6581 /* Input-reloading is done. Now do output-reloading,
6582 storing the value from the reload-register after the main insn
6583 if reload_out[j] is nonzero.
6585 ??? At some point we need to support handling output reloads of
6586 JUMP_INSNs or insns that set cc0. */
6587 old = reload_out[j];
6589 && reload_reg_rtx[j] != old
6590 && reload_reg_rtx[j] != 0)
6592 register rtx reloadreg = reload_reg_rtx[j];
6593 register rtx second_reloadreg = 0;
6595 enum machine_mode mode;
6598 /* An output operand that dies right away does need a reload,
6599 but need not be copied from it. Show the new location in the
6601 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6602 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6604 XEXP (note, 0) = reload_reg_rtx[j];
6607 /* Likewise for a SUBREG of an operand that dies. */
6608 else if (GET_CODE (old) == SUBREG
6609 && GET_CODE (SUBREG_REG (old)) == REG
6610 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6613 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6617 else if (GET_CODE (old) == SCRATCH)
6618 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6619 but we don't want to make an output reload. */
6623 /* Strip off of OLD any size-increasing SUBREGs such as
6624 (SUBREG:SI foo:QI 0). */
6626 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6627 && (GET_MODE_SIZE (GET_MODE (old))
6628 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6629 old = SUBREG_REG (old);
6632 /* If is a JUMP_INSN, we can't support output reloads yet. */
6633 if (GET_CODE (insn) == JUMP_INSN)
6636 if (reload_when_needed[j] == RELOAD_OTHER)
6639 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6641 /* Determine the mode to reload in.
6642 See comments above (for input reloading). */
6644 mode = GET_MODE (old);
6645 if (mode == VOIDmode)
6647 /* VOIDmode should never happen for an output. */
6648 if (asm_noperands (PATTERN (insn)) < 0)
6649 /* It's the compiler's fault. */
6650 fatal_insn ("VOIDmode on an output", insn);
6651 error_for_asm (insn, "output operand is constant in `asm'");
6652 /* Prevent crash--use something we know is valid. */
6654 old = gen_rtx (REG, mode, REGNO (reloadreg));
6657 if (GET_MODE (reloadreg) != mode)
6658 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6660 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6662 /* If we need two reload regs, set RELOADREG to the intermediate
6663 one, since it will be stored into OLD. We might need a secondary
6664 register only for an input reload, so check again here. */
6666 if (reload_secondary_out_reload[j] >= 0)
6670 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6671 && reg_equiv_mem[REGNO (old)] != 0)
6672 real_old = reg_equiv_mem[REGNO (old)];
6674 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6678 second_reloadreg = reloadreg;
6679 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6681 /* See if RELOADREG is to be used as a scratch register
6682 or as an intermediate register. */
6683 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6685 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6686 (real_old, second_reloadreg, reloadreg)));
6691 /* See if we need both a scratch and intermediate reload
6694 int secondary_reload = reload_secondary_out_reload[j];
6695 enum insn_code tertiary_icode
6696 = reload_secondary_out_icode[secondary_reload];
6698 if (GET_MODE (reloadreg) != mode)
6699 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6701 if (tertiary_icode != CODE_FOR_nothing)
6704 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6707 /* Copy primary reload reg to secondary reload reg.
6708 (Note that these have been swapped above, then
6709 secondary reload reg to OLD using our insn. */
6711 /* If REAL_OLD is a paradoxical SUBREG, remove it
6712 and try to put the opposite SUBREG on
6714 if (GET_CODE (real_old) == SUBREG
6715 && (GET_MODE_SIZE (GET_MODE (real_old))
6716 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6717 && 0 != (tem = gen_lowpart_common
6718 (GET_MODE (SUBREG_REG (real_old)),
6720 real_old = SUBREG_REG (real_old), reloadreg = tem;
6722 gen_reload (reloadreg, second_reloadreg,
6723 reload_opnum[j], reload_when_needed[j]);
6724 emit_insn ((GEN_FCN (tertiary_icode)
6725 (real_old, reloadreg, third_reloadreg)));
6730 /* Copy between the reload regs here and then to
6733 gen_reload (reloadreg, second_reloadreg,
6734 reload_opnum[j], reload_when_needed[j]);
6740 /* Output the last reload insn. */
6742 gen_reload (old, reloadreg, reload_opnum[j],
6743 reload_when_needed[j]);
6745 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6746 /* If final will look at death notes for this reg,
6747 put one on the last output-reload insn to use it. Similarly
6748 for any secondary register. */
6749 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6750 for (p = get_last_insn (); p; p = PREV_INSN (p))
6751 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6752 && reg_overlap_mentioned_for_reload_p (reloadreg,
6754 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6755 reloadreg, REG_NOTES (p));
6757 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6758 if (! special && second_reloadreg
6759 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6760 for (p = get_last_insn (); p; p = PREV_INSN (p))
6761 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6762 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6764 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6765 second_reloadreg, REG_NOTES (p));
6768 /* Look at all insns we emitted, just to be safe. */
6769 for (p = get_insns (); p; p = NEXT_INSN (p))
6770 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6772 /* If this output reload doesn't come from a spill reg,
6773 clear any memory of reloaded copies of the pseudo reg.
6774 If this output reload comes from a spill reg,
6775 reg_has_output_reload will make this do nothing. */
6776 note_stores (PATTERN (p), forget_old_reloads_1);
6778 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6779 && reload_spill_index[j] >= 0)
6780 new_spill_reg_store[reload_spill_index[j]] = p;
6783 if (reload_when_needed[j] == RELOAD_OTHER)
6785 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6786 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6789 output_reload_insns[reload_opnum[j]] = get_insns ();
6795 /* Now write all the insns we made for reloads in the order expected by
6796 the allocation functions. Prior to the insn being reloaded, we write
6797 the following reloads:
6799 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6801 RELOAD_OTHER reloads.
6803 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6804 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6805 RELOAD_FOR_INPUT reload for the operand.
6807 RELOAD_FOR_OPADDR_ADDRS reloads.
6809 RELOAD_FOR_OPERAND_ADDRESS reloads.
6811 After the insn being reloaded, we write the following:
6813 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6814 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6815 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6816 reloads for the operand. The RELOAD_OTHER output reloads are
6817 output in descending order by reload number. */
6819 emit_insns_before (other_input_address_reload_insns, before_insn);
6820 emit_insns_before (other_input_reload_insns, before_insn);
6822 for (j = 0; j < reload_n_operands; j++)
6824 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6825 emit_insns_before (input_address_reload_insns[j], before_insn);
6826 emit_insns_before (input_reload_insns[j], before_insn);
6829 emit_insns_before (other_operand_reload_insns, before_insn);
6830 emit_insns_before (operand_reload_insns, before_insn);
6832 for (j = 0; j < reload_n_operands; j++)
6834 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6835 emit_insns_before (output_address_reload_insns[j], following_insn);
6836 emit_insns_before (output_reload_insns[j], following_insn);
6837 emit_insns_before (other_output_reload_insns[j], following_insn);
6840 /* Move death notes from INSN
6841 to output-operand-address and output reload insns. */
6842 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6845 /* Loop over those insns, last ones first. */
6846 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6847 insn1 = PREV_INSN (insn1))
6848 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6850 rtx source = SET_SRC (PATTERN (insn1));
6851 rtx dest = SET_DEST (PATTERN (insn1));
6853 /* The note we will examine next. */
6854 rtx reg_notes = REG_NOTES (insn);
6855 /* The place that pointed to this note. */
6856 rtx *prev_reg_note = ®_NOTES (insn);
6858 /* If the note is for something used in the source of this
6859 reload insn, or in the output address, move the note. */
6862 rtx next_reg_notes = XEXP (reg_notes, 1);
6863 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6864 && GET_CODE (XEXP (reg_notes, 0)) == REG
6865 && ((GET_CODE (dest) != REG
6866 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6868 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6871 *prev_reg_note = next_reg_notes;
6872 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6873 REG_NOTES (insn1) = reg_notes;
6876 prev_reg_note = &XEXP (reg_notes, 1);
6878 reg_notes = next_reg_notes;
6884 /* For all the spill regs newly reloaded in this instruction,
6885 record what they were reloaded from, so subsequent instructions
6886 can inherit the reloads.
6888 Update spill_reg_store for the reloads of this insn.
6889 Copy the elements that were updated in the loop above. */
6891 for (j = 0; j < n_reloads; j++)
6893 register int r = reload_order[j];
6894 register int i = reload_spill_index[r];
6896 /* I is nonneg if this reload used one of the spill regs.
6897 If reload_reg_rtx[r] is 0, this is an optional reload
6898 that we opted to ignore. */
6900 if (i >= 0 && reload_reg_rtx[r] != 0)
6903 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6905 int part_reaches_end = 0;
6906 int all_reaches_end = 1;
6908 /* For a multi register reload, we need to check if all or part
6909 of the value lives to the end. */
6910 for (k = 0; k < nr; k++)
6912 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6913 reload_when_needed[r]))
6914 part_reaches_end = 1;
6916 all_reaches_end = 0;
6919 /* Ignore reloads that don't reach the end of the insn in
6921 if (all_reaches_end)
6923 /* First, clear out memory of what used to be in this spill reg.
6924 If consecutive registers are used, clear them all. */
6926 for (k = 0; k < nr; k++)
6928 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6929 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6932 /* Maybe the spill reg contains a copy of reload_out. */
6933 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6935 register int nregno = REGNO (reload_out[r]);
6936 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6937 : HARD_REGNO_NREGS (nregno,
6938 GET_MODE (reload_reg_rtx[r])));
6940 spill_reg_store[i] = new_spill_reg_store[i];
6941 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6943 /* If NREGNO is a hard register, it may occupy more than
6944 one register. If it does, say what is in the
6945 rest of the registers assuming that both registers
6946 agree on how many words the object takes. If not,
6947 invalidate the subsequent registers. */
6949 if (nregno < FIRST_PSEUDO_REGISTER)
6950 for (k = 1; k < nnr; k++)
6951 reg_last_reload_reg[nregno + k]
6954 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6955 REGNO (reload_reg_rtx[r]) + k)
6958 /* Now do the inverse operation. */
6959 for (k = 0; k < nr; k++)
6961 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6962 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6965 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6969 /* Maybe the spill reg contains a copy of reload_in. Only do
6970 something if there will not be an output reload for
6971 the register being reloaded. */
6972 else if (reload_out[r] == 0
6973 && reload_in[r] != 0
6974 && ((GET_CODE (reload_in[r]) == REG
6975 && ! reg_has_output_reload[REGNO (reload_in[r])])
6976 || (GET_CODE (reload_in_reg[r]) == REG
6977 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6979 register int nregno;
6982 if (GET_CODE (reload_in[r]) == REG)
6983 nregno = REGNO (reload_in[r]);
6985 nregno = REGNO (reload_in_reg[r]);
6987 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6988 : HARD_REGNO_NREGS (nregno,
6989 GET_MODE (reload_reg_rtx[r])));
6991 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6993 if (nregno < FIRST_PSEUDO_REGISTER)
6994 for (k = 1; k < nnr; k++)
6995 reg_last_reload_reg[nregno + k]
6998 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6999 REGNO (reload_reg_rtx[r]) + k)
7002 /* Unless we inherited this reload, show we haven't
7003 recently done a store. */
7004 if (! reload_inherited[r])
7005 spill_reg_store[i] = 0;
7007 for (k = 0; k < nr; k++)
7009 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
7010 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7013 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
7019 /* However, if part of the reload reaches the end, then we must
7020 invalidate the old info for the part that survives to the end. */
7021 else if (part_reaches_end)
7023 for (k = 0; k < nr; k++)
7024 if (reload_reg_reaches_end_p (spill_regs[i] + k,
7026 reload_when_needed[r]))
7028 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
7029 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7034 /* The following if-statement was #if 0'd in 1.34 (or before...).
7035 It's reenabled in 1.35 because supposedly nothing else
7036 deals with this problem. */
7038 /* If a register gets output-reloaded from a non-spill register,
7039 that invalidates any previous reloaded copy of it.
7040 But forget_old_reloads_1 won't get to see it, because
7041 it thinks only about the original insn. So invalidate it here. */
7042 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7044 register int nregno = REGNO (reload_out[r]);
7045 if (nregno >= FIRST_PSEUDO_REGISTER)
7046 reg_last_reload_reg[nregno] = 0;
7049 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7051 while (num_regs-- > 0)
7052 reg_last_reload_reg[nregno + num_regs] = 0;
7058 /* Emit code to perform a reload from IN (which may be a reload register) to
7059 OUT (which may also be a reload register). IN or OUT is from operand
7060 OPNUM with reload type TYPE.
7062 Returns first insn emitted. */
7065 gen_reload (out, in, opnum, type)
7069 enum reload_type type;
7071 rtx last = get_last_insn ();
7074 /* If IN is a paradoxical SUBREG, remove it and try to put the
7075 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7076 if (GET_CODE (in) == SUBREG
7077 && (GET_MODE_SIZE (GET_MODE (in))
7078 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7079 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7080 in = SUBREG_REG (in), out = tem;
7081 else if (GET_CODE (out) == SUBREG
7082 && (GET_MODE_SIZE (GET_MODE (out))
7083 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7084 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7085 out = SUBREG_REG (out), in = tem;
7087 /* How to do this reload can get quite tricky. Normally, we are being
7088 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7089 register that didn't get a hard register. In that case we can just
7090 call emit_move_insn.
7092 We can also be asked to reload a PLUS that adds a register or a MEM to
7093 another register, constant or MEM. This can occur during frame pointer
7094 elimination and while reloading addresses. This case is handled by
7095 trying to emit a single insn to perform the add. If it is not valid,
7096 we use a two insn sequence.
7098 Finally, we could be called to handle an 'o' constraint by putting
7099 an address into a register. In that case, we first try to do this
7100 with a named pattern of "reload_load_address". If no such pattern
7101 exists, we just emit a SET insn and hope for the best (it will normally
7102 be valid on machines that use 'o').
7104 This entire process is made complex because reload will never
7105 process the insns we generate here and so we must ensure that
7106 they will fit their constraints and also by the fact that parts of
7107 IN might be being reloaded separately and replaced with spill registers.
7108 Because of this, we are, in some sense, just guessing the right approach
7109 here. The one listed above seems to work.
7111 ??? At some point, this whole thing needs to be rethought. */
7113 if (GET_CODE (in) == PLUS
7114 && (GET_CODE (XEXP (in, 0)) == REG
7115 || GET_CODE (XEXP (in, 0)) == SUBREG
7116 || GET_CODE (XEXP (in, 0)) == MEM)
7117 && (GET_CODE (XEXP (in, 1)) == REG
7118 || GET_CODE (XEXP (in, 1)) == SUBREG
7119 || CONSTANT_P (XEXP (in, 1))
7120 || GET_CODE (XEXP (in, 1)) == MEM))
7122 /* We need to compute the sum of a register or a MEM and another
7123 register, constant, or MEM, and put it into the reload
7124 register. The best possible way of doing this is if the machine
7125 has a three-operand ADD insn that accepts the required operands.
7127 The simplest approach is to try to generate such an insn and see if it
7128 is recognized and matches its constraints. If so, it can be used.
7130 It might be better not to actually emit the insn unless it is valid,
7131 but we need to pass the insn as an operand to `recog' and
7132 `insn_extract' and it is simpler to emit and then delete the insn if
7133 not valid than to dummy things up. */
7135 rtx op0, op1, tem, insn;
7138 op0 = find_replacement (&XEXP (in, 0));
7139 op1 = find_replacement (&XEXP (in, 1));
7141 /* Since constraint checking is strict, commutativity won't be
7142 checked, so we need to do that here to avoid spurious failure
7143 if the add instruction is two-address and the second operand
7144 of the add is the same as the reload reg, which is frequently
7145 the case. If the insn would be A = B + A, rearrange it so
7146 it will be A = A + B as constrain_operands expects. */
7148 if (GET_CODE (XEXP (in, 1)) == REG
7149 && REGNO (out) == REGNO (XEXP (in, 1)))
7150 tem = op0, op0 = op1, op1 = tem;
7152 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7153 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7155 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7156 code = recog_memoized (insn);
7160 insn_extract (insn);
7161 /* We want constrain operands to treat this insn strictly in
7162 its validity determination, i.e., the way it would after reload
7164 if (constrain_operands (code, 1))
7168 delete_insns_since (last);
7170 /* If that failed, we must use a conservative two-insn sequence.
7171 use move to copy constant, MEM, or pseudo register to the reload
7172 register since "move" will be able to handle an arbitrary operand,
7173 unlike add which can't, in general. Then add the registers.
7175 If there is another way to do this for a specific machine, a
7176 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7179 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7180 || (GET_CODE (op1) == REG
7181 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7182 tem = op0, op0 = op1, op1 = tem;
7184 gen_reload (out, op0, opnum, type);
7186 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7187 This fixes a problem on the 32K where the stack pointer cannot
7188 be used as an operand of an add insn. */
7190 if (rtx_equal_p (op0, op1))
7193 insn = emit_insn (gen_add2_insn (out, op1));
7195 /* If that failed, copy the address register to the reload register.
7196 Then add the constant to the reload register. */
7198 code = recog_memoized (insn);
7202 insn_extract (insn);
7203 /* We want constrain operands to treat this insn strictly in
7204 its validity determination, i.e., the way it would after reload
7206 if (constrain_operands (code, 1))
7210 delete_insns_since (last);
7212 gen_reload (out, op1, opnum, type);
7213 emit_insn (gen_add2_insn (out, op0));
7216 #ifdef SECONDARY_MEMORY_NEEDED
7217 /* If we need a memory location to do the move, do it that way. */
7218 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7219 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7220 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7221 REGNO_REG_CLASS (REGNO (out)),
7224 /* Get the memory to use and rewrite both registers to its mode. */
7225 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7227 if (GET_MODE (loc) != GET_MODE (out))
7228 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7230 if (GET_MODE (loc) != GET_MODE (in))
7231 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7233 gen_reload (loc, in, opnum, type);
7234 gen_reload (out, loc, opnum, type);
7238 /* If IN is a simple operand, use gen_move_insn. */
7239 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7240 emit_insn (gen_move_insn (out, in));
7242 #ifdef HAVE_reload_load_address
7243 else if (HAVE_reload_load_address)
7244 emit_insn (gen_reload_load_address (out, in));
7247 /* Otherwise, just write (set OUT IN) and hope for the best. */
7249 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7251 /* Return the first insn emitted.
7252 We can not just return get_last_insn, because there may have
7253 been multiple instructions emitted. Also note that gen_move_insn may
7254 emit more than one insn itself, so we can not assume that there is one
7255 insn emitted per emit_insn_before call. */
7257 return last ? NEXT_INSN (last) : get_insns ();
7260 /* Delete a previously made output-reload
7261 whose result we now believe is not needed.
7262 First we double-check.
7264 INSN is the insn now being processed.
7265 OUTPUT_RELOAD_INSN is the insn of the output reload.
7266 J is the reload-number for this insn. */
7269 delete_output_reload (insn, j, output_reload_insn)
7272 rtx output_reload_insn;
7276 /* Get the raw pseudo-register referred to. */
7278 rtx reg = reload_in[j];
7279 while (GET_CODE (reg) == SUBREG)
7280 reg = SUBREG_REG (reg);
7282 /* If the pseudo-reg we are reloading is no longer referenced
7283 anywhere between the store into it and here,
7284 and no jumps or labels intervene, then the value can get
7285 here through the reload reg alone.
7286 Otherwise, give up--return. */
7287 for (i1 = NEXT_INSN (output_reload_insn);
7288 i1 != insn; i1 = NEXT_INSN (i1))
7290 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7292 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7293 && reg_mentioned_p (reg, PATTERN (i1)))
7297 if (cannot_omit_stores[REGNO (reg)])
7300 /* If this insn will store in the pseudo again,
7301 the previous store can be removed. */
7302 if (reload_out[j] == reload_in[j])
7303 delete_insn (output_reload_insn);
7305 /* See if the pseudo reg has been completely replaced
7306 with reload regs. If so, delete the store insn
7307 and forget we had a stack slot for the pseudo. */
7308 else if (REG_N_DEATHS (REGNO (reg)) == 1
7309 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7310 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7314 /* We know that it was used only between here
7315 and the beginning of the current basic block.
7316 (We also know that the last use before INSN was
7317 the output reload we are thinking of deleting, but never mind that.)
7318 Search that range; see if any ref remains. */
7319 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7321 rtx set = single_set (i2);
7323 /* Uses which just store in the pseudo don't count,
7324 since if they are the only uses, they are dead. */
7325 if (set != 0 && SET_DEST (set) == reg)
7327 if (GET_CODE (i2) == CODE_LABEL
7328 || GET_CODE (i2) == JUMP_INSN)
7330 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7331 && reg_mentioned_p (reg, PATTERN (i2)))
7332 /* Some other ref remains;
7333 we can't do anything. */
7337 /* Delete the now-dead stores into this pseudo. */
7338 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7340 rtx set = single_set (i2);
7342 if (set != 0 && SET_DEST (set) == reg)
7344 /* This might be a basic block head,
7345 thus don't use delete_insn. */
7346 PUT_CODE (i2, NOTE);
7347 NOTE_SOURCE_FILE (i2) = 0;
7348 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7350 if (GET_CODE (i2) == CODE_LABEL
7351 || GET_CODE (i2) == JUMP_INSN)
7355 /* For the debugging info,
7356 say the pseudo lives in this reload reg. */
7357 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7358 alter_reg (REGNO (reg), -1);
7362 /* Output reload-insns to reload VALUE into RELOADREG.
7363 VALUE is an autoincrement or autodecrement RTX whose operand
7364 is a register or memory location;
7365 so reloading involves incrementing that location.
7367 INC_AMOUNT is the number to increment or decrement by (always positive).
7368 This cannot be deduced from VALUE. */
7371 inc_for_reload (reloadreg, value, inc_amount)
7376 /* REG or MEM to be copied and incremented. */
7377 rtx incloc = XEXP (value, 0);
7378 /* Nonzero if increment after copying. */
7379 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7385 /* No hard register is equivalent to this register after
7386 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7387 we could inc/dec that register as well (maybe even using it for
7388 the source), but I'm not sure it's worth worrying about. */
7389 if (GET_CODE (incloc) == REG)
7390 reg_last_reload_reg[REGNO (incloc)] = 0;
7392 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7393 inc_amount = - inc_amount;
7395 inc = GEN_INT (inc_amount);
7397 /* If this is post-increment, first copy the location to the reload reg. */
7399 emit_insn (gen_move_insn (reloadreg, incloc));
7401 /* See if we can directly increment INCLOC. Use a method similar to that
7404 last = get_last_insn ();
7405 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7406 gen_rtx (PLUS, GET_MODE (incloc),
7409 code = recog_memoized (add_insn);
7412 insn_extract (add_insn);
7413 if (constrain_operands (code, 1))
7415 /* If this is a pre-increment and we have incremented the value
7416 where it lives, copy the incremented value to RELOADREG to
7417 be used as an address. */
7420 emit_insn (gen_move_insn (reloadreg, incloc));
7426 delete_insns_since (last);
7428 /* If couldn't do the increment directly, must increment in RELOADREG.
7429 The way we do this depends on whether this is pre- or post-increment.
7430 For pre-increment, copy INCLOC to the reload register, increment it
7431 there, then save back. */
7435 emit_insn (gen_move_insn (reloadreg, incloc));
7436 emit_insn (gen_add2_insn (reloadreg, inc));
7437 emit_insn (gen_move_insn (incloc, reloadreg));
7442 Because this might be a jump insn or a compare, and because RELOADREG
7443 may not be available after the insn in an input reload, we must do
7444 the incrementation before the insn being reloaded for.
7446 We have already copied INCLOC to RELOADREG. Increment the copy in
7447 RELOADREG, save that back, then decrement RELOADREG so it has
7448 the original value. */
7450 emit_insn (gen_add2_insn (reloadreg, inc));
7451 emit_insn (gen_move_insn (incloc, reloadreg));
7452 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7458 /* Return 1 if we are certain that the constraint-string STRING allows
7459 the hard register REG. Return 0 if we can't be sure of this. */
7462 constraint_accepts_reg_p (string, reg)
7467 int regno = true_regnum (reg);
7470 /* Initialize for first alternative. */
7472 /* Check that each alternative contains `g' or `r'. */
7474 switch (c = *string++)
7477 /* If an alternative lacks `g' or `r', we lose. */
7480 /* If an alternative lacks `g' or `r', we lose. */
7483 /* Initialize for next alternative. */
7488 /* Any general reg wins for this alternative. */
7489 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7493 /* Any reg in specified class wins for this alternative. */
7495 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7497 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7503 /* Return the number of places FIND appears within X, but don't count
7504 an occurrence if some SET_DEST is FIND. */
7507 count_occurrences (x, find)
7508 register rtx x, find;
7511 register enum rtx_code code;
7512 register char *format_ptr;
7520 code = GET_CODE (x);
7535 if (SET_DEST (x) == find)
7536 return count_occurrences (SET_SRC (x), find);
7540 format_ptr = GET_RTX_FORMAT (code);
7543 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7545 switch (*format_ptr++)
7548 count += count_occurrences (XEXP (x, i), find);
7552 if (XVEC (x, i) != NULL)
7554 for (j = 0; j < XVECLEN (x, i); j++)
7555 count += count_occurrences (XVECEXP (x, i, j), find);
7563 /* This array holds values which are equivalent to a hard register
7564 during reload_cse_regs. Each array element is an EXPR_LIST of
7565 values. Each time a hard register is set, we set the corresponding
7566 array element to the value. Each time a hard register is copied
7567 into memory, we add the memory location to the corresponding array
7568 element. We don't store values or memory addresses with side
7569 effects in this array.
7571 If the value is a CONST_INT, then the mode of the containing
7572 EXPR_LIST is the mode in which that CONST_INT was referenced.
7574 We sometimes clobber a specific entry in a list. In that case, we
7575 just set XEXP (list-entry, 0) to 0. */
7577 static rtx *reg_values;
7579 /* This is a preallocated REG rtx which we use as a temporary in
7580 reload_cse_invalidate_regno, so that we don't need to allocate a
7581 new one each time through a loop in that function. */
7583 static rtx invalidate_regno_rtx;
7585 /* Invalidate any entries in reg_values which depend on REGNO,
7586 including those for REGNO itself. This is called if REGNO is
7587 changing. If CLOBBER is true, then always forget anything we
7588 currently know about REGNO. MODE is the mode of the assignment to
7589 REGNO, which is used to determine how many hard registers are being
7590 changed. If MODE is VOIDmode, then only REGNO is being changed;
7591 this is used when invalidating call clobbered registers across a
7595 reload_cse_invalidate_regno (regno, mode, clobber)
7597 enum machine_mode mode;
7603 /* Our callers don't always go through true_regnum; we may see a
7604 pseudo-register here from a CLOBBER or the like. We probably
7605 won't ever see a pseudo-register that has a real register number,
7606 for we check anyhow for safety. */
7607 if (regno >= FIRST_PSEUDO_REGISTER)
7608 regno = reg_renumber[regno];
7612 if (mode == VOIDmode)
7613 endregno = regno + 1;
7615 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7618 for (i = regno; i < endregno; i++)
7621 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7625 for (x = reg_values[i]; x; x = XEXP (x, 1))
7627 if (XEXP (x, 0) != 0
7628 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7630 /* If this is the only entry on the list, clear
7631 reg_values[i]. Otherwise, just clear this entry on
7633 if (XEXP (x, 1) == 0 && x == reg_values[i])
7643 /* We must look at earlier registers, in case REGNO is part of a
7644 multi word value but is not the first register. If an earlier
7645 register has a value in a mode which overlaps REGNO, then we must
7646 invalidate that earlier register. Note that we do not need to
7647 check REGNO or later registers (we must not check REGNO itself,
7648 because we would incorrectly conclude that there was a conflict). */
7650 for (i = 0; i < regno; i++)
7654 for (x = reg_values[i]; x; x = XEXP (x, 1))
7656 if (XEXP (x, 0) != 0)
7658 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7659 REGNO (invalidate_regno_rtx) = i;
7660 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7663 reload_cse_invalidate_regno (i, VOIDmode, 1);
7671 /* The memory at address MEM_BASE is being changed.
7672 Return whether this change will invalidate VAL. */
7675 reload_cse_mem_conflict_p (mem_base, val)
7683 code = GET_CODE (val);
7686 /* Get rid of a few simple cases quickly. */
7699 if (GET_MODE (mem_base) == BLKmode
7700 || GET_MODE (val) == BLKmode)
7702 return anti_dependence (val, mem_base);
7708 fmt = GET_RTX_FORMAT (code);
7710 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7714 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7717 else if (fmt[i] == 'E')
7721 for (j = 0; j < XVECLEN (val, i); j++)
7722 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
7730 /* Invalidate any entries in reg_values which are changed because of a
7731 store to MEM_RTX. If this is called because of a non-const call
7732 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7735 reload_cse_invalidate_mem (mem_rtx)
7740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7744 for (x = reg_values[i]; x; x = XEXP (x, 1))
7746 if (XEXP (x, 0) != 0
7747 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
7749 /* If this is the only entry on the list, clear
7750 reg_values[i]. Otherwise, just clear this entry on
7752 if (XEXP (x, 1) == 0 && x == reg_values[i])
7763 /* Invalidate DEST, which is being assigned to or clobbered. The
7764 second parameter exists so that this function can be passed to
7765 note_stores; it is ignored. */
7768 reload_cse_invalidate_rtx (dest, ignore)
7772 while (GET_CODE (dest) == STRICT_LOW_PART
7773 || GET_CODE (dest) == SIGN_EXTRACT
7774 || GET_CODE (dest) == ZERO_EXTRACT
7775 || GET_CODE (dest) == SUBREG)
7776 dest = XEXP (dest, 0);
7778 if (GET_CODE (dest) == REG)
7779 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7780 else if (GET_CODE (dest) == MEM)
7781 reload_cse_invalidate_mem (dest);
7784 /* Do a very simple CSE pass over the hard registers.
7786 This function detects no-op moves where we happened to assign two
7787 different pseudo-registers to the same hard register, and then
7788 copied one to the other. Reload will generate a useless
7789 instruction copying a register to itself.
7791 This function also detects cases where we load a value from memory
7792 into two different registers, and (if memory is more expensive than
7793 registers) changes it to simply copy the first register into the
7797 reload_cse_regs (first)
7805 init_alias_analysis ();
7807 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7808 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7811 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7812 free them when we are done. */
7813 push_obstacks (&reload_obstack, &reload_obstack);
7814 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7816 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7817 memory for a non-const call instruction. */
7818 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7820 /* This is used in reload_cse_invalidate_regno to avoid consing a
7821 new REG in a loop in that function. */
7822 invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
7824 for (insn = first; insn; insn = NEXT_INSN (insn))
7828 if (GET_CODE (insn) == CODE_LABEL)
7830 /* Forget all the register values at a code label. We don't
7831 try to do anything clever around jumps. */
7832 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7838 #ifdef NON_SAVING_SETJMP
7839 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7840 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7849 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7852 /* If this is a call instruction, forget anything stored in a
7853 call clobbered register, or, if this is not a const call, in
7855 if (GET_CODE (insn) == CALL_INSN)
7857 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7858 if (call_used_regs[i])
7859 reload_cse_invalidate_regno (i, VOIDmode, 1);
7861 if (! CONST_CALL_P (insn))
7862 reload_cse_invalidate_mem (callmem);
7865 body = PATTERN (insn);
7866 if (GET_CODE (body) == SET)
7868 if (reload_cse_noop_set_p (body, insn))
7870 PUT_CODE (insn, NOTE);
7871 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7872 NOTE_SOURCE_FILE (insn) = 0;
7874 /* We're done with this insn. */
7878 reload_cse_simplify_set (body, insn);
7879 reload_cse_record_set (body, body);
7881 else if (GET_CODE (body) == PARALLEL)
7885 /* If every action in a PARALLEL is a noop, we can delete
7886 the entire PARALLEL. */
7887 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7888 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
7889 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
7890 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
7894 PUT_CODE (insn, NOTE);
7895 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7896 NOTE_SOURCE_FILE (insn) = 0;
7898 /* We're done with this insn. */
7902 /* Look through the PARALLEL and record the values being
7903 set, if possible. Also handle any CLOBBERs. */
7904 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7906 rtx x = XVECEXP (body, 0, i);
7908 if (GET_CODE (x) == SET)
7909 reload_cse_record_set (x, body);
7911 note_stores (x, reload_cse_invalidate_rtx);
7915 note_stores (body, reload_cse_invalidate_rtx);
7918 /* Clobber any registers which appear in REG_INC notes. We
7919 could keep track of the changes to their values, but it is
7920 unlikely to help. */
7924 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
7925 if (REG_NOTE_KIND (x) == REG_INC)
7926 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
7930 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
7931 after we have processed the insn. */
7932 if (GET_CODE (insn) == CALL_INSN)
7936 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
7937 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
7938 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
7942 /* Free all the temporary structures we created, and go back to the
7943 regular obstacks. */
7944 obstack_free (&reload_obstack, firstobj);
7948 /* Return whether the values known for REGNO are equal to VAL. MODE
7949 is the mode of the object that VAL is being copied to; this matters
7950 if VAL is a CONST_INT. */
7953 reload_cse_regno_equal_p (regno, val, mode)
7956 enum machine_mode mode;
7963 for (x = reg_values[regno]; x; x = XEXP (x, 1))
7964 if (XEXP (x, 0) != 0
7965 && rtx_equal_p (XEXP (x, 0), val)
7966 && (GET_CODE (val) != CONST_INT
7967 || mode == GET_MODE (x)
7968 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7969 /* On a big endian machine if the value spans more than
7970 one register then this register holds the high part of
7971 it and we can't use it.
7973 ??? We should also compare with the high part of the
7975 && !(WORDS_BIG_ENDIAN
7976 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
7977 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7978 GET_MODE_BITSIZE (GET_MODE (x))))))
7984 /* See whether a single set is a noop. SET is the set instruction we
7985 are should check, and INSN is the instruction from which it came. */
7988 reload_cse_noop_set_p (set, insn)
7993 enum machine_mode dest_mode;
7997 src = SET_SRC (set);
7998 dest = SET_DEST (set);
7999 dest_mode = GET_MODE (dest);
8001 if (side_effects_p (src))
8004 dreg = true_regnum (dest);
8005 sreg = true_regnum (src);
8007 /* Check for setting a register to itself. In this case, we don't
8008 have to worry about REG_DEAD notes. */
8009 if (dreg >= 0 && dreg == sreg)
8015 /* Check for setting a register to itself. */
8019 /* Check for setting a register to a value which we already know
8020 is in the register. */
8021 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8024 /* Check for setting a register DREG to another register SREG
8025 where SREG is equal to a value which is already in DREG. */
8030 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8034 if (XEXP (x, 0) == 0)
8037 if (dest_mode == GET_MODE (x))
8039 else if (GET_MODE_BITSIZE (dest_mode)
8040 < GET_MODE_BITSIZE (GET_MODE (x)))
8041 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8046 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8054 else if (GET_CODE (dest) == MEM)
8056 /* Check for storing a register to memory when we know that the
8057 register is equivalent to the memory location. */
8059 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8060 && ! side_effects_p (dest))
8064 /* If we can delete this SET, then we need to look for an earlier
8065 REG_DEAD note on DREG, and remove it if it exists. */
8068 if (! find_regno_note (insn, REG_UNUSED, dreg))
8072 for (trial = prev_nonnote_insn (insn);
8074 && GET_CODE (trial) != CODE_LABEL
8075 && GET_CODE (trial) != BARRIER);
8076 trial = prev_nonnote_insn (trial))
8078 if (find_regno_note (trial, REG_DEAD, dreg))
8080 remove_death (dreg, trial);
8090 /* Try to simplify a single SET instruction. SET is the set pattern.
8091 INSN is the instruction it came from. */
8094 reload_cse_simplify_set (set, insn)
8100 enum machine_mode dest_mode;
8101 enum reg_class dclass;
8104 /* We only handle one case: if we set a register to a value which is
8105 not a register, we try to find that value in some other register
8106 and change the set into a register copy. */
8108 dreg = true_regnum (SET_DEST (set));
8112 src = SET_SRC (set);
8113 if (side_effects_p (src) || true_regnum (src) >= 0)
8116 /* If memory loads are cheaper than register copies, don't change
8118 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8121 dest_mode = GET_MODE (SET_DEST (set));
8122 dclass = REGNO_REG_CLASS (dreg);
8123 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8126 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8127 && reload_cse_regno_equal_p (i, src, dest_mode))
8131 /* Pop back to the real obstacks while changing the insn. */
8134 validated = validate_change (insn, &SET_SRC (set),
8135 gen_rtx (REG, dest_mode, i), 0);
8137 /* Go back to the obstack we are using for temporary
8139 push_obstacks (&reload_obstack, &reload_obstack);
8143 /* We need to look for an earlier REG_DEAD note on I,
8144 and remove it if it exists. */
8145 if (! find_regno_note (insn, REG_UNUSED, i))
8149 for (trial = prev_nonnote_insn (insn);
8151 && GET_CODE (trial) != CODE_LABEL
8152 && GET_CODE (trial) != BARRIER);
8153 trial = prev_nonnote_insn (trial))
8155 if (find_regno_note (trial, REG_DEAD, i))
8157 remove_death (i, trial);
8169 /* These two variables are used to pass information from
8170 reload_cse_record_set to reload_cse_check_clobber. */
8172 static int reload_cse_check_clobbered;
8173 static rtx reload_cse_check_src;
8175 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8176 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8177 second argument, which is passed by note_stores, is ignored. */
8180 reload_cse_check_clobber (dest, ignore)
8184 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8185 reload_cse_check_clobbered = 1;
8188 /* Record the result of a SET instruction. SET is the set pattern.
8189 BODY is the pattern of the insn that it came from. */
8192 reload_cse_record_set (set, body)
8198 enum machine_mode dest_mode;
8200 dest = SET_DEST (set);
8201 src = SET_SRC (set);
8202 dreg = true_regnum (dest);
8203 sreg = true_regnum (src);
8204 dest_mode = GET_MODE (dest);
8206 /* Some machines don't define AUTO_INC_DEC, but they still use push
8207 instructions. We need to catch that case here in order to
8208 invalidate the stack pointer correctly. Note that invalidating
8209 the stack pointer is different from invalidating DEST. */
8211 while (GET_CODE (x) == SUBREG
8212 || GET_CODE (x) == ZERO_EXTRACT
8213 || GET_CODE (x) == SIGN_EXTRACT
8214 || GET_CODE (x) == STRICT_LOW_PART)
8216 if (push_operand (x, GET_MODE (x)))
8218 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8219 reload_cse_invalidate_rtx (dest, NULL_RTX);
8223 /* We can only handle an assignment to a register, or a store of a
8224 register to a memory location. For other cases, we just clobber
8225 the destination. We also have to just clobber if there are side
8226 effects in SRC or DEST. */
8227 if ((dreg < 0 && GET_CODE (dest) != MEM)
8228 || side_effects_p (src)
8229 || side_effects_p (dest))
8231 reload_cse_invalidate_rtx (dest, NULL_RTX);
8236 /* We don't try to handle values involving CC, because it's a pain
8237 to keep track of when they have to be invalidated. */
8238 if (reg_mentioned_p (cc0_rtx, src)
8239 || reg_mentioned_p (cc0_rtx, dest))
8241 reload_cse_invalidate_rtx (dest, NULL_RTX);
8246 /* If BODY is a PARALLEL, then we need to see whether the source of
8247 SET is clobbered by some other instruction in the PARALLEL. */
8248 if (GET_CODE (body) == PARALLEL)
8252 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8256 x = XVECEXP (body, 0, i);
8260 reload_cse_check_clobbered = 0;
8261 reload_cse_check_src = src;
8262 note_stores (x, reload_cse_check_clobber);
8263 if (reload_cse_check_clobbered)
8265 reload_cse_invalidate_rtx (dest, NULL_RTX);
8275 /* This is an assignment to a register. Update the value we
8276 have stored for the register. */
8281 /* This is a copy from one register to another. Any values
8282 which were valid for SREG are now valid for DREG. If the
8283 mode changes, we use gen_lowpart_common to extract only
8284 the part of the value that is copied. */
8285 reg_values[dreg] = 0;
8286 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8290 if (XEXP (x, 0) == 0)
8292 if (dest_mode == GET_MODE (XEXP (x, 0)))
8294 else if (GET_MODE_BITSIZE (dest_mode)
8295 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8298 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8300 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
8305 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8307 /* We've changed DREG, so invalidate any values held by other
8308 registers that depend upon it. */
8309 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8311 /* If this assignment changes more than one hard register,
8312 forget anything we know about the others. */
8313 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8314 reg_values[dreg + i] = 0;
8316 else if (GET_CODE (dest) == MEM)
8318 /* Invalidate conflicting memory locations. */
8319 reload_cse_invalidate_mem (dest);
8321 /* If we're storing a register to memory, add DEST to the list
8323 if (sreg >= 0 && ! side_effects_p (dest))
8324 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8329 /* We should have bailed out earlier. */