1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
32 #include "hard-reg-set.h"
35 #include "basic-block.h"
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
164 (spill_reg_order prevents these registers from being used to start a
166 static HARD_REG_SET bad_spill_regs;
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
178 HARD_REG_SET used_spill_regs;
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
183 static int last_spill_reg;
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
220 static char spill_indirect_levels;
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
226 char indirect_symref_ok;
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
230 char double_reg_address_ok;
232 /* Record the stack slot for each spilled hard register. */
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236 /* Width allocated so far for that stack slot. */
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
245 char *basic_block_needs[N_REG_CLASSES];
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
254 int caller_save_needed;
256 /* The register class to use for a base register when reloading an
257 address. This is normally BASE_REG_CLASS, but it may be different
258 when using SMALL_REGISTER_CLASSES and passing parameters in
260 enum reg_class reload_address_base_reg_class;
262 /* The register class to use for an index register when reloading an
263 address. This is normally INDEX_REG_CLASS, but it may be different
264 when using SMALL_REGISTER_CLASSES and passing parameters in
266 enum reg_class reload_address_index_reg_class;
268 /* Set to 1 while reload_as_needed is operating.
269 Required by some machines to handle any generated moves differently. */
271 int reload_in_progress = 0;
273 /* These arrays record the insn_code of insns that may be needed to
274 perform input and output reloads of special objects. They provide a
275 place to pass a scratch register. */
277 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
278 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
280 /* This obstack is used for allocation of rtl during register elimination.
281 The allocated storage can be freed once find_reloads has processed the
284 struct obstack reload_obstack;
285 char *reload_firstobj;
287 #define obstack_chunk_alloc xmalloc
288 #define obstack_chunk_free free
290 /* List of labels that must never be deleted. */
291 extern rtx forced_labels;
293 /* Allocation number table from global register allocation. */
294 extern int *reg_allocno;
296 /* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
301 static struct elim_table
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
310 int max_offset; /* Maximum offset between the two regs. */
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
321 /* If a set of eliminable registers was specified, define the table from it.
322 Otherwise, default to the normal case of the frame pointer being
323 replaced by the stack pointer. */
325 #ifdef ELIMINABLE_REGS
328 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
331 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
333 /* Record the number of pending eliminations that have an offset not equal
334 to their initial offset. If non-zero, we use a new copy of each
335 replacement result in any insns encountered. */
336 static int num_not_at_initial_offset;
338 /* Count the number of registers that we may be able to eliminate. */
339 static int num_eliminable;
341 /* For each label, we record the offset of each elimination. If we reach
342 a label by more than one path and an offset differs, we cannot do the
343 elimination. This information is indexed by the number of the label.
344 The first table is an array of flags that records whether we have yet
345 encountered a label and the second table is an array of arrays, one
346 entry in the latter array for each elimination. */
348 static char *offsets_known_at;
349 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
351 /* Number of labels in the current function. */
353 static int num_labels;
355 struct hard_reg_n_uses { int regno; int uses; };
357 static int possible_group_p PROTO((int, int *));
358 static void count_possible_groups PROTO((int *, enum machine_mode *,
360 static int modes_equiv_for_class_p PROTO((enum machine_mode,
363 static void spill_failure PROTO((rtx));
364 static int new_spill_reg PROTO((int, int, int *, int *, int,
366 static void delete_dead_insn PROTO((rtx));
367 static void alter_reg PROTO((int, int));
368 static void mark_scratch_live PROTO((rtx));
369 static void set_label_offsets PROTO((rtx, rtx, int));
370 static int eliminate_regs_in_insn PROTO((rtx, int));
371 static void mark_not_eliminable PROTO((rtx, rtx));
372 static int spill_hard_reg PROTO((int, int, FILE *, int));
373 static void scan_paradoxical_subregs PROTO((rtx));
374 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void order_regs_for_reload PROTO((int));
376 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
377 static void reload_as_needed PROTO((rtx, int));
378 static void forget_old_reloads_1 PROTO((rtx, rtx));
379 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
380 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
382 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
384 static int reload_reg_free_p PROTO((int, int, enum reload_type));
385 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
386 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
387 static int reloads_conflict PROTO((int, int));
388 static int allocate_reload_reg PROTO((int, rtx, int, int));
389 static void choose_reload_regs PROTO((rtx, rtx));
390 static void merge_assigned_reloads PROTO((rtx));
391 static void emit_reload_insns PROTO((rtx));
392 static void delete_output_reload PROTO((rtx, int, rtx));
393 static void inc_for_reload PROTO((rtx, rtx, int));
394 static int constraint_accepts_reg_p PROTO((char *, rtx));
395 static int count_occurrences PROTO((rtx, rtx));
396 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
397 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
398 static void reload_cse_invalidate_mem PROTO((rtx));
399 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
400 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
401 static int reload_cse_noop_set_p PROTO((rtx, rtx));
402 static int reload_cse_simplify_set PROTO((rtx, rtx));
403 static int reload_cse_simplify_operands PROTO((rtx));
404 static void reload_cse_check_clobber PROTO((rtx, rtx));
405 static void reload_cse_record_set PROTO((rtx, rtx));
406 static void reload_cse_delete_death_notes PROTO((rtx));
407 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
409 /* Initialize the reload pass once per compilation. */
416 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
417 Set spill_indirect_levels to the number of levels such addressing is
418 permitted, zero if it is not permitted at all. */
421 = gen_rtx (MEM, Pmode,
422 gen_rtx (PLUS, Pmode,
423 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
425 spill_indirect_levels = 0;
427 while (memory_address_p (QImode, tem))
429 spill_indirect_levels++;
430 tem = gen_rtx (MEM, Pmode, tem);
433 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
435 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
436 indirect_symref_ok = memory_address_p (QImode, tem);
438 /* See if reg+reg is a valid (and offsettable) address. */
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
442 tem = gen_rtx (PLUS, Pmode,
443 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
444 gen_rtx (REG, Pmode, i));
445 /* This way, we make sure that reg+reg is an offsettable address. */
446 tem = plus_constant (tem, 4);
448 if (memory_address_p (QImode, tem))
450 double_reg_address_ok = 1;
455 /* Initialize obstack for our rtl allocation. */
456 gcc_obstack_init (&reload_obstack);
457 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
459 /* Decide which register class should be used when reloading
460 addresses. If we are using SMALL_REGISTER_CLASSES, and any
461 parameters are passed in registers, then we do not want to use
462 those registers when reloading an address. Otherwise, if a
463 function argument needs a reload, we may wind up clobbering
464 another argument to the function which was already computed. If
465 we find a subset class which simply avoids those registers, we
466 use it instead. ??? It would be better to only use the
467 restricted class when we actually are loading function arguments,
468 but that is hard to determine. */
469 reload_address_base_reg_class = BASE_REG_CLASS;
470 reload_address_index_reg_class = INDEX_REG_CLASS;
471 if (SMALL_REGISTER_CLASSES)
474 HARD_REG_SET base, index;
477 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
478 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
479 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
481 if (FUNCTION_ARG_REGNO_P (regno))
483 CLEAR_HARD_REG_BIT (base, regno);
484 CLEAR_HARD_REG_BIT (index, regno);
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
490 for (p = reg_class_subclasses[BASE_REG_CLASS];
491 *p != LIM_REG_CLASSES;
494 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
497 reload_address_base_reg_class = *p;
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
504 for (p = reg_class_subclasses[INDEX_REG_CLASS];
505 *p != LIM_REG_CLASSES;
508 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
511 reload_address_index_reg_class = *p;
518 /* Main entry point for the reload pass.
520 FIRST is the first insn of the function being compiled.
522 GLOBAL nonzero means we were called from global_alloc
523 and should attempt to reallocate any pseudoregs that we
524 displace from hard regs we will use for reloads.
525 If GLOBAL is zero, we do not have enough information to do that,
526 so any pseudo reg that is spilled must go to the stack.
528 DUMPFILE is the global-reg debugging dump file stream, or 0.
529 If it is nonzero, messages are written to it to describe
530 which registers are seized as reload regs, which pseudo regs
531 are spilled from them, and where the pseudo regs are reallocated to.
533 Return value is nonzero if reload failed
534 and we must not do any more for this function. */
537 reload (first, global, dumpfile)
543 register int i, j, k;
545 register struct elim_table *ep;
547 /* The two pointers used to track the true location of the memory used
548 for label offsets. */
549 char *real_known_ptr = NULL_PTR;
550 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
552 int something_changed;
553 int something_needs_reloads;
554 int something_needs_elimination;
555 int new_basic_block_needs;
556 enum reg_class caller_save_spill_class = NO_REGS;
557 int caller_save_group_size = 1;
559 /* Nonzero means we couldn't get enough spill regs. */
562 /* The basic block number currently being processed for INSN. */
565 /* Make sure even insns with volatile mem refs are recognizable. */
568 /* Enable find_equiv_reg to distinguish insns made by reload. */
569 reload_first_uid = get_max_uid ();
571 for (i = 0; i < N_REG_CLASSES; i++)
572 basic_block_needs[i] = 0;
574 #ifdef SECONDARY_MEMORY_NEEDED
575 /* Initialize the secondary memory table. */
576 clear_secondary_mem ();
579 /* Remember which hard regs appear explicitly
580 before we merge into `regs_ever_live' the ones in which
581 pseudo regs have been allocated. */
582 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
584 /* We don't have a stack slot for any spill reg yet. */
585 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
586 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
588 /* Initialize the save area information for caller-save, in case some
592 /* Compute which hard registers are now in use
593 as homes for pseudo registers.
594 This is done here rather than (eg) in global_alloc
595 because this point is reached even if not optimizing. */
596 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
599 /* A function that receives a nonlocal goto must save all call-saved
601 if (current_function_has_nonlocal_label)
602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
604 if (! call_used_regs[i] && ! fixed_regs[i])
605 regs_ever_live[i] = 1;
608 for (i = 0; i < scratch_list_length; i++)
610 mark_scratch_live (scratch_list[i]);
612 /* Make sure that the last insn in the chain
613 is not something that needs reloading. */
614 emit_note (NULL_PTR, NOTE_INSN_DELETED);
616 /* Find all the pseudo registers that didn't get hard regs
617 but do have known equivalent constants or memory slots.
618 These include parameters (known equivalent to parameter slots)
619 and cse'd or loop-moved constant memory addresses.
621 Record constant equivalents in reg_equiv_constant
622 so they will be substituted by find_reloads.
623 Record memory equivalents in reg_mem_equiv so they can
624 be substituted eventually by altering the REG-rtx's. */
626 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
628 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
630 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
631 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
632 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
633 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
634 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
635 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
636 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
637 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
638 cannot_omit_stores = (char *) alloca (max_regno);
639 bzero (cannot_omit_stores, max_regno);
641 if (SMALL_REGISTER_CLASSES)
642 CLEAR_HARD_REG_SET (forbidden_regs);
644 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
645 Also find all paradoxical subregs and find largest such for each pseudo.
646 On machines with small register classes, record hard registers that
647 are used for user variables. These can never be used for spills.
648 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
649 caller-saved registers must be marked live. */
651 for (insn = first; insn; insn = NEXT_INSN (insn))
653 rtx set = single_set (insn);
655 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
656 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
658 if (! call_used_regs[i])
659 regs_ever_live[i] = 1;
661 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
663 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
665 #ifdef LEGITIMATE_PIC_OPERAND_P
666 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
667 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
671 rtx x = XEXP (note, 0);
672 i = REGNO (SET_DEST (set));
673 if (i > LAST_VIRTUAL_REGISTER)
675 if (GET_CODE (x) == MEM)
676 reg_equiv_memory_loc[i] = x;
677 else if (CONSTANT_P (x))
679 if (LEGITIMATE_CONSTANT_P (x))
680 reg_equiv_constant[i] = x;
682 reg_equiv_memory_loc[i]
683 = force_const_mem (GET_MODE (SET_DEST (set)), x);
688 /* If this register is being made equivalent to a MEM
689 and the MEM is not SET_SRC, the equivalencing insn
690 is one with the MEM as a SET_DEST and it occurs later.
691 So don't mark this insn now. */
692 if (GET_CODE (x) != MEM
693 || rtx_equal_p (SET_SRC (set), x))
694 reg_equiv_init[i] = insn;
699 /* If this insn is setting a MEM from a register equivalent to it,
700 this is the equivalencing insn. */
701 else if (set && GET_CODE (SET_DEST (set)) == MEM
702 && GET_CODE (SET_SRC (set)) == REG
703 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
704 && rtx_equal_p (SET_DEST (set),
705 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
706 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
708 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
709 scan_paradoxical_subregs (PATTERN (insn));
712 /* Does this function require a frame pointer? */
714 frame_pointer_needed = (! flag_omit_frame_pointer
715 #ifdef EXIT_IGNORE_STACK
716 /* ?? If EXIT_IGNORE_STACK is set, we will not save
717 and restore sp for alloca. So we can't eliminate
718 the frame pointer in that case. At some point,
719 we should improve this by emitting the
720 sp-adjusting insns for this case. */
721 || (current_function_calls_alloca
722 && EXIT_IGNORE_STACK)
724 || FRAME_POINTER_REQUIRED);
728 /* Initialize the table of registers to eliminate. The way we do this
729 depends on how the eliminable registers were defined. */
730 #ifdef ELIMINABLE_REGS
731 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
733 ep->can_eliminate = ep->can_eliminate_previous
734 = (CAN_ELIMINATE (ep->from, ep->to)
735 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
738 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
739 = ! frame_pointer_needed;
742 /* Count the number of eliminable registers and build the FROM and TO
743 REG rtx's. Note that code in gen_rtx will cause, e.g.,
744 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
745 We depend on this. */
746 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
748 num_eliminable += ep->can_eliminate;
749 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
750 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
753 num_labels = max_label_num () - get_first_label_num ();
755 /* Allocate the tables used to store offset information at labels. */
756 /* We used to use alloca here, but the size of what it would try to
757 allocate would occasionally cause it to exceed the stack limit and
758 cause a core dump. */
759 real_known_ptr = xmalloc (num_labels);
761 = (int (*)[NUM_ELIMINABLE_REGS])
762 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
764 offsets_known_at = real_known_ptr - get_first_label_num ();
766 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
768 /* Alter each pseudo-reg rtx to contain its hard reg number.
769 Assign stack slots to the pseudos that lack hard regs or equivalents.
770 Do not touch virtual registers. */
772 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
775 /* If we have some registers we think can be eliminated, scan all insns to
776 see if there is an insn that sets one of these registers to something
777 other than itself plus a constant. If so, the register cannot be
778 eliminated. Doing this scan here eliminates an extra pass through the
779 main reload loop in the most common case where register elimination
781 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
782 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
783 || GET_CODE (insn) == CALL_INSN)
784 note_stores (PATTERN (insn), mark_not_eliminable);
786 #ifndef REGISTER_CONSTRAINTS
787 /* If all the pseudo regs have hard regs,
788 except for those that are never referenced,
789 we know that no reloads are needed. */
790 /* But that is not true if there are register constraints, since
791 in that case some pseudos might be in the wrong kind of hard reg. */
793 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
794 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
797 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
799 free (real_known_ptr);
805 /* Compute the order of preference for hard registers to spill.
806 Store them by decreasing preference in potential_reload_regs. */
808 order_regs_for_reload (global);
810 /* So far, no hard regs have been spilled. */
812 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
813 spill_reg_order[i] = -1;
815 /* Initialize to -1, which means take the first spill register. */
818 /* On most machines, we can't use any register explicitly used in the
819 rtl as a spill register. But on some, we have to. Those will have
820 taken care to keep the life of hard regs as short as possible. */
822 if (! SMALL_REGISTER_CLASSES)
823 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
825 /* Spill any hard regs that we know we can't eliminate. */
826 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
827 if (! ep->can_eliminate)
828 spill_hard_reg (ep->from, global, dumpfile, 1);
830 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
831 if (frame_pointer_needed)
832 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
836 for (i = 0; i < N_REG_CLASSES; i++)
838 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
839 bzero (basic_block_needs[i], n_basic_blocks);
842 /* From now on, we need to emit any moves without making new pseudos. */
843 reload_in_progress = 1;
845 /* This loop scans the entire function each go-round
846 and repeats until one repetition spills no additional hard regs. */
848 /* This flag is set when a pseudo reg is spilled,
849 to require another pass. Note that getting an additional reload
850 reg does not necessarily imply any pseudo reg was spilled;
851 sometimes we find a reload reg that no pseudo reg was allocated in. */
852 something_changed = 1;
853 /* This flag is set if there are any insns that require reloading. */
854 something_needs_reloads = 0;
855 /* This flag is set if there are any insns that require register
857 something_needs_elimination = 0;
858 while (something_changed)
862 /* For each class, number of reload regs needed in that class.
863 This is the maximum over all insns of the needs in that class
864 of the individual insn. */
865 int max_needs[N_REG_CLASSES];
866 /* For each class, size of group of consecutive regs
867 that is needed for the reloads of this class. */
868 int group_size[N_REG_CLASSES];
869 /* For each class, max number of consecutive groups needed.
870 (Each group contains group_size[CLASS] consecutive registers.) */
871 int max_groups[N_REG_CLASSES];
872 /* For each class, max number needed of regs that don't belong
873 to any of the groups. */
874 int max_nongroups[N_REG_CLASSES];
875 /* For each class, the machine mode which requires consecutive
876 groups of regs of that class.
877 If two different modes ever require groups of one class,
878 they must be the same size and equally restrictive for that class,
879 otherwise we can't handle the complexity. */
880 enum machine_mode group_mode[N_REG_CLASSES];
881 /* Record the insn where each maximum need is first found. */
882 rtx max_needs_insn[N_REG_CLASSES];
883 rtx max_groups_insn[N_REG_CLASSES];
884 rtx max_nongroups_insn[N_REG_CLASSES];
886 HOST_WIDE_INT starting_frame_size;
887 int previous_frame_pointer_needed = frame_pointer_needed;
888 static char *reg_class_names[] = REG_CLASS_NAMES;
890 something_changed = 0;
891 bzero ((char *) max_needs, sizeof max_needs);
892 bzero ((char *) max_groups, sizeof max_groups);
893 bzero ((char *) max_nongroups, sizeof max_nongroups);
894 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
895 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
896 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
897 bzero ((char *) group_size, sizeof group_size);
898 for (i = 0; i < N_REG_CLASSES; i++)
899 group_mode[i] = VOIDmode;
901 /* Keep track of which basic blocks are needing the reloads. */
904 /* Remember whether any element of basic_block_needs
905 changes from 0 to 1 in this pass. */
906 new_basic_block_needs = 0;
908 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
909 here because the stack size may be a part of the offset computation
910 for register elimination, and there might have been new stack slots
911 created in the last iteration of this loop. */
912 assign_stack_local (BLKmode, 0, 0);
914 starting_frame_size = get_frame_size ();
916 /* Reset all offsets on eliminable registers to their initial values. */
917 #ifdef ELIMINABLE_REGS
918 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
920 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
921 ep->previous_offset = ep->offset
922 = ep->max_offset = ep->initial_offset;
925 #ifdef INITIAL_FRAME_POINTER_OFFSET
926 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
928 if (!FRAME_POINTER_REQUIRED)
930 reg_eliminate[0].initial_offset = 0;
932 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
933 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
936 num_not_at_initial_offset = 0;
938 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
940 /* Set a known offset for each forced label to be at the initial offset
941 of each elimination. We do this because we assume that all
942 computed jumps occur from a location where each elimination is
943 at its initial offset. */
945 for (x = forced_labels; x; x = XEXP (x, 1))
947 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
949 /* For each pseudo register that has an equivalent location defined,
950 try to eliminate any eliminable registers (such as the frame pointer)
951 assuming initial offsets for the replacement register, which
954 If the resulting location is directly addressable, substitute
955 the MEM we just got directly for the old REG.
957 If it is not addressable but is a constant or the sum of a hard reg
958 and constant, it is probably not addressable because the constant is
959 out of range, in that case record the address; we will generate
960 hairy code to compute the address in a register each time it is
961 needed. Similarly if it is a hard register, but one that is not
962 valid as an address register.
964 If the location is not addressable, but does not have one of the
965 above forms, assign a stack slot. We have to do this to avoid the
966 potential of producing lots of reloads if, e.g., a location involves
967 a pseudo that didn't get a hard register and has an equivalent memory
968 location that also involves a pseudo that didn't get a hard register.
970 Perhaps at some point we will improve reload_when_needed handling
971 so this problem goes away. But that's very hairy. */
973 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
974 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
976 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX, 0);
978 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
980 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
981 else if (CONSTANT_P (XEXP (x, 0))
982 || (GET_CODE (XEXP (x, 0)) == REG
983 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
984 || (GET_CODE (XEXP (x, 0)) == PLUS
985 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
986 && (REGNO (XEXP (XEXP (x, 0), 0))
987 < FIRST_PSEUDO_REGISTER)
988 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
989 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
992 /* Make a new stack slot. Then indicate that something
993 changed so we go back and recompute offsets for
994 eliminable registers because the allocation of memory
995 below might change some offset. reg_equiv_{mem,address}
996 will be set up for this pseudo on the next pass around
998 reg_equiv_memory_loc[i] = 0;
999 reg_equiv_init[i] = 0;
1001 something_changed = 1;
1005 /* If we allocated another pseudo to the stack, redo elimination
1007 if (something_changed)
1010 /* If caller-saves needs a group, initialize the group to include
1011 the size and mode required for caller-saves. */
1013 if (caller_save_group_size > 1)
1015 group_mode[(int) caller_save_spill_class] = Pmode;
1016 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1019 /* Compute the most additional registers needed by any instruction.
1020 Collect information separately for each class of regs. */
1022 for (insn = first; insn; insn = NEXT_INSN (insn))
1024 if (global && this_block + 1 < n_basic_blocks
1025 && insn == basic_block_head[this_block+1])
1028 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1029 might include REG_LABEL), we need to see what effects this
1030 has on the known offsets at labels. */
1032 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1033 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1034 && REG_NOTES (insn) != 0))
1035 set_label_offsets (insn, insn, 0);
1037 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1039 /* Nonzero means don't use a reload reg that overlaps
1040 the place where a function value can be returned. */
1041 rtx avoid_return_reg = 0;
1043 rtx old_body = PATTERN (insn);
1044 int old_code = INSN_CODE (insn);
1045 rtx old_notes = REG_NOTES (insn);
1046 int did_elimination = 0;
1048 /* To compute the number of reload registers of each class
1049 needed for an insn, we must simulate what choose_reload_regs
1050 can do. We do this by splitting an insn into an "input" and
1051 an "output" part. RELOAD_OTHER reloads are used in both.
1052 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1053 which must be live over the entire input section of reloads,
1054 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1055 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1058 The registers needed for output are RELOAD_OTHER and
1059 RELOAD_FOR_OUTPUT, which are live for the entire output
1060 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1061 reloads for each operand.
1063 The total number of registers needed is the maximum of the
1064 inputs and outputs. */
1068 /* [0] is normal, [1] is nongroup. */
1069 int regs[2][N_REG_CLASSES];
1070 int groups[N_REG_CLASSES];
1073 /* Each `struct needs' corresponds to one RELOAD_... type. */
1077 struct needs output;
1079 struct needs other_addr;
1080 struct needs op_addr;
1081 struct needs op_addr_reload;
1082 struct needs in_addr[MAX_RECOG_OPERANDS];
1083 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1084 struct needs out_addr[MAX_RECOG_OPERANDS];
1085 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1088 /* If needed, eliminate any eliminable registers. */
1090 did_elimination = eliminate_regs_in_insn (insn, 0);
1092 /* Set avoid_return_reg if this is an insn
1093 that might use the value of a function call. */
1094 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1096 if (GET_CODE (PATTERN (insn)) == SET)
1097 after_call = SET_DEST (PATTERN (insn));
1098 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1099 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1100 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1104 else if (SMALL_REGISTER_CLASSES && after_call != 0
1105 && !(GET_CODE (PATTERN (insn)) == SET
1106 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1108 if (reg_referenced_p (after_call, PATTERN (insn)))
1109 avoid_return_reg = after_call;
1113 /* Analyze the instruction. */
1114 find_reloads (insn, 0, spill_indirect_levels, global,
1117 /* Remember for later shortcuts which insns had any reloads or
1118 register eliminations.
1120 One might think that it would be worthwhile to mark insns
1121 that need register replacements but not reloads, but this is
1122 not safe because find_reloads may do some manipulation of
1123 the insn (such as swapping commutative operands), which would
1124 be lost when we restore the old pattern after register
1125 replacement. So the actions of find_reloads must be redone in
1126 subsequent passes or in reload_as_needed.
1128 However, it is safe to mark insns that need reloads
1129 but not register replacement. */
1131 PUT_MODE (insn, (did_elimination ? QImode
1132 : n_reloads ? HImode
1133 : GET_MODE (insn) == DImode ? DImode
1136 /* Discard any register replacements done. */
1137 if (did_elimination)
1139 obstack_free (&reload_obstack, reload_firstobj);
1140 PATTERN (insn) = old_body;
1141 INSN_CODE (insn) = old_code;
1142 REG_NOTES (insn) = old_notes;
1143 something_needs_elimination = 1;
1146 /* If this insn has no reloads, we need not do anything except
1147 in the case of a CALL_INSN when we have caller-saves and
1148 caller-save needs reloads. */
1151 && ! (GET_CODE (insn) == CALL_INSN
1152 && caller_save_spill_class != NO_REGS))
1155 something_needs_reloads = 1;
1156 bzero ((char *) &insn_needs, sizeof insn_needs);
1158 /* Count each reload once in every class
1159 containing the reload's own class. */
1161 for (i = 0; i < n_reloads; i++)
1163 register enum reg_class *p;
1164 enum reg_class class = reload_reg_class[i];
1166 enum machine_mode mode;
1168 struct needs *this_needs;
1170 /* Don't count the dummy reloads, for which one of the
1171 regs mentioned in the insn can be used for reloading.
1172 Don't count optional reloads.
1173 Don't count reloads that got combined with others. */
1174 if (reload_reg_rtx[i] != 0
1175 || reload_optional[i] != 0
1176 || (reload_out[i] == 0 && reload_in[i] == 0
1177 && ! reload_secondary_p[i]))
1180 /* Show that a reload register of this class is needed
1181 in this basic block. We do not use insn_needs and
1182 insn_groups because they are overly conservative for
1184 if (global && ! basic_block_needs[(int) class][this_block])
1186 basic_block_needs[(int) class][this_block] = 1;
1187 new_basic_block_needs = 1;
1190 mode = reload_inmode[i];
1191 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1192 mode = reload_outmode[i];
1193 size = CLASS_MAX_NREGS (class, mode);
1195 /* If this class doesn't want a group, determine if we have
1196 a nongroup need or a regular need. We have a nongroup
1197 need if this reload conflicts with a group reload whose
1198 class intersects with this reload's class. */
1202 for (j = 0; j < n_reloads; j++)
1203 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1204 (GET_MODE_SIZE (reload_outmode[j])
1205 > GET_MODE_SIZE (reload_inmode[j]))
1209 && (!reload_optional[j])
1210 && (reload_in[j] != 0 || reload_out[j] != 0
1211 || reload_secondary_p[j])
1212 && reloads_conflict (i, j)
1213 && reg_classes_intersect_p (class,
1214 reload_reg_class[j]))
1220 /* Decide which time-of-use to count this reload for. */
1221 switch (reload_when_needed[i])
1224 this_needs = &insn_needs.other;
1226 case RELOAD_FOR_INPUT:
1227 this_needs = &insn_needs.input;
1229 case RELOAD_FOR_OUTPUT:
1230 this_needs = &insn_needs.output;
1232 case RELOAD_FOR_INSN:
1233 this_needs = &insn_needs.insn;
1235 case RELOAD_FOR_OTHER_ADDRESS:
1236 this_needs = &insn_needs.other_addr;
1238 case RELOAD_FOR_INPUT_ADDRESS:
1239 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1241 case RELOAD_FOR_INPADDR_ADDRESS:
1242 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1244 case RELOAD_FOR_OUTPUT_ADDRESS:
1245 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1247 case RELOAD_FOR_OUTADDR_ADDRESS:
1248 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1250 case RELOAD_FOR_OPERAND_ADDRESS:
1251 this_needs = &insn_needs.op_addr;
1253 case RELOAD_FOR_OPADDR_ADDR:
1254 this_needs = &insn_needs.op_addr_reload;
1260 enum machine_mode other_mode, allocate_mode;
1262 /* Count number of groups needed separately from
1263 number of individual regs needed. */
1264 this_needs->groups[(int) class]++;
1265 p = reg_class_superclasses[(int) class];
1266 while (*p != LIM_REG_CLASSES)
1267 this_needs->groups[(int) *p++]++;
1269 /* Record size and mode of a group of this class. */
1270 /* If more than one size group is needed,
1271 make all groups the largest needed size. */
1272 if (group_size[(int) class] < size)
1274 other_mode = group_mode[(int) class];
1275 allocate_mode = mode;
1277 group_size[(int) class] = size;
1278 group_mode[(int) class] = mode;
1283 allocate_mode = group_mode[(int) class];
1286 /* Crash if two dissimilar machine modes both need
1287 groups of consecutive regs of the same class. */
1289 if (other_mode != VOIDmode && other_mode != allocate_mode
1290 && ! modes_equiv_for_class_p (allocate_mode,
1292 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1297 this_needs->regs[nongroup_need][(int) class] += 1;
1298 p = reg_class_superclasses[(int) class];
1299 while (*p != LIM_REG_CLASSES)
1300 this_needs->regs[nongroup_need][(int) *p++] += 1;
1306 /* All reloads have been counted for this insn;
1307 now merge the various times of use.
1308 This sets insn_needs, etc., to the maximum total number
1309 of registers needed at any point in this insn. */
1311 for (i = 0; i < N_REG_CLASSES; i++)
1313 int in_max, out_max;
1315 /* Compute normal and nongroup needs. */
1316 for (j = 0; j <= 1; j++)
1318 for (in_max = 0, out_max = 0, k = 0;
1319 k < reload_n_operands; k++)
1323 (insn_needs.in_addr[k].regs[j][i]
1324 + insn_needs.in_addr_addr[k].regs[j][i]));
1326 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1329 insn_needs.out_addr_addr[k].regs[j][i]);
1332 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1333 and operand addresses but not things used to reload
1334 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1335 don't conflict with things needed to reload inputs or
1338 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1339 insn_needs.op_addr_reload.regs[j][i]),
1342 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1344 insn_needs.input.regs[j][i]
1345 = MAX (insn_needs.input.regs[j][i]
1346 + insn_needs.op_addr.regs[j][i]
1347 + insn_needs.insn.regs[j][i],
1348 in_max + insn_needs.input.regs[j][i]);
1350 insn_needs.output.regs[j][i] += out_max;
1351 insn_needs.other.regs[j][i]
1352 += MAX (MAX (insn_needs.input.regs[j][i],
1353 insn_needs.output.regs[j][i]),
1354 insn_needs.other_addr.regs[j][i]);
1358 /* Now compute group needs. */
1359 for (in_max = 0, out_max = 0, j = 0;
1360 j < reload_n_operands; j++)
1362 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1363 in_max = MAX (in_max,
1364 insn_needs.in_addr_addr[j].groups[i]);
1366 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1368 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1371 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1372 insn_needs.op_addr_reload.groups[i]),
1374 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1376 insn_needs.input.groups[i]
1377 = MAX (insn_needs.input.groups[i]
1378 + insn_needs.op_addr.groups[i]
1379 + insn_needs.insn.groups[i],
1380 in_max + insn_needs.input.groups[i]);
1382 insn_needs.output.groups[i] += out_max;
1383 insn_needs.other.groups[i]
1384 += MAX (MAX (insn_needs.input.groups[i],
1385 insn_needs.output.groups[i]),
1386 insn_needs.other_addr.groups[i]);
1389 /* If this is a CALL_INSN and caller-saves will need
1390 a spill register, act as if the spill register is
1391 needed for this insn. However, the spill register
1392 can be used by any reload of this insn, so we only
1393 need do something if no need for that class has
1396 The assumption that every CALL_INSN will trigger a
1397 caller-save is highly conservative, however, the number
1398 of cases where caller-saves will need a spill register but
1399 a block containing a CALL_INSN won't need a spill register
1400 of that class should be quite rare.
1402 If a group is needed, the size and mode of the group will
1403 have been set up at the beginning of this loop. */
1405 if (GET_CODE (insn) == CALL_INSN
1406 && caller_save_spill_class != NO_REGS)
1408 /* See if this register would conflict with any reload
1409 that needs a group. */
1410 int nongroup_need = 0;
1411 int *caller_save_needs;
1413 for (j = 0; j < n_reloads; j++)
1414 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1415 (GET_MODE_SIZE (reload_outmode[j])
1416 > GET_MODE_SIZE (reload_inmode[j]))
1420 && reg_classes_intersect_p (caller_save_spill_class,
1421 reload_reg_class[j]))
1428 = (caller_save_group_size > 1
1429 ? insn_needs.other.groups
1430 : insn_needs.other.regs[nongroup_need]);
1432 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1434 register enum reg_class *p
1435 = reg_class_superclasses[(int) caller_save_spill_class];
1437 caller_save_needs[(int) caller_save_spill_class]++;
1439 while (*p != LIM_REG_CLASSES)
1440 caller_save_needs[(int) *p++] += 1;
1443 /* Show that this basic block will need a register of
1447 && ! (basic_block_needs[(int) caller_save_spill_class]
1450 basic_block_needs[(int) caller_save_spill_class]
1452 new_basic_block_needs = 1;
1456 /* If this insn stores the value of a function call,
1457 and that value is in a register that has been spilled,
1458 and if the insn needs a reload in a class
1459 that might use that register as the reload register,
1460 then add add an extra need in that class.
1461 This makes sure we have a register available that does
1462 not overlap the return value. */
1464 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1466 int regno = REGNO (avoid_return_reg);
1468 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1470 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1472 /* First compute the "basic needs", which counts a
1473 need only in the smallest class in which it
1476 bcopy ((char *) insn_needs.other.regs[0],
1477 (char *) basic_needs, sizeof basic_needs);
1478 bcopy ((char *) insn_needs.other.groups,
1479 (char *) basic_groups, sizeof basic_groups);
1481 for (i = 0; i < N_REG_CLASSES; i++)
1485 if (basic_needs[i] >= 0)
1486 for (p = reg_class_superclasses[i];
1487 *p != LIM_REG_CLASSES; p++)
1488 basic_needs[(int) *p] -= basic_needs[i];
1490 if (basic_groups[i] >= 0)
1491 for (p = reg_class_superclasses[i];
1492 *p != LIM_REG_CLASSES; p++)
1493 basic_groups[(int) *p] -= basic_groups[i];
1496 /* Now count extra regs if there might be a conflict with
1497 the return value register. */
1499 for (r = regno; r < regno + nregs; r++)
1500 if (spill_reg_order[r] >= 0)
1501 for (i = 0; i < N_REG_CLASSES; i++)
1502 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1504 if (basic_needs[i] > 0)
1508 insn_needs.other.regs[0][i]++;
1509 p = reg_class_superclasses[i];
1510 while (*p != LIM_REG_CLASSES)
1511 insn_needs.other.regs[0][(int) *p++]++;
1513 if (basic_groups[i] > 0)
1517 insn_needs.other.groups[i]++;
1518 p = reg_class_superclasses[i];
1519 while (*p != LIM_REG_CLASSES)
1520 insn_needs.other.groups[(int) *p++]++;
1525 /* For each class, collect maximum need of any insn. */
1527 for (i = 0; i < N_REG_CLASSES; i++)
1529 if (max_needs[i] < insn_needs.other.regs[0][i])
1531 max_needs[i] = insn_needs.other.regs[0][i];
1532 max_needs_insn[i] = insn;
1534 if (max_groups[i] < insn_needs.other.groups[i])
1536 max_groups[i] = insn_needs.other.groups[i];
1537 max_groups_insn[i] = insn;
1539 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1541 max_nongroups[i] = insn_needs.other.regs[1][i];
1542 max_nongroups_insn[i] = insn;
1546 /* Note that there is a continue statement above. */
1549 /* If we allocated any new memory locations, make another pass
1550 since it might have changed elimination offsets. */
1551 if (starting_frame_size != get_frame_size ())
1552 something_changed = 1;
1555 for (i = 0; i < N_REG_CLASSES; i++)
1557 if (max_needs[i] > 0)
1559 ";; Need %d reg%s of class %s (for insn %d).\n",
1560 max_needs[i], max_needs[i] == 1 ? "" : "s",
1561 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1562 if (max_nongroups[i] > 0)
1564 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1565 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1566 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1567 if (max_groups[i] > 0)
1569 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1570 max_groups[i], max_groups[i] == 1 ? "" : "s",
1571 mode_name[(int) group_mode[i]],
1572 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1575 /* If we have caller-saves, set up the save areas and see if caller-save
1576 will need a spill register. */
1578 if (caller_save_needed)
1580 /* Set the offsets for setup_save_areas. */
1581 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
1583 ep->previous_offset = ep->max_offset;
1585 if ( ! setup_save_areas (&something_changed)
1586 && caller_save_spill_class == NO_REGS)
1588 /* The class we will need depends on whether the machine
1589 supports the sum of two registers for an address; see
1590 find_address_reloads for details. */
1592 caller_save_spill_class
1593 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1594 caller_save_group_size
1595 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1596 something_changed = 1;
1600 /* See if anything that happened changes which eliminations are valid.
1601 For example, on the Sparc, whether or not the frame pointer can
1602 be eliminated can depend on what registers have been used. We need
1603 not check some conditions again (such as flag_omit_frame_pointer)
1604 since they can't have changed. */
1606 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1607 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1608 #ifdef ELIMINABLE_REGS
1609 || ! CAN_ELIMINATE (ep->from, ep->to)
1612 ep->can_eliminate = 0;
1614 /* Look for the case where we have discovered that we can't replace
1615 register A with register B and that means that we will now be
1616 trying to replace register A with register C. This means we can
1617 no longer replace register C with register B and we need to disable
1618 such an elimination, if it exists. This occurs often with A == ap,
1619 B == sp, and C == fp. */
1621 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1623 struct elim_table *op;
1624 register int new_to = -1;
1626 if (! ep->can_eliminate && ep->can_eliminate_previous)
1628 /* Find the current elimination for ep->from, if there is a
1630 for (op = reg_eliminate;
1631 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1632 if (op->from == ep->from && op->can_eliminate)
1638 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1640 for (op = reg_eliminate;
1641 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1642 if (op->from == new_to && op->to == ep->to)
1643 op->can_eliminate = 0;
1647 /* See if any registers that we thought we could eliminate the previous
1648 time are no longer eliminable. If so, something has changed and we
1649 must spill the register. Also, recompute the number of eliminable
1650 registers and see if the frame pointer is needed; it is if there is
1651 no elimination of the frame pointer that we can perform. */
1653 frame_pointer_needed = 1;
1654 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1656 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1657 && ep->to != HARD_FRAME_POINTER_REGNUM)
1658 frame_pointer_needed = 0;
1660 if (! ep->can_eliminate && ep->can_eliminate_previous)
1662 ep->can_eliminate_previous = 0;
1663 spill_hard_reg (ep->from, global, dumpfile, 1);
1664 something_changed = 1;
1669 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1670 /* If we didn't need a frame pointer last time, but we do now, spill
1671 the hard frame pointer. */
1672 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1674 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1675 something_changed = 1;
1679 /* If all needs are met, we win. */
1681 for (i = 0; i < N_REG_CLASSES; i++)
1682 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1684 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1687 /* Not all needs are met; must spill some hard regs. */
1689 /* Put all registers spilled so far back in potential_reload_regs, but
1690 put them at the front, since we've already spilled most of the
1691 pseudos in them (we might have left some pseudos unspilled if they
1692 were in a block that didn't need any spill registers of a conflicting
1693 class. We used to try to mark off the need for those registers,
1694 but doing so properly is very complex and reallocating them is the
1695 simpler approach. First, "pack" potential_reload_regs by pushing
1696 any nonnegative entries towards the end. That will leave room
1697 for the registers we already spilled.
1699 Also, undo the marking of the spill registers from the last time
1700 around in FORBIDDEN_REGS since we will be probably be allocating
1703 ??? It is theoretically possible that we might end up not using one
1704 of our previously-spilled registers in this allocation, even though
1705 they are at the head of the list. It's not clear what to do about
1706 this, but it was no better before, when we marked off the needs met
1707 by the previously-spilled registers. With the current code, globals
1708 can be allocated into these registers, but locals cannot. */
1712 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1713 if (potential_reload_regs[i] != -1)
1714 potential_reload_regs[j--] = potential_reload_regs[i];
1716 for (i = 0; i < n_spills; i++)
1718 potential_reload_regs[i] = spill_regs[i];
1719 spill_reg_order[spill_regs[i]] = -1;
1720 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1726 /* Now find more reload regs to satisfy the remaining need
1727 Do it by ascending class number, since otherwise a reg
1728 might be spilled for a big class and might fail to count
1729 for a smaller class even though it belongs to that class.
1731 Count spilled regs in `spills', and add entries to
1732 `spill_regs' and `spill_reg_order'.
1734 ??? Note there is a problem here.
1735 When there is a need for a group in a high-numbered class,
1736 and also need for non-group regs that come from a lower class,
1737 the non-group regs are chosen first. If there aren't many regs,
1738 they might leave no room for a group.
1740 This was happening on the 386. To fix it, we added the code
1741 that calls possible_group_p, so that the lower class won't
1742 break up the last possible group.
1744 Really fixing the problem would require changes above
1745 in counting the regs already spilled, and in choose_reload_regs.
1746 It might be hard to avoid introducing bugs there. */
1748 CLEAR_HARD_REG_SET (counted_for_groups);
1749 CLEAR_HARD_REG_SET (counted_for_nongroups);
1751 for (class = 0; class < N_REG_CLASSES; class++)
1753 /* First get the groups of registers.
1754 If we got single registers first, we might fragment
1756 while (max_groups[class] > 0)
1758 /* If any single spilled regs happen to form groups,
1759 count them now. Maybe we don't really need
1760 to spill another group. */
1761 count_possible_groups (group_size, group_mode, max_groups,
1764 if (max_groups[class] <= 0)
1767 /* Groups of size 2 (the only groups used on most machines)
1768 are treated specially. */
1769 if (group_size[class] == 2)
1771 /* First, look for a register that will complete a group. */
1772 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1776 j = potential_reload_regs[i];
1777 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1779 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1780 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1781 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1782 && HARD_REGNO_MODE_OK (other, group_mode[class])
1783 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1785 /* We don't want one part of another group.
1786 We could get "two groups" that overlap! */
1787 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1789 (j < FIRST_PSEUDO_REGISTER - 1
1790 && (other = j + 1, spill_reg_order[other] >= 0)
1791 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1792 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1793 && HARD_REGNO_MODE_OK (j, group_mode[class])
1794 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1796 && ! TEST_HARD_REG_BIT (counted_for_groups,
1799 register enum reg_class *p;
1801 /* We have found one that will complete a group,
1802 so count off one group as provided. */
1803 max_groups[class]--;
1804 p = reg_class_superclasses[class];
1805 while (*p != LIM_REG_CLASSES)
1807 if (group_size [(int) *p] <= group_size [class])
1808 max_groups[(int) *p]--;
1812 /* Indicate both these regs are part of a group. */
1813 SET_HARD_REG_BIT (counted_for_groups, j);
1814 SET_HARD_REG_BIT (counted_for_groups, other);
1818 /* We can't complete a group, so start one. */
1819 /* Look for a pair neither of which is explicitly used. */
1820 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1824 j = potential_reload_regs[i];
1825 /* Verify that J+1 is a potential reload reg. */
1826 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1827 if (potential_reload_regs[k] == j + 1)
1829 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1830 && k < FIRST_PSEUDO_REGISTER
1831 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1832 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1833 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1834 && HARD_REGNO_MODE_OK (j, group_mode[class])
1835 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1837 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1838 /* Reject J at this stage
1839 if J+1 was explicitly used. */
1840 && ! regs_explicitly_used[j + 1])
1843 /* Now try any group at all
1844 whose registers are not in bad_spill_regs. */
1845 if (i == FIRST_PSEUDO_REGISTER)
1846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1849 j = potential_reload_regs[i];
1850 /* Verify that J+1 is a potential reload reg. */
1851 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1852 if (potential_reload_regs[k] == j + 1)
1854 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1855 && k < FIRST_PSEUDO_REGISTER
1856 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1857 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1858 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1859 && HARD_REGNO_MODE_OK (j, group_mode[class])
1860 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1862 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1866 /* I should be the index in potential_reload_regs
1867 of the new reload reg we have found. */
1869 if (i >= FIRST_PSEUDO_REGISTER)
1871 /* There are no groups left to spill. */
1872 spill_failure (max_groups_insn[class]);
1878 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1883 /* For groups of more than 2 registers,
1884 look for a sufficient sequence of unspilled registers,
1885 and spill them all at once. */
1886 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1890 j = potential_reload_regs[i];
1892 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1893 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1895 /* Check each reg in the sequence. */
1896 for (k = 0; k < group_size[class]; k++)
1897 if (! (spill_reg_order[j + k] < 0
1898 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1899 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1901 /* We got a full sequence, so spill them all. */
1902 if (k == group_size[class])
1904 register enum reg_class *p;
1905 for (k = 0; k < group_size[class]; k++)
1908 SET_HARD_REG_BIT (counted_for_groups, j + k);
1909 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1910 if (potential_reload_regs[idx] == j + k)
1913 |= new_spill_reg (idx, class,
1914 max_needs, NULL_PTR,
1918 /* We have found one that will complete a group,
1919 so count off one group as provided. */
1920 max_groups[class]--;
1921 p = reg_class_superclasses[class];
1922 while (*p != LIM_REG_CLASSES)
1924 if (group_size [(int) *p]
1925 <= group_size [class])
1926 max_groups[(int) *p]--;
1933 /* We couldn't find any registers for this reload.
1934 Avoid going into an infinite loop. */
1935 if (i >= FIRST_PSEUDO_REGISTER)
1937 /* There are no groups left. */
1938 spill_failure (max_groups_insn[class]);
1945 /* Now similarly satisfy all need for single registers. */
1947 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1949 /* If we spilled enough regs, but they weren't counted
1950 against the non-group need, see if we can count them now.
1951 If so, we can avoid some actual spilling. */
1952 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1953 for (i = 0; i < n_spills; i++)
1954 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1956 && !TEST_HARD_REG_BIT (counted_for_groups,
1958 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1960 && max_nongroups[class] > 0)
1962 register enum reg_class *p;
1964 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1965 max_nongroups[class]--;
1966 p = reg_class_superclasses[class];
1967 while (*p != LIM_REG_CLASSES)
1968 max_nongroups[(int) *p++]--;
1970 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1973 /* Consider the potential reload regs that aren't
1974 yet in use as reload regs, in order of preference.
1975 Find the most preferred one that's in this class. */
1977 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1978 if (potential_reload_regs[i] >= 0
1979 && TEST_HARD_REG_BIT (reg_class_contents[class],
1980 potential_reload_regs[i])
1981 /* If this reg will not be available for groups,
1982 pick one that does not foreclose possible groups.
1983 This is a kludge, and not very general,
1984 but it should be sufficient to make the 386 work,
1985 and the problem should not occur on machines with
1987 && (max_nongroups[class] == 0
1988 || possible_group_p (potential_reload_regs[i], max_groups)))
1991 /* If we couldn't get a register, try to get one even if we
1992 might foreclose possible groups. This may cause problems
1993 later, but that's better than aborting now, since it is
1994 possible that we will, in fact, be able to form the needed
1995 group even with this allocation. */
1997 if (i >= FIRST_PSEUDO_REGISTER
1998 && (asm_noperands (max_needs[class] > 0
1999 ? max_needs_insn[class]
2000 : max_nongroups_insn[class])
2002 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2003 if (potential_reload_regs[i] >= 0
2004 && TEST_HARD_REG_BIT (reg_class_contents[class],
2005 potential_reload_regs[i]))
2008 /* I should be the index in potential_reload_regs
2009 of the new reload reg we have found. */
2011 if (i >= FIRST_PSEUDO_REGISTER)
2013 /* There are no possible registers left to spill. */
2014 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
2015 : max_nongroups_insn[class]);
2021 |= new_spill_reg (i, class, max_needs, max_nongroups,
2027 /* If global-alloc was run, notify it of any register eliminations we have
2030 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2031 if (ep->can_eliminate)
2032 mark_elimination (ep->from, ep->to);
2034 /* Insert code to save and restore call-clobbered hard regs
2035 around calls. Tell if what mode to use so that we will process
2036 those insns in reload_as_needed if we have to. */
2038 if (caller_save_needed)
2039 save_call_clobbered_regs (num_eliminable ? QImode
2040 : caller_save_spill_class != NO_REGS ? HImode
2043 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2044 If that insn didn't set the register (i.e., it copied the register to
2045 memory), just delete that insn instead of the equivalencing insn plus
2046 anything now dead. If we call delete_dead_insn on that insn, we may
2047 delete the insn that actually sets the register if the register die
2048 there and that is incorrect. */
2050 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2051 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2052 && GET_CODE (reg_equiv_init[i]) != NOTE)
2054 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2055 delete_dead_insn (reg_equiv_init[i]);
2058 PUT_CODE (reg_equiv_init[i], NOTE);
2059 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2060 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2064 /* Use the reload registers where necessary
2065 by generating move instructions to move the must-be-register
2066 values into or out of the reload registers. */
2068 if (something_needs_reloads || something_needs_elimination
2069 || (caller_save_needed && num_eliminable)
2070 || caller_save_spill_class != NO_REGS)
2071 reload_as_needed (first, global);
2073 /* If we were able to eliminate the frame pointer, show that it is no
2074 longer live at the start of any basic block. If it ls live by
2075 virtue of being in a pseudo, that pseudo will be marked live
2076 and hence the frame pointer will be known to be live via that
2079 if (! frame_pointer_needed)
2080 for (i = 0; i < n_basic_blocks; i++)
2081 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2082 HARD_FRAME_POINTER_REGNUM);
2084 /* Come here (with failure set nonzero) if we can't get enough spill regs
2085 and we decide not to abort about it. */
2088 reload_in_progress = 0;
2090 /* Now eliminate all pseudo regs by modifying them into
2091 their equivalent memory references.
2092 The REG-rtx's for the pseudos are modified in place,
2093 so all insns that used to refer to them now refer to memory.
2095 For a reg that has a reg_equiv_address, all those insns
2096 were changed by reloading so that no insns refer to it any longer;
2097 but the DECL_RTL of a variable decl may refer to it,
2098 and if so this causes the debugging info to mention the variable. */
2100 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2104 if (reg_equiv_mem[i])
2106 addr = XEXP (reg_equiv_mem[i], 0);
2107 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2109 if (reg_equiv_address[i])
2110 addr = reg_equiv_address[i];
2113 if (reg_renumber[i] < 0)
2115 rtx reg = regno_reg_rtx[i];
2116 XEXP (reg, 0) = addr;
2117 REG_USERVAR_P (reg) = 0;
2118 MEM_IN_STRUCT_P (reg) = in_struct;
2119 PUT_CODE (reg, MEM);
2121 else if (reg_equiv_mem[i])
2122 XEXP (reg_equiv_mem[i], 0) = addr;
2126 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2127 /* Make a pass over all the insns and remove death notes for things that
2128 are no longer registers or no longer die in the insn (e.g., an input
2129 and output pseudo being tied). */
2131 for (insn = first; insn; insn = NEXT_INSN (insn))
2132 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2136 for (note = REG_NOTES (insn); note; note = next)
2138 next = XEXP (note, 1);
2139 if (REG_NOTE_KIND (note) == REG_DEAD
2140 && (GET_CODE (XEXP (note, 0)) != REG
2141 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2142 remove_note (insn, note);
2147 /* If we are doing stack checking, give a warning if this function's
2148 frame size is larger than we expect. */
2149 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2151 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2153 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2154 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2155 size += UNITS_PER_WORD;
2157 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2158 warning ("frame size too large for reliable stack checking");
2161 /* Indicate that we no longer have known memory locations or constants. */
2162 reg_equiv_constant = 0;
2163 reg_equiv_memory_loc = 0;
2166 free (real_known_ptr);
2171 free (scratch_list);
2174 free (scratch_block);
2177 CLEAR_HARD_REG_SET (used_spill_regs);
2178 for (i = 0; i < n_spills; i++)
2179 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2184 /* Nonzero if, after spilling reg REGNO for non-groups,
2185 it will still be possible to find a group if we still need one. */
2188 possible_group_p (regno, max_groups)
2193 int class = (int) NO_REGS;
2195 for (i = 0; i < (int) N_REG_CLASSES; i++)
2196 if (max_groups[i] > 0)
2202 if (class == (int) NO_REGS)
2205 /* Consider each pair of consecutive registers. */
2206 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2208 /* Ignore pairs that include reg REGNO. */
2209 if (i == regno || i + 1 == regno)
2212 /* Ignore pairs that are outside the class that needs the group.
2213 ??? Here we fail to handle the case where two different classes
2214 independently need groups. But this never happens with our
2215 current machine descriptions. */
2216 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2217 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2220 /* A pair of consecutive regs we can still spill does the trick. */
2221 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2222 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2223 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2226 /* A pair of one already spilled and one we can spill does it
2227 provided the one already spilled is not otherwise reserved. */
2228 if (spill_reg_order[i] < 0
2229 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2230 && spill_reg_order[i + 1] >= 0
2231 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2232 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2234 if (spill_reg_order[i + 1] < 0
2235 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2236 && spill_reg_order[i] >= 0
2237 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2238 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2245 /* Count any groups of CLASS that can be formed from the registers recently
2249 count_possible_groups (group_size, group_mode, max_groups, class)
2251 enum machine_mode *group_mode;
2258 /* Now find all consecutive groups of spilled registers
2259 and mark each group off against the need for such groups.
2260 But don't count them against ordinary need, yet. */
2262 if (group_size[class] == 0)
2265 CLEAR_HARD_REG_SET (new);
2267 /* Make a mask of all the regs that are spill regs in class I. */
2268 for (i = 0; i < n_spills; i++)
2269 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2270 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2271 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2272 SET_HARD_REG_BIT (new, spill_regs[i]);
2274 /* Find each consecutive group of them. */
2275 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2276 if (TEST_HARD_REG_BIT (new, i)
2277 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2278 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2280 for (j = 1; j < group_size[class]; j++)
2281 if (! TEST_HARD_REG_BIT (new, i + j))
2284 if (j == group_size[class])
2286 /* We found a group. Mark it off against this class's need for
2287 groups, and against each superclass too. */
2288 register enum reg_class *p;
2290 max_groups[class]--;
2291 p = reg_class_superclasses[class];
2292 while (*p != LIM_REG_CLASSES)
2294 if (group_size [(int) *p] <= group_size [class])
2295 max_groups[(int) *p]--;
2299 /* Don't count these registers again. */
2300 for (j = 0; j < group_size[class]; j++)
2301 SET_HARD_REG_BIT (counted_for_groups, i + j);
2304 /* Skip to the last reg in this group. When i is incremented above,
2305 it will then point to the first reg of the next possible group. */
2310 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2311 another mode that needs to be reloaded for the same register class CLASS.
2312 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2313 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2315 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2316 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2317 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2318 causes unnecessary failures on machines requiring alignment of register
2319 groups when the two modes are different sizes, because the larger mode has
2320 more strict alignment rules than the smaller mode. */
2323 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2324 enum machine_mode allocate_mode, other_mode;
2325 enum reg_class class;
2328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2330 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2331 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2332 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2338 /* Handle the failure to find a register to spill.
2339 INSN should be one of the insns which needed this particular spill reg. */
2342 spill_failure (insn)
2345 if (asm_noperands (PATTERN (insn)) >= 0)
2346 error_for_asm (insn, "`asm' needs too many reloads");
2348 fatal_insn ("Unable to find a register to spill.", insn);
2351 /* Add a new register to the tables of available spill-registers
2352 (as well as spilling all pseudos allocated to the register).
2353 I is the index of this register in potential_reload_regs.
2354 CLASS is the regclass whose need is being satisfied.
2355 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2356 so that this register can count off against them.
2357 MAX_NONGROUPS is 0 if this register is part of a group.
2358 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2361 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2369 register enum reg_class *p;
2371 int regno = potential_reload_regs[i];
2373 if (i >= FIRST_PSEUDO_REGISTER)
2374 abort (); /* Caller failed to find any register. */
2376 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2377 fatal ("fixed or forbidden register was spilled.\n\
2378 This may be due to a compiler bug or to impossible asm\n\
2379 statements or clauses.");
2381 /* Make reg REGNO an additional reload reg. */
2383 potential_reload_regs[i] = -1;
2384 spill_regs[n_spills] = regno;
2385 spill_reg_order[regno] = n_spills;
2387 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2389 /* Clear off the needs we just satisfied. */
2392 p = reg_class_superclasses[class];
2393 while (*p != LIM_REG_CLASSES)
2394 max_needs[(int) *p++]--;
2396 if (max_nongroups && max_nongroups[class] > 0)
2398 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2399 max_nongroups[class]--;
2400 p = reg_class_superclasses[class];
2401 while (*p != LIM_REG_CLASSES)
2402 max_nongroups[(int) *p++]--;
2405 /* Spill every pseudo reg that was allocated to this reg
2406 or to something that overlaps this reg. */
2408 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2410 /* If there are some registers still to eliminate and this register
2411 wasn't ever used before, additional stack space may have to be
2412 allocated to store this register. Thus, we may have changed the offset
2413 between the stack and frame pointers, so mark that something has changed.
2414 (If new pseudos were spilled, thus requiring more space, VAL would have
2415 been set non-zero by the call to spill_hard_reg above since additional
2416 reloads may be needed in that case.
2418 One might think that we need only set VAL to 1 if this is a call-used
2419 register. However, the set of registers that must be saved by the
2420 prologue is not identical to the call-used set. For example, the
2421 register used by the call insn for the return PC is a call-used register,
2422 but must be saved by the prologue. */
2423 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2426 regs_ever_live[spill_regs[n_spills]] = 1;
2432 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2433 data that is dead in INSN. */
2436 delete_dead_insn (insn)
2439 rtx prev = prev_real_insn (insn);
2442 /* If the previous insn sets a register that dies in our insn, delete it
2444 if (prev && GET_CODE (PATTERN (prev)) == SET
2445 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2446 && reg_mentioned_p (prev_dest, PATTERN (insn))
2447 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2448 delete_dead_insn (prev);
2450 PUT_CODE (insn, NOTE);
2451 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2452 NOTE_SOURCE_FILE (insn) = 0;
2455 /* Modify the home of pseudo-reg I.
2456 The new home is present in reg_renumber[I].
2458 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2459 or it may be -1, meaning there is none or it is not relevant.
2460 This is used so that all pseudos spilled from a given hard reg
2461 can share one stack slot. */
2464 alter_reg (i, from_reg)
2468 /* When outputting an inline function, this can happen
2469 for a reg that isn't actually used. */
2470 if (regno_reg_rtx[i] == 0)
2473 /* If the reg got changed to a MEM at rtl-generation time,
2475 if (GET_CODE (regno_reg_rtx[i]) != REG)
2478 /* Modify the reg-rtx to contain the new hard reg
2479 number or else to contain its pseudo reg number. */
2480 REGNO (regno_reg_rtx[i])
2481 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2483 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2484 allocate a stack slot for it. */
2486 if (reg_renumber[i] < 0
2487 && REG_N_REFS (i) > 0
2488 && reg_equiv_constant[i] == 0
2489 && reg_equiv_memory_loc[i] == 0)
2492 int inherent_size = PSEUDO_REGNO_BYTES (i);
2493 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2496 /* Each pseudo reg has an inherent size which comes from its own mode,
2497 and a total size which provides room for paradoxical subregs
2498 which refer to the pseudo reg in wider modes.
2500 We can use a slot already allocated if it provides both
2501 enough inherent space and enough total space.
2502 Otherwise, we allocate a new slot, making sure that it has no less
2503 inherent space, and no less total space, then the previous slot. */
2506 /* No known place to spill from => no slot to reuse. */
2507 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2508 inherent_size == total_size ? 0 : -1);
2509 if (BYTES_BIG_ENDIAN)
2510 /* Cancel the big-endian correction done in assign_stack_local.
2511 Get the address of the beginning of the slot.
2512 This is so we can do a big-endian correction unconditionally
2514 adjust = inherent_size - total_size;
2516 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2518 /* Reuse a stack slot if possible. */
2519 else if (spill_stack_slot[from_reg] != 0
2520 && spill_stack_slot_width[from_reg] >= total_size
2521 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2523 x = spill_stack_slot[from_reg];
2524 /* Allocate a bigger slot. */
2527 /* Compute maximum size needed, both for inherent size
2528 and for total size. */
2529 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2531 if (spill_stack_slot[from_reg])
2533 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2535 mode = GET_MODE (spill_stack_slot[from_reg]);
2536 if (spill_stack_slot_width[from_reg] > total_size)
2537 total_size = spill_stack_slot_width[from_reg];
2539 /* Make a slot with that size. */
2540 x = assign_stack_local (mode, total_size,
2541 inherent_size == total_size ? 0 : -1);
2543 if (BYTES_BIG_ENDIAN)
2545 /* Cancel the big-endian correction done in assign_stack_local.
2546 Get the address of the beginning of the slot.
2547 This is so we can do a big-endian correction unconditionally
2549 adjust = GET_MODE_SIZE (mode) - total_size;
2551 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2554 plus_constant (XEXP (x, 0), adjust));
2556 spill_stack_slot[from_reg] = stack_slot;
2557 spill_stack_slot_width[from_reg] = total_size;
2560 /* On a big endian machine, the "address" of the slot
2561 is the address of the low part that fits its inherent mode. */
2562 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2563 adjust += (total_size - inherent_size);
2565 /* If we have any adjustment to make, or if the stack slot is the
2566 wrong mode, make a new stack slot. */
2567 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2569 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2570 plus_constant (XEXP (x, 0), adjust));
2571 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2574 /* Save the stack slot for later. */
2575 reg_equiv_memory_loc[i] = x;
2579 /* Mark the slots in regs_ever_live for the hard regs
2580 used by pseudo-reg number REGNO. */
2583 mark_home_live (regno)
2586 register int i, lim;
2587 i = reg_renumber[regno];
2590 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2592 regs_ever_live[i++] = 1;
2595 /* Mark the registers used in SCRATCH as being live. */
2598 mark_scratch_live (scratch)
2602 int regno = REGNO (scratch);
2603 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2605 for (i = regno; i < lim; i++)
2606 regs_ever_live[i] = 1;
2609 /* This function handles the tracking of elimination offsets around branches.
2611 X is a piece of RTL being scanned.
2613 INSN is the insn that it came from, if any.
2615 INITIAL_P is non-zero if we are to set the offset to be the initial
2616 offset and zero if we are setting the offset of the label to be the
2620 set_label_offsets (x, insn, initial_p)
2625 enum rtx_code code = GET_CODE (x);
2628 struct elim_table *p;
2633 if (LABEL_REF_NONLOCAL_P (x))
2638 /* ... fall through ... */
2641 /* If we know nothing about this label, set the desired offsets. Note
2642 that this sets the offset at a label to be the offset before a label
2643 if we don't know anything about the label. This is not correct for
2644 the label after a BARRIER, but is the best guess we can make. If
2645 we guessed wrong, we will suppress an elimination that might have
2646 been possible had we been able to guess correctly. */
2648 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2650 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2651 offsets_at[CODE_LABEL_NUMBER (x)][i]
2652 = (initial_p ? reg_eliminate[i].initial_offset
2653 : reg_eliminate[i].offset);
2654 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2657 /* Otherwise, if this is the definition of a label and it is
2658 preceded by a BARRIER, set our offsets to the known offset of
2662 && (tem = prev_nonnote_insn (insn)) != 0
2663 && GET_CODE (tem) == BARRIER)
2665 num_not_at_initial_offset = 0;
2666 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2668 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2669 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2670 if (reg_eliminate[i].can_eliminate
2671 && (reg_eliminate[i].offset
2672 != reg_eliminate[i].initial_offset))
2673 num_not_at_initial_offset++;
2678 /* If neither of the above cases is true, compare each offset
2679 with those previously recorded and suppress any eliminations
2680 where the offsets disagree. */
2682 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2683 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2684 != (initial_p ? reg_eliminate[i].initial_offset
2685 : reg_eliminate[i].offset))
2686 reg_eliminate[i].can_eliminate = 0;
2691 set_label_offsets (PATTERN (insn), insn, initial_p);
2693 /* ... fall through ... */
2697 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2698 and hence must have all eliminations at their initial offsets. */
2699 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2700 if (REG_NOTE_KIND (tem) == REG_LABEL)
2701 set_label_offsets (XEXP (tem, 0), insn, 1);
2706 /* Each of the labels in the address vector must be at their initial
2707 offsets. We want the first first for ADDR_VEC and the second
2708 field for ADDR_DIFF_VEC. */
2710 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2711 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2716 /* We only care about setting PC. If the source is not RETURN,
2717 IF_THEN_ELSE, or a label, disable any eliminations not at
2718 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2719 isn't one of those possibilities. For branches to a label,
2720 call ourselves recursively.
2722 Note that this can disable elimination unnecessarily when we have
2723 a non-local goto since it will look like a non-constant jump to
2724 someplace in the current function. This isn't a significant
2725 problem since such jumps will normally be when all elimination
2726 pairs are back to their initial offsets. */
2728 if (SET_DEST (x) != pc_rtx)
2731 switch (GET_CODE (SET_SRC (x)))
2738 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2742 tem = XEXP (SET_SRC (x), 1);
2743 if (GET_CODE (tem) == LABEL_REF)
2744 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2745 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2748 tem = XEXP (SET_SRC (x), 2);
2749 if (GET_CODE (tem) == LABEL_REF)
2750 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2751 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2759 /* If we reach here, all eliminations must be at their initial
2760 offset because we are doing a jump to a variable address. */
2761 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2762 if (p->offset != p->initial_offset)
2763 p->can_eliminate = 0;
2771 /* Used for communication between the next two function to properly share
2772 the vector for an ASM_OPERANDS. */
2774 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2776 /* Scan X and replace any eliminable registers (such as fp) with a
2777 replacement (such as sp), plus an offset.
2779 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2780 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2781 MEM, we are allowed to replace a sum of a register and the constant zero
2782 with the register, which we cannot do outside a MEM. In addition, we need
2783 to record the fact that a register is referenced outside a MEM.
2785 If INSN is an insn, it is the insn containing X. If we replace a REG
2786 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2787 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2788 that the REG is being modified.
2790 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2791 That's used when we eliminate in expressions stored in notes.
2792 This means, do not set ref_outside_mem even if the reference
2795 If we see a modification to a register we know about, take the
2796 appropriate action (see case SET, below).
2798 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2799 replacements done assuming all offsets are at their initial values. If
2800 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2801 encounter, return the actual location so that find_reloads will do
2802 the proper thing. */
2805 eliminate_regs (x, mem_mode, insn, storing)
2807 enum machine_mode mem_mode;
2811 enum rtx_code code = GET_CODE (x);
2812 struct elim_table *ep;
2835 /* This is only for the benefit of the debugging backends, which call
2836 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2837 removed after CSE. */
2838 new = eliminate_regs (XEXP (x, 0), 0, insn, 0);
2839 if (GET_CODE (new) == MEM)
2840 return XEXP (new, 0);
2846 /* First handle the case where we encounter a bare register that
2847 is eliminable. Replace it with a PLUS. */
2848 if (regno < FIRST_PSEUDO_REGISTER)
2850 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2852 if (ep->from_rtx == x && ep->can_eliminate)
2855 /* Refs inside notes don't count for this purpose. */
2856 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2857 || GET_CODE (insn) == INSN_LIST)))
2858 ep->ref_outside_mem = 1;
2859 return plus_constant (ep->to_rtx, ep->previous_offset);
2863 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2864 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2866 /* In this case, find_reloads would attempt to either use an
2867 incorrect address (if something is not at its initial offset)
2868 or substitute an replaced address into an insn (which loses
2869 if the offset is changed by some later action). So we simply
2870 return the replaced stack slot (assuming it is changed by
2871 elimination) and ignore the fact that this is actually a
2872 reference to the pseudo. Ensure we make a copy of the
2873 address in case it is shared. */
2874 new = eliminate_regs (reg_equiv_memory_loc[regno],
2876 if (new != reg_equiv_memory_loc[regno])
2878 cannot_omit_stores[regno] = 1;
2879 return copy_rtx (new);
2885 /* If this is the sum of an eliminable register and a constant, rework
2887 if (GET_CODE (XEXP (x, 0)) == REG
2888 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2889 && CONSTANT_P (XEXP (x, 1)))
2891 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2893 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2896 /* Refs inside notes don't count for this purpose. */
2897 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2898 || GET_CODE (insn) == INSN_LIST)))
2899 ep->ref_outside_mem = 1;
2901 /* The only time we want to replace a PLUS with a REG (this
2902 occurs when the constant operand of the PLUS is the negative
2903 of the offset) is when we are inside a MEM. We won't want
2904 to do so at other times because that would change the
2905 structure of the insn in a way that reload can't handle.
2906 We special-case the commonest situation in
2907 eliminate_regs_in_insn, so just replace a PLUS with a
2908 PLUS here, unless inside a MEM. */
2909 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2910 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2913 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2914 plus_constant (XEXP (x, 1),
2915 ep->previous_offset));
2918 /* If the register is not eliminable, we are done since the other
2919 operand is a constant. */
2923 /* If this is part of an address, we want to bring any constant to the
2924 outermost PLUS. We will do this by doing register replacement in
2925 our operands and seeing if a constant shows up in one of them.
2927 We assume here this is part of an address (or a "load address" insn)
2928 since an eliminable register is not likely to appear in any other
2931 If we have (plus (eliminable) (reg)), we want to produce
2932 (plus (plus (replacement) (reg) (const))). If this was part of a
2933 normal add insn, (plus (replacement) (reg)) will be pushed as a
2934 reload. This is the desired action. */
2937 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
2938 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
2940 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2942 /* If one side is a PLUS and the other side is a pseudo that
2943 didn't get a hard register but has a reg_equiv_constant,
2944 we must replace the constant here since it may no longer
2945 be in the position of any operand. */
2946 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2947 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2948 && reg_renumber[REGNO (new1)] < 0
2949 && reg_equiv_constant != 0
2950 && reg_equiv_constant[REGNO (new1)] != 0)
2951 new1 = reg_equiv_constant[REGNO (new1)];
2952 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2953 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2954 && reg_renumber[REGNO (new0)] < 0
2955 && reg_equiv_constant[REGNO (new0)] != 0)
2956 new0 = reg_equiv_constant[REGNO (new0)];
2958 new = form_sum (new0, new1);
2960 /* As above, if we are not inside a MEM we do not want to
2961 turn a PLUS into something else. We might try to do so here
2962 for an addition of 0 if we aren't optimizing. */
2963 if (! mem_mode && GET_CODE (new) != PLUS)
2964 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2972 /* If this is the product of an eliminable register and a
2973 constant, apply the distribute law and move the constant out
2974 so that we have (plus (mult ..) ..). This is needed in order
2975 to keep load-address insns valid. This case is pathological.
2976 We ignore the possibility of overflow here. */
2977 if (GET_CODE (XEXP (x, 0)) == REG
2978 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2979 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2980 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2982 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2985 /* Refs inside notes don't count for this purpose. */
2986 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2987 || GET_CODE (insn) == INSN_LIST)))
2988 ep->ref_outside_mem = 1;
2991 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2992 ep->previous_offset * INTVAL (XEXP (x, 1)));
2995 /* ... fall through ... */
3000 case DIV: case UDIV:
3001 case MOD: case UMOD:
3002 case AND: case IOR: case XOR:
3003 case ROTATERT: case ROTATE:
3004 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3006 case GE: case GT: case GEU: case GTU:
3007 case LE: case LT: case LEU: case LTU:
3009 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3011 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn, 0) : 0;
3013 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3014 return gen_rtx (code, GET_MODE (x), new0, new1);
3019 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3022 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3023 if (new != XEXP (x, 0))
3024 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
3027 /* ... fall through ... */
3030 /* Now do eliminations in the rest of the chain. If this was
3031 an EXPR_LIST, this might result in allocating more memory than is
3032 strictly needed, but it simplifies the code. */
3035 new = eliminate_regs (XEXP (x, 1), mem_mode, insn, 0);
3036 if (new != XEXP (x, 1))
3037 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3045 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3046 if (ep->to_rtx == XEXP (x, 0))
3048 int size = GET_MODE_SIZE (mem_mode);
3050 /* If more bytes than MEM_MODE are pushed, account for them. */
3051 #ifdef PUSH_ROUNDING
3052 if (ep->to_rtx == stack_pointer_rtx)
3053 size = PUSH_ROUNDING (size);
3055 if (code == PRE_DEC || code == POST_DEC)
3061 /* Fall through to generic unary operation case. */
3062 case STRICT_LOW_PART:
3064 case SIGN_EXTEND: case ZERO_EXTEND:
3065 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3066 case FLOAT: case FIX:
3067 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3071 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3072 if (new != XEXP (x, 0))
3073 return gen_rtx (code, GET_MODE (x), new);
3077 /* Similar to above processing, but preserve SUBREG_WORD.
3078 Convert (subreg (mem)) to (mem) if not paradoxical.
3079 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3080 pseudo didn't get a hard reg, we must replace this with the
3081 eliminated version of the memory location because push_reloads
3082 may do the replacement in certain circumstances. */
3083 if (GET_CODE (SUBREG_REG (x)) == REG
3084 && (GET_MODE_SIZE (GET_MODE (x))
3085 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3086 && reg_equiv_memory_loc != 0
3087 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3089 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3092 /* If we didn't change anything, we must retain the pseudo. */
3093 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3094 new = SUBREG_REG (x);
3097 /* Otherwise, ensure NEW isn't shared in case we have to reload
3099 new = copy_rtx (new);
3101 /* In this case, we must show that the pseudo is used in this
3102 insn so that delete_output_reload will do the right thing. */
3103 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3104 && GET_CODE (insn) != INSN_LIST)
3105 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
3110 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn, 0);
3112 if (new != XEXP (x, 0))
3114 int x_size = GET_MODE_SIZE (GET_MODE (x));
3115 int new_size = GET_MODE_SIZE (GET_MODE (new));
3117 /* When asked to spill a partial word subreg, we need to go
3118 ahead and spill the whole thing against the possibility
3119 that we reload the whole reg and find garbage at the top. */
3121 && GET_CODE (new) == MEM
3122 && x_size < new_size
3123 && ((x_size + UNITS_PER_WORD-1) / UNITS_PER_WORD
3124 == (new_size + UNITS_PER_WORD-1) / UNITS_PER_WORD))
3126 else if (GET_CODE (new) == MEM
3127 && x_size <= new_size
3128 #ifdef LOAD_EXTEND_OP
3129 /* On these machines we will be reloading what is
3130 inside the SUBREG if it originally was a pseudo and
3131 the inner and outer modes are both a word or
3132 smaller. So leave the SUBREG then. */
3133 && ! (GET_CODE (SUBREG_REG (x)) == REG
3134 && x_size <= UNITS_PER_WORD
3135 && new_size <= UNITS_PER_WORD
3136 && x_size > new_size
3137 && INTEGRAL_MODE_P (GET_MODE (new))
3138 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
3142 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3143 enum machine_mode mode = GET_MODE (x);
3145 if (BYTES_BIG_ENDIAN)
3146 offset += (MIN (UNITS_PER_WORD,
3147 GET_MODE_SIZE (GET_MODE (new)))
3148 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3150 PUT_MODE (new, mode);
3151 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3155 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
3161 /* If using a register that is the source of an eliminate we still
3162 think can be performed, note it cannot be performed since we don't
3163 know how this register is used. */
3164 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3165 if (ep->from_rtx == XEXP (x, 0))
3166 ep->can_eliminate = 0;
3168 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3169 if (new != XEXP (x, 0))
3170 return gen_rtx (code, GET_MODE (x), new);
3174 /* If clobbering a register that is the replacement register for an
3175 elimination we still think can be performed, note that it cannot
3176 be performed. Otherwise, we need not be concerned about it. */
3177 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3178 if (ep->to_rtx == XEXP (x, 0))
3179 ep->can_eliminate = 0;
3181 new = eliminate_regs (XEXP (x, 0), mem_mode, insn, 0);
3182 if (new != XEXP (x, 0))
3183 return gen_rtx (code, GET_MODE (x), new);
3189 /* Properly handle sharing input and constraint vectors. */
3190 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3192 /* When we come to a new vector not seen before,
3193 scan all its elements; keep the old vector if none
3194 of them changes; otherwise, make a copy. */
3195 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3196 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3197 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3198 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3201 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3202 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3205 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3206 new_asm_operands_vec = old_asm_operands_vec;
3208 new_asm_operands_vec
3209 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3212 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3213 if (new_asm_operands_vec == old_asm_operands_vec)
3216 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3217 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3218 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3219 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3220 ASM_OPERANDS_SOURCE_FILE (x),
3221 ASM_OPERANDS_SOURCE_LINE (x));
3222 new->volatil = x->volatil;
3227 /* Check for setting a register that we know about. */
3228 if (GET_CODE (SET_DEST (x)) == REG)
3230 /* See if this is setting the replacement register for an
3233 If DEST is the hard frame pointer, we do nothing because we
3234 assume that all assignments to the frame pointer are for
3235 non-local gotos and are being done at a time when they are valid
3236 and do not disturb anything else. Some machines want to
3237 eliminate a fake argument pointer (or even a fake frame pointer)
3238 with either the real frame or the stack pointer. Assignments to
3239 the hard frame pointer must not prevent this elimination. */
3241 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3243 if (ep->to_rtx == SET_DEST (x)
3244 && SET_DEST (x) != hard_frame_pointer_rtx)
3246 /* If it is being incremented, adjust the offset. Otherwise,
3247 this elimination can't be done. */
3248 rtx src = SET_SRC (x);
3250 if (GET_CODE (src) == PLUS
3251 && XEXP (src, 0) == SET_DEST (x)
3252 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3253 ep->offset -= INTVAL (XEXP (src, 1));
3255 ep->can_eliminate = 0;
3258 /* Now check to see we are assigning to a register that can be
3259 eliminated. If so, it must be as part of a PARALLEL, since we
3260 will not have been called if this is a single SET. So indicate
3261 that we can no longer eliminate this reg. */
3262 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3264 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3265 ep->can_eliminate = 0;
3268 /* Now avoid the loop below in this common case. */
3270 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn, 1);
3271 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn, 0);
3273 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3274 write a CLOBBER insn. */
3275 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3276 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3277 && GET_CODE (insn) != INSN_LIST)
3278 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3280 /* If SET_DEST was a partial-word subreg, NEW0 may have been widened
3281 to spill the entire register (see SUBREG case above). If the
3282 widths of SET_DEST and NEW0 no longer match, adjust NEW1. */
3283 if (GET_MODE (SET_DEST (x)) != GET_MODE (new0))
3284 new1 = gen_rtx (SUBREG, GET_MODE (new0), new1, 0);
3286 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3287 return gen_rtx (SET, VOIDmode, new0, new1);
3293 /* This is only for the benefit of the debugging backends, which call
3294 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3295 removed after CSE. */
3296 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3297 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn, 0);
3299 /* Our only special processing is to pass the mode of the MEM to our
3300 recursive call and copy the flags. While we are here, handle this
3301 case more efficiently. */
3302 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn, 0);
3303 if (new != XEXP (x, 0))
3305 new = gen_rtx (MEM, GET_MODE (x), new);
3306 new->volatil = x->volatil;
3307 new->unchanging = x->unchanging;
3308 new->in_struct = x->in_struct;
3318 /* Process each of our operands recursively. If any have changed, make a
3320 fmt = GET_RTX_FORMAT (code);
3321 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3325 new = eliminate_regs (XEXP (x, i), mem_mode, insn, 0);
3326 if (new != XEXP (x, i) && ! copied)
3328 rtx new_x = rtx_alloc (code);
3329 bcopy ((char *) x, (char *) new_x,
3330 (sizeof (*new_x) - sizeof (new_x->fld)
3331 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3337 else if (*fmt == 'E')
3340 for (j = 0; j < XVECLEN (x, i); j++)
3342 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn, 0);
3343 if (new != XVECEXP (x, i, j) && ! copied_vec)
3345 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3349 rtx new_x = rtx_alloc (code);
3350 bcopy ((char *) x, (char *) new_x,
3351 (sizeof (*new_x) - sizeof (new_x->fld)
3352 + (sizeof (new_x->fld[0])
3353 * GET_RTX_LENGTH (code))));
3357 XVEC (x, i) = new_v;
3360 XVECEXP (x, i, j) = new;
3368 /* Scan INSN and eliminate all eliminable registers in it.
3370 If REPLACE is nonzero, do the replacement destructively. Also
3371 delete the insn as dead it if it is setting an eliminable register.
3373 If REPLACE is zero, do all our allocations in reload_obstack.
3375 If no eliminations were done and this insn doesn't require any elimination
3376 processing (these are not identical conditions: it might be updating sp,
3377 but not referencing fp; this needs to be seen during reload_as_needed so
3378 that the offset between fp and sp can be taken into consideration), zero
3379 is returned. Otherwise, 1 is returned. */
3382 eliminate_regs_in_insn (insn, replace)
3386 rtx old_body = PATTERN (insn);
3387 rtx old_set = single_set (insn);
3390 struct elim_table *ep;
3393 push_obstacks (&reload_obstack, &reload_obstack);
3395 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3396 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3398 /* Check for setting an eliminable register. */
3399 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3400 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3402 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3403 /* If this is setting the frame pointer register to the
3404 hardware frame pointer register and this is an elimination
3405 that will be done (tested above), this insn is really
3406 adjusting the frame pointer downward to compensate for
3407 the adjustment done before a nonlocal goto. */
3408 if (ep->from == FRAME_POINTER_REGNUM
3409 && ep->to == HARD_FRAME_POINTER_REGNUM)
3411 rtx src = SET_SRC (old_set);
3413 rtx prev_insn, prev_set;
3415 if (src == ep->to_rtx)
3417 else if (GET_CODE (src) == PLUS
3418 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3419 offset = INTVAL (XEXP (src, 0)), ok = 1;
3420 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3421 && (prev_set = single_set (prev_insn)) != 0
3422 && rtx_equal_p (SET_DEST (prev_set), src))
3424 src = SET_SRC (prev_set);
3425 if (src == ep->to_rtx)
3427 else if (GET_CODE (src) == PLUS
3428 && GET_CODE (XEXP (src, 0)) == CONST_INT
3429 && XEXP (src, 1) == ep->to_rtx)
3430 offset = INTVAL (XEXP (src, 0)), ok = 1;
3431 else if (GET_CODE (src) == PLUS
3432 && GET_CODE (XEXP (src, 1)) == CONST_INT
3433 && XEXP (src, 0) == ep->to_rtx)
3434 offset = INTVAL (XEXP (src, 1)), ok = 1;
3442 = plus_constant (ep->to_rtx, offset - ep->offset);
3444 /* First see if this insn remains valid when we
3445 make the change. If not, keep the INSN_CODE
3446 the same and let reload fit it up. */
3447 validate_change (insn, &SET_SRC (old_set), src, 1);
3448 validate_change (insn, &SET_DEST (old_set),
3450 if (! apply_change_group ())
3452 SET_SRC (old_set) = src;
3453 SET_DEST (old_set) = ep->to_rtx;
3463 /* In this case this insn isn't serving a useful purpose. We
3464 will delete it in reload_as_needed once we know that this
3465 elimination is, in fact, being done.
3467 If REPLACE isn't set, we can't delete this insn, but needn't
3468 process it since it won't be used unless something changes. */
3470 delete_dead_insn (insn);
3475 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3476 in the insn is the negative of the offset in FROM. Substitute
3477 (set (reg) (reg to)) for the insn and change its code.
3479 We have to do this here, rather than in eliminate_regs, do that we can
3480 change the insn code. */
3482 if (GET_CODE (SET_SRC (old_set)) == PLUS
3483 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3484 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3485 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3487 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3488 && ep->can_eliminate)
3490 /* We must stop at the first elimination that will be used.
3491 If this one would replace the PLUS with a REG, do it
3492 now. Otherwise, quit the loop and let eliminate_regs
3493 do its normal replacement. */
3494 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3496 /* We assume here that we don't need a PARALLEL of
3497 any CLOBBERs for this assignment. There's not
3498 much we can do if we do need it. */
3499 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3500 SET_DEST (old_set), ep->to_rtx);
3501 INSN_CODE (insn) = -1;
3510 old_asm_operands_vec = 0;
3512 /* Replace the body of this insn with a substituted form. If we changed
3513 something, return non-zero.
3515 If we are replacing a body that was a (set X (plus Y Z)), try to
3516 re-recognize the insn. We do this in case we had a simple addition
3517 but now can do this as a load-address. This saves an insn in this
3520 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX, 0);
3521 if (new_body != old_body)
3523 /* If we aren't replacing things permanently and we changed something,
3524 make another copy to ensure that all the RTL is new. Otherwise
3525 things can go wrong if find_reload swaps commutative operands
3526 and one is inside RTL that has been copied while the other is not. */
3528 /* Don't copy an asm_operands because (1) there's no need and (2)
3529 copy_rtx can't do it properly when there are multiple outputs. */
3530 if (! replace && asm_noperands (old_body) < 0)
3531 new_body = copy_rtx (new_body);
3533 /* If we had a move insn but now we don't, rerecognize it. This will
3534 cause spurious re-recognition if the old move had a PARALLEL since
3535 the new one still will, but we can't call single_set without
3536 having put NEW_BODY into the insn and the re-recognition won't
3537 hurt in this rare case. */
3539 && ((GET_CODE (SET_SRC (old_set)) == REG
3540 && (GET_CODE (new_body) != SET
3541 || GET_CODE (SET_SRC (new_body)) != REG))
3542 /* If this was a load from or store to memory, compare
3543 the MEM in recog_operand to the one in the insn. If they
3544 are not equal, then rerecognize the insn. */
3546 && ((GET_CODE (SET_SRC (old_set)) == MEM
3547 && SET_SRC (old_set) != recog_operand[1])
3548 || (GET_CODE (SET_DEST (old_set)) == MEM
3549 && SET_DEST (old_set) != recog_operand[0])))
3550 /* If this was an add insn before, rerecognize. */
3551 || GET_CODE (SET_SRC (old_set)) == PLUS))
3553 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3554 /* If recognition fails, store the new body anyway.
3555 It's normal to have recognition failures here
3556 due to bizarre memory addresses; reloading will fix them. */
3557 PATTERN (insn) = new_body;
3560 PATTERN (insn) = new_body;
3565 /* Loop through all elimination pairs. See if any have changed and
3566 recalculate the number not at initial offset.
3568 Compute the maximum offset (minimum offset if the stack does not
3569 grow downward) for each elimination pair.
3571 We also detect a cases where register elimination cannot be done,
3572 namely, if a register would be both changed and referenced outside a MEM
3573 in the resulting insn since such an insn is often undefined and, even if
3574 not, we cannot know what meaning will be given to it. Note that it is
3575 valid to have a register used in an address in an insn that changes it
3576 (presumably with a pre- or post-increment or decrement).
3578 If anything changes, return nonzero. */
3580 num_not_at_initial_offset = 0;
3581 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3583 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3584 ep->can_eliminate = 0;
3586 ep->ref_outside_mem = 0;
3588 if (ep->previous_offset != ep->offset)
3591 ep->previous_offset = ep->offset;
3592 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3593 num_not_at_initial_offset++;
3595 #ifdef STACK_GROWS_DOWNWARD
3596 ep->max_offset = MAX (ep->max_offset, ep->offset);
3598 ep->max_offset = MIN (ep->max_offset, ep->offset);
3603 /* If we changed something, perform elimination in REG_NOTES. This is
3604 needed even when REPLACE is zero because a REG_DEAD note might refer
3605 to a register that we eliminate and could cause a different number
3606 of spill registers to be needed in the final reload pass than in
3608 if (val && REG_NOTES (insn) != 0)
3609 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn), 0);
3617 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3618 replacement we currently believe is valid, mark it as not eliminable if X
3619 modifies DEST in any way other than by adding a constant integer to it.
3621 If DEST is the frame pointer, we do nothing because we assume that
3622 all assignments to the hard frame pointer are nonlocal gotos and are being
3623 done at a time when they are valid and do not disturb anything else.
3624 Some machines want to eliminate a fake argument pointer with either the
3625 frame or stack pointer. Assignments to the hard frame pointer must not
3626 prevent this elimination.
3628 Called via note_stores from reload before starting its passes to scan
3629 the insns of the function. */
3632 mark_not_eliminable (dest, x)
3638 /* A SUBREG of a hard register here is just changing its mode. We should
3639 not see a SUBREG of an eliminable hard register, but check just in
3641 if (GET_CODE (dest) == SUBREG)
3642 dest = SUBREG_REG (dest);
3644 if (dest == hard_frame_pointer_rtx)
3647 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3648 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3649 && (GET_CODE (x) != SET
3650 || GET_CODE (SET_SRC (x)) != PLUS
3651 || XEXP (SET_SRC (x), 0) != dest
3652 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3654 reg_eliminate[i].can_eliminate_previous
3655 = reg_eliminate[i].can_eliminate = 0;
3660 /* Kick all pseudos out of hard register REGNO.
3661 If GLOBAL is nonzero, try to find someplace else to put them.
3662 If DUMPFILE is nonzero, log actions taken on that file.
3664 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3665 because we found we can't eliminate some register. In the case, no pseudos
3666 are allowed to be in the register, even if they are only in a block that
3667 doesn't require spill registers, unlike the case when we are spilling this
3668 hard reg to produce another spill register.
3670 Return nonzero if any pseudos needed to be kicked out. */
3673 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3679 enum reg_class class = REGNO_REG_CLASS (regno);
3680 int something_changed = 0;
3683 SET_HARD_REG_BIT (forbidden_regs, regno);
3686 regs_ever_live[regno] = 1;
3688 /* Spill every pseudo reg that was allocated to this reg
3689 or to something that overlaps this reg. */
3691 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3692 if (reg_renumber[i] >= 0
3693 && reg_renumber[i] <= regno
3695 + HARD_REGNO_NREGS (reg_renumber[i],
3696 PSEUDO_REGNO_MODE (i))
3699 /* If this register belongs solely to a basic block which needed no
3700 spilling of any class that this register is contained in,
3701 leave it be, unless we are spilling this register because
3702 it was a hard register that can't be eliminated. */
3704 if (! cant_eliminate
3705 && basic_block_needs[0]
3706 && REG_BASIC_BLOCK (i) >= 0
3707 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3711 for (p = reg_class_superclasses[(int) class];
3712 *p != LIM_REG_CLASSES; p++)
3713 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3716 if (*p == LIM_REG_CLASSES)
3720 /* Mark it as no longer having a hard register home. */
3721 reg_renumber[i] = -1;
3722 /* We will need to scan everything again. */
3723 something_changed = 1;
3725 retry_global_alloc (i, forbidden_regs);
3727 alter_reg (i, regno);
3730 if (reg_renumber[i] == -1)
3731 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3733 fprintf (dumpfile, " Register %d now in %d.\n\n",
3734 i, reg_renumber[i]);
3737 for (i = 0; i < scratch_list_length; i++)
3739 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3741 if (! cant_eliminate && basic_block_needs[0]
3742 && ! basic_block_needs[(int) class][scratch_block[i]])
3746 for (p = reg_class_superclasses[(int) class];
3747 *p != LIM_REG_CLASSES; p++)
3748 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3751 if (*p == LIM_REG_CLASSES)
3754 PUT_CODE (scratch_list[i], SCRATCH);
3755 scratch_list[i] = 0;
3756 something_changed = 1;
3761 return something_changed;
3764 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3765 Also mark any hard registers used to store user variables as
3766 forbidden from being used for spill registers. */
3769 scan_paradoxical_subregs (x)
3774 register enum rtx_code code = GET_CODE (x);
3779 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3780 && REG_USERVAR_P (x))
3781 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3796 if (GET_CODE (SUBREG_REG (x)) == REG
3797 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3798 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3799 = GET_MODE_SIZE (GET_MODE (x));
3806 fmt = GET_RTX_FORMAT (code);
3807 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3810 scan_paradoxical_subregs (XEXP (x, i));
3811 else if (fmt[i] == 'E')
3814 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3815 scan_paradoxical_subregs (XVECEXP (x, i, j));
3821 hard_reg_use_compare (p1p, p2p)
3822 const GENERIC_PTR p1p;
3823 const GENERIC_PTR p2p;
3825 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3826 *p2 = (struct hard_reg_n_uses *)p2p;
3827 int tem = p1->uses - p2->uses;
3828 if (tem != 0) return tem;
3829 /* If regs are equally good, sort by regno,
3830 so that the results of qsort leave nothing to chance. */
3831 return p1->regno - p2->regno;
3834 /* Choose the order to consider regs for use as reload registers
3835 based on how much trouble would be caused by spilling one.
3836 Store them in order of decreasing preference in potential_reload_regs. */
3839 order_regs_for_reload (global)
3846 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3848 CLEAR_HARD_REG_SET (bad_spill_regs);
3850 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3851 potential_reload_regs[i] = -1;
3853 /* Count number of uses of each hard reg by pseudo regs allocated to it
3854 and then order them by decreasing use. */
3856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3858 hard_reg_n_uses[i].uses = 0;
3859 hard_reg_n_uses[i].regno = i;
3862 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3864 int regno = reg_renumber[i];
3867 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3870 /* If allocated by local-alloc, show more uses since
3871 we're not going to be able to reallocate it, but
3872 we might if allocated by global alloc. */
3873 if (global && reg_allocno[i] < 0)
3874 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3876 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3879 large += REG_N_REFS (i);
3882 /* Now fixed registers (which cannot safely be used for reloading)
3883 get a very high use count so they will be considered least desirable.
3884 Registers used explicitly in the rtl code are almost as bad. */
3886 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3890 hard_reg_n_uses[i].uses += 2 * large + 2;
3891 SET_HARD_REG_BIT (bad_spill_regs, i);
3893 else if (regs_explicitly_used[i])
3895 hard_reg_n_uses[i].uses += large + 1;
3896 if (! SMALL_REGISTER_CLASSES)
3897 /* ??? We are doing this here because of the potential
3898 that bad code may be generated if a register explicitly
3899 used in an insn was used as a spill register for that
3900 insn. But not using these are spill registers may lose
3901 on some machine. We'll have to see how this works out. */
3902 SET_HARD_REG_BIT (bad_spill_regs, i);
3905 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3906 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3908 #ifdef ELIMINABLE_REGS
3909 /* If registers other than the frame pointer are eliminable, mark them as
3911 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3913 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3914 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3918 /* Prefer registers not so far used, for use in temporary loading.
3919 Among them, if REG_ALLOC_ORDER is defined, use that order.
3920 Otherwise, prefer registers not preserved by calls. */
3922 #ifdef REG_ALLOC_ORDER
3923 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3925 int regno = reg_alloc_order[i];
3927 if (hard_reg_n_uses[regno].uses == 0)
3928 potential_reload_regs[o++] = regno;
3931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3933 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3934 potential_reload_regs[o++] = i;
3936 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3938 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3939 potential_reload_regs[o++] = i;
3943 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3944 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3946 /* Now add the regs that are already used,
3947 preferring those used less often. The fixed and otherwise forbidden
3948 registers will be at the end of this list. */
3950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3951 if (hard_reg_n_uses[i].uses != 0)
3952 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3955 /* Used in reload_as_needed to sort the spilled regs. */
3958 compare_spill_regs (r1p, r2p)
3959 const GENERIC_PTR r1p;
3960 const GENERIC_PTR r2p;
3962 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3966 /* Reload pseudo-registers into hard regs around each insn as needed.
3967 Additional register load insns are output before the insn that needs it
3968 and perhaps store insns after insns that modify the reloaded pseudo reg.
3970 reg_last_reload_reg and reg_reloaded_contents keep track of
3971 which registers are already available in reload registers.
3972 We update these for the reloads that we perform,
3973 as the insns are scanned. */
3976 reload_as_needed (first, live_known)
3986 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3987 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3988 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3989 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3990 reg_has_output_reload = (char *) alloca (max_regno);
3991 for (i = 0; i < n_spills; i++)
3993 reg_reloaded_contents[i] = -1;
3994 reg_reloaded_insn[i] = 0;
3997 /* Reset all offsets on eliminable registers to their initial values. */
3998 #ifdef ELIMINABLE_REGS
3999 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4001 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
4002 reg_eliminate[i].initial_offset);
4003 reg_eliminate[i].previous_offset
4004 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
4007 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
4008 reg_eliminate[0].previous_offset
4009 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
4012 num_not_at_initial_offset = 0;
4014 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4015 pack registers with group needs. */
4018 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4019 for (i = 0; i < n_spills; i++)
4020 spill_reg_order[spill_regs[i]] = i;
4023 for (insn = first; insn;)
4025 register rtx next = NEXT_INSN (insn);
4027 /* Notice when we move to a new basic block. */
4028 if (live_known && this_block + 1 < n_basic_blocks
4029 && insn == basic_block_head[this_block+1])
4032 /* If we pass a label, copy the offsets from the label information
4033 into the current offsets of each elimination. */
4034 if (GET_CODE (insn) == CODE_LABEL)
4036 num_not_at_initial_offset = 0;
4037 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4039 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4040 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4041 if (reg_eliminate[i].can_eliminate
4042 && (reg_eliminate[i].offset
4043 != reg_eliminate[i].initial_offset))
4044 num_not_at_initial_offset++;
4048 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4050 rtx avoid_return_reg = 0;
4051 rtx oldpat = PATTERN (insn);
4053 /* Set avoid_return_reg if this is an insn
4054 that might use the value of a function call. */
4055 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4057 if (GET_CODE (PATTERN (insn)) == SET)
4058 after_call = SET_DEST (PATTERN (insn));
4059 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4060 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4061 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4065 else if (SMALL_REGISTER_CLASSES && after_call != 0
4066 && !(GET_CODE (PATTERN (insn)) == SET
4067 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
4069 if (reg_referenced_p (after_call, PATTERN (insn)))
4070 avoid_return_reg = after_call;
4074 /* If this is a USE and CLOBBER of a MEM, ensure that any
4075 references to eliminable registers have been removed. */
4077 if ((GET_CODE (PATTERN (insn)) == USE
4078 || GET_CODE (PATTERN (insn)) == CLOBBER)
4079 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4080 XEXP (XEXP (PATTERN (insn), 0), 0)
4081 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4082 GET_MODE (XEXP (PATTERN (insn), 0)),
4085 /* If we need to do register elimination processing, do so.
4086 This might delete the insn, in which case we are done. */
4087 if (num_eliminable && GET_MODE (insn) == QImode)
4089 eliminate_regs_in_insn (insn, 1);
4090 if (GET_CODE (insn) == NOTE)
4097 if (GET_MODE (insn) == VOIDmode)
4099 /* First find the pseudo regs that must be reloaded for this insn.
4100 This info is returned in the tables reload_... (see reload.h).
4101 Also modify the body of INSN by substituting RELOAD
4102 rtx's for those pseudo regs. */
4105 bzero (reg_has_output_reload, max_regno);
4106 CLEAR_HARD_REG_SET (reg_is_output_reload);
4108 find_reloads (insn, 1, spill_indirect_levels, live_known,
4114 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4118 /* If this block has not had spilling done for a
4119 particular clas and we have any non-optionals that need a
4120 spill reg in that class, abort. */
4122 for (class = 0; class < N_REG_CLASSES; class++)
4123 if (basic_block_needs[class] != 0
4124 && basic_block_needs[class][this_block] == 0)
4125 for (i = 0; i < n_reloads; i++)
4126 if (class == (int) reload_reg_class[i]
4127 && reload_reg_rtx[i] == 0
4128 && ! reload_optional[i]
4129 && (reload_in[i] != 0 || reload_out[i] != 0
4130 || reload_secondary_p[i] != 0))
4131 fatal_insn ("Non-optional registers need a spill register", insn);
4133 /* Now compute which reload regs to reload them into. Perhaps
4134 reusing reload regs from previous insns, or else output
4135 load insns to reload them. Maybe output store insns too.
4136 Record the choices of reload reg in reload_reg_rtx. */
4137 choose_reload_regs (insn, avoid_return_reg);
4139 /* Merge any reloads that we didn't combine for fear of
4140 increasing the number of spill registers needed but now
4141 discover can be safely merged. */
4142 if (SMALL_REGISTER_CLASSES)
4143 merge_assigned_reloads (insn);
4145 /* Generate the insns to reload operands into or out of
4146 their reload regs. */
4147 emit_reload_insns (insn);
4149 /* Substitute the chosen reload regs from reload_reg_rtx
4150 into the insn's body (or perhaps into the bodies of other
4151 load and store insn that we just made for reloading
4152 and that we moved the structure into). */
4155 /* If this was an ASM, make sure that all the reload insns
4156 we have generated are valid. If not, give an error
4159 if (asm_noperands (PATTERN (insn)) >= 0)
4160 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4161 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4162 && (recog_memoized (p) < 0
4163 || (insn_extract (p),
4164 ! constrain_operands (INSN_CODE (p), 1))))
4166 error_for_asm (insn,
4167 "`asm' operand requires impossible reload");
4169 NOTE_SOURCE_FILE (p) = 0;
4170 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4173 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4174 is no longer validly lying around to save a future reload.
4175 Note that this does not detect pseudos that were reloaded
4176 for this insn in order to be stored in
4177 (obeying register constraints). That is correct; such reload
4178 registers ARE still valid. */
4179 note_stores (oldpat, forget_old_reloads_1);
4181 /* There may have been CLOBBER insns placed after INSN. So scan
4182 between INSN and NEXT and use them to forget old reloads. */
4183 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4184 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4185 note_stores (PATTERN (x), forget_old_reloads_1);
4188 /* Likewise for regs altered by auto-increment in this insn.
4189 But note that the reg-notes are not changed by reloading:
4190 they still contain the pseudo-regs, not the spill regs. */
4191 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4192 if (REG_NOTE_KIND (x) == REG_INC)
4194 /* See if this pseudo reg was reloaded in this insn.
4195 If so, its last-reload info is still valid
4196 because it is based on this insn's reload. */
4197 for (i = 0; i < n_reloads; i++)
4198 if (reload_out[i] == XEXP (x, 0))
4202 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4206 /* A reload reg's contents are unknown after a label. */
4207 if (GET_CODE (insn) == CODE_LABEL)
4208 for (i = 0; i < n_spills; i++)
4210 reg_reloaded_contents[i] = -1;
4211 reg_reloaded_insn[i] = 0;
4214 /* Don't assume a reload reg is still good after a call insn
4215 if it is a call-used reg. */
4216 else if (GET_CODE (insn) == CALL_INSN)
4217 for (i = 0; i < n_spills; i++)
4218 if (call_used_regs[spill_regs[i]])
4220 reg_reloaded_contents[i] = -1;
4221 reg_reloaded_insn[i] = 0;
4224 /* In case registers overlap, allow certain insns to invalidate
4225 particular hard registers. */
4227 #ifdef INSN_CLOBBERS_REGNO_P
4228 for (i = 0 ; i < n_spills ; i++)
4229 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4231 reg_reloaded_contents[i] = -1;
4232 reg_reloaded_insn[i] = 0;
4244 /* Discard all record of any value reloaded from X,
4245 or reloaded in X from someplace else;
4246 unless X is an output reload reg of the current insn.
4248 X may be a hard reg (the reload reg)
4249 or it may be a pseudo reg that was reloaded from. */
4252 forget_old_reloads_1 (x, ignored)
4260 /* note_stores does give us subregs of hard regs. */
4261 while (GET_CODE (x) == SUBREG)
4263 offset += SUBREG_WORD (x);
4267 if (GET_CODE (x) != REG)
4270 regno = REGNO (x) + offset;
4272 if (regno >= FIRST_PSEUDO_REGISTER)
4277 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4278 /* Storing into a spilled-reg invalidates its contents.
4279 This can happen if a block-local pseudo is allocated to that reg
4280 and it wasn't spilled because this block's total need is 0.
4281 Then some insn might have an optional reload and use this reg. */
4282 for (i = 0; i < nr; i++)
4283 if (spill_reg_order[regno + i] >= 0
4284 /* But don't do this if the reg actually serves as an output
4285 reload reg in the current instruction. */
4287 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4289 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4290 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4294 /* Since value of X has changed,
4295 forget any value previously copied from it. */
4298 /* But don't forget a copy if this is the output reload
4299 that establishes the copy's validity. */
4300 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4301 reg_last_reload_reg[regno + nr] = 0;
4304 /* For each reload, the mode of the reload register. */
4305 static enum machine_mode reload_mode[MAX_RELOADS];
4307 /* For each reload, the largest number of registers it will require. */
4308 static int reload_nregs[MAX_RELOADS];
4310 /* Comparison function for qsort to decide which of two reloads
4311 should be handled first. *P1 and *P2 are the reload numbers. */
4314 reload_reg_class_lower (r1p, r2p)
4315 const GENERIC_PTR r1p;
4316 const GENERIC_PTR r2p;
4318 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4321 /* Consider required reloads before optional ones. */
4322 t = reload_optional[r1] - reload_optional[r2];
4326 /* Count all solitary classes before non-solitary ones. */
4327 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4328 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4332 /* Aside from solitaires, consider all multi-reg groups first. */
4333 t = reload_nregs[r2] - reload_nregs[r1];
4337 /* Consider reloads in order of increasing reg-class number. */
4338 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4342 /* If reloads are equally urgent, sort by reload number,
4343 so that the results of qsort leave nothing to chance. */
4347 /* The following HARD_REG_SETs indicate when each hard register is
4348 used for a reload of various parts of the current insn. */
4350 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4351 static HARD_REG_SET reload_reg_used;
4352 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4353 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4354 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4355 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4356 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4357 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4358 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4359 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4360 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4361 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4362 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4363 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4364 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4365 static HARD_REG_SET reload_reg_used_in_op_addr;
4366 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4367 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4368 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4369 static HARD_REG_SET reload_reg_used_in_insn;
4370 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4371 static HARD_REG_SET reload_reg_used_in_other_addr;
4373 /* If reg is in use as a reload reg for any sort of reload. */
4374 static HARD_REG_SET reload_reg_used_at_all;
4376 /* If reg is use as an inherited reload. We just mark the first register
4378 static HARD_REG_SET reload_reg_used_for_inherit;
4380 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4381 TYPE. MODE is used to indicate how many consecutive regs are
4385 mark_reload_reg_in_use (regno, opnum, type, mode)
4388 enum reload_type type;
4389 enum machine_mode mode;
4391 int nregs = HARD_REGNO_NREGS (regno, mode);
4394 for (i = regno; i < nregs + regno; i++)
4399 SET_HARD_REG_BIT (reload_reg_used, i);
4402 case RELOAD_FOR_INPUT_ADDRESS:
4403 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4406 case RELOAD_FOR_INPADDR_ADDRESS:
4407 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4410 case RELOAD_FOR_OUTPUT_ADDRESS:
4411 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4414 case RELOAD_FOR_OUTADDR_ADDRESS:
4415 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4418 case RELOAD_FOR_OPERAND_ADDRESS:
4419 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4422 case RELOAD_FOR_OPADDR_ADDR:
4423 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4426 case RELOAD_FOR_OTHER_ADDRESS:
4427 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4430 case RELOAD_FOR_INPUT:
4431 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4434 case RELOAD_FOR_OUTPUT:
4435 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4438 case RELOAD_FOR_INSN:
4439 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4443 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4447 /* Similarly, but show REGNO is no longer in use for a reload. */
4450 clear_reload_reg_in_use (regno, opnum, type, mode)
4453 enum reload_type type;
4454 enum machine_mode mode;
4456 int nregs = HARD_REGNO_NREGS (regno, mode);
4459 for (i = regno; i < nregs + regno; i++)
4464 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4467 case RELOAD_FOR_INPUT_ADDRESS:
4468 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4471 case RELOAD_FOR_INPADDR_ADDRESS:
4472 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4475 case RELOAD_FOR_OUTPUT_ADDRESS:
4476 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4479 case RELOAD_FOR_OUTADDR_ADDRESS:
4480 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4483 case RELOAD_FOR_OPERAND_ADDRESS:
4484 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4487 case RELOAD_FOR_OPADDR_ADDR:
4488 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4491 case RELOAD_FOR_OTHER_ADDRESS:
4492 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4495 case RELOAD_FOR_INPUT:
4496 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4499 case RELOAD_FOR_OUTPUT:
4500 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4503 case RELOAD_FOR_INSN:
4504 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4510 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4511 specified by OPNUM and TYPE. */
4514 reload_reg_free_p (regno, opnum, type)
4517 enum reload_type type;
4521 /* In use for a RELOAD_OTHER means it's not available for anything. */
4522 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4528 /* In use for anything means we can't use it for RELOAD_OTHER. */
4529 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4530 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4531 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4534 for (i = 0; i < reload_n_operands; i++)
4535 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4536 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4537 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4538 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4540 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4545 case RELOAD_FOR_INPUT:
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4547 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4550 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4553 /* If it is used for some other input, can't use it. */
4554 for (i = 0; i < reload_n_operands; i++)
4555 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4558 /* If it is used in a later operand's address, can't use it. */
4559 for (i = opnum + 1; i < reload_n_operands; i++)
4560 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4561 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4566 case RELOAD_FOR_INPUT_ADDRESS:
4567 /* Can't use a register if it is used for an input address for this
4568 operand or used as an input in an earlier one. */
4569 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4570 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4573 for (i = 0; i < opnum; i++)
4574 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4579 case RELOAD_FOR_INPADDR_ADDRESS:
4580 /* Can't use a register if it is used for an input address
4581 address for this operand or used as an input in an earlier
4583 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4586 for (i = 0; i < opnum; i++)
4587 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4592 case RELOAD_FOR_OUTPUT_ADDRESS:
4593 /* Can't use a register if it is used for an output address for this
4594 operand or used as an output in this or a later operand. */
4595 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4598 for (i = opnum; i < reload_n_operands; i++)
4599 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4604 case RELOAD_FOR_OUTADDR_ADDRESS:
4605 /* Can't use a register if it is used for an output address
4606 address for this operand or used as an output in this or a
4608 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4611 for (i = opnum; i < reload_n_operands; i++)
4612 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4617 case RELOAD_FOR_OPERAND_ADDRESS:
4618 for (i = 0; i < reload_n_operands; i++)
4619 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4622 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4623 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4625 case RELOAD_FOR_OPADDR_ADDR:
4626 for (i = 0; i < reload_n_operands; i++)
4627 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4630 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4632 case RELOAD_FOR_OUTPUT:
4633 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4634 outputs, or an operand address for this or an earlier output. */
4635 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4638 for (i = 0; i < reload_n_operands; i++)
4639 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4642 for (i = 0; i <= opnum; i++)
4643 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4644 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4649 case RELOAD_FOR_INSN:
4650 for (i = 0; i < reload_n_operands; i++)
4651 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4652 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4655 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4656 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4658 case RELOAD_FOR_OTHER_ADDRESS:
4659 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4664 /* Return 1 if the value in reload reg REGNO, as used by a reload
4665 needed for the part of the insn specified by OPNUM and TYPE,
4666 is not in use for a reload in any prior part of the insn.
4668 We can assume that the reload reg was already tested for availability
4669 at the time it is needed, and we should not check this again,
4670 in case the reg has already been marked in use. */
4673 reload_reg_free_before_p (regno, opnum, type)
4676 enum reload_type type;
4682 case RELOAD_FOR_OTHER_ADDRESS:
4683 /* These always come first. */
4687 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4689 /* If this use is for part of the insn,
4690 check the reg is not in use for any prior part. It is tempting
4691 to try to do this by falling through from objecs that occur
4692 later in the insn to ones that occur earlier, but that will not
4693 correctly take into account the fact that here we MUST ignore
4694 things that would prevent the register from being allocated in
4695 the first place, since we know that it was allocated. */
4697 case RELOAD_FOR_OUTPUT_ADDRESS:
4698 case RELOAD_FOR_OUTADDR_ADDRESS:
4699 /* Earlier reloads are for earlier outputs or their addresses,
4700 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4701 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4703 for (i = 0; i < opnum; i++)
4704 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4705 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4706 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4709 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4712 for (i = 0; i < reload_n_operands; i++)
4713 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4714 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4715 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4718 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4719 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4720 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4722 case RELOAD_FOR_OUTPUT:
4723 /* This can't be used in the output address for this operand and
4724 anything that can't be used for it, except that we've already
4725 tested for RELOAD_FOR_INSN objects. */
4727 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4728 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4731 for (i = 0; i < opnum; i++)
4732 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4733 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4734 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4737 for (i = 0; i < reload_n_operands; i++)
4738 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4739 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4740 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4741 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4744 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4746 case RELOAD_FOR_OPERAND_ADDRESS:
4747 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4748 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4751 /* ... fall through ... */
4753 case RELOAD_FOR_OPADDR_ADDR:
4754 case RELOAD_FOR_INSN:
4755 /* These can't conflict with inputs, or each other, so all we have to
4756 test is input addresses and the addresses of OTHER items. */
4758 for (i = 0; i < reload_n_operands; i++)
4759 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4760 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4763 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4765 case RELOAD_FOR_INPUT:
4766 /* The only things earlier are the address for this and
4767 earlier inputs, other inputs (which we know we don't conflict
4768 with), and addresses of RELOAD_OTHER objects. */
4770 for (i = 0; i <= opnum; i++)
4771 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4772 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4775 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4777 case RELOAD_FOR_INPUT_ADDRESS:
4778 case RELOAD_FOR_INPADDR_ADDRESS:
4779 /* Similarly, all we have to check is for use in earlier inputs'
4781 for (i = 0; i < opnum; i++)
4782 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4783 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4786 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4791 /* Return 1 if the value in reload reg REGNO, as used by a reload
4792 needed for the part of the insn specified by OPNUM and TYPE,
4793 is still available in REGNO at the end of the insn.
4795 We can assume that the reload reg was already tested for availability
4796 at the time it is needed, and we should not check this again,
4797 in case the reg has already been marked in use. */
4800 reload_reg_reaches_end_p (regno, opnum, type)
4803 enum reload_type type;
4810 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4811 its value must reach the end. */
4814 /* If this use is for part of the insn,
4815 its value reaches if no subsequent part uses the same register.
4816 Just like the above function, don't try to do this with lots
4819 case RELOAD_FOR_OTHER_ADDRESS:
4820 /* Here we check for everything else, since these don't conflict
4821 with anything else and everything comes later. */
4823 for (i = 0; i < reload_n_operands; i++)
4824 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4825 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4826 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4827 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4829 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4832 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4833 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4834 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4836 case RELOAD_FOR_INPUT_ADDRESS:
4837 case RELOAD_FOR_INPADDR_ADDRESS:
4838 /* Similar, except that we check only for this and subsequent inputs
4839 and the address of only subsequent inputs and we do not need
4840 to check for RELOAD_OTHER objects since they are known not to
4843 for (i = opnum; i < reload_n_operands; i++)
4844 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4847 for (i = opnum + 1; i < reload_n_operands; i++)
4848 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4849 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4852 for (i = 0; i < reload_n_operands; i++)
4853 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4854 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4855 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4858 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4861 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4862 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4864 case RELOAD_FOR_INPUT:
4865 /* Similar to input address, except we start at the next operand for
4866 both input and input address and we do not check for
4867 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4870 for (i = opnum + 1; i < reload_n_operands; i++)
4871 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4872 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4873 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4876 /* ... fall through ... */
4878 case RELOAD_FOR_OPERAND_ADDRESS:
4879 /* Check outputs and their addresses. */
4881 for (i = 0; i < reload_n_operands; i++)
4882 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4883 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4884 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4889 case RELOAD_FOR_OPADDR_ADDR:
4890 for (i = 0; i < reload_n_operands; i++)
4891 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4892 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4893 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4896 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4897 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4899 case RELOAD_FOR_INSN:
4900 /* These conflict with other outputs with RELOAD_OTHER. So
4901 we need only check for output addresses. */
4905 /* ... fall through ... */
4907 case RELOAD_FOR_OUTPUT:
4908 case RELOAD_FOR_OUTPUT_ADDRESS:
4909 case RELOAD_FOR_OUTADDR_ADDRESS:
4910 /* We already know these can't conflict with a later output. So the
4911 only thing to check are later output addresses. */
4912 for (i = opnum + 1; i < reload_n_operands; i++)
4913 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4914 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4923 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4926 This function uses the same algorithm as reload_reg_free_p above. */
4929 reloads_conflict (r1, r2)
4932 enum reload_type r1_type = reload_when_needed[r1];
4933 enum reload_type r2_type = reload_when_needed[r2];
4934 int r1_opnum = reload_opnum[r1];
4935 int r2_opnum = reload_opnum[r2];
4937 /* RELOAD_OTHER conflicts with everything. */
4938 if (r2_type == RELOAD_OTHER)
4941 /* Otherwise, check conflicts differently for each type. */
4945 case RELOAD_FOR_INPUT:
4946 return (r2_type == RELOAD_FOR_INSN
4947 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4948 || r2_type == RELOAD_FOR_OPADDR_ADDR
4949 || r2_type == RELOAD_FOR_INPUT
4950 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4951 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4952 && r2_opnum > r1_opnum));
4954 case RELOAD_FOR_INPUT_ADDRESS:
4955 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4956 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4958 case RELOAD_FOR_INPADDR_ADDRESS:
4959 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4960 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4962 case RELOAD_FOR_OUTPUT_ADDRESS:
4963 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4964 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4966 case RELOAD_FOR_OUTADDR_ADDRESS:
4967 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4968 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4970 case RELOAD_FOR_OPERAND_ADDRESS:
4971 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4972 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4974 case RELOAD_FOR_OPADDR_ADDR:
4975 return (r2_type == RELOAD_FOR_INPUT
4976 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4978 case RELOAD_FOR_OUTPUT:
4979 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4980 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4981 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4982 && r2_opnum >= r1_opnum));
4984 case RELOAD_FOR_INSN:
4985 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4986 || r2_type == RELOAD_FOR_INSN
4987 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4989 case RELOAD_FOR_OTHER_ADDRESS:
4990 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5000 /* Vector of reload-numbers showing the order in which the reloads should
5002 short reload_order[MAX_RELOADS];
5004 /* Indexed by reload number, 1 if incoming value
5005 inherited from previous insns. */
5006 char reload_inherited[MAX_RELOADS];
5008 /* For an inherited reload, this is the insn the reload was inherited from,
5009 if we know it. Otherwise, this is 0. */
5010 rtx reload_inheritance_insn[MAX_RELOADS];
5012 /* If non-zero, this is a place to get the value of the reload,
5013 rather than using reload_in. */
5014 rtx reload_override_in[MAX_RELOADS];
5016 /* For each reload, the index in spill_regs of the spill register used,
5017 or -1 if we did not need one of the spill registers for this reload. */
5018 int reload_spill_index[MAX_RELOADS];
5020 /* Find a spill register to use as a reload register for reload R.
5021 LAST_RELOAD is non-zero if this is the last reload for the insn being
5024 Set reload_reg_rtx[R] to the register allocated.
5026 If NOERROR is nonzero, we return 1 if successful,
5027 or 0 if we couldn't find a spill reg and we didn't change anything. */
5030 allocate_reload_reg (r, insn, last_reload, noerror)
5042 /* If we put this reload ahead, thinking it is a group,
5043 then insist on finding a group. Otherwise we can grab a
5044 reg that some other reload needs.
5045 (That can happen when we have a 68000 DATA_OR_FP_REG
5046 which is a group of data regs or one fp reg.)
5047 We need not be so restrictive if there are no more reloads
5050 ??? Really it would be nicer to have smarter handling
5051 for that kind of reg class, where a problem like this is normal.
5052 Perhaps those classes should be avoided for reloading
5053 by use of more alternatives. */
5055 int force_group = reload_nregs[r] > 1 && ! last_reload;
5057 /* If we want a single register and haven't yet found one,
5058 take any reg in the right class and not in use.
5059 If we want a consecutive group, here is where we look for it.
5061 We use two passes so we can first look for reload regs to
5062 reuse, which are already in use for other reloads in this insn,
5063 and only then use additional registers.
5064 I think that maximizing reuse is needed to make sure we don't
5065 run out of reload regs. Suppose we have three reloads, and
5066 reloads A and B can share regs. These need two regs.
5067 Suppose A and B are given different regs.
5068 That leaves none for C. */
5069 for (pass = 0; pass < 2; pass++)
5071 /* I is the index in spill_regs.
5072 We advance it round-robin between insns to use all spill regs
5073 equally, so that inherited reloads have a chance
5074 of leapfrogging each other. Don't do this, however, when we have
5075 group needs and failure would be fatal; if we only have a relatively
5076 small number of spill registers, and more than one of them has
5077 group needs, then by starting in the middle, we may end up
5078 allocating the first one in such a way that we are not left with
5079 sufficient groups to handle the rest. */
5081 if (noerror || ! force_group)
5086 for (count = 0; count < n_spills; count++)
5088 int class = (int) reload_reg_class[r];
5090 i = (i + 1) % n_spills;
5092 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
5093 reload_when_needed[r])
5094 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5095 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5096 /* Look first for regs to share, then for unshared. But
5097 don't share regs used for inherited reloads; they are
5098 the ones we want to preserve. */
5100 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5102 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5105 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5106 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5107 (on 68000) got us two FP regs. If NR is 1,
5108 we would reject both of them. */
5110 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5111 /* If we need only one reg, we have already won. */
5114 /* But reject a single reg if we demand a group. */
5119 /* Otherwise check that as many consecutive regs as we need
5121 Also, don't use for a group registers that are
5122 needed for nongroups. */
5123 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5126 regno = spill_regs[i] + nr - 1;
5127 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5128 && spill_reg_order[regno] >= 0
5129 && reload_reg_free_p (regno, reload_opnum[r],
5130 reload_when_needed[r])
5131 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5141 /* If we found something on pass 1, omit pass 2. */
5142 if (count < n_spills)
5146 /* We should have found a spill register by now. */
5147 if (count == n_spills)
5154 /* I is the index in SPILL_REG_RTX of the reload register we are to
5155 allocate. Get an rtx for it and find its register number. */
5157 new = spill_reg_rtx[i];
5159 if (new == 0 || GET_MODE (new) != reload_mode[r])
5160 spill_reg_rtx[i] = new
5161 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
5163 regno = true_regnum (new);
5165 /* Detect when the reload reg can't hold the reload mode.
5166 This used to be one `if', but Sequent compiler can't handle that. */
5167 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5169 enum machine_mode test_mode = VOIDmode;
5171 test_mode = GET_MODE (reload_in[r]);
5172 /* If reload_in[r] has VOIDmode, it means we will load it
5173 in whatever mode the reload reg has: to wit, reload_mode[r].
5174 We have already tested that for validity. */
5175 /* Aside from that, we need to test that the expressions
5176 to reload from or into have modes which are valid for this
5177 reload register. Otherwise the reload insns would be invalid. */
5178 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5179 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5180 if (! (reload_out[r] != 0
5181 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5183 /* The reg is OK. */
5186 /* Mark as in use for this insn the reload regs we use
5188 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5189 reload_when_needed[r], reload_mode[r]);
5191 reload_reg_rtx[r] = new;
5192 reload_spill_index[r] = i;
5197 /* The reg is not OK. */
5202 if (asm_noperands (PATTERN (insn)) < 0)
5203 /* It's the compiler's fault. */
5204 fatal_insn ("Could not find a spill register", insn);
5206 /* It's the user's fault; the operand's mode and constraint
5207 don't match. Disable this reload so we don't crash in final. */
5208 error_for_asm (insn,
5209 "`asm' operand constraint incompatible with operand size");
5212 reload_reg_rtx[r] = 0;
5213 reload_optional[r] = 1;
5214 reload_secondary_p[r] = 1;
5219 /* Assign hard reg targets for the pseudo-registers we must reload
5220 into hard regs for this insn.
5221 Also output the instructions to copy them in and out of the hard regs.
5223 For machines with register classes, we are responsible for
5224 finding a reload reg in the proper class. */
5227 choose_reload_regs (insn, avoid_return_reg)
5229 rtx avoid_return_reg;
5232 int max_group_size = 1;
5233 enum reg_class group_class = NO_REGS;
5236 rtx save_reload_reg_rtx[MAX_RELOADS];
5237 char save_reload_inherited[MAX_RELOADS];
5238 rtx save_reload_inheritance_insn[MAX_RELOADS];
5239 rtx save_reload_override_in[MAX_RELOADS];
5240 int save_reload_spill_index[MAX_RELOADS];
5241 HARD_REG_SET save_reload_reg_used;
5242 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5243 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5244 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5245 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5246 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5247 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5248 HARD_REG_SET save_reload_reg_used_in_op_addr;
5249 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5250 HARD_REG_SET save_reload_reg_used_in_insn;
5251 HARD_REG_SET save_reload_reg_used_in_other_addr;
5252 HARD_REG_SET save_reload_reg_used_at_all;
5254 bzero (reload_inherited, MAX_RELOADS);
5255 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5256 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5258 CLEAR_HARD_REG_SET (reload_reg_used);
5259 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5260 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5261 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5262 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5263 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5265 for (i = 0; i < reload_n_operands; i++)
5267 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5268 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5269 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5270 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5271 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5272 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5275 /* Don't bother with avoiding the return reg
5276 if we have no mandatory reload that could use it. */
5277 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5280 int regno = REGNO (avoid_return_reg);
5282 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5285 for (r = regno; r < regno + nregs; r++)
5286 if (spill_reg_order[r] >= 0)
5287 for (j = 0; j < n_reloads; j++)
5288 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5289 && (reload_in[j] != 0 || reload_out[j] != 0
5290 || reload_secondary_p[j])
5292 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5295 avoid_return_reg = 0;
5298 #if 0 /* Not needed, now that we can always retry without inheritance. */
5299 /* See if we have more mandatory reloads than spill regs.
5300 If so, then we cannot risk optimizations that could prevent
5301 reloads from sharing one spill register.
5303 Since we will try finding a better register than reload_reg_rtx
5304 unless it is equal to reload_in or reload_out, count such reloads. */
5307 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5308 for (j = 0; j < n_reloads; j++)
5309 if (! reload_optional[j]
5310 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5311 && (reload_reg_rtx[j] == 0
5312 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5313 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5320 /* Don't use the subroutine call return reg for a reload
5321 if we are supposed to avoid it. */
5322 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5324 int regno = REGNO (avoid_return_reg);
5326 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5329 for (r = regno; r < regno + nregs; r++)
5330 if (spill_reg_order[r] >= 0)
5331 SET_HARD_REG_BIT (reload_reg_used, r);
5334 /* In order to be certain of getting the registers we need,
5335 we must sort the reloads into order of increasing register class.
5336 Then our grabbing of reload registers will parallel the process
5337 that provided the reload registers.
5339 Also note whether any of the reloads wants a consecutive group of regs.
5340 If so, record the maximum size of the group desired and what
5341 register class contains all the groups needed by this insn. */
5343 for (j = 0; j < n_reloads; j++)
5345 reload_order[j] = j;
5346 reload_spill_index[j] = -1;
5349 = (reload_inmode[j] == VOIDmode
5350 || (GET_MODE_SIZE (reload_outmode[j])
5351 > GET_MODE_SIZE (reload_inmode[j])))
5352 ? reload_outmode[j] : reload_inmode[j];
5354 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5356 if (reload_nregs[j] > 1)
5358 max_group_size = MAX (reload_nregs[j], max_group_size);
5359 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5362 /* If we have already decided to use a certain register,
5363 don't use it in another way. */
5364 if (reload_reg_rtx[j])
5365 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5366 reload_when_needed[j], reload_mode[j]);
5370 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5372 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5373 sizeof reload_reg_rtx);
5374 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5375 bcopy ((char *) reload_inheritance_insn,
5376 (char *) save_reload_inheritance_insn,
5377 sizeof reload_inheritance_insn);
5378 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5379 sizeof reload_override_in);
5380 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5381 sizeof reload_spill_index);
5382 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5383 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5384 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5385 reload_reg_used_in_op_addr);
5387 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5388 reload_reg_used_in_op_addr_reload);
5390 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5391 reload_reg_used_in_insn);
5392 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5393 reload_reg_used_in_other_addr);
5395 for (i = 0; i < reload_n_operands; i++)
5397 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5398 reload_reg_used_in_output[i]);
5399 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5400 reload_reg_used_in_input[i]);
5401 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5402 reload_reg_used_in_input_addr[i]);
5403 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5404 reload_reg_used_in_inpaddr_addr[i]);
5405 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5406 reload_reg_used_in_output_addr[i]);
5407 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5408 reload_reg_used_in_outaddr_addr[i]);
5411 /* If -O, try first with inheritance, then turning it off.
5412 If not -O, don't do inheritance.
5413 Using inheritance when not optimizing leads to paradoxes
5414 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5415 because one side of the comparison might be inherited. */
5417 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5419 /* Process the reloads in order of preference just found.
5420 Beyond this point, subregs can be found in reload_reg_rtx.
5422 This used to look for an existing reloaded home for all
5423 of the reloads, and only then perform any new reloads.
5424 But that could lose if the reloads were done out of reg-class order
5425 because a later reload with a looser constraint might have an old
5426 home in a register needed by an earlier reload with a tighter constraint.
5428 To solve this, we make two passes over the reloads, in the order
5429 described above. In the first pass we try to inherit a reload
5430 from a previous insn. If there is a later reload that needs a
5431 class that is a proper subset of the class being processed, we must
5432 also allocate a spill register during the first pass.
5434 Then make a second pass over the reloads to allocate any reloads
5435 that haven't been given registers yet. */
5437 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5439 for (j = 0; j < n_reloads; j++)
5441 register int r = reload_order[j];
5443 /* Ignore reloads that got marked inoperative. */
5444 if (reload_out[r] == 0 && reload_in[r] == 0
5445 && ! reload_secondary_p[r])
5448 /* If find_reloads chose a to use reload_in or reload_out as a reload
5449 register, we don't need to chose one. Otherwise, try even if it
5450 found one since we might save an insn if we find the value lying
5452 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5453 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5454 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5457 #if 0 /* No longer needed for correct operation.
5458 It might give better code, or might not; worth an experiment? */
5459 /* If this is an optional reload, we can't inherit from earlier insns
5460 until we are sure that any non-optional reloads have been allocated.
5461 The following code takes advantage of the fact that optional reloads
5462 are at the end of reload_order. */
5463 if (reload_optional[r] != 0)
5464 for (i = 0; i < j; i++)
5465 if ((reload_out[reload_order[i]] != 0
5466 || reload_in[reload_order[i]] != 0
5467 || reload_secondary_p[reload_order[i]])
5468 && ! reload_optional[reload_order[i]]
5469 && reload_reg_rtx[reload_order[i]] == 0)
5470 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5473 /* First see if this pseudo is already available as reloaded
5474 for a previous insn. We cannot try to inherit for reloads
5475 that are smaller than the maximum number of registers needed
5476 for groups unless the register we would allocate cannot be used
5479 We could check here to see if this is a secondary reload for
5480 an object that is already in a register of the desired class.
5481 This would avoid the need for the secondary reload register.
5482 But this is complex because we can't easily determine what
5483 objects might want to be loaded via this reload. So let a
5484 register be allocated here. In `emit_reload_insns' we suppress
5485 one of the loads in the case described above. */
5489 register int regno = -1;
5490 enum machine_mode mode;
5492 if (reload_in[r] == 0)
5494 else if (GET_CODE (reload_in[r]) == REG)
5496 regno = REGNO (reload_in[r]);
5497 mode = GET_MODE (reload_in[r]);
5499 else if (GET_CODE (reload_in_reg[r]) == REG)
5501 regno = REGNO (reload_in_reg[r]);
5502 mode = GET_MODE (reload_in_reg[r]);
5505 /* This won't work, since REGNO can be a pseudo reg number.
5506 Also, it takes much more hair to keep track of all the things
5507 that can invalidate an inherited reload of part of a pseudoreg. */
5508 else if (GET_CODE (reload_in[r]) == SUBREG
5509 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5510 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5513 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5515 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5517 if (reg_reloaded_contents[i] == regno
5518 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5519 >= GET_MODE_SIZE (mode))
5520 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5521 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5523 && (reload_nregs[r] == max_group_size
5524 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5526 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5527 reload_when_needed[r])
5528 && reload_reg_free_before_p (spill_regs[i],
5530 reload_when_needed[r]))
5532 /* If a group is needed, verify that all the subsequent
5533 registers still have their values intact. */
5535 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5538 for (k = 1; k < nr; k++)
5539 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5547 /* We found a register that contains the
5548 value we need. If this register is the
5549 same as an `earlyclobber' operand of the
5550 current insn, just mark it as a place to
5551 reload from since we can't use it as the
5552 reload register itself. */
5554 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5555 if (reg_overlap_mentioned_for_reload_p
5556 (reg_last_reload_reg[regno],
5557 reload_earlyclobbers[i1]))
5560 if (i1 != n_earlyclobbers
5561 /* Don't really use the inherited spill reg
5562 if we need it wider than we've got it. */
5563 || (GET_MODE_SIZE (reload_mode[r])
5564 > GET_MODE_SIZE (mode)))
5565 reload_override_in[r] = reg_last_reload_reg[regno];
5569 /* We can use this as a reload reg. */
5570 /* Mark the register as in use for this part of
5572 mark_reload_reg_in_use (spill_regs[i],
5574 reload_when_needed[r],
5576 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5577 reload_inherited[r] = 1;
5578 reload_inheritance_insn[r]
5579 = reg_reloaded_insn[i];
5580 reload_spill_index[r] = i;
5581 for (k = 0; k < nr; k++)
5582 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5590 /* Here's another way to see if the value is already lying around. */
5592 && reload_in[r] != 0
5593 && ! reload_inherited[r]
5594 && reload_out[r] == 0
5595 && (CONSTANT_P (reload_in[r])
5596 || GET_CODE (reload_in[r]) == PLUS
5597 || GET_CODE (reload_in[r]) == REG
5598 || GET_CODE (reload_in[r]) == MEM)
5599 && (reload_nregs[r] == max_group_size
5600 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5603 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5604 -1, NULL_PTR, 0, reload_mode[r]);
5609 if (GET_CODE (equiv) == REG)
5610 regno = REGNO (equiv);
5611 else if (GET_CODE (equiv) == SUBREG)
5613 /* This must be a SUBREG of a hard register.
5614 Make a new REG since this might be used in an
5615 address and not all machines support SUBREGs
5617 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5618 equiv = gen_rtx (REG, reload_mode[r], regno);
5624 /* If we found a spill reg, reject it unless it is free
5625 and of the desired class. */
5627 && ((spill_reg_order[regno] >= 0
5628 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5629 reload_when_needed[r]))
5630 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5634 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5637 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5640 /* We found a register that contains the value we need.
5641 If this register is the same as an `earlyclobber' operand
5642 of the current insn, just mark it as a place to reload from
5643 since we can't use it as the reload register itself. */
5646 for (i = 0; i < n_earlyclobbers; i++)
5647 if (reg_overlap_mentioned_for_reload_p (equiv,
5648 reload_earlyclobbers[i]))
5650 reload_override_in[r] = equiv;
5655 /* JRV: If the equiv register we have found is
5656 explicitly clobbered in the current insn, mark but
5657 don't use, as above. */
5659 if (equiv != 0 && regno_clobbered_p (regno, insn))
5661 reload_override_in[r] = equiv;
5665 /* If we found an equivalent reg, say no code need be generated
5666 to load it, and use it as our reload reg. */
5667 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5669 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5671 reload_reg_rtx[r] = equiv;
5672 reload_inherited[r] = 1;
5674 /* If any of the hard registers in EQUIV are spill
5675 registers, mark them as in use for this insn. */
5676 for (k = 0; k < nr; k++)
5678 i = spill_reg_order[regno + k];
5681 mark_reload_reg_in_use (regno, reload_opnum[r],
5682 reload_when_needed[r],
5684 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5691 /* If we found a register to use already, or if this is an optional
5692 reload, we are done. */
5693 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5696 #if 0 /* No longer needed for correct operation. Might or might not
5697 give better code on the average. Want to experiment? */
5699 /* See if there is a later reload that has a class different from our
5700 class that intersects our class or that requires less register
5701 than our reload. If so, we must allocate a register to this
5702 reload now, since that reload might inherit a previous reload
5703 and take the only available register in our class. Don't do this
5704 for optional reloads since they will force all previous reloads
5705 to be allocated. Also don't do this for reloads that have been
5708 for (i = j + 1; i < n_reloads; i++)
5710 int s = reload_order[i];
5712 if ((reload_in[s] == 0 && reload_out[s] == 0
5713 && ! reload_secondary_p[s])
5714 || reload_optional[s])
5717 if ((reload_reg_class[s] != reload_reg_class[r]
5718 && reg_classes_intersect_p (reload_reg_class[r],
5719 reload_reg_class[s]))
5720 || reload_nregs[s] < reload_nregs[r])
5727 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5731 /* Now allocate reload registers for anything non-optional that
5732 didn't get one yet. */
5733 for (j = 0; j < n_reloads; j++)
5735 register int r = reload_order[j];
5737 /* Ignore reloads that got marked inoperative. */
5738 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5741 /* Skip reloads that already have a register allocated or are
5743 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5746 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5750 /* If that loop got all the way, we have won. */
5755 /* Loop around and try without any inheritance. */
5756 /* First undo everything done by the failed attempt
5757 to allocate with inheritance. */
5758 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5759 sizeof reload_reg_rtx);
5760 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5761 sizeof reload_inherited);
5762 bcopy ((char *) save_reload_inheritance_insn,
5763 (char *) reload_inheritance_insn,
5764 sizeof reload_inheritance_insn);
5765 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5766 sizeof reload_override_in);
5767 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5768 sizeof reload_spill_index);
5769 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5770 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5771 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5772 save_reload_reg_used_in_op_addr);
5773 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5774 save_reload_reg_used_in_op_addr_reload);
5775 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5776 save_reload_reg_used_in_insn);
5777 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5778 save_reload_reg_used_in_other_addr);
5780 for (i = 0; i < reload_n_operands; i++)
5782 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5783 save_reload_reg_used_in_input[i]);
5784 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5785 save_reload_reg_used_in_output[i]);
5786 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5787 save_reload_reg_used_in_input_addr[i]);
5788 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5789 save_reload_reg_used_in_inpaddr_addr[i]);
5790 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5791 save_reload_reg_used_in_output_addr[i]);
5792 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5793 save_reload_reg_used_in_outaddr_addr[i]);
5797 /* If we thought we could inherit a reload, because it seemed that
5798 nothing else wanted the same reload register earlier in the insn,
5799 verify that assumption, now that all reloads have been assigned. */
5801 for (j = 0; j < n_reloads; j++)
5803 register int r = reload_order[j];
5805 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5806 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5808 reload_when_needed[r]))
5809 reload_inherited[r] = 0;
5811 /* If we found a better place to reload from,
5812 validate it in the same fashion, if it is a reload reg. */
5813 if (reload_override_in[r]
5814 && (GET_CODE (reload_override_in[r]) == REG
5815 || GET_CODE (reload_override_in[r]) == SUBREG))
5817 int regno = true_regnum (reload_override_in[r]);
5818 if (spill_reg_order[regno] >= 0
5819 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5820 reload_when_needed[r]))
5821 reload_override_in[r] = 0;
5825 /* Now that reload_override_in is known valid,
5826 actually override reload_in. */
5827 for (j = 0; j < n_reloads; j++)
5828 if (reload_override_in[j])
5829 reload_in[j] = reload_override_in[j];
5831 /* If this reload won't be done because it has been cancelled or is
5832 optional and not inherited, clear reload_reg_rtx so other
5833 routines (such as subst_reloads) don't get confused. */
5834 for (j = 0; j < n_reloads; j++)
5835 if (reload_reg_rtx[j] != 0
5836 && ((reload_optional[j] && ! reload_inherited[j])
5837 || (reload_in[j] == 0 && reload_out[j] == 0
5838 && ! reload_secondary_p[j])))
5840 int regno = true_regnum (reload_reg_rtx[j]);
5842 if (spill_reg_order[regno] >= 0)
5843 clear_reload_reg_in_use (regno, reload_opnum[j],
5844 reload_when_needed[j], reload_mode[j]);
5845 reload_reg_rtx[j] = 0;
5848 /* Record which pseudos and which spill regs have output reloads. */
5849 for (j = 0; j < n_reloads; j++)
5851 register int r = reload_order[j];
5853 i = reload_spill_index[r];
5855 /* I is nonneg if this reload used one of the spill regs.
5856 If reload_reg_rtx[r] is 0, this is an optional reload
5857 that we opted to ignore. */
5858 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5859 && reload_reg_rtx[r] != 0)
5861 register int nregno = REGNO (reload_out[r]);
5864 if (nregno < FIRST_PSEUDO_REGISTER)
5865 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5868 reg_has_output_reload[nregno + nr] = 1;
5872 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5874 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5877 if (reload_when_needed[r] != RELOAD_OTHER
5878 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5879 && reload_when_needed[r] != RELOAD_FOR_INSN)
5885 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
5886 reloads of the same item for fear that we might not have enough reload
5887 registers. However, normally they will get the same reload register
5888 and hence actually need not be loaded twice.
5890 Here we check for the most common case of this phenomenon: when we have
5891 a number of reloads for the same object, each of which were allocated
5892 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5893 reload, and is not modified in the insn itself. If we find such,
5894 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5895 This will not increase the number of spill registers needed and will
5896 prevent redundant code. */
5899 merge_assigned_reloads (insn)
5904 /* Scan all the reloads looking for ones that only load values and
5905 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5906 assigned and not modified by INSN. */
5908 for (i = 0; i < n_reloads; i++)
5910 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5911 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5912 || reg_set_p (reload_reg_rtx[i], insn))
5915 /* Look at all other reloads. Ensure that the only use of this
5916 reload_reg_rtx is in a reload that just loads the same value
5917 as we do. Note that any secondary reloads must be of the identical
5918 class since the values, modes, and result registers are the
5919 same, so we need not do anything with any secondary reloads. */
5921 for (j = 0; j < n_reloads; j++)
5923 if (i == j || reload_reg_rtx[j] == 0
5924 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5928 /* If the reload regs aren't exactly the same (e.g, different modes)
5929 or if the values are different, we can't merge anything with this
5932 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5933 || reload_out[j] != 0 || reload_in[j] == 0
5934 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5938 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5939 we, in fact, found any matching reloads. */
5943 for (j = 0; j < n_reloads; j++)
5944 if (i != j && reload_reg_rtx[j] != 0
5945 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5947 reload_when_needed[i] = RELOAD_OTHER;
5949 transfer_replacements (i, j);
5952 /* If this is now RELOAD_OTHER, look for any reloads that load
5953 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5954 if they were for inputs, RELOAD_OTHER for outputs. Note that
5955 this test is equivalent to looking for reloads for this operand
5958 if (reload_when_needed[i] == RELOAD_OTHER)
5959 for (j = 0; j < n_reloads; j++)
5960 if (reload_in[j] != 0
5961 && reload_when_needed[i] != RELOAD_OTHER
5962 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5964 reload_when_needed[j]
5965 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5966 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5967 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
5973 /* Output insns to reload values in and out of the chosen reload regs. */
5976 emit_reload_insns (insn)
5980 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5981 rtx other_input_address_reload_insns = 0;
5982 rtx other_input_reload_insns = 0;
5983 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5984 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5985 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5986 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5987 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
5988 rtx operand_reload_insns = 0;
5989 rtx other_operand_reload_insns = 0;
5990 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5991 rtx following_insn = NEXT_INSN (insn);
5992 rtx before_insn = insn;
5994 /* Values to be put in spill_reg_store are put here first. */
5995 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5997 for (j = 0; j < reload_n_operands; j++)
5998 input_reload_insns[j] = input_address_reload_insns[j]
5999 = inpaddr_address_reload_insns[j]
6000 = output_reload_insns[j] = output_address_reload_insns[j]
6001 = outaddr_address_reload_insns[j]
6002 = other_output_reload_insns[j] = 0;
6004 /* Now output the instructions to copy the data into and out of the
6005 reload registers. Do these in the order that the reloads were reported,
6006 since reloads of base and index registers precede reloads of operands
6007 and the operands may need the base and index registers reloaded. */
6009 for (j = 0; j < n_reloads; j++)
6012 rtx oldequiv_reg = 0;
6013 rtx this_reload_insn = 0;
6015 if (reload_spill_index[j] >= 0)
6016 new_spill_reg_store[reload_spill_index[j]] = 0;
6019 if (old != 0 && ! reload_inherited[j]
6020 && ! rtx_equal_p (reload_reg_rtx[j], old)
6021 && reload_reg_rtx[j] != 0)
6023 register rtx reloadreg = reload_reg_rtx[j];
6025 enum machine_mode mode;
6028 /* Determine the mode to reload in.
6029 This is very tricky because we have three to choose from.
6030 There is the mode the insn operand wants (reload_inmode[J]).
6031 There is the mode of the reload register RELOADREG.
6032 There is the intrinsic mode of the operand, which we could find
6033 by stripping some SUBREGs.
6034 It turns out that RELOADREG's mode is irrelevant:
6035 we can change that arbitrarily.
6037 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6038 then the reload reg may not support QImode moves, so use SImode.
6039 If foo is in memory due to spilling a pseudo reg, this is safe,
6040 because the QImode value is in the least significant part of a
6041 slot big enough for a SImode. If foo is some other sort of
6042 memory reference, then it is impossible to reload this case,
6043 so previous passes had better make sure this never happens.
6045 Then consider a one-word union which has SImode and one of its
6046 members is a float, being fetched as (SUBREG:SF union:SI).
6047 We must fetch that as SFmode because we could be loading into
6048 a float-only register. In this case OLD's mode is correct.
6050 Consider an immediate integer: it has VOIDmode. Here we need
6051 to get a mode from something else.
6053 In some cases, there is a fourth mode, the operand's
6054 containing mode. If the insn specifies a containing mode for
6055 this operand, it overrides all others.
6057 I am not sure whether the algorithm here is always right,
6058 but it does the right things in those cases. */
6060 mode = GET_MODE (old);
6061 if (mode == VOIDmode)
6062 mode = reload_inmode[j];
6064 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6065 /* If we need a secondary register for this operation, see if
6066 the value is already in a register in that class. Don't
6067 do this if the secondary register will be used as a scratch
6070 if (reload_secondary_in_reload[j] >= 0
6071 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6074 = find_equiv_reg (old, insn,
6075 reload_reg_class[reload_secondary_in_reload[j]],
6076 -1, NULL_PTR, 0, mode);
6079 /* If reloading from memory, see if there is a register
6080 that already holds the same value. If so, reload from there.
6081 We can pass 0 as the reload_reg_p argument because
6082 any other reload has either already been emitted,
6083 in which case find_equiv_reg will see the reload-insn,
6084 or has yet to be emitted, in which case it doesn't matter
6085 because we will use this equiv reg right away. */
6087 if (oldequiv == 0 && optimize
6088 && (GET_CODE (old) == MEM
6089 || (GET_CODE (old) == REG
6090 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6091 && reg_renumber[REGNO (old)] < 0)))
6092 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6093 -1, NULL_PTR, 0, mode);
6097 int regno = true_regnum (oldequiv);
6099 /* If OLDEQUIV is a spill register, don't use it for this
6100 if any other reload needs it at an earlier stage of this insn
6101 or at this stage. */
6102 if (spill_reg_order[regno] >= 0
6103 && (! reload_reg_free_p (regno, reload_opnum[j],
6104 reload_when_needed[j])
6105 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6106 reload_when_needed[j])))
6109 /* If OLDEQUIV is not a spill register,
6110 don't use it if any other reload wants it. */
6111 if (spill_reg_order[regno] < 0)
6114 for (k = 0; k < n_reloads; k++)
6115 if (reload_reg_rtx[k] != 0 && k != j
6116 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6124 /* If it is no cheaper to copy from OLDEQUIV into the
6125 reload register than it would be to move from memory,
6126 don't use it. Likewise, if we need a secondary register
6130 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6131 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6132 reload_reg_class[j])
6133 >= MEMORY_MOVE_COST (mode)))
6134 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6135 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6139 #ifdef SECONDARY_MEMORY_NEEDED
6140 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6141 REGNO_REG_CLASS (regno),
6150 else if (GET_CODE (oldequiv) == REG)
6151 oldequiv_reg = oldequiv;
6152 else if (GET_CODE (oldequiv) == SUBREG)
6153 oldequiv_reg = SUBREG_REG (oldequiv);
6155 /* If we are reloading from a register that was recently stored in
6156 with an output-reload, see if we can prove there was
6157 actually no need to store the old value in it. */
6159 if (optimize && GET_CODE (oldequiv) == REG
6160 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6161 && spill_reg_order[REGNO (oldequiv)] >= 0
6162 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
6163 && find_reg_note (insn, REG_DEAD, reload_in[j])
6164 /* This is unsafe if operand occurs more than once in current
6165 insn. Perhaps some occurrences weren't reloaded. */
6166 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6167 delete_output_reload
6168 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
6170 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6171 then load RELOADREG from OLDEQUIV. Note that we cannot use
6172 gen_lowpart_common since it can do the wrong thing when
6173 RELOADREG has a multi-word mode. Note that RELOADREG
6174 must always be a REG here. */
6176 if (GET_MODE (reloadreg) != mode)
6177 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6178 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6179 oldequiv = SUBREG_REG (oldequiv);
6180 if (GET_MODE (oldequiv) != VOIDmode
6181 && mode != GET_MODE (oldequiv))
6182 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
6184 /* Switch to the right place to emit the reload insns. */
6185 switch (reload_when_needed[j])
6188 where = &other_input_reload_insns;
6190 case RELOAD_FOR_INPUT:
6191 where = &input_reload_insns[reload_opnum[j]];
6193 case RELOAD_FOR_INPUT_ADDRESS:
6194 where = &input_address_reload_insns[reload_opnum[j]];
6196 case RELOAD_FOR_INPADDR_ADDRESS:
6197 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6199 case RELOAD_FOR_OUTPUT_ADDRESS:
6200 where = &output_address_reload_insns[reload_opnum[j]];
6202 case RELOAD_FOR_OUTADDR_ADDRESS:
6203 where = &outaddr_address_reload_insns[reload_opnum[j]];
6205 case RELOAD_FOR_OPERAND_ADDRESS:
6206 where = &operand_reload_insns;
6208 case RELOAD_FOR_OPADDR_ADDR:
6209 where = &other_operand_reload_insns;
6211 case RELOAD_FOR_OTHER_ADDRESS:
6212 where = &other_input_address_reload_insns;
6218 push_to_sequence (*where);
6221 /* Auto-increment addresses must be reloaded in a special way. */
6222 if (GET_CODE (oldequiv) == POST_INC
6223 || GET_CODE (oldequiv) == POST_DEC
6224 || GET_CODE (oldequiv) == PRE_INC
6225 || GET_CODE (oldequiv) == PRE_DEC)
6227 /* We are not going to bother supporting the case where a
6228 incremented register can't be copied directly from
6229 OLDEQUIV since this seems highly unlikely. */
6230 if (reload_secondary_in_reload[j] >= 0)
6232 /* Prevent normal processing of this reload. */
6234 /* Output a special code sequence for this case. */
6235 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6238 /* If we are reloading a pseudo-register that was set by the previous
6239 insn, see if we can get rid of that pseudo-register entirely
6240 by redirecting the previous insn into our reload register. */
6242 else if (optimize && GET_CODE (old) == REG
6243 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6244 && dead_or_set_p (insn, old)
6245 /* This is unsafe if some other reload
6246 uses the same reg first. */
6247 && reload_reg_free_before_p (REGNO (reloadreg),
6249 reload_when_needed[j]))
6251 rtx temp = PREV_INSN (insn);
6252 while (temp && GET_CODE (temp) == NOTE)
6253 temp = PREV_INSN (temp);
6255 && GET_CODE (temp) == INSN
6256 && GET_CODE (PATTERN (temp)) == SET
6257 && SET_DEST (PATTERN (temp)) == old
6258 /* Make sure we can access insn_operand_constraint. */
6259 && asm_noperands (PATTERN (temp)) < 0
6260 /* This is unsafe if prev insn rejects our reload reg. */
6261 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6263 /* This is unsafe if operand occurs more than once in current
6264 insn. Perhaps some occurrences aren't reloaded. */
6265 && count_occurrences (PATTERN (insn), old) == 1
6266 /* Don't risk splitting a matching pair of operands. */
6267 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6269 /* Store into the reload register instead of the pseudo. */
6270 SET_DEST (PATTERN (temp)) = reloadreg;
6271 /* If these are the only uses of the pseudo reg,
6272 pretend for GDB it lives in the reload reg we used. */
6273 if (REG_N_DEATHS (REGNO (old)) == 1
6274 && REG_N_SETS (REGNO (old)) == 1)
6276 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6277 alter_reg (REGNO (old), -1);
6283 /* We can't do that, so output an insn to load RELOADREG. */
6287 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6288 rtx second_reload_reg = 0;
6289 enum insn_code icode;
6291 /* If we have a secondary reload, pick up the secondary register
6292 and icode, if any. If OLDEQUIV and OLD are different or
6293 if this is an in-out reload, recompute whether or not we
6294 still need a secondary register and what the icode should
6295 be. If we still need a secondary register and the class or
6296 icode is different, go back to reloading from OLD if using
6297 OLDEQUIV means that we got the wrong type of register. We
6298 cannot have different class or icode due to an in-out reload
6299 because we don't make such reloads when both the input and
6300 output need secondary reload registers. */
6302 if (reload_secondary_in_reload[j] >= 0)
6304 int secondary_reload = reload_secondary_in_reload[j];
6305 rtx real_oldequiv = oldequiv;
6308 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6309 and similarly for OLD.
6310 See comments in get_secondary_reload in reload.c. */
6311 if (GET_CODE (oldequiv) == REG
6312 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6313 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6314 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6316 if (GET_CODE (old) == REG
6317 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6318 && reg_equiv_mem[REGNO (old)] != 0)
6319 real_old = reg_equiv_mem[REGNO (old)];
6321 second_reload_reg = reload_reg_rtx[secondary_reload];
6322 icode = reload_secondary_in_icode[j];
6324 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6325 || (reload_in[j] != 0 && reload_out[j] != 0))
6327 enum reg_class new_class
6328 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6329 mode, real_oldequiv);
6331 if (new_class == NO_REGS)
6332 second_reload_reg = 0;
6335 enum insn_code new_icode;
6336 enum machine_mode new_mode;
6338 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6339 REGNO (second_reload_reg)))
6340 oldequiv = old, real_oldequiv = real_old;
6343 new_icode = reload_in_optab[(int) mode];
6344 if (new_icode != CODE_FOR_nothing
6345 && ((insn_operand_predicate[(int) new_icode][0]
6346 && ! ((*insn_operand_predicate[(int) new_icode][0])
6348 || (insn_operand_predicate[(int) new_icode][1]
6349 && ! ((*insn_operand_predicate[(int) new_icode][1])
6350 (real_oldequiv, mode)))))
6351 new_icode = CODE_FOR_nothing;
6353 if (new_icode == CODE_FOR_nothing)
6356 new_mode = insn_operand_mode[(int) new_icode][2];
6358 if (GET_MODE (second_reload_reg) != new_mode)
6360 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6362 oldequiv = old, real_oldequiv = real_old;
6365 = gen_rtx (REG, new_mode,
6366 REGNO (second_reload_reg));
6372 /* If we still need a secondary reload register, check
6373 to see if it is being used as a scratch or intermediate
6374 register and generate code appropriately. If we need
6375 a scratch register, use REAL_OLDEQUIV since the form of
6376 the insn may depend on the actual address if it is
6379 if (second_reload_reg)
6381 if (icode != CODE_FOR_nothing)
6383 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6384 second_reload_reg));
6389 /* See if we need a scratch register to load the
6390 intermediate register (a tertiary reload). */
6391 enum insn_code tertiary_icode
6392 = reload_secondary_in_icode[secondary_reload];
6394 if (tertiary_icode != CODE_FOR_nothing)
6396 rtx third_reload_reg
6397 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6399 emit_insn ((GEN_FCN (tertiary_icode)
6400 (second_reload_reg, real_oldequiv,
6401 third_reload_reg)));
6404 gen_reload (second_reload_reg, oldequiv,
6406 reload_when_needed[j]);
6408 oldequiv = second_reload_reg;
6414 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6415 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6416 reload_when_needed[j]);
6418 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6419 /* We may have to make a REG_DEAD note for the secondary reload
6420 register in the insns we just made. Find the last insn that
6421 mentioned the register. */
6422 if (! special && second_reload_reg
6423 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6427 for (prev = get_last_insn (); prev;
6428 prev = PREV_INSN (prev))
6429 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6430 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6433 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6442 this_reload_insn = get_last_insn ();
6443 /* End this sequence. */
6444 *where = get_insns ();
6448 /* Add a note saying the input reload reg
6449 dies in this insn, if anyone cares. */
6450 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6452 && reload_reg_rtx[j] != old
6453 && reload_reg_rtx[j] != 0
6454 && reload_out[j] == 0
6455 && ! reload_inherited[j]
6456 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6458 register rtx reloadreg = reload_reg_rtx[j];
6461 /* We can't abort here because we need to support this for sched.c.
6462 It's not terrible to miss a REG_DEAD note, but we should try
6463 to figure out how to do this correctly. */
6464 /* The code below is incorrect for address-only reloads. */
6465 if (reload_when_needed[j] != RELOAD_OTHER
6466 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6470 /* Add a death note to this insn, for an input reload. */
6472 if ((reload_when_needed[j] == RELOAD_OTHER
6473 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6474 && ! dead_or_set_p (insn, reloadreg))
6476 = gen_rtx (EXPR_LIST, REG_DEAD,
6477 reloadreg, REG_NOTES (insn));
6480 /* When we inherit a reload, the last marked death of the reload reg
6481 may no longer really be a death. */
6482 if (reload_reg_rtx[j] != 0
6483 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6484 && reload_inherited[j])
6486 /* Handle inheriting an output reload.
6487 Remove the death note from the output reload insn. */
6488 if (reload_spill_index[j] >= 0
6489 && GET_CODE (reload_in[j]) == REG
6490 && spill_reg_store[reload_spill_index[j]] != 0
6491 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6492 REG_DEAD, REGNO (reload_reg_rtx[j])))
6493 remove_death (REGNO (reload_reg_rtx[j]),
6494 spill_reg_store[reload_spill_index[j]]);
6495 /* Likewise for input reloads that were inherited. */
6496 else if (reload_spill_index[j] >= 0
6497 && GET_CODE (reload_in[j]) == REG
6498 && spill_reg_store[reload_spill_index[j]] == 0
6499 && reload_inheritance_insn[j] != 0
6500 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6501 REGNO (reload_reg_rtx[j])))
6502 remove_death (REGNO (reload_reg_rtx[j]),
6503 reload_inheritance_insn[j]);
6508 /* We got this register from find_equiv_reg.
6509 Search back for its last death note and get rid of it.
6510 But don't search back too far.
6511 Don't go past a place where this reg is set,
6512 since a death note before that remains valid. */
6513 for (prev = PREV_INSN (insn);
6514 prev && GET_CODE (prev) != CODE_LABEL;
6515 prev = PREV_INSN (prev))
6516 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6517 && dead_or_set_p (prev, reload_reg_rtx[j]))
6519 if (find_regno_note (prev, REG_DEAD,
6520 REGNO (reload_reg_rtx[j])))
6521 remove_death (REGNO (reload_reg_rtx[j]), prev);
6527 /* We might have used find_equiv_reg above to choose an alternate
6528 place from which to reload. If so, and it died, we need to remove
6529 that death and move it to one of the insns we just made. */
6531 if (oldequiv_reg != 0
6532 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6536 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6537 prev = PREV_INSN (prev))
6538 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6539 && dead_or_set_p (prev, oldequiv_reg))
6541 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6543 for (prev1 = this_reload_insn;
6544 prev1; prev1 = PREV_INSN (prev1))
6545 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6546 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6549 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6554 remove_death (REGNO (oldequiv_reg), prev);
6561 /* If we are reloading a register that was recently stored in with an
6562 output-reload, see if we can prove there was
6563 actually no need to store the old value in it. */
6565 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6566 && reload_in[j] != 0
6567 && GET_CODE (reload_in[j]) == REG
6569 /* There doesn't seem to be any reason to restrict this to pseudos
6570 and doing so loses in the case where we are copying from a
6571 register of the wrong class. */
6572 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6574 && spill_reg_store[reload_spill_index[j]] != 0
6575 /* This is unsafe if some other reload uses the same reg first. */
6576 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6577 reload_opnum[j], reload_when_needed[j])
6578 && dead_or_set_p (insn, reload_in[j])
6579 /* This is unsafe if operand occurs more than once in current
6580 insn. Perhaps some occurrences weren't reloaded. */
6581 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6582 delete_output_reload (insn, j,
6583 spill_reg_store[reload_spill_index[j]]);
6585 /* Input-reloading is done. Now do output-reloading,
6586 storing the value from the reload-register after the main insn
6587 if reload_out[j] is nonzero.
6589 ??? At some point we need to support handling output reloads of
6590 JUMP_INSNs or insns that set cc0. */
6591 old = reload_out[j];
6593 && reload_reg_rtx[j] != old
6594 && reload_reg_rtx[j] != 0)
6596 register rtx reloadreg = reload_reg_rtx[j];
6597 register rtx second_reloadreg = 0;
6599 enum machine_mode mode;
6602 /* An output operand that dies right away does need a reload,
6603 but need not be copied from it. Show the new location in the
6605 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6606 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6608 XEXP (note, 0) = reload_reg_rtx[j];
6611 /* Likewise for a SUBREG of an operand that dies. */
6612 else if (GET_CODE (old) == SUBREG
6613 && GET_CODE (SUBREG_REG (old)) == REG
6614 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6617 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6621 else if (GET_CODE (old) == SCRATCH)
6622 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6623 but we don't want to make an output reload. */
6627 /* Strip off of OLD any size-increasing SUBREGs such as
6628 (SUBREG:SI foo:QI 0). */
6630 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6631 && (GET_MODE_SIZE (GET_MODE (old))
6632 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6633 old = SUBREG_REG (old);
6636 /* If is a JUMP_INSN, we can't support output reloads yet. */
6637 if (GET_CODE (insn) == JUMP_INSN)
6640 if (reload_when_needed[j] == RELOAD_OTHER)
6643 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6645 /* Determine the mode to reload in.
6646 See comments above (for input reloading). */
6648 mode = GET_MODE (old);
6649 if (mode == VOIDmode)
6651 /* VOIDmode should never happen for an output. */
6652 if (asm_noperands (PATTERN (insn)) < 0)
6653 /* It's the compiler's fault. */
6654 fatal_insn ("VOIDmode on an output", insn);
6655 error_for_asm (insn, "output operand is constant in `asm'");
6656 /* Prevent crash--use something we know is valid. */
6658 old = gen_rtx (REG, mode, REGNO (reloadreg));
6661 if (GET_MODE (reloadreg) != mode)
6662 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6664 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6666 /* If we need two reload regs, set RELOADREG to the intermediate
6667 one, since it will be stored into OLD. We might need a secondary
6668 register only for an input reload, so check again here. */
6670 if (reload_secondary_out_reload[j] >= 0)
6674 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6675 && reg_equiv_mem[REGNO (old)] != 0)
6676 real_old = reg_equiv_mem[REGNO (old)];
6678 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6682 second_reloadreg = reloadreg;
6683 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6685 /* See if RELOADREG is to be used as a scratch register
6686 or as an intermediate register. */
6687 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6689 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6690 (real_old, second_reloadreg, reloadreg)));
6695 /* See if we need both a scratch and intermediate reload
6698 int secondary_reload = reload_secondary_out_reload[j];
6699 enum insn_code tertiary_icode
6700 = reload_secondary_out_icode[secondary_reload];
6702 if (GET_MODE (reloadreg) != mode)
6703 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6705 if (tertiary_icode != CODE_FOR_nothing)
6708 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6711 /* Copy primary reload reg to secondary reload reg.
6712 (Note that these have been swapped above, then
6713 secondary reload reg to OLD using our insn. */
6715 /* If REAL_OLD is a paradoxical SUBREG, remove it
6716 and try to put the opposite SUBREG on
6718 if (GET_CODE (real_old) == SUBREG
6719 && (GET_MODE_SIZE (GET_MODE (real_old))
6720 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6721 && 0 != (tem = gen_lowpart_common
6722 (GET_MODE (SUBREG_REG (real_old)),
6724 real_old = SUBREG_REG (real_old), reloadreg = tem;
6726 gen_reload (reloadreg, second_reloadreg,
6727 reload_opnum[j], reload_when_needed[j]);
6728 emit_insn ((GEN_FCN (tertiary_icode)
6729 (real_old, reloadreg, third_reloadreg)));
6734 /* Copy between the reload regs here and then to
6737 gen_reload (reloadreg, second_reloadreg,
6738 reload_opnum[j], reload_when_needed[j]);
6744 /* Output the last reload insn. */
6746 gen_reload (old, reloadreg, reload_opnum[j],
6747 reload_when_needed[j]);
6749 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6750 /* If final will look at death notes for this reg,
6751 put one on the last output-reload insn to use it. Similarly
6752 for any secondary register. */
6753 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6754 for (p = get_last_insn (); p; p = PREV_INSN (p))
6755 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6756 && reg_overlap_mentioned_for_reload_p (reloadreg,
6758 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6759 reloadreg, REG_NOTES (p));
6761 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6762 if (! special && second_reloadreg
6763 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6764 for (p = get_last_insn (); p; p = PREV_INSN (p))
6765 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6766 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6768 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6769 second_reloadreg, REG_NOTES (p));
6772 /* Look at all insns we emitted, just to be safe. */
6773 for (p = get_insns (); p; p = NEXT_INSN (p))
6774 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6776 /* If this output reload doesn't come from a spill reg,
6777 clear any memory of reloaded copies of the pseudo reg.
6778 If this output reload comes from a spill reg,
6779 reg_has_output_reload will make this do nothing. */
6780 note_stores (PATTERN (p), forget_old_reloads_1);
6782 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6783 && reload_spill_index[j] >= 0)
6784 new_spill_reg_store[reload_spill_index[j]] = p;
6787 if (reload_when_needed[j] == RELOAD_OTHER)
6789 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6790 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6793 output_reload_insns[reload_opnum[j]] = get_insns ();
6799 /* Now write all the insns we made for reloads in the order expected by
6800 the allocation functions. Prior to the insn being reloaded, we write
6801 the following reloads:
6803 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6805 RELOAD_OTHER reloads.
6807 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6808 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6809 RELOAD_FOR_INPUT reload for the operand.
6811 RELOAD_FOR_OPADDR_ADDRS reloads.
6813 RELOAD_FOR_OPERAND_ADDRESS reloads.
6815 After the insn being reloaded, we write the following:
6817 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
6818 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
6819 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6820 reloads for the operand. The RELOAD_OTHER output reloads are
6821 output in descending order by reload number. */
6823 emit_insns_before (other_input_address_reload_insns, before_insn);
6824 emit_insns_before (other_input_reload_insns, before_insn);
6826 for (j = 0; j < reload_n_operands; j++)
6828 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
6829 emit_insns_before (input_address_reload_insns[j], before_insn);
6830 emit_insns_before (input_reload_insns[j], before_insn);
6833 emit_insns_before (other_operand_reload_insns, before_insn);
6834 emit_insns_before (operand_reload_insns, before_insn);
6836 for (j = 0; j < reload_n_operands; j++)
6838 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
6839 emit_insns_before (output_address_reload_insns[j], following_insn);
6840 emit_insns_before (output_reload_insns[j], following_insn);
6841 emit_insns_before (other_output_reload_insns[j], following_insn);
6844 /* Move death notes from INSN
6845 to output-operand-address and output reload insns. */
6846 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6849 /* Loop over those insns, last ones first. */
6850 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6851 insn1 = PREV_INSN (insn1))
6852 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6854 rtx source = SET_SRC (PATTERN (insn1));
6855 rtx dest = SET_DEST (PATTERN (insn1));
6857 /* The note we will examine next. */
6858 rtx reg_notes = REG_NOTES (insn);
6859 /* The place that pointed to this note. */
6860 rtx *prev_reg_note = ®_NOTES (insn);
6862 /* If the note is for something used in the source of this
6863 reload insn, or in the output address, move the note. */
6866 rtx next_reg_notes = XEXP (reg_notes, 1);
6867 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6868 && GET_CODE (XEXP (reg_notes, 0)) == REG
6869 && ((GET_CODE (dest) != REG
6870 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6872 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6875 *prev_reg_note = next_reg_notes;
6876 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6877 REG_NOTES (insn1) = reg_notes;
6880 prev_reg_note = &XEXP (reg_notes, 1);
6882 reg_notes = next_reg_notes;
6888 /* For all the spill regs newly reloaded in this instruction,
6889 record what they were reloaded from, so subsequent instructions
6890 can inherit the reloads.
6892 Update spill_reg_store for the reloads of this insn.
6893 Copy the elements that were updated in the loop above. */
6895 for (j = 0; j < n_reloads; j++)
6897 register int r = reload_order[j];
6898 register int i = reload_spill_index[r];
6900 /* I is nonneg if this reload used one of the spill regs.
6901 If reload_reg_rtx[r] is 0, this is an optional reload
6902 that we opted to ignore. */
6904 if (i >= 0 && reload_reg_rtx[r] != 0)
6907 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6909 int part_reaches_end = 0;
6910 int all_reaches_end = 1;
6912 /* For a multi register reload, we need to check if all or part
6913 of the value lives to the end. */
6914 for (k = 0; k < nr; k++)
6916 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6917 reload_when_needed[r]))
6918 part_reaches_end = 1;
6920 all_reaches_end = 0;
6923 /* Ignore reloads that don't reach the end of the insn in
6925 if (all_reaches_end)
6927 /* First, clear out memory of what used to be in this spill reg.
6928 If consecutive registers are used, clear them all. */
6930 for (k = 0; k < nr; k++)
6932 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6933 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6936 /* Maybe the spill reg contains a copy of reload_out. */
6937 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6939 register int nregno = REGNO (reload_out[r]);
6940 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6941 : HARD_REGNO_NREGS (nregno,
6942 GET_MODE (reload_reg_rtx[r])));
6944 spill_reg_store[i] = new_spill_reg_store[i];
6945 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6947 /* If NREGNO is a hard register, it may occupy more than
6948 one register. If it does, say what is in the
6949 rest of the registers assuming that both registers
6950 agree on how many words the object takes. If not,
6951 invalidate the subsequent registers. */
6953 if (nregno < FIRST_PSEUDO_REGISTER)
6954 for (k = 1; k < nnr; k++)
6955 reg_last_reload_reg[nregno + k]
6958 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6959 REGNO (reload_reg_rtx[r]) + k)
6962 /* Now do the inverse operation. */
6963 for (k = 0; k < nr; k++)
6965 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6966 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6969 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6973 /* Maybe the spill reg contains a copy of reload_in. Only do
6974 something if there will not be an output reload for
6975 the register being reloaded. */
6976 else if (reload_out[r] == 0
6977 && reload_in[r] != 0
6978 && ((GET_CODE (reload_in[r]) == REG
6979 && ! reg_has_output_reload[REGNO (reload_in[r])])
6980 || (GET_CODE (reload_in_reg[r]) == REG
6981 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6983 register int nregno;
6986 if (GET_CODE (reload_in[r]) == REG)
6987 nregno = REGNO (reload_in[r]);
6989 nregno = REGNO (reload_in_reg[r]);
6991 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6992 : HARD_REGNO_NREGS (nregno,
6993 GET_MODE (reload_reg_rtx[r])));
6995 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6997 if (nregno < FIRST_PSEUDO_REGISTER)
6998 for (k = 1; k < nnr; k++)
6999 reg_last_reload_reg[nregno + k]
7002 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7003 REGNO (reload_reg_rtx[r]) + k)
7006 /* Unless we inherited this reload, show we haven't
7007 recently done a store. */
7008 if (! reload_inherited[r])
7009 spill_reg_store[i] = 0;
7011 for (k = 0; k < nr; k++)
7013 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
7014 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7017 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
7023 /* However, if part of the reload reaches the end, then we must
7024 invalidate the old info for the part that survives to the end. */
7025 else if (part_reaches_end)
7027 for (k = 0; k < nr; k++)
7028 if (reload_reg_reaches_end_p (spill_regs[i] + k,
7030 reload_when_needed[r]))
7032 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
7033 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
7038 /* The following if-statement was #if 0'd in 1.34 (or before...).
7039 It's reenabled in 1.35 because supposedly nothing else
7040 deals with this problem. */
7042 /* If a register gets output-reloaded from a non-spill register,
7043 that invalidates any previous reloaded copy of it.
7044 But forget_old_reloads_1 won't get to see it, because
7045 it thinks only about the original insn. So invalidate it here. */
7046 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7048 register int nregno = REGNO (reload_out[r]);
7049 if (nregno >= FIRST_PSEUDO_REGISTER)
7050 reg_last_reload_reg[nregno] = 0;
7053 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7055 while (num_regs-- > 0)
7056 reg_last_reload_reg[nregno + num_regs] = 0;
7062 /* Emit code to perform a reload from IN (which may be a reload register) to
7063 OUT (which may also be a reload register). IN or OUT is from operand
7064 OPNUM with reload type TYPE.
7066 Returns first insn emitted. */
7069 gen_reload (out, in, opnum, type)
7073 enum reload_type type;
7075 rtx last = get_last_insn ();
7078 /* If IN is a paradoxical SUBREG, remove it and try to put the
7079 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7080 if (GET_CODE (in) == SUBREG
7081 && (GET_MODE_SIZE (GET_MODE (in))
7082 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7083 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7084 in = SUBREG_REG (in), out = tem;
7085 else if (GET_CODE (out) == SUBREG
7086 && (GET_MODE_SIZE (GET_MODE (out))
7087 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7088 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7089 out = SUBREG_REG (out), in = tem;
7091 /* How to do this reload can get quite tricky. Normally, we are being
7092 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7093 register that didn't get a hard register. In that case we can just
7094 call emit_move_insn.
7096 We can also be asked to reload a PLUS that adds a register or a MEM to
7097 another register, constant or MEM. This can occur during frame pointer
7098 elimination and while reloading addresses. This case is handled by
7099 trying to emit a single insn to perform the add. If it is not valid,
7100 we use a two insn sequence.
7102 Finally, we could be called to handle an 'o' constraint by putting
7103 an address into a register. In that case, we first try to do this
7104 with a named pattern of "reload_load_address". If no such pattern
7105 exists, we just emit a SET insn and hope for the best (it will normally
7106 be valid on machines that use 'o').
7108 This entire process is made complex because reload will never
7109 process the insns we generate here and so we must ensure that
7110 they will fit their constraints and also by the fact that parts of
7111 IN might be being reloaded separately and replaced with spill registers.
7112 Because of this, we are, in some sense, just guessing the right approach
7113 here. The one listed above seems to work.
7115 ??? At some point, this whole thing needs to be rethought. */
7117 if (GET_CODE (in) == PLUS
7118 && (GET_CODE (XEXP (in, 0)) == REG
7119 || GET_CODE (XEXP (in, 0)) == SUBREG
7120 || GET_CODE (XEXP (in, 0)) == MEM)
7121 && (GET_CODE (XEXP (in, 1)) == REG
7122 || GET_CODE (XEXP (in, 1)) == SUBREG
7123 || CONSTANT_P (XEXP (in, 1))
7124 || GET_CODE (XEXP (in, 1)) == MEM))
7126 /* We need to compute the sum of a register or a MEM and another
7127 register, constant, or MEM, and put it into the reload
7128 register. The best possible way of doing this is if the machine
7129 has a three-operand ADD insn that accepts the required operands.
7131 The simplest approach is to try to generate such an insn and see if it
7132 is recognized and matches its constraints. If so, it can be used.
7134 It might be better not to actually emit the insn unless it is valid,
7135 but we need to pass the insn as an operand to `recog' and
7136 `insn_extract' and it is simpler to emit and then delete the insn if
7137 not valid than to dummy things up. */
7139 rtx op0, op1, tem, insn;
7142 op0 = find_replacement (&XEXP (in, 0));
7143 op1 = find_replacement (&XEXP (in, 1));
7145 /* Since constraint checking is strict, commutativity won't be
7146 checked, so we need to do that here to avoid spurious failure
7147 if the add instruction is two-address and the second operand
7148 of the add is the same as the reload reg, which is frequently
7149 the case. If the insn would be A = B + A, rearrange it so
7150 it will be A = A + B as constrain_operands expects. */
7152 if (GET_CODE (XEXP (in, 1)) == REG
7153 && REGNO (out) == REGNO (XEXP (in, 1)))
7154 tem = op0, op0 = op1, op1 = tem;
7156 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7157 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
7159 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
7160 code = recog_memoized (insn);
7164 insn_extract (insn);
7165 /* We want constrain operands to treat this insn strictly in
7166 its validity determination, i.e., the way it would after reload
7168 if (constrain_operands (code, 1))
7172 delete_insns_since (last);
7174 /* If that failed, we must use a conservative two-insn sequence.
7175 use move to copy constant, MEM, or pseudo register to the reload
7176 register since "move" will be able to handle an arbitrary operand,
7177 unlike add which can't, in general. Then add the registers.
7179 If there is another way to do this for a specific machine, a
7180 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7183 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7184 || (GET_CODE (op1) == REG
7185 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7186 tem = op0, op0 = op1, op1 = tem;
7188 gen_reload (out, op0, opnum, type);
7190 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7191 This fixes a problem on the 32K where the stack pointer cannot
7192 be used as an operand of an add insn. */
7194 if (rtx_equal_p (op0, op1))
7197 insn = emit_insn (gen_add2_insn (out, op1));
7199 /* If that failed, copy the address register to the reload register.
7200 Then add the constant to the reload register. */
7202 code = recog_memoized (insn);
7206 insn_extract (insn);
7207 /* We want constrain operands to treat this insn strictly in
7208 its validity determination, i.e., the way it would after reload
7210 if (constrain_operands (code, 1))
7214 delete_insns_since (last);
7216 gen_reload (out, op1, opnum, type);
7217 emit_insn (gen_add2_insn (out, op0));
7220 #ifdef SECONDARY_MEMORY_NEEDED
7221 /* If we need a memory location to do the move, do it that way. */
7222 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7223 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7224 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7225 REGNO_REG_CLASS (REGNO (out)),
7228 /* Get the memory to use and rewrite both registers to its mode. */
7229 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7231 if (GET_MODE (loc) != GET_MODE (out))
7232 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
7234 if (GET_MODE (loc) != GET_MODE (in))
7235 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
7237 gen_reload (loc, in, opnum, type);
7238 gen_reload (out, loc, opnum, type);
7242 /* If IN is a simple operand, use gen_move_insn. */
7243 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7244 emit_insn (gen_move_insn (out, in));
7246 #ifdef HAVE_reload_load_address
7247 else if (HAVE_reload_load_address)
7248 emit_insn (gen_reload_load_address (out, in));
7251 /* Otherwise, just write (set OUT IN) and hope for the best. */
7253 emit_insn (gen_rtx (SET, VOIDmode, out, in));
7255 /* Return the first insn emitted.
7256 We can not just return get_last_insn, because there may have
7257 been multiple instructions emitted. Also note that gen_move_insn may
7258 emit more than one insn itself, so we can not assume that there is one
7259 insn emitted per emit_insn_before call. */
7261 return last ? NEXT_INSN (last) : get_insns ();
7264 /* Delete a previously made output-reload
7265 whose result we now believe is not needed.
7266 First we double-check.
7268 INSN is the insn now being processed.
7269 OUTPUT_RELOAD_INSN is the insn of the output reload.
7270 J is the reload-number for this insn. */
7273 delete_output_reload (insn, j, output_reload_insn)
7276 rtx output_reload_insn;
7280 /* Get the raw pseudo-register referred to. */
7282 rtx reg = reload_in[j];
7283 while (GET_CODE (reg) == SUBREG)
7284 reg = SUBREG_REG (reg);
7286 /* If the pseudo-reg we are reloading is no longer referenced
7287 anywhere between the store into it and here,
7288 and no jumps or labels intervene, then the value can get
7289 here through the reload reg alone.
7290 Otherwise, give up--return. */
7291 for (i1 = NEXT_INSN (output_reload_insn);
7292 i1 != insn; i1 = NEXT_INSN (i1))
7294 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7296 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7297 && reg_mentioned_p (reg, PATTERN (i1)))
7301 if (cannot_omit_stores[REGNO (reg)])
7304 /* If this insn will store in the pseudo again,
7305 the previous store can be removed. */
7306 if (reload_out[j] == reload_in[j])
7307 delete_insn (output_reload_insn);
7309 /* See if the pseudo reg has been completely replaced
7310 with reload regs. If so, delete the store insn
7311 and forget we had a stack slot for the pseudo. */
7312 else if (REG_N_DEATHS (REGNO (reg)) == 1
7313 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7314 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7318 /* We know that it was used only between here
7319 and the beginning of the current basic block.
7320 (We also know that the last use before INSN was
7321 the output reload we are thinking of deleting, but never mind that.)
7322 Search that range; see if any ref remains. */
7323 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7325 rtx set = single_set (i2);
7327 /* Uses which just store in the pseudo don't count,
7328 since if they are the only uses, they are dead. */
7329 if (set != 0 && SET_DEST (set) == reg)
7331 if (GET_CODE (i2) == CODE_LABEL
7332 || GET_CODE (i2) == JUMP_INSN)
7334 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7335 && reg_mentioned_p (reg, PATTERN (i2)))
7336 /* Some other ref remains;
7337 we can't do anything. */
7341 /* Delete the now-dead stores into this pseudo. */
7342 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7344 rtx set = single_set (i2);
7346 if (set != 0 && SET_DEST (set) == reg)
7348 /* This might be a basic block head,
7349 thus don't use delete_insn. */
7350 PUT_CODE (i2, NOTE);
7351 NOTE_SOURCE_FILE (i2) = 0;
7352 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7354 if (GET_CODE (i2) == CODE_LABEL
7355 || GET_CODE (i2) == JUMP_INSN)
7359 /* For the debugging info,
7360 say the pseudo lives in this reload reg. */
7361 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7362 alter_reg (REGNO (reg), -1);
7366 /* Output reload-insns to reload VALUE into RELOADREG.
7367 VALUE is an autoincrement or autodecrement RTX whose operand
7368 is a register or memory location;
7369 so reloading involves incrementing that location.
7371 INC_AMOUNT is the number to increment or decrement by (always positive).
7372 This cannot be deduced from VALUE. */
7375 inc_for_reload (reloadreg, value, inc_amount)
7380 /* REG or MEM to be copied and incremented. */
7381 rtx incloc = XEXP (value, 0);
7382 /* Nonzero if increment after copying. */
7383 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7389 /* No hard register is equivalent to this register after
7390 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7391 we could inc/dec that register as well (maybe even using it for
7392 the source), but I'm not sure it's worth worrying about. */
7393 if (GET_CODE (incloc) == REG)
7394 reg_last_reload_reg[REGNO (incloc)] = 0;
7396 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7397 inc_amount = - inc_amount;
7399 inc = GEN_INT (inc_amount);
7401 /* If this is post-increment, first copy the location to the reload reg. */
7403 emit_insn (gen_move_insn (reloadreg, incloc));
7405 /* See if we can directly increment INCLOC. Use a method similar to that
7408 last = get_last_insn ();
7409 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7410 gen_rtx (PLUS, GET_MODE (incloc),
7413 code = recog_memoized (add_insn);
7416 insn_extract (add_insn);
7417 if (constrain_operands (code, 1))
7419 /* If this is a pre-increment and we have incremented the value
7420 where it lives, copy the incremented value to RELOADREG to
7421 be used as an address. */
7424 emit_insn (gen_move_insn (reloadreg, incloc));
7430 delete_insns_since (last);
7432 /* If couldn't do the increment directly, must increment in RELOADREG.
7433 The way we do this depends on whether this is pre- or post-increment.
7434 For pre-increment, copy INCLOC to the reload register, increment it
7435 there, then save back. */
7439 emit_insn (gen_move_insn (reloadreg, incloc));
7440 emit_insn (gen_add2_insn (reloadreg, inc));
7441 emit_insn (gen_move_insn (incloc, reloadreg));
7446 Because this might be a jump insn or a compare, and because RELOADREG
7447 may not be available after the insn in an input reload, we must do
7448 the incrementation before the insn being reloaded for.
7450 We have already copied INCLOC to RELOADREG. Increment the copy in
7451 RELOADREG, save that back, then decrement RELOADREG so it has
7452 the original value. */
7454 emit_insn (gen_add2_insn (reloadreg, inc));
7455 emit_insn (gen_move_insn (incloc, reloadreg));
7456 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7462 /* Return 1 if we are certain that the constraint-string STRING allows
7463 the hard register REG. Return 0 if we can't be sure of this. */
7466 constraint_accepts_reg_p (string, reg)
7471 int regno = true_regnum (reg);
7474 /* Initialize for first alternative. */
7476 /* Check that each alternative contains `g' or `r'. */
7478 switch (c = *string++)
7481 /* If an alternative lacks `g' or `r', we lose. */
7484 /* If an alternative lacks `g' or `r', we lose. */
7487 /* Initialize for next alternative. */
7492 /* Any general reg wins for this alternative. */
7493 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7497 /* Any reg in specified class wins for this alternative. */
7499 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7501 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7507 /* Return the number of places FIND appears within X, but don't count
7508 an occurrence if some SET_DEST is FIND. */
7511 count_occurrences (x, find)
7512 register rtx x, find;
7515 register enum rtx_code code;
7516 register char *format_ptr;
7524 code = GET_CODE (x);
7539 if (SET_DEST (x) == find)
7540 return count_occurrences (SET_SRC (x), find);
7547 format_ptr = GET_RTX_FORMAT (code);
7550 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7552 switch (*format_ptr++)
7555 count += count_occurrences (XEXP (x, i), find);
7559 if (XVEC (x, i) != NULL)
7561 for (j = 0; j < XVECLEN (x, i); j++)
7562 count += count_occurrences (XVECEXP (x, i, j), find);
7570 /* This array holds values which are equivalent to a hard register
7571 during reload_cse_regs. Each array element is an EXPR_LIST of
7572 values. Each time a hard register is set, we set the corresponding
7573 array element to the value. Each time a hard register is copied
7574 into memory, we add the memory location to the corresponding array
7575 element. We don't store values or memory addresses with side
7576 effects in this array.
7578 If the value is a CONST_INT, then the mode of the containing
7579 EXPR_LIST is the mode in which that CONST_INT was referenced.
7581 We sometimes clobber a specific entry in a list. In that case, we
7582 just set XEXP (list-entry, 0) to 0. */
7584 static rtx *reg_values;
7586 /* This is a preallocated REG rtx which we use as a temporary in
7587 reload_cse_invalidate_regno, so that we don't need to allocate a
7588 new one each time through a loop in that function. */
7590 static rtx invalidate_regno_rtx;
7592 /* This is a set of registers for which we must remove REG_DEAD notes in
7593 previous insns, because our modifications made them invalid. That can
7594 happen if we introduced the register into the current insn, or we deleted
7595 the current insn which used to set the register. */
7597 static HARD_REG_SET no_longer_dead_regs;
7599 /* Invalidate any entries in reg_values which depend on REGNO,
7600 including those for REGNO itself. This is called if REGNO is
7601 changing. If CLOBBER is true, then always forget anything we
7602 currently know about REGNO. MODE is the mode of the assignment to
7603 REGNO, which is used to determine how many hard registers are being
7604 changed. If MODE is VOIDmode, then only REGNO is being changed;
7605 this is used when invalidating call clobbered registers across a
7609 reload_cse_invalidate_regno (regno, mode, clobber)
7611 enum machine_mode mode;
7617 /* Our callers don't always go through true_regnum; we may see a
7618 pseudo-register here from a CLOBBER or the like. We probably
7619 won't ever see a pseudo-register that has a real register number,
7620 for we check anyhow for safety. */
7621 if (regno >= FIRST_PSEUDO_REGISTER)
7622 regno = reg_renumber[regno];
7626 if (mode == VOIDmode)
7627 endregno = regno + 1;
7629 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7632 for (i = regno; i < endregno; i++)
7635 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7639 for (x = reg_values[i]; x; x = XEXP (x, 1))
7641 if (XEXP (x, 0) != 0
7642 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7644 /* If this is the only entry on the list, clear
7645 reg_values[i]. Otherwise, just clear this entry on
7647 if (XEXP (x, 1) == 0 && x == reg_values[i])
7657 /* We must look at earlier registers, in case REGNO is part of a
7658 multi word value but is not the first register. If an earlier
7659 register has a value in a mode which overlaps REGNO, then we must
7660 invalidate that earlier register. Note that we do not need to
7661 check REGNO or later registers (we must not check REGNO itself,
7662 because we would incorrectly conclude that there was a conflict). */
7664 for (i = 0; i < regno; i++)
7668 for (x = reg_values[i]; x; x = XEXP (x, 1))
7670 if (XEXP (x, 0) != 0)
7672 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7673 REGNO (invalidate_regno_rtx) = i;
7674 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7677 reload_cse_invalidate_regno (i, VOIDmode, 1);
7685 /* The memory at address MEM_BASE is being changed.
7686 Return whether this change will invalidate VAL. */
7689 reload_cse_mem_conflict_p (mem_base, val)
7697 code = GET_CODE (val);
7700 /* Get rid of a few simple cases quickly. */
7713 if (GET_MODE (mem_base) == BLKmode
7714 || GET_MODE (val) == BLKmode)
7716 if (anti_dependence (val, mem_base))
7718 /* The address may contain nested MEMs. */
7725 fmt = GET_RTX_FORMAT (code);
7727 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7731 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7734 else if (fmt[i] == 'E')
7738 for (j = 0; j < XVECLEN (val, i); j++)
7739 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
7747 /* Invalidate any entries in reg_values which are changed because of a
7748 store to MEM_RTX. If this is called because of a non-const call
7749 instruction, MEM_RTX is (mem:BLK const0_rtx). */
7752 reload_cse_invalidate_mem (mem_rtx)
7757 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7761 for (x = reg_values[i]; x; x = XEXP (x, 1))
7763 if (XEXP (x, 0) != 0
7764 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
7766 /* If this is the only entry on the list, clear
7767 reg_values[i]. Otherwise, just clear this entry on
7769 if (XEXP (x, 1) == 0 && x == reg_values[i])
7780 /* Invalidate DEST, which is being assigned to or clobbered. The
7781 second parameter exists so that this function can be passed to
7782 note_stores; it is ignored. */
7785 reload_cse_invalidate_rtx (dest, ignore)
7789 while (GET_CODE (dest) == STRICT_LOW_PART
7790 || GET_CODE (dest) == SIGN_EXTRACT
7791 || GET_CODE (dest) == ZERO_EXTRACT
7792 || GET_CODE (dest) == SUBREG)
7793 dest = XEXP (dest, 0);
7795 if (GET_CODE (dest) == REG)
7796 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
7797 else if (GET_CODE (dest) == MEM)
7798 reload_cse_invalidate_mem (dest);
7801 /* Possibly delete death notes on the insns before INSN if modifying INSN
7802 extended the lifespan of the registers. */
7805 reload_cse_delete_death_notes (insn)
7810 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
7814 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
7817 for (trial = prev_nonnote_insn (insn);
7819 && GET_CODE (trial) != CODE_LABEL
7820 && GET_CODE (trial) != BARRIER);
7821 trial = prev_nonnote_insn (trial))
7823 if (find_regno_note (trial, REG_DEAD, dreg))
7825 remove_death (dreg, trial);
7832 /* Record that the current insn uses hard reg REGNO in mode MODE. This
7833 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
7834 notes for this register. */
7837 reload_cse_no_longer_dead (regno, mode)
7839 enum machine_mode mode;
7841 int nregs = HARD_REGNO_NREGS (regno, mode);
7844 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
7850 /* Do a very simple CSE pass over the hard registers.
7852 This function detects no-op moves where we happened to assign two
7853 different pseudo-registers to the same hard register, and then
7854 copied one to the other. Reload will generate a useless
7855 instruction copying a register to itself.
7857 This function also detects cases where we load a value from memory
7858 into two different registers, and (if memory is more expensive than
7859 registers) changes it to simply copy the first register into the
7862 Another optimization is performed that scans the operands of each
7863 instruction to see whether the value is already available in a
7864 hard register. It then replaces the operand with the hard register
7865 if possible, much like an optional reload would. */
7868 reload_cse_regs (first)
7876 init_alias_analysis ();
7878 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7879 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7882 /* Create our EXPR_LIST structures on reload_obstack, so that we can
7883 free them when we are done. */
7884 push_obstacks (&reload_obstack, &reload_obstack);
7885 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7887 /* We pass this to reload_cse_invalidate_mem to invalidate all of
7888 memory for a non-const call instruction. */
7889 callmem = gen_rtx (MEM, BLKmode, const0_rtx);
7891 /* This is used in reload_cse_invalidate_regno to avoid consing a
7892 new REG in a loop in that function. */
7893 invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
7895 for (insn = first; insn; insn = NEXT_INSN (insn))
7899 if (GET_CODE (insn) == CODE_LABEL)
7901 /* Forget all the register values at a code label. We don't
7902 try to do anything clever around jumps. */
7903 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7909 #ifdef NON_SAVING_SETJMP
7910 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
7911 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
7913 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7920 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
7923 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7925 /* If this is a call instruction, forget anything stored in a
7926 call clobbered register, or, if this is not a const call, in
7928 if (GET_CODE (insn) == CALL_INSN)
7930 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7931 if (call_used_regs[i])
7932 reload_cse_invalidate_regno (i, VOIDmode, 1);
7934 if (! CONST_CALL_P (insn))
7935 reload_cse_invalidate_mem (callmem);
7938 body = PATTERN (insn);
7939 if (GET_CODE (body) == SET)
7942 if (reload_cse_noop_set_p (body, insn))
7944 PUT_CODE (insn, NOTE);
7945 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7946 NOTE_SOURCE_FILE (insn) = 0;
7947 reload_cse_delete_death_notes (insn);
7949 /* We're done with this insn. */
7953 /* It's not a no-op, but we can try to simplify it. */
7954 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7955 count += reload_cse_simplify_set (body, insn);
7957 if (count > 0 && apply_change_group ())
7958 reload_cse_delete_death_notes (insn);
7959 else if (reload_cse_simplify_operands (insn))
7960 reload_cse_delete_death_notes (insn);
7962 reload_cse_record_set (body, body);
7964 else if (GET_CODE (body) == PARALLEL)
7968 /* If every action in a PARALLEL is a noop, we can delete
7969 the entire PARALLEL. */
7970 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7971 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
7972 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
7973 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
7977 PUT_CODE (insn, NOTE);
7978 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
7979 NOTE_SOURCE_FILE (insn) = 0;
7980 reload_cse_delete_death_notes (insn);
7982 /* We're done with this insn. */
7986 /* It's not a no-op, but we can try to simplify it. */
7987 CLEAR_HARD_REG_SET (no_longer_dead_regs);
7988 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
7989 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
7990 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
7992 if (count > 0 && apply_change_group ())
7993 reload_cse_delete_death_notes (insn);
7994 else if (reload_cse_simplify_operands (insn))
7995 reload_cse_delete_death_notes (insn);
7997 /* Look through the PARALLEL and record the values being
7998 set, if possible. Also handle any CLOBBERs. */
7999 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8001 rtx x = XVECEXP (body, 0, i);
8003 if (GET_CODE (x) == SET)
8004 reload_cse_record_set (x, body);
8006 note_stores (x, reload_cse_invalidate_rtx);
8010 note_stores (body, reload_cse_invalidate_rtx);
8013 /* Clobber any registers which appear in REG_INC notes. We
8014 could keep track of the changes to their values, but it is
8015 unlikely to help. */
8019 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8020 if (REG_NOTE_KIND (x) == REG_INC)
8021 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8025 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8026 after we have processed the insn. */
8027 if (GET_CODE (insn) == CALL_INSN)
8031 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8032 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8033 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8037 /* Free all the temporary structures we created, and go back to the
8038 regular obstacks. */
8039 obstack_free (&reload_obstack, firstobj);
8043 /* Return whether the values known for REGNO are equal to VAL. MODE
8044 is the mode of the object that VAL is being copied to; this matters
8045 if VAL is a CONST_INT. */
8048 reload_cse_regno_equal_p (regno, val, mode)
8051 enum machine_mode mode;
8058 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8059 if (XEXP (x, 0) != 0
8060 && rtx_equal_p (XEXP (x, 0), val)
8061 && (GET_CODE (val) != CONST_INT
8062 || mode == GET_MODE (x)
8063 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8064 /* On a big endian machine if the value spans more than
8065 one register then this register holds the high part of
8066 it and we can't use it.
8068 ??? We should also compare with the high part of the
8070 && !(WORDS_BIG_ENDIAN
8071 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8072 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8073 GET_MODE_BITSIZE (GET_MODE (x))))))
8079 /* See whether a single set is a noop. SET is the set instruction we
8080 are should check, and INSN is the instruction from which it came. */
8083 reload_cse_noop_set_p (set, insn)
8088 enum machine_mode dest_mode;
8092 src = SET_SRC (set);
8093 dest = SET_DEST (set);
8094 dest_mode = GET_MODE (dest);
8096 if (side_effects_p (src))
8099 dreg = true_regnum (dest);
8100 sreg = true_regnum (src);
8102 /* Check for setting a register to itself. In this case, we don't
8103 have to worry about REG_DEAD notes. */
8104 if (dreg >= 0 && dreg == sreg)
8110 /* Check for setting a register to itself. */
8114 /* Check for setting a register to a value which we already know
8115 is in the register. */
8116 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8119 /* Check for setting a register DREG to another register SREG
8120 where SREG is equal to a value which is already in DREG. */
8125 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8129 if (XEXP (x, 0) == 0)
8132 if (dest_mode == GET_MODE (x))
8134 else if (GET_MODE_BITSIZE (dest_mode)
8135 < GET_MODE_BITSIZE (GET_MODE (x)))
8136 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8141 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8149 else if (GET_CODE (dest) == MEM)
8151 /* Check for storing a register to memory when we know that the
8152 register is equivalent to the memory location. */
8154 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8155 && ! side_effects_p (dest))
8159 /* If we can delete this SET, then we need to look for an earlier
8160 REG_DEAD note on DREG, and remove it if it exists. */
8161 if (ret && dreg >= 0)
8163 if (! find_regno_note (insn, REG_UNUSED, dreg))
8164 reload_cse_no_longer_dead (dreg, dest_mode);
8170 /* Try to simplify a single SET instruction. SET is the set pattern.
8171 INSN is the instruction it came from.
8172 This function only handles one case: if we set a register to a value
8173 which is not a register, we try to find that value in some other register
8174 and change the set into a register copy. */
8177 reload_cse_simplify_set (set, insn)
8183 enum machine_mode dest_mode;
8184 enum reg_class dclass;
8187 dreg = true_regnum (SET_DEST (set));
8191 src = SET_SRC (set);
8192 if (side_effects_p (src) || true_regnum (src) >= 0)
8195 /* If memory loads are cheaper than register copies, don't change
8197 if (GET_CODE (src) == MEM && MEMORY_MOVE_COST (GET_MODE (src)) < 2)
8200 dest_mode = GET_MODE (SET_DEST (set));
8201 dclass = REGNO_REG_CLASS (dreg);
8202 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8205 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8206 && reload_cse_regno_equal_p (i, src, dest_mode))
8210 /* Pop back to the real obstacks while changing the insn. */
8213 validated = validate_change (insn, &SET_SRC (set),
8214 gen_rtx (REG, dest_mode, i), 1);
8216 /* Go back to the obstack we are using for temporary
8218 push_obstacks (&reload_obstack, &reload_obstack);
8220 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8222 reload_cse_no_longer_dead (i, dest_mode);
8230 /* Try to replace operands in INSN with equivalent values that are already
8231 in registers. This can be viewed as optional reloading.
8233 For each non-register operand in the insn, see if any hard regs are
8234 known to be equivalent to that operand. Record the alternatives which
8235 can accept these hard registers. Among all alternatives, select the
8236 ones which are better or equal to the one currently matching, where
8237 "better" is in terms of '?' and '!' constraints. Among the remaining
8238 alternatives, select the one which replaces most operands with
8242 reload_cse_simplify_operands (insn)
8245 #ifdef REGISTER_CONSTRAINTS
8246 int insn_code_number, n_operands, n_alternatives;
8249 char *constraints[MAX_RECOG_OPERANDS];
8251 /* Vector recording how bad an alternative is. */
8252 int *alternative_reject;
8253 /* Vector recording how many registers can be introduced by choosing
8254 this alternative. */
8255 int *alternative_nregs;
8256 /* Array of vectors recording, for each operand and each alternative,
8257 which hard register to substitute, or -1 if the operand should be
8259 int *op_alt_regno[MAX_RECOG_OPERANDS];
8260 /* Array of alternatives, sorted in order of decreasing desirability. */
8261 int *alternative_order;
8263 /* Find out some information about this insn. */
8264 insn_code_number = recog_memoized (insn);
8265 /* We don't modify asm instructions. */
8266 if (insn_code_number < 0)
8269 n_operands = insn_n_operands[insn_code_number];
8270 n_alternatives = insn_n_alternatives[insn_code_number];
8272 if (n_alternatives == 0 || n_operands == 0)
8274 insn_extract (insn);
8276 /* Figure out which alternative currently matches. */
8277 if (! constrain_operands (insn_code_number, 1))
8280 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8281 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8282 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8283 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8284 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8286 for (i = 0; i < n_operands; i++)
8288 enum machine_mode mode;
8292 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8293 for (j = 0; j < n_alternatives; j++)
8294 op_alt_regno[i][j] = -1;
8296 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8297 mode = insn_operand_mode[insn_code_number][i];
8299 /* Add the reject values for each alternative given by the constraints
8300 for this operand. */
8308 alternative_reject[j] += 3;
8310 alternative_reject[j] += 300;
8313 /* We won't change operands which are already registers. We
8314 also don't want to modify output operands. */
8315 regno = true_regnum (recog_operand[i]);
8317 || constraints[i][0] == '='
8318 || constraints[i][0] == '+')
8321 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8323 int class = (int) NO_REGS;
8325 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8328 /* We found a register equal to this operand. Now look for all
8329 alternatives that can accept this register and have not been
8330 assigned a register they can use yet. */
8339 case '=': case '+': case '?':
8340 case '#': case '&': case '!':
8342 case '0': case '1': case '2': case '3': case '4':
8343 case 'm': case '<': case '>': case 'V': case 'o':
8344 case 'E': case 'F': case 'G': case 'H':
8345 case 's': case 'i': case 'n':
8346 case 'I': case 'J': case 'K': case 'L':
8347 case 'M': case 'N': case 'O': case 'P':
8348 #ifdef EXTRA_CONSTRAINT
8349 case 'Q': case 'R': case 'S': case 'T': case 'U':
8352 /* These don't say anything we care about. */
8356 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8361 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8364 case ',': case '\0':
8365 /* See if REGNO fits this alternative, and set it up as the
8366 replacement register if we don't have one for this
8368 if (op_alt_regno[i][j] == -1
8369 && reg_fits_class_p (gen_rtx (REG, mode, regno), class,
8372 alternative_nregs[j]++;
8373 op_alt_regno[i][j] = regno;
8385 /* Record all alternatives which are better or equal to the currently
8386 matching one in the alternative_order array. */
8387 for (i = j = 0; i < n_alternatives; i++)
8388 if (alternative_reject[i] <= alternative_reject[which_alternative])
8389 alternative_order[j++] = i;
8392 /* Sort it. Given a small number of alternatives, a dumb algorithm
8393 won't hurt too much. */
8394 for (i = 0; i < n_alternatives - 1; i++)
8397 int best_reject = alternative_reject[alternative_order[i]];
8398 int best_nregs = alternative_nregs[alternative_order[i]];
8401 for (j = i + 1; j < n_alternatives; j++)
8403 int this_reject = alternative_reject[alternative_order[j]];
8404 int this_nregs = alternative_nregs[alternative_order[j]];
8406 if (this_reject < best_reject
8407 || (this_reject == best_reject && this_nregs < best_nregs))
8410 best_reject = this_reject;
8411 best_nregs = this_nregs;
8415 tmp = alternative_order[best];
8416 alternative_order[best] = alternative_order[i];
8417 alternative_order[i] = tmp;
8420 /* Substitute the operands as determined by op_alt_regno for the best
8422 j = alternative_order[0];
8423 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8425 /* Pop back to the real obstacks while changing the insn. */
8428 for (i = 0; i < n_operands; i++)
8430 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8431 if (op_alt_regno[i][j] == -1)
8434 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8435 validate_change (insn, recog_operand_loc[i],
8436 gen_rtx (REG, mode, op_alt_regno[i][j]), 1);
8439 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8441 int op = recog_dup_num[i];
8442 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8444 if (op_alt_regno[op][j] == -1)
8447 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8448 validate_change (insn, recog_dup_loc[i],
8449 gen_rtx (REG, mode, op_alt_regno[op][j]), 1);
8452 /* Go back to the obstack we are using for temporary
8454 push_obstacks (&reload_obstack, &reload_obstack);
8456 return apply_change_group ();
8462 /* These two variables are used to pass information from
8463 reload_cse_record_set to reload_cse_check_clobber. */
8465 static int reload_cse_check_clobbered;
8466 static rtx reload_cse_check_src;
8468 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8469 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8470 second argument, which is passed by note_stores, is ignored. */
8473 reload_cse_check_clobber (dest, ignore)
8477 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8478 reload_cse_check_clobbered = 1;
8481 /* Record the result of a SET instruction. SET is the set pattern.
8482 BODY is the pattern of the insn that it came from. */
8485 reload_cse_record_set (set, body)
8491 enum machine_mode dest_mode;
8493 dest = SET_DEST (set);
8494 src = SET_SRC (set);
8495 dreg = true_regnum (dest);
8496 sreg = true_regnum (src);
8497 dest_mode = GET_MODE (dest);
8499 /* Some machines don't define AUTO_INC_DEC, but they still use push
8500 instructions. We need to catch that case here in order to
8501 invalidate the stack pointer correctly. Note that invalidating
8502 the stack pointer is different from invalidating DEST. */
8504 while (GET_CODE (x) == SUBREG
8505 || GET_CODE (x) == ZERO_EXTRACT
8506 || GET_CODE (x) == SIGN_EXTRACT
8507 || GET_CODE (x) == STRICT_LOW_PART)
8509 if (push_operand (x, GET_MODE (x)))
8511 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8512 reload_cse_invalidate_rtx (dest, NULL_RTX);
8516 /* We can only handle an assignment to a register, or a store of a
8517 register to a memory location. For other cases, we just clobber
8518 the destination. We also have to just clobber if there are side
8519 effects in SRC or DEST. */
8520 if ((dreg < 0 && GET_CODE (dest) != MEM)
8521 || side_effects_p (src)
8522 || side_effects_p (dest))
8524 reload_cse_invalidate_rtx (dest, NULL_RTX);
8529 /* We don't try to handle values involving CC, because it's a pain
8530 to keep track of when they have to be invalidated. */
8531 if (reg_mentioned_p (cc0_rtx, src)
8532 || reg_mentioned_p (cc0_rtx, dest))
8534 reload_cse_invalidate_rtx (dest, NULL_RTX);
8539 /* If BODY is a PARALLEL, then we need to see whether the source of
8540 SET is clobbered by some other instruction in the PARALLEL. */
8541 if (GET_CODE (body) == PARALLEL)
8545 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8549 x = XVECEXP (body, 0, i);
8553 reload_cse_check_clobbered = 0;
8554 reload_cse_check_src = src;
8555 note_stores (x, reload_cse_check_clobber);
8556 if (reload_cse_check_clobbered)
8558 reload_cse_invalidate_rtx (dest, NULL_RTX);
8568 /* This is an assignment to a register. Update the value we
8569 have stored for the register. */
8574 /* This is a copy from one register to another. Any values
8575 which were valid for SREG are now valid for DREG. If the
8576 mode changes, we use gen_lowpart_common to extract only
8577 the part of the value that is copied. */
8578 reg_values[dreg] = 0;
8579 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8583 if (XEXP (x, 0) == 0)
8585 if (dest_mode == GET_MODE (XEXP (x, 0)))
8587 else if (GET_MODE_BITSIZE (dest_mode)
8588 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8591 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8593 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
8598 reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
8600 /* We've changed DREG, so invalidate any values held by other
8601 registers that depend upon it. */
8602 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8604 /* If this assignment changes more than one hard register,
8605 forget anything we know about the others. */
8606 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8607 reg_values[dreg + i] = 0;
8609 else if (GET_CODE (dest) == MEM)
8611 /* Invalidate conflicting memory locations. */
8612 reload_cse_invalidate_mem (dest);
8614 /* If we're storing a register to memory, add DEST to the list
8616 if (sreg >= 0 && ! side_effects_p (dest))
8617 reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
8622 /* We should have bailed out earlier. */