1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
32 #include "hard-reg-set.h"
35 #include "basic-block.h"
40 /* This file contains the reload pass of the compiler, which is
41 run after register allocation has been done. It checks that
42 each insn is valid (operands required to be in registers really
43 are in registers of the proper class) and fixes up invalid ones
44 by copying values temporarily into registers for the insns
47 The results of register allocation are described by the vector
48 reg_renumber; the insns still contain pseudo regs, but reg_renumber
49 can be used to find which hard reg, if any, a pseudo reg is in.
51 The technique we always use is to free up a few hard regs that are
52 called ``reload regs'', and for each place where a pseudo reg
53 must be in a hard reg, copy it temporarily into one of the reload regs.
55 All the pseudos that were formerly allocated to the hard regs that
56 are now in use as reload regs must be ``spilled''. This means
57 that they go to other hard regs, or to stack slots if no other
58 available hard regs can be found. Spilling can invalidate more
59 insns, requiring additional need for reloads, so we must keep checking
60 until the process stabilizes.
62 For machines with different classes of registers, we must keep track
63 of the register class needed for each reload, and make sure that
64 we allocate enough reload registers of each class.
66 The file reload.c contains the code that checks one insn for
67 validity and reports the reloads that it needs. This file
68 is in charge of scanning the entire rtl code, accumulating the
69 reload needs, spilling, assigning reload registers to use for
70 fixing up each insn, and generating the new insns to copy values
71 into the reload registers. */
74 #ifndef REGISTER_MOVE_COST
75 #define REGISTER_MOVE_COST(x, y) 2
78 /* During reload_as_needed, element N contains a REG rtx for the hard reg
79 into which reg N has been reloaded (perhaps for a previous insn). */
80 static rtx *reg_last_reload_reg;
82 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
83 for an output reload that stores into reg N. */
84 static char *reg_has_output_reload;
86 /* Indicates which hard regs are reload-registers for an output reload
87 in the current insn. */
88 static HARD_REG_SET reg_is_output_reload;
90 /* Element N is the constant value to which pseudo reg N is equivalent,
91 or zero if pseudo reg N is not equivalent to a constant.
92 find_reloads looks at this in order to replace pseudo reg N
93 with the constant it stands for. */
94 rtx *reg_equiv_constant;
96 /* Element N is a memory location to which pseudo reg N is equivalent,
97 prior to any register elimination (such as frame pointer to stack
98 pointer). Depending on whether or not it is a valid address, this value
99 is transferred to either reg_equiv_address or reg_equiv_mem. */
100 rtx *reg_equiv_memory_loc;
102 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
103 This is used when the address is not valid as a memory address
104 (because its displacement is too big for the machine.) */
105 rtx *reg_equiv_address;
107 /* Element N is the memory slot to which pseudo reg N is equivalent,
108 or zero if pseudo reg N is not equivalent to a memory slot. */
111 /* Widest width in which each pseudo reg is referred to (via subreg). */
112 static int *reg_max_ref_width;
114 /* Element N is the insn that initialized reg N from its equivalent
115 constant or memory slot. */
116 static rtx *reg_equiv_init;
118 /* During reload_as_needed, element N contains the last pseudo regno reloaded
119 into hard register N. If that pseudo reg occupied more than one register,
120 reg_reloaded_contents points to that pseudo for each spill register in
121 use; all of these must remain set for an inheritance to occur. */
122 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
124 /* During reload_as_needed, element N contains the insn for which
125 hard register N was last used. Its contents are significant only
126 when reg_reloaded_valid is set for this register. */
127 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
129 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
130 static HARD_REG_SET reg_reloaded_valid;
131 /* Indicate if the register was dead at the end of the reload.
132 This is only valid if reg_reloaded_contents is set and valid. */
133 static HARD_REG_SET reg_reloaded_dead;
135 /* Number of spill-regs so far; number of valid elements of spill_regs. */
138 /* In parallel with spill_regs, contains REG rtx's for those regs.
139 Holds the last rtx used for any given reg, or 0 if it has never
140 been used for spilling yet. This rtx is reused, provided it has
142 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
144 /* In parallel with spill_regs, contains nonzero for a spill reg
145 that was stored after the last time it was used.
146 The precise value is the insn generated to do the store. */
147 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
149 /* This table is the inverse mapping of spill_regs:
150 indexed by hard reg number,
151 it contains the position of that reg in spill_regs,
152 or -1 for something that is not in spill_regs. */
153 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
155 /* This reg set indicates registers that may not be used for retrying global
156 allocation. The registers that may not be used include all spill registers
157 and the frame pointer (if we are using one). */
158 HARD_REG_SET forbidden_regs;
160 /* This reg set indicates registers that are not good for spill registers.
161 They will not be used to complete groups of spill registers. This includes
162 all fixed registers, registers that may be eliminated, and, if
163 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
165 (spill_reg_order prevents these registers from being used to start a
167 static HARD_REG_SET bad_spill_regs;
169 /* Describes order of use of registers for reloading
170 of spilled pseudo-registers. `spills' is the number of
171 elements that are actually valid; new ones are added at the end. */
172 static short spill_regs[FIRST_PSEUDO_REGISTER];
174 /* This reg set indicates those registers that have been used a spill
175 registers. This information is used in reorg.c, to help figure out
176 what registers are live at any point. It is assumed that all spill_regs
177 are dead at every CODE_LABEL. */
179 HARD_REG_SET used_spill_regs;
181 /* Index of last register assigned as a spill register. We allocate in
182 a round-robin fashion. */
184 static int last_spill_reg;
186 /* Describes order of preference for putting regs into spill_regs.
187 Contains the numbers of all the hard regs, in order most preferred first.
188 This order is different for each function.
189 It is set up by order_regs_for_reload.
190 Empty elements at the end contain -1. */
191 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
193 /* 1 for a hard register that appears explicitly in the rtl
194 (for example, function value registers, special registers
195 used by insns, structure value pointer registers). */
196 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
198 /* Indicates if a register was counted against the need for
199 groups. 0 means it can count against max_nongroup instead. */
200 static HARD_REG_SET counted_for_groups;
202 /* Indicates if a register was counted against the need for
203 non-groups. 0 means it can become part of a new group.
204 During choose_reload_regs, 1 here means don't use this reg
205 as part of a group, even if it seems to be otherwise ok. */
206 static HARD_REG_SET counted_for_nongroups;
208 /* Nonzero if indirect addressing is supported on the machine; this means
209 that spilling (REG n) does not require reloading it into a register in
210 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
211 value indicates the level of indirect addressing supported, e.g., two
212 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
215 static char spill_indirect_levels;
217 /* Nonzero if indirect addressing is supported when the innermost MEM is
218 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
219 which these are valid is the same as spill_indirect_levels, above. */
221 char indirect_symref_ok;
223 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
225 char double_reg_address_ok;
227 /* Record the stack slot for each spilled hard register. */
229 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
231 /* Width allocated so far for that stack slot. */
233 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
235 /* Indexed by register class and basic block number, nonzero if there is
236 any need for a spill register of that class in that basic block.
237 The pointer is 0 if we did stupid allocation and don't know
238 the structure of basic blocks. */
240 char *basic_block_needs[N_REG_CLASSES];
242 /* First uid used by insns created by reload in this function.
243 Used in find_equiv_reg. */
244 int reload_first_uid;
246 /* Flag set by local-alloc or global-alloc if anything is live in
247 a call-clobbered reg across calls. */
249 int caller_save_needed;
251 /* The register class to use for a base register when reloading an
252 address. This is normally BASE_REG_CLASS, but it may be different
253 when using SMALL_REGISTER_CLASSES and passing parameters in
255 enum reg_class reload_address_base_reg_class;
257 /* The register class to use for an index register when reloading an
258 address. This is normally INDEX_REG_CLASS, but it may be different
259 when using SMALL_REGISTER_CLASSES and passing parameters in
261 enum reg_class reload_address_index_reg_class;
263 /* Set to 1 while reload_as_needed is operating.
264 Required by some machines to handle any generated moves differently. */
266 int reload_in_progress = 0;
268 /* These arrays record the insn_code of insns that may be needed to
269 perform input and output reloads of special objects. They provide a
270 place to pass a scratch register. */
272 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
273 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
275 /* This obstack is used for allocation of rtl during register elimination.
276 The allocated storage can be freed once find_reloads has processed the
279 struct obstack reload_obstack;
280 char *reload_firstobj;
282 #define obstack_chunk_alloc xmalloc
283 #define obstack_chunk_free free
285 /* List of labels that must never be deleted. */
286 extern rtx forced_labels;
288 /* Allocation number table from global register allocation. */
289 extern int *reg_allocno;
291 /* This structure is used to record information about register eliminations.
292 Each array entry describes one possible way of eliminating a register
293 in favor of another. If there is more than one way of eliminating a
294 particular register, the most preferred should be specified first. */
296 static struct elim_table
298 int from; /* Register number to be eliminated. */
299 int to; /* Register number used as replacement. */
300 int initial_offset; /* Initial difference between values. */
301 int can_eliminate; /* Non-zero if this elimination can be done. */
302 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
303 insns made by reload. */
304 int offset; /* Current offset between the two regs. */
305 int max_offset; /* Maximum offset between the two regs. */
306 int previous_offset; /* Offset at end of previous insn. */
307 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
308 rtx from_rtx; /* REG rtx for the register to be eliminated.
309 We cannot simply compare the number since
310 we might then spuriously replace a hard
311 register corresponding to a pseudo
312 assigned to the reg to be eliminated. */
313 rtx to_rtx; /* REG rtx for the replacement. */
316 /* If a set of eliminable registers was specified, define the table from it.
317 Otherwise, default to the normal case of the frame pointer being
318 replaced by the stack pointer. */
320 #ifdef ELIMINABLE_REGS
323 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
326 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
328 /* Record the number of pending eliminations that have an offset not equal
329 to their initial offset. If non-zero, we use a new copy of each
330 replacement result in any insns encountered. */
331 static int num_not_at_initial_offset;
333 /* Count the number of registers that we may be able to eliminate. */
334 static int num_eliminable;
336 /* For each label, we record the offset of each elimination. If we reach
337 a label by more than one path and an offset differs, we cannot do the
338 elimination. This information is indexed by the number of the label.
339 The first table is an array of flags that records whether we have yet
340 encountered a label and the second table is an array of arrays, one
341 entry in the latter array for each elimination. */
343 static char *offsets_known_at;
344 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
346 /* Number of labels in the current function. */
348 static int num_labels;
350 struct hard_reg_n_uses { int regno; int uses; };
352 static int possible_group_p PROTO((int, int *));
353 static void count_possible_groups PROTO((int *, enum machine_mode *,
355 static int modes_equiv_for_class_p PROTO((enum machine_mode,
358 static void spill_failure PROTO((rtx));
359 static int new_spill_reg PROTO((int, int, int *, int *, int,
361 static void delete_dead_insn PROTO((rtx));
362 static void alter_reg PROTO((int, int));
363 static void mark_scratch_live PROTO((rtx));
364 static void set_label_offsets PROTO((rtx, rtx, int));
365 static int eliminate_regs_in_insn PROTO((rtx, int));
366 static void mark_not_eliminable PROTO((rtx, rtx));
367 static int spill_hard_reg PROTO((int, int, FILE *, int));
368 static void scan_paradoxical_subregs PROTO((rtx));
369 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
370 static void order_regs_for_reload PROTO((int));
371 static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
372 static void reload_as_needed PROTO((rtx, int));
373 static void forget_old_reloads_1 PROTO((rtx, rtx));
374 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
375 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
377 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
379 static int reload_reg_free_p PROTO((int, int, enum reload_type));
380 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
381 static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx));
382 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
383 static int allocate_reload_reg PROTO((int, rtx, int, int));
384 static void choose_reload_regs PROTO((rtx, rtx));
385 static void merge_assigned_reloads PROTO((rtx));
386 static void emit_reload_insns PROTO((rtx));
387 static void delete_output_reload PROTO((rtx, int, rtx));
388 static void inc_for_reload PROTO((rtx, rtx, int));
389 static int constraint_accepts_reg_p PROTO((char *, rtx));
390 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
391 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
392 static void reload_cse_invalidate_mem PROTO((rtx));
393 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
394 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
395 static int reload_cse_noop_set_p PROTO((rtx, rtx));
396 static int reload_cse_simplify_set PROTO((rtx, rtx));
397 static int reload_cse_simplify_operands PROTO((rtx));
398 static void reload_cse_check_clobber PROTO((rtx, rtx));
399 static void reload_cse_record_set PROTO((rtx, rtx));
400 static void reload_cse_delete_death_notes PROTO((rtx));
401 static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
403 /* Initialize the reload pass once per compilation. */
410 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
411 Set spill_indirect_levels to the number of levels such addressing is
412 permitted, zero if it is not permitted at all. */
415 = gen_rtx_MEM (Pmode,
417 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
419 spill_indirect_levels = 0;
421 while (memory_address_p (QImode, tem))
423 spill_indirect_levels++;
424 tem = gen_rtx_MEM (Pmode, tem);
427 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
429 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
430 indirect_symref_ok = memory_address_p (QImode, tem);
432 /* See if reg+reg is a valid (and offsettable) address. */
434 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
436 tem = gen_rtx_PLUS (Pmode,
437 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
438 gen_rtx_REG (Pmode, i));
439 /* This way, we make sure that reg+reg is an offsettable address. */
440 tem = plus_constant (tem, 4);
442 if (memory_address_p (QImode, tem))
444 double_reg_address_ok = 1;
449 /* Initialize obstack for our rtl allocation. */
450 gcc_obstack_init (&reload_obstack);
451 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
453 /* Decide which register class should be used when reloading
454 addresses. If we are using SMALL_REGISTER_CLASSES, and any
455 parameters are passed in registers, then we do not want to use
456 those registers when reloading an address. Otherwise, if a
457 function argument needs a reload, we may wind up clobbering
458 another argument to the function which was already computed. If
459 we find a subset class which simply avoids those registers, we
460 use it instead. ??? It would be better to only use the
461 restricted class when we actually are loading function arguments,
462 but that is hard to determine. */
463 reload_address_base_reg_class = BASE_REG_CLASS;
464 reload_address_index_reg_class = INDEX_REG_CLASS;
465 if (SMALL_REGISTER_CLASSES)
468 HARD_REG_SET base, index;
471 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
472 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
473 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
475 if (FUNCTION_ARG_REGNO_P (regno))
477 CLEAR_HARD_REG_BIT (base, regno);
478 CLEAR_HARD_REG_BIT (index, regno);
482 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
484 for (p = reg_class_subclasses[BASE_REG_CLASS];
485 *p != LIM_REG_CLASSES;
488 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
491 reload_address_base_reg_class = *p;
496 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
498 for (p = reg_class_subclasses[INDEX_REG_CLASS];
499 *p != LIM_REG_CLASSES;
502 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
505 reload_address_index_reg_class = *p;
512 /* Main entry point for the reload pass.
514 FIRST is the first insn of the function being compiled.
516 GLOBAL nonzero means we were called from global_alloc
517 and should attempt to reallocate any pseudoregs that we
518 displace from hard regs we will use for reloads.
519 If GLOBAL is zero, we do not have enough information to do that,
520 so any pseudo reg that is spilled must go to the stack.
522 DUMPFILE is the global-reg debugging dump file stream, or 0.
523 If it is nonzero, messages are written to it to describe
524 which registers are seized as reload regs, which pseudo regs
525 are spilled from them, and where the pseudo regs are reallocated to.
527 Return value is nonzero if reload failed
528 and we must not do any more for this function. */
531 reload (first, global, dumpfile)
537 register int i, j, k;
539 register struct elim_table *ep;
541 /* The two pointers used to track the true location of the memory used
542 for label offsets. */
543 char *real_known_ptr = NULL_PTR;
544 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
546 int something_changed;
547 int something_needs_reloads;
548 int something_needs_elimination;
549 int new_basic_block_needs;
550 enum reg_class caller_save_spill_class = NO_REGS;
551 int caller_save_group_size = 1;
553 /* Nonzero means we couldn't get enough spill regs. */
556 /* The basic block number currently being processed for INSN. */
559 /* Make sure even insns with volatile mem refs are recognizable. */
562 /* Enable find_equiv_reg to distinguish insns made by reload. */
563 reload_first_uid = get_max_uid ();
565 for (i = 0; i < N_REG_CLASSES; i++)
566 basic_block_needs[i] = 0;
568 #ifdef SECONDARY_MEMORY_NEEDED
569 /* Initialize the secondary memory table. */
570 clear_secondary_mem ();
573 /* Remember which hard regs appear explicitly
574 before we merge into `regs_ever_live' the ones in which
575 pseudo regs have been allocated. */
576 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
578 /* We don't have a stack slot for any spill reg yet. */
579 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
580 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
582 /* Initialize the save area information for caller-save, in case some
586 /* Compute which hard registers are now in use
587 as homes for pseudo registers.
588 This is done here rather than (eg) in global_alloc
589 because this point is reached even if not optimizing. */
590 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
593 /* A function that receives a nonlocal goto must save all call-saved
595 if (current_function_has_nonlocal_label)
596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
598 if (! call_used_regs[i] && ! fixed_regs[i])
599 regs_ever_live[i] = 1;
602 for (i = 0; i < scratch_list_length; i++)
604 mark_scratch_live (scratch_list[i]);
606 /* Make sure that the last insn in the chain
607 is not something that needs reloading. */
608 emit_note (NULL_PTR, NOTE_INSN_DELETED);
610 /* Find all the pseudo registers that didn't get hard regs
611 but do have known equivalent constants or memory slots.
612 These include parameters (known equivalent to parameter slots)
613 and cse'd or loop-moved constant memory addresses.
615 Record constant equivalents in reg_equiv_constant
616 so they will be substituted by find_reloads.
617 Record memory equivalents in reg_mem_equiv so they can
618 be substituted eventually by altering the REG-rtx's. */
620 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
621 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
622 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
623 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
624 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
625 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
626 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
628 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
630 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
631 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
633 if (SMALL_REGISTER_CLASSES)
634 CLEAR_HARD_REG_SET (forbidden_regs);
636 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
637 Also find all paradoxical subregs and find largest such for each pseudo.
638 On machines with small register classes, record hard registers that
639 are used for user variables. These can never be used for spills.
640 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
641 caller-saved registers must be marked live. */
643 for (insn = first; insn; insn = NEXT_INSN (insn))
645 rtx set = single_set (insn);
647 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
648 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
649 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
650 if (! call_used_regs[i])
651 regs_ever_live[i] = 1;
653 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
655 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
657 #ifdef LEGITIMATE_PIC_OPERAND_P
658 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
659 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
663 rtx x = XEXP (note, 0);
664 i = REGNO (SET_DEST (set));
665 if (i > LAST_VIRTUAL_REGISTER)
667 if (GET_CODE (x) == MEM)
669 /* If the operand is a PLUS, the MEM may be shared,
670 so make sure we have an unshared copy here. */
671 if (GET_CODE (XEXP (x, 0)) == PLUS)
674 reg_equiv_memory_loc[i] = x;
676 else if (CONSTANT_P (x))
678 if (LEGITIMATE_CONSTANT_P (x))
679 reg_equiv_constant[i] = x;
681 reg_equiv_memory_loc[i]
682 = force_const_mem (GET_MODE (SET_DEST (set)), x);
687 /* If this register is being made equivalent to a MEM
688 and the MEM is not SET_SRC, the equivalencing insn
689 is one with the MEM as a SET_DEST and it occurs later.
690 So don't mark this insn now. */
691 if (GET_CODE (x) != MEM
692 || rtx_equal_p (SET_SRC (set), x))
693 reg_equiv_init[i] = insn;
698 /* If this insn is setting a MEM from a register equivalent to it,
699 this is the equivalencing insn. */
700 else if (set && GET_CODE (SET_DEST (set)) == MEM
701 && GET_CODE (SET_SRC (set)) == REG
702 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
703 && rtx_equal_p (SET_DEST (set),
704 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
705 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
707 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
708 scan_paradoxical_subregs (PATTERN (insn));
711 /* Does this function require a frame pointer? */
713 frame_pointer_needed = (! flag_omit_frame_pointer
714 #ifdef EXIT_IGNORE_STACK
715 /* ?? If EXIT_IGNORE_STACK is set, we will not save
716 and restore sp for alloca. So we can't eliminate
717 the frame pointer in that case. At some point,
718 we should improve this by emitting the
719 sp-adjusting insns for this case. */
720 || (current_function_calls_alloca
721 && EXIT_IGNORE_STACK)
723 || FRAME_POINTER_REQUIRED);
727 /* Initialize the table of registers to eliminate. The way we do this
728 depends on how the eliminable registers were defined. */
729 #ifdef ELIMINABLE_REGS
730 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 ep->can_eliminate = ep->can_eliminate_previous
733 = (CAN_ELIMINATE (ep->from, ep->to)
734 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
737 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
738 = ! frame_pointer_needed;
741 /* Count the number of eliminable registers and build the FROM and TO
742 REG rtx's. Note that code in gen_rtx will cause, e.g.,
743 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
744 We depend on this. */
745 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
747 num_eliminable += ep->can_eliminate;
748 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
749 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
752 num_labels = max_label_num () - get_first_label_num ();
754 /* Allocate the tables used to store offset information at labels. */
755 /* We used to use alloca here, but the size of what it would try to
756 allocate would occasionally cause it to exceed the stack limit and
757 cause a core dump. */
758 real_known_ptr = xmalloc (num_labels);
760 = (int (*)[NUM_ELIMINABLE_REGS])
761 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
763 offsets_known_at = real_known_ptr - get_first_label_num ();
765 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
767 /* Alter each pseudo-reg rtx to contain its hard reg number.
768 Assign stack slots to the pseudos that lack hard regs or equivalents.
769 Do not touch virtual registers. */
771 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
774 /* If we have some registers we think can be eliminated, scan all insns to
775 see if there is an insn that sets one of these registers to something
776 other than itself plus a constant. If so, the register cannot be
777 eliminated. Doing this scan here eliminates an extra pass through the
778 main reload loop in the most common case where register elimination
780 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
781 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
782 || GET_CODE (insn) == CALL_INSN)
783 note_stores (PATTERN (insn), mark_not_eliminable);
785 #ifndef REGISTER_CONSTRAINTS
786 /* If all the pseudo regs have hard regs,
787 except for those that are never referenced,
788 we know that no reloads are needed. */
789 /* But that is not true if there are register constraints, since
790 in that case some pseudos might be in the wrong kind of hard reg. */
792 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
793 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
796 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
798 free (real_known_ptr);
804 /* Compute the order of preference for hard registers to spill.
805 Store them by decreasing preference in potential_reload_regs. */
807 order_regs_for_reload (global);
809 /* So far, no hard regs have been spilled. */
811 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
812 spill_reg_order[i] = -1;
814 /* Initialize to -1, which means take the first spill register. */
817 /* On most machines, we can't use any register explicitly used in the
818 rtl as a spill register. But on some, we have to. Those will have
819 taken care to keep the life of hard regs as short as possible. */
821 if (! SMALL_REGISTER_CLASSES)
822 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
824 /* Spill any hard regs that we know we can't eliminate. */
825 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
826 if (! ep->can_eliminate)
827 spill_hard_reg (ep->from, global, dumpfile, 1);
829 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
830 if (frame_pointer_needed)
831 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
835 for (i = 0; i < N_REG_CLASSES; i++)
837 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
838 bzero (basic_block_needs[i], n_basic_blocks);
841 /* From now on, we need to emit any moves without making new pseudos. */
842 reload_in_progress = 1;
844 /* This loop scans the entire function each go-round
845 and repeats until one repetition spills no additional hard regs. */
847 /* This flag is set when a pseudo reg is spilled,
848 to require another pass. Note that getting an additional reload
849 reg does not necessarily imply any pseudo reg was spilled;
850 sometimes we find a reload reg that no pseudo reg was allocated in. */
851 something_changed = 1;
852 /* This flag is set if there are any insns that require reloading. */
853 something_needs_reloads = 0;
854 /* This flag is set if there are any insns that require register
856 something_needs_elimination = 0;
857 while (something_changed)
861 /* For each class, number of reload regs needed in that class.
862 This is the maximum over all insns of the needs in that class
863 of the individual insn. */
864 int max_needs[N_REG_CLASSES];
865 /* For each class, size of group of consecutive regs
866 that is needed for the reloads of this class. */
867 int group_size[N_REG_CLASSES];
868 /* For each class, max number of consecutive groups needed.
869 (Each group contains group_size[CLASS] consecutive registers.) */
870 int max_groups[N_REG_CLASSES];
871 /* For each class, max number needed of regs that don't belong
872 to any of the groups. */
873 int max_nongroups[N_REG_CLASSES];
874 /* For each class, the machine mode which requires consecutive
875 groups of regs of that class.
876 If two different modes ever require groups of one class,
877 they must be the same size and equally restrictive for that class,
878 otherwise we can't handle the complexity. */
879 enum machine_mode group_mode[N_REG_CLASSES];
880 /* Record the insn where each maximum need is first found. */
881 rtx max_needs_insn[N_REG_CLASSES];
882 rtx max_groups_insn[N_REG_CLASSES];
883 rtx max_nongroups_insn[N_REG_CLASSES];
885 HOST_WIDE_INT starting_frame_size;
886 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
887 int previous_frame_pointer_needed = frame_pointer_needed;
889 static char *reg_class_names[] = REG_CLASS_NAMES;
891 something_changed = 0;
892 bzero ((char *) max_needs, sizeof max_needs);
893 bzero ((char *) max_groups, sizeof max_groups);
894 bzero ((char *) max_nongroups, sizeof max_nongroups);
895 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
896 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
897 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
898 bzero ((char *) group_size, sizeof group_size);
899 for (i = 0; i < N_REG_CLASSES; i++)
900 group_mode[i] = VOIDmode;
902 /* Keep track of which basic blocks are needing the reloads. */
905 /* Remember whether any element of basic_block_needs
906 changes from 0 to 1 in this pass. */
907 new_basic_block_needs = 0;
909 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
910 here because the stack size may be a part of the offset computation
911 for register elimination, and there might have been new stack slots
912 created in the last iteration of this loop. */
913 assign_stack_local (BLKmode, 0, 0);
915 starting_frame_size = get_frame_size ();
917 /* Reset all offsets on eliminable registers to their initial values. */
918 #ifdef ELIMINABLE_REGS
919 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
921 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
922 ep->previous_offset = ep->offset
923 = ep->max_offset = ep->initial_offset;
926 #ifdef INITIAL_FRAME_POINTER_OFFSET
927 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
929 if (!FRAME_POINTER_REQUIRED)
931 reg_eliminate[0].initial_offset = 0;
933 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
934 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
937 num_not_at_initial_offset = 0;
939 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
941 /* Set a known offset for each forced label to be at the initial offset
942 of each elimination. We do this because we assume that all
943 computed jumps occur from a location where each elimination is
944 at its initial offset. */
946 for (x = forced_labels; x; x = XEXP (x, 1))
948 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
950 /* For each pseudo register that has an equivalent location defined,
951 try to eliminate any eliminable registers (such as the frame pointer)
952 assuming initial offsets for the replacement register, which
955 If the resulting location is directly addressable, substitute
956 the MEM we just got directly for the old REG.
958 If it is not addressable but is a constant or the sum of a hard reg
959 and constant, it is probably not addressable because the constant is
960 out of range, in that case record the address; we will generate
961 hairy code to compute the address in a register each time it is
962 needed. Similarly if it is a hard register, but one that is not
963 valid as an address register.
965 If the location is not addressable, but does not have one of the
966 above forms, assign a stack slot. We have to do this to avoid the
967 potential of producing lots of reloads if, e.g., a location involves
968 a pseudo that didn't get a hard register and has an equivalent memory
969 location that also involves a pseudo that didn't get a hard register.
971 Perhaps at some point we will improve reload_when_needed handling
972 so this problem goes away. But that's very hairy. */
974 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
975 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
977 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
979 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
981 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
982 else if (CONSTANT_P (XEXP (x, 0))
983 || (GET_CODE (XEXP (x, 0)) == REG
984 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
985 || (GET_CODE (XEXP (x, 0)) == PLUS
986 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
987 && (REGNO (XEXP (XEXP (x, 0), 0))
988 < FIRST_PSEUDO_REGISTER)
989 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
990 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
993 /* Make a new stack slot. Then indicate that something
994 changed so we go back and recompute offsets for
995 eliminable registers because the allocation of memory
996 below might change some offset. reg_equiv_{mem,address}
997 will be set up for this pseudo on the next pass around
999 reg_equiv_memory_loc[i] = 0;
1000 reg_equiv_init[i] = 0;
1002 something_changed = 1;
1006 /* If we allocated another pseudo to the stack, redo elimination
1008 if (something_changed)
1011 /* If caller-saves needs a group, initialize the group to include
1012 the size and mode required for caller-saves. */
1014 if (caller_save_group_size > 1)
1016 group_mode[(int) caller_save_spill_class] = Pmode;
1017 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1020 /* Compute the most additional registers needed by any instruction.
1021 Collect information separately for each class of regs. */
1023 for (insn = first; insn; insn = NEXT_INSN (insn))
1025 if (global && this_block + 1 < n_basic_blocks
1026 && insn == basic_block_head[this_block+1])
1029 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1030 might include REG_LABEL), we need to see what effects this
1031 has on the known offsets at labels. */
1033 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1034 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1035 && REG_NOTES (insn) != 0))
1036 set_label_offsets (insn, insn, 0);
1038 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1040 /* Nonzero means don't use a reload reg that overlaps
1041 the place where a function value can be returned. */
1042 rtx avoid_return_reg = 0;
1044 rtx old_body = PATTERN (insn);
1045 int old_code = INSN_CODE (insn);
1046 rtx old_notes = REG_NOTES (insn);
1047 int did_elimination = 0;
1049 /* To compute the number of reload registers of each class
1050 needed for an insn, we must simulate what choose_reload_regs
1051 can do. We do this by splitting an insn into an "input" and
1052 an "output" part. RELOAD_OTHER reloads are used in both.
1053 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1054 which must be live over the entire input section of reloads,
1055 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1056 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1059 The registers needed for output are RELOAD_OTHER and
1060 RELOAD_FOR_OUTPUT, which are live for the entire output
1061 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1062 reloads for each operand.
1064 The total number of registers needed is the maximum of the
1065 inputs and outputs. */
1069 /* [0] is normal, [1] is nongroup. */
1070 int regs[2][N_REG_CLASSES];
1071 int groups[N_REG_CLASSES];
1074 /* Each `struct needs' corresponds to one RELOAD_... type. */
1078 struct needs output;
1080 struct needs other_addr;
1081 struct needs op_addr;
1082 struct needs op_addr_reload;
1083 struct needs in_addr[MAX_RECOG_OPERANDS];
1084 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1085 struct needs out_addr[MAX_RECOG_OPERANDS];
1086 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1089 /* If needed, eliminate any eliminable registers. */
1091 did_elimination = eliminate_regs_in_insn (insn, 0);
1093 /* Set avoid_return_reg if this is an insn
1094 that might use the value of a function call. */
1095 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
1097 if (GET_CODE (PATTERN (insn)) == SET)
1098 after_call = SET_DEST (PATTERN (insn));
1099 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1100 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1101 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1105 else if (SMALL_REGISTER_CLASSES && after_call != 0
1106 && !(GET_CODE (PATTERN (insn)) == SET
1107 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
1108 && GET_CODE (PATTERN (insn)) != USE)
1110 if (reg_referenced_p (after_call, PATTERN (insn)))
1111 avoid_return_reg = after_call;
1115 /* Analyze the instruction. */
1116 find_reloads (insn, 0, spill_indirect_levels, global,
1119 /* Remember for later shortcuts which insns had any reloads or
1120 register eliminations.
1122 One might think that it would be worthwhile to mark insns
1123 that need register replacements but not reloads, but this is
1124 not safe because find_reloads may do some manipulation of
1125 the insn (such as swapping commutative operands), which would
1126 be lost when we restore the old pattern after register
1127 replacement. So the actions of find_reloads must be redone in
1128 subsequent passes or in reload_as_needed.
1130 However, it is safe to mark insns that need reloads
1131 but not register replacement. */
1133 PUT_MODE (insn, (did_elimination ? QImode
1134 : n_reloads ? HImode
1135 : GET_MODE (insn) == DImode ? DImode
1138 /* Discard any register replacements done. */
1139 if (did_elimination)
1141 obstack_free (&reload_obstack, reload_firstobj);
1142 PATTERN (insn) = old_body;
1143 INSN_CODE (insn) = old_code;
1144 REG_NOTES (insn) = old_notes;
1145 something_needs_elimination = 1;
1148 /* If this insn has no reloads, we need not do anything except
1149 in the case of a CALL_INSN when we have caller-saves and
1150 caller-save needs reloads. */
1153 && ! (GET_CODE (insn) == CALL_INSN
1154 && caller_save_spill_class != NO_REGS))
1157 something_needs_reloads = 1;
1158 bzero ((char *) &insn_needs, sizeof insn_needs);
1160 /* Count each reload once in every class
1161 containing the reload's own class. */
1163 for (i = 0; i < n_reloads; i++)
1165 register enum reg_class *p;
1166 enum reg_class class = reload_reg_class[i];
1168 enum machine_mode mode;
1169 struct needs *this_needs;
1171 /* Don't count the dummy reloads, for which one of the
1172 regs mentioned in the insn can be used for reloading.
1173 Don't count optional reloads.
1174 Don't count reloads that got combined with others. */
1175 if (reload_reg_rtx[i] != 0
1176 || reload_optional[i] != 0
1177 || (reload_out[i] == 0 && reload_in[i] == 0
1178 && ! reload_secondary_p[i]))
1181 /* Show that a reload register of this class is needed
1182 in this basic block. We do not use insn_needs and
1183 insn_groups because they are overly conservative for
1185 if (global && ! basic_block_needs[(int) class][this_block])
1187 basic_block_needs[(int) class][this_block] = 1;
1188 new_basic_block_needs = 1;
1191 mode = reload_inmode[i];
1192 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1193 mode = reload_outmode[i];
1194 size = CLASS_MAX_NREGS (class, mode);
1196 /* Decide which time-of-use to count this reload for. */
1197 switch (reload_when_needed[i])
1200 this_needs = &insn_needs.other;
1202 case RELOAD_FOR_INPUT:
1203 this_needs = &insn_needs.input;
1205 case RELOAD_FOR_OUTPUT:
1206 this_needs = &insn_needs.output;
1208 case RELOAD_FOR_INSN:
1209 this_needs = &insn_needs.insn;
1211 case RELOAD_FOR_OTHER_ADDRESS:
1212 this_needs = &insn_needs.other_addr;
1214 case RELOAD_FOR_INPUT_ADDRESS:
1215 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1217 case RELOAD_FOR_INPADDR_ADDRESS:
1218 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1220 case RELOAD_FOR_OUTPUT_ADDRESS:
1221 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1223 case RELOAD_FOR_OUTADDR_ADDRESS:
1224 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1226 case RELOAD_FOR_OPERAND_ADDRESS:
1227 this_needs = &insn_needs.op_addr;
1229 case RELOAD_FOR_OPADDR_ADDR:
1230 this_needs = &insn_needs.op_addr_reload;
1236 enum machine_mode other_mode, allocate_mode;
1238 /* Count number of groups needed separately from
1239 number of individual regs needed. */
1240 this_needs->groups[(int) class]++;
1241 p = reg_class_superclasses[(int) class];
1242 while (*p != LIM_REG_CLASSES)
1243 this_needs->groups[(int) *p++]++;
1245 /* Record size and mode of a group of this class. */
1246 /* If more than one size group is needed,
1247 make all groups the largest needed size. */
1248 if (group_size[(int) class] < size)
1250 other_mode = group_mode[(int) class];
1251 allocate_mode = mode;
1253 group_size[(int) class] = size;
1254 group_mode[(int) class] = mode;
1259 allocate_mode = group_mode[(int) class];
1262 /* Crash if two dissimilar machine modes both need
1263 groups of consecutive regs of the same class. */
1265 if (other_mode != VOIDmode && other_mode != allocate_mode
1266 && ! modes_equiv_for_class_p (allocate_mode,
1268 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1273 this_needs->regs[reload_nongroup[i]][(int) class] += 1;
1274 p = reg_class_superclasses[(int) class];
1275 while (*p != LIM_REG_CLASSES)
1276 this_needs->regs[reload_nongroup[i]][(int) *p++] += 1;
1282 /* All reloads have been counted for this insn;
1283 now merge the various times of use.
1284 This sets insn_needs, etc., to the maximum total number
1285 of registers needed at any point in this insn. */
1287 for (i = 0; i < N_REG_CLASSES; i++)
1289 int in_max, out_max;
1291 /* Compute normal and nongroup needs. */
1292 for (j = 0; j <= 1; j++)
1294 for (in_max = 0, out_max = 0, k = 0;
1295 k < reload_n_operands; k++)
1299 (insn_needs.in_addr[k].regs[j][i]
1300 + insn_needs.in_addr_addr[k].regs[j][i]));
1302 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1305 insn_needs.out_addr_addr[k].regs[j][i]);
1308 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1309 and operand addresses but not things used to reload
1310 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1311 don't conflict with things needed to reload inputs or
1314 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1315 insn_needs.op_addr_reload.regs[j][i]),
1318 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1320 insn_needs.input.regs[j][i]
1321 = MAX (insn_needs.input.regs[j][i]
1322 + insn_needs.op_addr.regs[j][i]
1323 + insn_needs.insn.regs[j][i],
1324 in_max + insn_needs.input.regs[j][i]);
1326 insn_needs.output.regs[j][i] += out_max;
1327 insn_needs.other.regs[j][i]
1328 += MAX (MAX (insn_needs.input.regs[j][i],
1329 insn_needs.output.regs[j][i]),
1330 insn_needs.other_addr.regs[j][i]);
1334 /* Now compute group needs. */
1335 for (in_max = 0, out_max = 0, j = 0;
1336 j < reload_n_operands; j++)
1338 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1339 in_max = MAX (in_max,
1340 insn_needs.in_addr_addr[j].groups[i]);
1342 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1344 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1347 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1348 insn_needs.op_addr_reload.groups[i]),
1350 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1352 insn_needs.input.groups[i]
1353 = MAX (insn_needs.input.groups[i]
1354 + insn_needs.op_addr.groups[i]
1355 + insn_needs.insn.groups[i],
1356 in_max + insn_needs.input.groups[i]);
1358 insn_needs.output.groups[i] += out_max;
1359 insn_needs.other.groups[i]
1360 += MAX (MAX (insn_needs.input.groups[i],
1361 insn_needs.output.groups[i]),
1362 insn_needs.other_addr.groups[i]);
1365 /* If this is a CALL_INSN and caller-saves will need
1366 a spill register, act as if the spill register is
1367 needed for this insn. However, the spill register
1368 can be used by any reload of this insn, so we only
1369 need do something if no need for that class has
1372 The assumption that every CALL_INSN will trigger a
1373 caller-save is highly conservative, however, the number
1374 of cases where caller-saves will need a spill register but
1375 a block containing a CALL_INSN won't need a spill register
1376 of that class should be quite rare.
1378 If a group is needed, the size and mode of the group will
1379 have been set up at the beginning of this loop. */
1381 if (GET_CODE (insn) == CALL_INSN
1382 && caller_save_spill_class != NO_REGS)
1384 /* See if this register would conflict with any reload that
1385 needs a group or any reload that needs a nongroup. */
1386 int nongroup_need = 0;
1387 int *caller_save_needs;
1389 for (j = 0; j < n_reloads; j++)
1390 if (reg_classes_intersect_p (caller_save_spill_class,
1391 reload_reg_class[j])
1392 && ((CLASS_MAX_NREGS
1393 (reload_reg_class[j],
1394 (GET_MODE_SIZE (reload_outmode[j])
1395 > GET_MODE_SIZE (reload_inmode[j]))
1396 ? reload_outmode[j] : reload_inmode[j])
1398 || reload_nongroup[j]))
1405 = (caller_save_group_size > 1
1406 ? insn_needs.other.groups
1407 : insn_needs.other.regs[nongroup_need]);
1409 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1411 register enum reg_class *p
1412 = reg_class_superclasses[(int) caller_save_spill_class];
1414 caller_save_needs[(int) caller_save_spill_class]++;
1416 while (*p != LIM_REG_CLASSES)
1417 caller_save_needs[(int) *p++] += 1;
1420 /* Show that this basic block will need a register of
1424 && ! (basic_block_needs[(int) caller_save_spill_class]
1427 basic_block_needs[(int) caller_save_spill_class]
1429 new_basic_block_needs = 1;
1433 /* If this insn stores the value of a function call,
1434 and that value is in a register that has been spilled,
1435 and if the insn needs a reload in a class
1436 that might use that register as the reload register,
1437 then add an extra need in that class.
1438 This makes sure we have a register available that does
1439 not overlap the return value. */
1441 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
1443 int regno = REGNO (avoid_return_reg);
1445 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1447 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1449 /* First compute the "basic needs", which counts a
1450 need only in the smallest class in which it
1453 bcopy ((char *) insn_needs.other.regs[0],
1454 (char *) basic_needs, sizeof basic_needs);
1455 bcopy ((char *) insn_needs.other.groups,
1456 (char *) basic_groups, sizeof basic_groups);
1458 for (i = 0; i < N_REG_CLASSES; i++)
1462 if (basic_needs[i] >= 0)
1463 for (p = reg_class_superclasses[i];
1464 *p != LIM_REG_CLASSES; p++)
1465 basic_needs[(int) *p] -= basic_needs[i];
1467 if (basic_groups[i] >= 0)
1468 for (p = reg_class_superclasses[i];
1469 *p != LIM_REG_CLASSES; p++)
1470 basic_groups[(int) *p] -= basic_groups[i];
1473 /* Now count extra regs if there might be a conflict with
1474 the return value register. */
1476 for (r = regno; r < regno + nregs; r++)
1477 if (spill_reg_order[r] >= 0)
1478 for (i = 0; i < N_REG_CLASSES; i++)
1479 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1481 if (basic_needs[i] > 0)
1485 insn_needs.other.regs[0][i]++;
1486 p = reg_class_superclasses[i];
1487 while (*p != LIM_REG_CLASSES)
1488 insn_needs.other.regs[0][(int) *p++]++;
1490 if (basic_groups[i] > 0)
1494 insn_needs.other.groups[i]++;
1495 p = reg_class_superclasses[i];
1496 while (*p != LIM_REG_CLASSES)
1497 insn_needs.other.groups[(int) *p++]++;
1502 /* For each class, collect maximum need of any insn. */
1504 for (i = 0; i < N_REG_CLASSES; i++)
1506 if (max_needs[i] < insn_needs.other.regs[0][i])
1508 max_needs[i] = insn_needs.other.regs[0][i];
1509 max_needs_insn[i] = insn;
1511 if (max_groups[i] < insn_needs.other.groups[i])
1513 max_groups[i] = insn_needs.other.groups[i];
1514 max_groups_insn[i] = insn;
1516 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1518 max_nongroups[i] = insn_needs.other.regs[1][i];
1519 max_nongroups_insn[i] = insn;
1523 /* Note that there is a continue statement above. */
1526 /* If we allocated any new memory locations, make another pass
1527 since it might have changed elimination offsets. */
1528 if (starting_frame_size != get_frame_size ())
1529 something_changed = 1;
1532 for (i = 0; i < N_REG_CLASSES; i++)
1534 if (max_needs[i] > 0)
1536 ";; Need %d reg%s of class %s (for insn %d).\n",
1537 max_needs[i], max_needs[i] == 1 ? "" : "s",
1538 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1539 if (max_nongroups[i] > 0)
1541 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1542 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1543 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1544 if (max_groups[i] > 0)
1546 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1547 max_groups[i], max_groups[i] == 1 ? "" : "s",
1548 mode_name[(int) group_mode[i]],
1549 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1552 /* If we have caller-saves, set up the save areas and see if caller-save
1553 will need a spill register. */
1555 if (caller_save_needed)
1557 /* Set the offsets for setup_save_areas. */
1558 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
1560 ep->previous_offset = ep->max_offset;
1562 if ( ! setup_save_areas (&something_changed)
1563 && caller_save_spill_class == NO_REGS)
1565 /* The class we will need depends on whether the machine
1566 supports the sum of two registers for an address; see
1567 find_address_reloads for details. */
1569 caller_save_spill_class
1570 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1571 caller_save_group_size
1572 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1573 something_changed = 1;
1577 /* See if anything that happened changes which eliminations are valid.
1578 For example, on the Sparc, whether or not the frame pointer can
1579 be eliminated can depend on what registers have been used. We need
1580 not check some conditions again (such as flag_omit_frame_pointer)
1581 since they can't have changed. */
1583 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1584 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1585 #ifdef ELIMINABLE_REGS
1586 || ! CAN_ELIMINATE (ep->from, ep->to)
1589 ep->can_eliminate = 0;
1591 /* Look for the case where we have discovered that we can't replace
1592 register A with register B and that means that we will now be
1593 trying to replace register A with register C. This means we can
1594 no longer replace register C with register B and we need to disable
1595 such an elimination, if it exists. This occurs often with A == ap,
1596 B == sp, and C == fp. */
1598 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1600 struct elim_table *op;
1601 register int new_to = -1;
1603 if (! ep->can_eliminate && ep->can_eliminate_previous)
1605 /* Find the current elimination for ep->from, if there is a
1607 for (op = reg_eliminate;
1608 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1609 if (op->from == ep->from && op->can_eliminate)
1615 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1617 for (op = reg_eliminate;
1618 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
1619 if (op->from == new_to && op->to == ep->to)
1620 op->can_eliminate = 0;
1624 /* See if any registers that we thought we could eliminate the previous
1625 time are no longer eliminable. If so, something has changed and we
1626 must spill the register. Also, recompute the number of eliminable
1627 registers and see if the frame pointer is needed; it is if there is
1628 no elimination of the frame pointer that we can perform. */
1630 frame_pointer_needed = 1;
1631 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1633 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1634 && ep->to != HARD_FRAME_POINTER_REGNUM)
1635 frame_pointer_needed = 0;
1637 if (! ep->can_eliminate && ep->can_eliminate_previous)
1639 ep->can_eliminate_previous = 0;
1640 spill_hard_reg (ep->from, global, dumpfile, 1);
1641 something_changed = 1;
1646 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1647 /* If we didn't need a frame pointer last time, but we do now, spill
1648 the hard frame pointer. */
1649 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1651 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1652 something_changed = 1;
1656 /* If all needs are met, we win. */
1658 for (i = 0; i < N_REG_CLASSES; i++)
1659 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1661 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1664 /* Not all needs are met; must spill some hard regs. */
1666 /* Put all registers spilled so far back in potential_reload_regs, but
1667 put them at the front, since we've already spilled most of the
1668 pseudos in them (we might have left some pseudos unspilled if they
1669 were in a block that didn't need any spill registers of a conflicting
1670 class. We used to try to mark off the need for those registers,
1671 but doing so properly is very complex and reallocating them is the
1672 simpler approach. First, "pack" potential_reload_regs by pushing
1673 any nonnegative entries towards the end. That will leave room
1674 for the registers we already spilled.
1676 Also, undo the marking of the spill registers from the last time
1677 around in FORBIDDEN_REGS since we will be probably be allocating
1680 ??? It is theoretically possible that we might end up not using one
1681 of our previously-spilled registers in this allocation, even though
1682 they are at the head of the list. It's not clear what to do about
1683 this, but it was no better before, when we marked off the needs met
1684 by the previously-spilled registers. With the current code, globals
1685 can be allocated into these registers, but locals cannot. */
1689 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1690 if (potential_reload_regs[i] != -1)
1691 potential_reload_regs[j--] = potential_reload_regs[i];
1693 for (i = 0; i < n_spills; i++)
1695 potential_reload_regs[i] = spill_regs[i];
1696 spill_reg_order[spill_regs[i]] = -1;
1697 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1703 /* Now find more reload regs to satisfy the remaining need
1704 Do it by ascending class number, since otherwise a reg
1705 might be spilled for a big class and might fail to count
1706 for a smaller class even though it belongs to that class.
1708 Count spilled regs in `spills', and add entries to
1709 `spill_regs' and `spill_reg_order'.
1711 ??? Note there is a problem here.
1712 When there is a need for a group in a high-numbered class,
1713 and also need for non-group regs that come from a lower class,
1714 the non-group regs are chosen first. If there aren't many regs,
1715 they might leave no room for a group.
1717 This was happening on the 386. To fix it, we added the code
1718 that calls possible_group_p, so that the lower class won't
1719 break up the last possible group.
1721 Really fixing the problem would require changes above
1722 in counting the regs already spilled, and in choose_reload_regs.
1723 It might be hard to avoid introducing bugs there. */
1725 CLEAR_HARD_REG_SET (counted_for_groups);
1726 CLEAR_HARD_REG_SET (counted_for_nongroups);
1728 for (class = 0; class < N_REG_CLASSES; class++)
1730 /* First get the groups of registers.
1731 If we got single registers first, we might fragment
1733 while (max_groups[class] > 0)
1735 /* If any single spilled regs happen to form groups,
1736 count them now. Maybe we don't really need
1737 to spill another group. */
1738 count_possible_groups (group_size, group_mode, max_groups,
1741 if (max_groups[class] <= 0)
1744 /* Groups of size 2 (the only groups used on most machines)
1745 are treated specially. */
1746 if (group_size[class] == 2)
1748 /* First, look for a register that will complete a group. */
1749 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1753 j = potential_reload_regs[i];
1754 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1756 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1757 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1758 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1759 && HARD_REGNO_MODE_OK (other, group_mode[class])
1760 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1762 /* We don't want one part of another group.
1763 We could get "two groups" that overlap! */
1764 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1766 (j < FIRST_PSEUDO_REGISTER - 1
1767 && (other = j + 1, spill_reg_order[other] >= 0)
1768 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1769 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1770 && HARD_REGNO_MODE_OK (j, group_mode[class])
1771 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1773 && ! TEST_HARD_REG_BIT (counted_for_groups,
1776 register enum reg_class *p;
1778 /* We have found one that will complete a group,
1779 so count off one group as provided. */
1780 max_groups[class]--;
1781 p = reg_class_superclasses[class];
1782 while (*p != LIM_REG_CLASSES)
1784 if (group_size [(int) *p] <= group_size [class])
1785 max_groups[(int) *p]--;
1789 /* Indicate both these regs are part of a group. */
1790 SET_HARD_REG_BIT (counted_for_groups, j);
1791 SET_HARD_REG_BIT (counted_for_groups, other);
1795 /* We can't complete a group, so start one. */
1796 /* Look for a pair neither of which is explicitly used. */
1797 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
1798 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1801 j = potential_reload_regs[i];
1802 /* Verify that J+1 is a potential reload reg. */
1803 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1804 if (potential_reload_regs[k] == j + 1)
1806 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1807 && k < FIRST_PSEUDO_REGISTER
1808 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1809 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1810 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1811 && HARD_REGNO_MODE_OK (j, group_mode[class])
1812 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1814 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1815 /* Reject J at this stage
1816 if J+1 was explicitly used. */
1817 && ! regs_explicitly_used[j + 1])
1820 /* Now try any group at all
1821 whose registers are not in bad_spill_regs. */
1822 if (i == FIRST_PSEUDO_REGISTER)
1823 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1826 j = potential_reload_regs[i];
1827 /* Verify that J+1 is a potential reload reg. */
1828 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1829 if (potential_reload_regs[k] == j + 1)
1831 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1832 && k < FIRST_PSEUDO_REGISTER
1833 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1834 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1835 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1836 && HARD_REGNO_MODE_OK (j, group_mode[class])
1837 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1839 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1843 /* I should be the index in potential_reload_regs
1844 of the new reload reg we have found. */
1846 if (i >= FIRST_PSEUDO_REGISTER)
1848 /* There are no groups left to spill. */
1849 spill_failure (max_groups_insn[class]);
1855 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1860 /* For groups of more than 2 registers,
1861 look for a sufficient sequence of unspilled registers,
1862 and spill them all at once. */
1863 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1867 j = potential_reload_regs[i];
1869 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1870 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1872 /* Check each reg in the sequence. */
1873 for (k = 0; k < group_size[class]; k++)
1874 if (! (spill_reg_order[j + k] < 0
1875 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1876 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1878 /* We got a full sequence, so spill them all. */
1879 if (k == group_size[class])
1881 register enum reg_class *p;
1882 for (k = 0; k < group_size[class]; k++)
1885 SET_HARD_REG_BIT (counted_for_groups, j + k);
1886 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1887 if (potential_reload_regs[idx] == j + k)
1890 |= new_spill_reg (idx, class,
1891 max_needs, NULL_PTR,
1895 /* We have found one that will complete a group,
1896 so count off one group as provided. */
1897 max_groups[class]--;
1898 p = reg_class_superclasses[class];
1899 while (*p != LIM_REG_CLASSES)
1901 if (group_size [(int) *p]
1902 <= group_size [class])
1903 max_groups[(int) *p]--;
1910 /* We couldn't find any registers for this reload.
1911 Avoid going into an infinite loop. */
1912 if (i >= FIRST_PSEUDO_REGISTER)
1914 /* There are no groups left. */
1915 spill_failure (max_groups_insn[class]);
1922 /* Now similarly satisfy all need for single registers. */
1924 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1926 /* If we spilled enough regs, but they weren't counted
1927 against the non-group need, see if we can count them now.
1928 If so, we can avoid some actual spilling. */
1929 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1930 for (i = 0; i < n_spills; i++)
1931 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1933 && !TEST_HARD_REG_BIT (counted_for_groups,
1935 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1937 && max_nongroups[class] > 0)
1939 register enum reg_class *p;
1941 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1942 max_nongroups[class]--;
1943 p = reg_class_superclasses[class];
1944 while (*p != LIM_REG_CLASSES)
1945 max_nongroups[(int) *p++]--;
1947 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1950 /* Consider the potential reload regs that aren't
1951 yet in use as reload regs, in order of preference.
1952 Find the most preferred one that's in this class. */
1954 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1955 if (potential_reload_regs[i] >= 0
1956 && TEST_HARD_REG_BIT (reg_class_contents[class],
1957 potential_reload_regs[i])
1958 /* If this reg will not be available for groups,
1959 pick one that does not foreclose possible groups.
1960 This is a kludge, and not very general,
1961 but it should be sufficient to make the 386 work,
1962 and the problem should not occur on machines with
1964 && (max_nongroups[class] == 0
1965 || possible_group_p (potential_reload_regs[i], max_groups)))
1968 /* If we couldn't get a register, try to get one even if we
1969 might foreclose possible groups. This may cause problems
1970 later, but that's better than aborting now, since it is
1971 possible that we will, in fact, be able to form the needed
1972 group even with this allocation. */
1974 if (i >= FIRST_PSEUDO_REGISTER
1975 && (asm_noperands (max_needs[class] > 0
1976 ? max_needs_insn[class]
1977 : max_nongroups_insn[class])
1979 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1980 if (potential_reload_regs[i] >= 0
1981 && TEST_HARD_REG_BIT (reg_class_contents[class],
1982 potential_reload_regs[i]))
1985 /* I should be the index in potential_reload_regs
1986 of the new reload reg we have found. */
1988 if (i >= FIRST_PSEUDO_REGISTER)
1990 /* There are no possible registers left to spill. */
1991 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1992 : max_nongroups_insn[class]);
1998 |= new_spill_reg (i, class, max_needs, max_nongroups,
2004 /* If global-alloc was run, notify it of any register eliminations we have
2007 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2008 if (ep->can_eliminate)
2009 mark_elimination (ep->from, ep->to);
2011 /* Insert code to save and restore call-clobbered hard regs
2012 around calls. Tell if what mode to use so that we will process
2013 those insns in reload_as_needed if we have to. */
2015 if (caller_save_needed)
2016 save_call_clobbered_regs (num_eliminable ? QImode
2017 : caller_save_spill_class != NO_REGS ? HImode
2020 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2021 If that insn didn't set the register (i.e., it copied the register to
2022 memory), just delete that insn instead of the equivalencing insn plus
2023 anything now dead. If we call delete_dead_insn on that insn, we may
2024 delete the insn that actually sets the register if the register die
2025 there and that is incorrect. */
2027 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2028 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2029 && GET_CODE (reg_equiv_init[i]) != NOTE)
2031 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2032 delete_dead_insn (reg_equiv_init[i]);
2035 PUT_CODE (reg_equiv_init[i], NOTE);
2036 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2037 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2041 /* Use the reload registers where necessary
2042 by generating move instructions to move the must-be-register
2043 values into or out of the reload registers. */
2045 if (something_needs_reloads || something_needs_elimination
2046 || (caller_save_needed && num_eliminable)
2047 || caller_save_spill_class != NO_REGS)
2048 reload_as_needed (first, global);
2050 /* If we were able to eliminate the frame pointer, show that it is no
2051 longer live at the start of any basic block. If it ls live by
2052 virtue of being in a pseudo, that pseudo will be marked live
2053 and hence the frame pointer will be known to be live via that
2056 if (! frame_pointer_needed)
2057 for (i = 0; i < n_basic_blocks; i++)
2058 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2059 HARD_FRAME_POINTER_REGNUM);
2061 /* Come here (with failure set nonzero) if we can't get enough spill regs
2062 and we decide not to abort about it. */
2065 reload_in_progress = 0;
2067 /* Now eliminate all pseudo regs by modifying them into
2068 their equivalent memory references.
2069 The REG-rtx's for the pseudos are modified in place,
2070 so all insns that used to refer to them now refer to memory.
2072 For a reg that has a reg_equiv_address, all those insns
2073 were changed by reloading so that no insns refer to it any longer;
2074 but the DECL_RTL of a variable decl may refer to it,
2075 and if so this causes the debugging info to mention the variable. */
2077 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2081 if (reg_equiv_mem[i])
2083 addr = XEXP (reg_equiv_mem[i], 0);
2084 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2086 if (reg_equiv_address[i])
2087 addr = reg_equiv_address[i];
2090 if (reg_renumber[i] < 0)
2092 rtx reg = regno_reg_rtx[i];
2093 XEXP (reg, 0) = addr;
2094 REG_USERVAR_P (reg) = 0;
2095 MEM_IN_STRUCT_P (reg) = in_struct;
2096 PUT_CODE (reg, MEM);
2098 else if (reg_equiv_mem[i])
2099 XEXP (reg_equiv_mem[i], 0) = addr;
2103 /* Make a pass over all the insns and delete all USEs which we inserted
2104 only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
2105 is defined, also remove death notes for things that are no longer
2106 registers or no longer die in the insn (e.g., an input and output
2107 pseudo being tied). */
2109 for (insn = first; insn; insn = NEXT_INSN (insn))
2110 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2112 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2116 if (GET_CODE (PATTERN (insn)) == USE
2117 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
2119 PUT_CODE (insn, NOTE);
2120 NOTE_SOURCE_FILE (insn) = 0;
2121 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2124 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2125 for (note = REG_NOTES (insn); note; note = next)
2127 next = XEXP (note, 1);
2128 if (REG_NOTE_KIND (note) == REG_DEAD
2129 && (GET_CODE (XEXP (note, 0)) != REG
2130 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2131 remove_note (insn, note);
2136 /* If we are doing stack checking, give a warning if this function's
2137 frame size is larger than we expect. */
2138 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2140 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2142 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2143 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2144 size += UNITS_PER_WORD;
2146 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2147 warning ("frame size too large for reliable stack checking");
2150 /* Indicate that we no longer have known memory locations or constants. */
2151 reg_equiv_constant = 0;
2152 reg_equiv_memory_loc = 0;
2155 free (real_known_ptr);
2160 free (scratch_list);
2163 free (scratch_block);
2166 CLEAR_HARD_REG_SET (used_spill_regs);
2167 for (i = 0; i < n_spills; i++)
2168 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2173 /* Nonzero if, after spilling reg REGNO for non-groups,
2174 it will still be possible to find a group if we still need one. */
2177 possible_group_p (regno, max_groups)
2182 int class = (int) NO_REGS;
2184 for (i = 0; i < (int) N_REG_CLASSES; i++)
2185 if (max_groups[i] > 0)
2191 if (class == (int) NO_REGS)
2194 /* Consider each pair of consecutive registers. */
2195 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2197 /* Ignore pairs that include reg REGNO. */
2198 if (i == regno || i + 1 == regno)
2201 /* Ignore pairs that are outside the class that needs the group.
2202 ??? Here we fail to handle the case where two different classes
2203 independently need groups. But this never happens with our
2204 current machine descriptions. */
2205 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2206 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2209 /* A pair of consecutive regs we can still spill does the trick. */
2210 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2211 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2212 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2215 /* A pair of one already spilled and one we can spill does it
2216 provided the one already spilled is not otherwise reserved. */
2217 if (spill_reg_order[i] < 0
2218 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2219 && spill_reg_order[i + 1] >= 0
2220 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2221 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2223 if (spill_reg_order[i + 1] < 0
2224 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2225 && spill_reg_order[i] >= 0
2226 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2227 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2234 /* Count any groups of CLASS that can be formed from the registers recently
2238 count_possible_groups (group_size, group_mode, max_groups, class)
2240 enum machine_mode *group_mode;
2247 /* Now find all consecutive groups of spilled registers
2248 and mark each group off against the need for such groups.
2249 But don't count them against ordinary need, yet. */
2251 if (group_size[class] == 0)
2254 CLEAR_HARD_REG_SET (new);
2256 /* Make a mask of all the regs that are spill regs in class I. */
2257 for (i = 0; i < n_spills; i++)
2258 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2259 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2260 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2261 SET_HARD_REG_BIT (new, spill_regs[i]);
2263 /* Find each consecutive group of them. */
2264 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2265 if (TEST_HARD_REG_BIT (new, i)
2266 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2267 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2269 for (j = 1; j < group_size[class]; j++)
2270 if (! TEST_HARD_REG_BIT (new, i + j))
2273 if (j == group_size[class])
2275 /* We found a group. Mark it off against this class's need for
2276 groups, and against each superclass too. */
2277 register enum reg_class *p;
2279 max_groups[class]--;
2280 p = reg_class_superclasses[class];
2281 while (*p != LIM_REG_CLASSES)
2283 if (group_size [(int) *p] <= group_size [class])
2284 max_groups[(int) *p]--;
2288 /* Don't count these registers again. */
2289 for (j = 0; j < group_size[class]; j++)
2290 SET_HARD_REG_BIT (counted_for_groups, i + j);
2293 /* Skip to the last reg in this group. When i is incremented above,
2294 it will then point to the first reg of the next possible group. */
2299 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2300 another mode that needs to be reloaded for the same register class CLASS.
2301 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2302 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2304 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2305 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2306 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2307 causes unnecessary failures on machines requiring alignment of register
2308 groups when the two modes are different sizes, because the larger mode has
2309 more strict alignment rules than the smaller mode. */
2312 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2313 enum machine_mode allocate_mode, other_mode;
2314 enum reg_class class;
2317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2319 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2320 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2321 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2327 /* Handle the failure to find a register to spill.
2328 INSN should be one of the insns which needed this particular spill reg. */
2331 spill_failure (insn)
2334 if (asm_noperands (PATTERN (insn)) >= 0)
2335 error_for_asm (insn, "`asm' needs too many reloads");
2337 fatal_insn ("Unable to find a register to spill.", insn);
2340 /* Add a new register to the tables of available spill-registers
2341 (as well as spilling all pseudos allocated to the register).
2342 I is the index of this register in potential_reload_regs.
2343 CLASS is the regclass whose need is being satisfied.
2344 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2345 so that this register can count off against them.
2346 MAX_NONGROUPS is 0 if this register is part of a group.
2347 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2350 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2358 register enum reg_class *p;
2360 int regno = potential_reload_regs[i];
2362 if (i >= FIRST_PSEUDO_REGISTER)
2363 abort (); /* Caller failed to find any register. */
2365 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2367 static char *reg_class_names[] = REG_CLASS_NAMES;
2368 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
2369 This may be due to a compiler bug or to impossible asm\n\
2370 statements or clauses.", regno, reg_names[regno], reg_class_names[class]);
2373 /* Make reg REGNO an additional reload reg. */
2375 potential_reload_regs[i] = -1;
2376 spill_regs[n_spills] = regno;
2377 spill_reg_order[regno] = n_spills;
2379 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2381 /* Clear off the needs we just satisfied. */
2384 p = reg_class_superclasses[class];
2385 while (*p != LIM_REG_CLASSES)
2386 max_needs[(int) *p++]--;
2388 if (max_nongroups && max_nongroups[class] > 0)
2390 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2391 max_nongroups[class]--;
2392 p = reg_class_superclasses[class];
2393 while (*p != LIM_REG_CLASSES)
2394 max_nongroups[(int) *p++]--;
2397 /* Spill every pseudo reg that was allocated to this reg
2398 or to something that overlaps this reg. */
2400 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2402 /* If there are some registers still to eliminate and this register
2403 wasn't ever used before, additional stack space may have to be
2404 allocated to store this register. Thus, we may have changed the offset
2405 between the stack and frame pointers, so mark that something has changed.
2406 (If new pseudos were spilled, thus requiring more space, VAL would have
2407 been set non-zero by the call to spill_hard_reg above since additional
2408 reloads may be needed in that case.
2410 One might think that we need only set VAL to 1 if this is a call-used
2411 register. However, the set of registers that must be saved by the
2412 prologue is not identical to the call-used set. For example, the
2413 register used by the call insn for the return PC is a call-used register,
2414 but must be saved by the prologue. */
2415 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2418 regs_ever_live[spill_regs[n_spills]] = 1;
2424 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2425 data that is dead in INSN. */
2428 delete_dead_insn (insn)
2431 rtx prev = prev_real_insn (insn);
2434 /* If the previous insn sets a register that dies in our insn, delete it
2436 if (prev && GET_CODE (PATTERN (prev)) == SET
2437 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2438 && reg_mentioned_p (prev_dest, PATTERN (insn))
2439 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2440 delete_dead_insn (prev);
2442 PUT_CODE (insn, NOTE);
2443 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2444 NOTE_SOURCE_FILE (insn) = 0;
2447 /* Modify the home of pseudo-reg I.
2448 The new home is present in reg_renumber[I].
2450 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2451 or it may be -1, meaning there is none or it is not relevant.
2452 This is used so that all pseudos spilled from a given hard reg
2453 can share one stack slot. */
2456 alter_reg (i, from_reg)
2460 /* When outputting an inline function, this can happen
2461 for a reg that isn't actually used. */
2462 if (regno_reg_rtx[i] == 0)
2465 /* If the reg got changed to a MEM at rtl-generation time,
2467 if (GET_CODE (regno_reg_rtx[i]) != REG)
2470 /* Modify the reg-rtx to contain the new hard reg
2471 number or else to contain its pseudo reg number. */
2472 REGNO (regno_reg_rtx[i])
2473 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2475 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2476 allocate a stack slot for it. */
2478 if (reg_renumber[i] < 0
2479 && REG_N_REFS (i) > 0
2480 && reg_equiv_constant[i] == 0
2481 && reg_equiv_memory_loc[i] == 0)
2484 int inherent_size = PSEUDO_REGNO_BYTES (i);
2485 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2488 /* Each pseudo reg has an inherent size which comes from its own mode,
2489 and a total size which provides room for paradoxical subregs
2490 which refer to the pseudo reg in wider modes.
2492 We can use a slot already allocated if it provides both
2493 enough inherent space and enough total space.
2494 Otherwise, we allocate a new slot, making sure that it has no less
2495 inherent space, and no less total space, then the previous slot. */
2498 /* No known place to spill from => no slot to reuse. */
2499 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2500 inherent_size == total_size ? 0 : -1);
2501 if (BYTES_BIG_ENDIAN)
2502 /* Cancel the big-endian correction done in assign_stack_local.
2503 Get the address of the beginning of the slot.
2504 This is so we can do a big-endian correction unconditionally
2506 adjust = inherent_size - total_size;
2508 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2510 /* Reuse a stack slot if possible. */
2511 else if (spill_stack_slot[from_reg] != 0
2512 && spill_stack_slot_width[from_reg] >= total_size
2513 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2515 x = spill_stack_slot[from_reg];
2516 /* Allocate a bigger slot. */
2519 /* Compute maximum size needed, both for inherent size
2520 and for total size. */
2521 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2523 if (spill_stack_slot[from_reg])
2525 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2527 mode = GET_MODE (spill_stack_slot[from_reg]);
2528 if (spill_stack_slot_width[from_reg] > total_size)
2529 total_size = spill_stack_slot_width[from_reg];
2531 /* Make a slot with that size. */
2532 x = assign_stack_local (mode, total_size,
2533 inherent_size == total_size ? 0 : -1);
2535 if (BYTES_BIG_ENDIAN)
2537 /* Cancel the big-endian correction done in assign_stack_local.
2538 Get the address of the beginning of the slot.
2539 This is so we can do a big-endian correction unconditionally
2541 adjust = GET_MODE_SIZE (mode) - total_size;
2543 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2546 plus_constant (XEXP (x, 0), adjust));
2548 spill_stack_slot[from_reg] = stack_slot;
2549 spill_stack_slot_width[from_reg] = total_size;
2552 /* On a big endian machine, the "address" of the slot
2553 is the address of the low part that fits its inherent mode. */
2554 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2555 adjust += (total_size - inherent_size);
2557 /* If we have any adjustment to make, or if the stack slot is the
2558 wrong mode, make a new stack slot. */
2559 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2561 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
2562 plus_constant (XEXP (x, 0), adjust));
2563 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2566 /* Save the stack slot for later. */
2567 reg_equiv_memory_loc[i] = x;
2571 /* Mark the slots in regs_ever_live for the hard regs
2572 used by pseudo-reg number REGNO. */
2575 mark_home_live (regno)
2578 register int i, lim;
2579 i = reg_renumber[regno];
2582 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2584 regs_ever_live[i++] = 1;
2587 /* Mark the registers used in SCRATCH as being live. */
2590 mark_scratch_live (scratch)
2594 int regno = REGNO (scratch);
2595 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2597 for (i = regno; i < lim; i++)
2598 regs_ever_live[i] = 1;
2601 /* This function handles the tracking of elimination offsets around branches.
2603 X is a piece of RTL being scanned.
2605 INSN is the insn that it came from, if any.
2607 INITIAL_P is non-zero if we are to set the offset to be the initial
2608 offset and zero if we are setting the offset of the label to be the
2612 set_label_offsets (x, insn, initial_p)
2617 enum rtx_code code = GET_CODE (x);
2620 struct elim_table *p;
2625 if (LABEL_REF_NONLOCAL_P (x))
2630 /* ... fall through ... */
2633 /* If we know nothing about this label, set the desired offsets. Note
2634 that this sets the offset at a label to be the offset before a label
2635 if we don't know anything about the label. This is not correct for
2636 the label after a BARRIER, but is the best guess we can make. If
2637 we guessed wrong, we will suppress an elimination that might have
2638 been possible had we been able to guess correctly. */
2640 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2642 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2643 offsets_at[CODE_LABEL_NUMBER (x)][i]
2644 = (initial_p ? reg_eliminate[i].initial_offset
2645 : reg_eliminate[i].offset);
2646 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2649 /* Otherwise, if this is the definition of a label and it is
2650 preceded by a BARRIER, set our offsets to the known offset of
2654 && (tem = prev_nonnote_insn (insn)) != 0
2655 && GET_CODE (tem) == BARRIER)
2657 num_not_at_initial_offset = 0;
2658 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2660 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2661 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2662 if (reg_eliminate[i].can_eliminate
2663 && (reg_eliminate[i].offset
2664 != reg_eliminate[i].initial_offset))
2665 num_not_at_initial_offset++;
2670 /* If neither of the above cases is true, compare each offset
2671 with those previously recorded and suppress any eliminations
2672 where the offsets disagree. */
2674 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2675 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2676 != (initial_p ? reg_eliminate[i].initial_offset
2677 : reg_eliminate[i].offset))
2678 reg_eliminate[i].can_eliminate = 0;
2683 set_label_offsets (PATTERN (insn), insn, initial_p);
2685 /* ... fall through ... */
2689 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2690 and hence must have all eliminations at their initial offsets. */
2691 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2692 if (REG_NOTE_KIND (tem) == REG_LABEL)
2693 set_label_offsets (XEXP (tem, 0), insn, 1);
2698 /* Each of the labels in the address vector must be at their initial
2699 offsets. We want the first field for ADDR_VEC and the second
2700 field for ADDR_DIFF_VEC. */
2702 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2703 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2708 /* We only care about setting PC. If the source is not RETURN,
2709 IF_THEN_ELSE, or a label, disable any eliminations not at
2710 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2711 isn't one of those possibilities. For branches to a label,
2712 call ourselves recursively.
2714 Note that this can disable elimination unnecessarily when we have
2715 a non-local goto since it will look like a non-constant jump to
2716 someplace in the current function. This isn't a significant
2717 problem since such jumps will normally be when all elimination
2718 pairs are back to their initial offsets. */
2720 if (SET_DEST (x) != pc_rtx)
2723 switch (GET_CODE (SET_SRC (x)))
2730 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2734 tem = XEXP (SET_SRC (x), 1);
2735 if (GET_CODE (tem) == LABEL_REF)
2736 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2737 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2740 tem = XEXP (SET_SRC (x), 2);
2741 if (GET_CODE (tem) == LABEL_REF)
2742 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2743 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2751 /* If we reach here, all eliminations must be at their initial
2752 offset because we are doing a jump to a variable address. */
2753 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2754 if (p->offset != p->initial_offset)
2755 p->can_eliminate = 0;
2763 /* Used for communication between the next two function to properly share
2764 the vector for an ASM_OPERANDS. */
2766 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2768 /* Scan X and replace any eliminable registers (such as fp) with a
2769 replacement (such as sp), plus an offset.
2771 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2772 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2773 MEM, we are allowed to replace a sum of a register and the constant zero
2774 with the register, which we cannot do outside a MEM. In addition, we need
2775 to record the fact that a register is referenced outside a MEM.
2777 If INSN is an insn, it is the insn containing X. If we replace a REG
2778 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2779 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2780 the REG is being modified.
2782 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2783 That's used when we eliminate in expressions stored in notes.
2784 This means, do not set ref_outside_mem even if the reference
2787 If we see a modification to a register we know about, take the
2788 appropriate action (see case SET, below).
2790 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2791 replacements done assuming all offsets are at their initial values. If
2792 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2793 encounter, return the actual location so that find_reloads will do
2794 the proper thing. */
2797 eliminate_regs (x, mem_mode, insn)
2799 enum machine_mode mem_mode;
2802 enum rtx_code code = GET_CODE (x);
2803 struct elim_table *ep;
2826 /* This is only for the benefit of the debugging backends, which call
2827 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2828 removed after CSE. */
2829 new = eliminate_regs (XEXP (x, 0), 0, insn);
2830 if (GET_CODE (new) == MEM)
2831 return XEXP (new, 0);
2837 /* First handle the case where we encounter a bare register that
2838 is eliminable. Replace it with a PLUS. */
2839 if (regno < FIRST_PSEUDO_REGISTER)
2841 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2843 if (ep->from_rtx == x && ep->can_eliminate)
2846 /* Refs inside notes don't count for this purpose. */
2847 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2848 || GET_CODE (insn) == INSN_LIST)))
2849 ep->ref_outside_mem = 1;
2850 return plus_constant (ep->to_rtx, ep->previous_offset);
2854 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2855 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2857 /* In this case, find_reloads would attempt to either use an
2858 incorrect address (if something is not at its initial offset)
2859 or substitute an replaced address into an insn (which loses
2860 if the offset is changed by some later action). So we simply
2861 return the replaced stack slot (assuming it is changed by
2862 elimination) and ignore the fact that this is actually a
2863 reference to the pseudo. Ensure we make a copy of the
2864 address in case it is shared. */
2865 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, insn);
2866 if (new != reg_equiv_memory_loc[regno])
2868 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2869 && GET_CODE (insn) != INSN_LIST)
2870 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn))
2871 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
2872 return copy_rtx (new);
2878 /* If this is the sum of an eliminable register and a constant, rework
2880 if (GET_CODE (XEXP (x, 0)) == REG
2881 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2882 && CONSTANT_P (XEXP (x, 1)))
2884 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2886 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2889 /* Refs inside notes don't count for this purpose. */
2890 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2891 || GET_CODE (insn) == INSN_LIST)))
2892 ep->ref_outside_mem = 1;
2894 /* The only time we want to replace a PLUS with a REG (this
2895 occurs when the constant operand of the PLUS is the negative
2896 of the offset) is when we are inside a MEM. We won't want
2897 to do so at other times because that would change the
2898 structure of the insn in a way that reload can't handle.
2899 We special-case the commonest situation in
2900 eliminate_regs_in_insn, so just replace a PLUS with a
2901 PLUS here, unless inside a MEM. */
2902 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2903 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2906 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2907 plus_constant (XEXP (x, 1),
2908 ep->previous_offset));
2911 /* If the register is not eliminable, we are done since the other
2912 operand is a constant. */
2916 /* If this is part of an address, we want to bring any constant to the
2917 outermost PLUS. We will do this by doing register replacement in
2918 our operands and seeing if a constant shows up in one of them.
2920 We assume here this is part of an address (or a "load address" insn)
2921 since an eliminable register is not likely to appear in any other
2924 If we have (plus (eliminable) (reg)), we want to produce
2925 (plus (plus (replacement) (reg) (const))). If this was part of a
2926 normal add insn, (plus (replacement) (reg)) will be pushed as a
2927 reload. This is the desired action. */
2930 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2931 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2933 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2935 /* If one side is a PLUS and the other side is a pseudo that
2936 didn't get a hard register but has a reg_equiv_constant,
2937 we must replace the constant here since it may no longer
2938 be in the position of any operand. */
2939 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2940 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2941 && reg_renumber[REGNO (new1)] < 0
2942 && reg_equiv_constant != 0
2943 && reg_equiv_constant[REGNO (new1)] != 0)
2944 new1 = reg_equiv_constant[REGNO (new1)];
2945 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2946 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2947 && reg_renumber[REGNO (new0)] < 0
2948 && reg_equiv_constant[REGNO (new0)] != 0)
2949 new0 = reg_equiv_constant[REGNO (new0)];
2951 new = form_sum (new0, new1);
2953 /* As above, if we are not inside a MEM we do not want to
2954 turn a PLUS into something else. We might try to do so here
2955 for an addition of 0 if we aren't optimizing. */
2956 if (! mem_mode && GET_CODE (new) != PLUS)
2957 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2965 /* If this is the product of an eliminable register and a
2966 constant, apply the distribute law and move the constant out
2967 so that we have (plus (mult ..) ..). This is needed in order
2968 to keep load-address insns valid. This case is pathological.
2969 We ignore the possibility of overflow here. */
2970 if (GET_CODE (XEXP (x, 0)) == REG
2971 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2972 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2973 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2975 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2978 /* Refs inside notes don't count for this purpose. */
2979 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2980 || GET_CODE (insn) == INSN_LIST)))
2981 ep->ref_outside_mem = 1;
2984 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2985 ep->previous_offset * INTVAL (XEXP (x, 1)));
2988 /* ... fall through ... */
2993 case DIV: case UDIV:
2994 case MOD: case UMOD:
2995 case AND: case IOR: case XOR:
2996 case ROTATERT: case ROTATE:
2997 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2999 case GE: case GT: case GEU: case GTU:
3000 case LE: case LT: case LEU: case LTU:
3002 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3004 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
3006 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
3007 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
3012 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3015 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3016 if (new != XEXP (x, 0))
3017 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
3020 /* ... fall through ... */
3023 /* Now do eliminations in the rest of the chain. If this was
3024 an EXPR_LIST, this might result in allocating more memory than is
3025 strictly needed, but it simplifies the code. */
3028 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
3029 if (new != XEXP (x, 1))
3030 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
3038 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3039 if (ep->to_rtx == XEXP (x, 0))
3041 int size = GET_MODE_SIZE (mem_mode);
3043 /* If more bytes than MEM_MODE are pushed, account for them. */
3044 #ifdef PUSH_ROUNDING
3045 if (ep->to_rtx == stack_pointer_rtx)
3046 size = PUSH_ROUNDING (size);
3048 if (code == PRE_DEC || code == POST_DEC)
3054 /* Fall through to generic unary operation case. */
3055 case STRICT_LOW_PART:
3057 case SIGN_EXTEND: case ZERO_EXTEND:
3058 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3059 case FLOAT: case FIX:
3060 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3064 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3065 if (new != XEXP (x, 0))
3066 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3070 /* Similar to above processing, but preserve SUBREG_WORD.
3071 Convert (subreg (mem)) to (mem) if not paradoxical.
3072 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3073 pseudo didn't get a hard reg, we must replace this with the
3074 eliminated version of the memory location because push_reloads
3075 may do the replacement in certain circumstances. */
3076 if (GET_CODE (SUBREG_REG (x)) == REG
3077 && (GET_MODE_SIZE (GET_MODE (x))
3078 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3079 && reg_equiv_memory_loc != 0
3080 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3082 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
3085 /* If we didn't change anything, we must retain the pseudo. */
3086 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
3087 new = SUBREG_REG (x);
3090 /* In this case, we must show that the pseudo is used in this
3091 insn so that delete_output_reload will do the right thing. */
3092 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3093 && GET_CODE (insn) != INSN_LIST)
3094 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
3097 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
3099 /* Ensure NEW isn't shared in case we have to reload it. */
3100 new = copy_rtx (new);
3104 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
3106 if (new != XEXP (x, 0))
3108 int x_size = GET_MODE_SIZE (GET_MODE (x));
3109 int new_size = GET_MODE_SIZE (GET_MODE (new));
3111 if (GET_CODE (new) == MEM
3112 && ((x_size < new_size
3113 #ifdef WORD_REGISTER_OPERATIONS
3114 /* On these machines, combine can create rtl of the form
3115 (set (subreg:m1 (reg:m2 R) 0) ...)
3116 where m1 < m2, and expects something interesting to
3117 happen to the entire word. Moreover, it will use the
3118 (reg:m2 R) later, expecting all bits to be preserved.
3119 So if the number of words is the same, preserve the
3120 subreg so that push_reloads can see it. */
3121 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
3124 || (x_size == new_size))
3127 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3128 enum machine_mode mode = GET_MODE (x);
3130 if (BYTES_BIG_ENDIAN)
3131 offset += (MIN (UNITS_PER_WORD,
3132 GET_MODE_SIZE (GET_MODE (new)))
3133 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
3135 PUT_MODE (new, mode);
3136 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3140 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
3146 /* If using a register that is the source of an eliminate we still
3147 think can be performed, note it cannot be performed since we don't
3148 know how this register is used. */
3149 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3150 if (ep->from_rtx == XEXP (x, 0))
3151 ep->can_eliminate = 0;
3153 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3154 if (new != XEXP (x, 0))
3155 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3159 /* If clobbering a register that is the replacement register for an
3160 elimination we still think can be performed, note that it cannot
3161 be performed. Otherwise, we need not be concerned about it. */
3162 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3163 if (ep->to_rtx == XEXP (x, 0))
3164 ep->can_eliminate = 0;
3166 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3167 if (new != XEXP (x, 0))
3168 return gen_rtx_fmt_e (code, GET_MODE (x), new);
3174 /* Properly handle sharing input and constraint vectors. */
3175 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3177 /* When we come to a new vector not seen before,
3178 scan all its elements; keep the old vector if none
3179 of them changes; otherwise, make a copy. */
3180 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3181 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3182 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3183 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3186 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3187 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3190 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3191 new_asm_operands_vec = old_asm_operands_vec;
3193 new_asm_operands_vec
3194 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3197 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3198 if (new_asm_operands_vec == old_asm_operands_vec)
3201 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3202 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3203 ASM_OPERANDS_OUTPUT_IDX (x),
3204 new_asm_operands_vec,
3205 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3206 ASM_OPERANDS_SOURCE_FILE (x),
3207 ASM_OPERANDS_SOURCE_LINE (x));
3208 new->volatil = x->volatil;
3213 /* Check for setting a register that we know about. */
3214 if (GET_CODE (SET_DEST (x)) == REG)
3216 /* See if this is setting the replacement register for an
3219 If DEST is the hard frame pointer, we do nothing because we
3220 assume that all assignments to the frame pointer are for
3221 non-local gotos and are being done at a time when they are valid
3222 and do not disturb anything else. Some machines want to
3223 eliminate a fake argument pointer (or even a fake frame pointer)
3224 with either the real frame or the stack pointer. Assignments to
3225 the hard frame pointer must not prevent this elimination. */
3227 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3229 if (ep->to_rtx == SET_DEST (x)
3230 && SET_DEST (x) != hard_frame_pointer_rtx)
3232 /* If it is being incremented, adjust the offset. Otherwise,
3233 this elimination can't be done. */
3234 rtx src = SET_SRC (x);
3236 if (GET_CODE (src) == PLUS
3237 && XEXP (src, 0) == SET_DEST (x)
3238 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3239 ep->offset -= INTVAL (XEXP (src, 1));
3241 ep->can_eliminate = 0;
3244 /* Now check to see we are assigning to a register that can be
3245 eliminated. If so, it must be as part of a PARALLEL, since we
3246 will not have been called if this is a single SET. So indicate
3247 that we can no longer eliminate this reg. */
3248 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3250 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3251 ep->can_eliminate = 0;
3254 /* Now avoid the loop below in this common case. */
3256 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3257 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3259 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3260 write a CLOBBER insn. */
3261 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3262 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3263 && GET_CODE (insn) != INSN_LIST)
3264 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
3266 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3267 return gen_rtx_SET (VOIDmode, new0, new1);
3273 /* This is only for the benefit of the debugging backends, which call
3274 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3275 removed after CSE. */
3276 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3277 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
3279 /* Our only special processing is to pass the mode of the MEM to our
3280 recursive call and copy the flags. While we are here, handle this
3281 case more efficiently. */
3282 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3283 if (new != XEXP (x, 0))
3285 new = gen_rtx_MEM (GET_MODE (x), new);
3286 new->volatil = x->volatil;
3287 new->unchanging = x->unchanging;
3288 new->in_struct = x->in_struct;
3298 /* Process each of our operands recursively. If any have changed, make a
3300 fmt = GET_RTX_FORMAT (code);
3301 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3305 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3306 if (new != XEXP (x, i) && ! copied)
3308 rtx new_x = rtx_alloc (code);
3309 bcopy ((char *) x, (char *) new_x,
3310 (sizeof (*new_x) - sizeof (new_x->fld)
3311 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3317 else if (*fmt == 'E')
3320 for (j = 0; j < XVECLEN (x, i); j++)
3322 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3323 if (new != XVECEXP (x, i, j) && ! copied_vec)
3325 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3329 rtx new_x = rtx_alloc (code);
3330 bcopy ((char *) x, (char *) new_x,
3331 (sizeof (*new_x) - sizeof (new_x->fld)
3332 + (sizeof (new_x->fld[0])
3333 * GET_RTX_LENGTH (code))));
3337 XVEC (x, i) = new_v;
3340 XVECEXP (x, i, j) = new;
3348 /* Scan INSN and eliminate all eliminable registers in it.
3350 If REPLACE is nonzero, do the replacement destructively. Also
3351 delete the insn as dead it if it is setting an eliminable register.
3353 If REPLACE is zero, do all our allocations in reload_obstack.
3355 If no eliminations were done and this insn doesn't require any elimination
3356 processing (these are not identical conditions: it might be updating sp,
3357 but not referencing fp; this needs to be seen during reload_as_needed so
3358 that the offset between fp and sp can be taken into consideration), zero
3359 is returned. Otherwise, 1 is returned. */
3362 eliminate_regs_in_insn (insn, replace)
3366 rtx old_body = PATTERN (insn);
3367 rtx old_set = single_set (insn);
3370 struct elim_table *ep;
3373 push_obstacks (&reload_obstack, &reload_obstack);
3375 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3376 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3378 /* Check for setting an eliminable register. */
3379 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3380 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3382 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3383 /* If this is setting the frame pointer register to the
3384 hardware frame pointer register and this is an elimination
3385 that will be done (tested above), this insn is really
3386 adjusting the frame pointer downward to compensate for
3387 the adjustment done before a nonlocal goto. */
3388 if (ep->from == FRAME_POINTER_REGNUM
3389 && ep->to == HARD_FRAME_POINTER_REGNUM)
3391 rtx src = SET_SRC (old_set);
3393 rtx prev_insn, prev_set;
3395 if (src == ep->to_rtx)
3397 else if (GET_CODE (src) == PLUS
3398 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3399 offset = INTVAL (XEXP (src, 0)), ok = 1;
3400 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3401 && (prev_set = single_set (prev_insn)) != 0
3402 && rtx_equal_p (SET_DEST (prev_set), src))
3404 src = SET_SRC (prev_set);
3405 if (src == ep->to_rtx)
3407 else if (GET_CODE (src) == PLUS
3408 && GET_CODE (XEXP (src, 0)) == CONST_INT
3409 && XEXP (src, 1) == ep->to_rtx)
3410 offset = INTVAL (XEXP (src, 0)), ok = 1;
3411 else if (GET_CODE (src) == PLUS
3412 && GET_CODE (XEXP (src, 1)) == CONST_INT
3413 && XEXP (src, 0) == ep->to_rtx)
3414 offset = INTVAL (XEXP (src, 1)), ok = 1;
3422 = plus_constant (ep->to_rtx, offset - ep->offset);
3424 /* First see if this insn remains valid when we
3425 make the change. If not, keep the INSN_CODE
3426 the same and let reload fit it up. */
3427 validate_change (insn, &SET_SRC (old_set), src, 1);
3428 validate_change (insn, &SET_DEST (old_set),
3430 if (! apply_change_group ())
3432 SET_SRC (old_set) = src;
3433 SET_DEST (old_set) = ep->to_rtx;
3443 /* In this case this insn isn't serving a useful purpose. We
3444 will delete it in reload_as_needed once we know that this
3445 elimination is, in fact, being done.
3447 If REPLACE isn't set, we can't delete this insn, but needn't
3448 process it since it won't be used unless something changes. */
3450 delete_dead_insn (insn);
3455 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3456 in the insn is the negative of the offset in FROM. Substitute
3457 (set (reg) (reg to)) for the insn and change its code.
3459 We have to do this here, rather than in eliminate_regs, do that we can
3460 change the insn code. */
3462 if (GET_CODE (SET_SRC (old_set)) == PLUS
3463 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3464 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3465 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3467 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3468 && ep->can_eliminate)
3470 /* We must stop at the first elimination that will be used.
3471 If this one would replace the PLUS with a REG, do it
3472 now. Otherwise, quit the loop and let eliminate_regs
3473 do its normal replacement. */
3474 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3476 /* We assume here that we don't need a PARALLEL of
3477 any CLOBBERs for this assignment. There's not
3478 much we can do if we do need it. */
3479 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3482 INSN_CODE (insn) = -1;
3491 old_asm_operands_vec = 0;
3493 /* Replace the body of this insn with a substituted form. If we changed
3494 something, return non-zero.
3496 If we are replacing a body that was a (set X (plus Y Z)), try to
3497 re-recognize the insn. We do this in case we had a simple addition
3498 but now can do this as a load-address. This saves an insn in this
3501 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3502 if (new_body != old_body)
3504 /* If we aren't replacing things permanently and we changed something,
3505 make another copy to ensure that all the RTL is new. Otherwise
3506 things can go wrong if find_reload swaps commutative operands
3507 and one is inside RTL that has been copied while the other is not. */
3509 /* Don't copy an asm_operands because (1) there's no need and (2)
3510 copy_rtx can't do it properly when there are multiple outputs. */
3511 if (! replace && asm_noperands (old_body) < 0)
3512 new_body = copy_rtx (new_body);
3514 /* If we had a move insn but now we don't, rerecognize it. This will
3515 cause spurious re-recognition if the old move had a PARALLEL since
3516 the new one still will, but we can't call single_set without
3517 having put NEW_BODY into the insn and the re-recognition won't
3518 hurt in this rare case. */
3520 && ((GET_CODE (SET_SRC (old_set)) == REG
3521 && (GET_CODE (new_body) != SET
3522 || GET_CODE (SET_SRC (new_body)) != REG))
3523 /* If this was a load from or store to memory, compare
3524 the MEM in recog_operand to the one in the insn. If they
3525 are not equal, then rerecognize the insn. */
3527 && ((GET_CODE (SET_SRC (old_set)) == MEM
3528 && SET_SRC (old_set) != recog_operand[1])
3529 || (GET_CODE (SET_DEST (old_set)) == MEM
3530 && SET_DEST (old_set) != recog_operand[0])))
3531 /* If this was an add insn before, rerecognize. */
3532 || GET_CODE (SET_SRC (old_set)) == PLUS))
3534 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3535 /* If recognition fails, store the new body anyway.
3536 It's normal to have recognition failures here
3537 due to bizarre memory addresses; reloading will fix them. */
3538 PATTERN (insn) = new_body;
3541 PATTERN (insn) = new_body;
3546 /* Loop through all elimination pairs. See if any have changed and
3547 recalculate the number not at initial offset.
3549 Compute the maximum offset (minimum offset if the stack does not
3550 grow downward) for each elimination pair.
3552 We also detect a cases where register elimination cannot be done,
3553 namely, if a register would be both changed and referenced outside a MEM
3554 in the resulting insn since such an insn is often undefined and, even if
3555 not, we cannot know what meaning will be given to it. Note that it is
3556 valid to have a register used in an address in an insn that changes it
3557 (presumably with a pre- or post-increment or decrement).
3559 If anything changes, return nonzero. */
3561 num_not_at_initial_offset = 0;
3562 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3564 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3565 ep->can_eliminate = 0;
3567 ep->ref_outside_mem = 0;
3569 if (ep->previous_offset != ep->offset)
3572 ep->previous_offset = ep->offset;
3573 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3574 num_not_at_initial_offset++;
3576 #ifdef STACK_GROWS_DOWNWARD
3577 ep->max_offset = MAX (ep->max_offset, ep->offset);
3579 ep->max_offset = MIN (ep->max_offset, ep->offset);
3584 /* If we changed something, perform elimination in REG_NOTES. This is
3585 needed even when REPLACE is zero because a REG_DEAD note might refer
3586 to a register that we eliminate and could cause a different number
3587 of spill registers to be needed in the final reload pass than in
3589 if (val && REG_NOTES (insn) != 0)
3590 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3598 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3599 replacement we currently believe is valid, mark it as not eliminable if X
3600 modifies DEST in any way other than by adding a constant integer to it.
3602 If DEST is the frame pointer, we do nothing because we assume that
3603 all assignments to the hard frame pointer are nonlocal gotos and are being
3604 done at a time when they are valid and do not disturb anything else.
3605 Some machines want to eliminate a fake argument pointer with either the
3606 frame or stack pointer. Assignments to the hard frame pointer must not
3607 prevent this elimination.
3609 Called via note_stores from reload before starting its passes to scan
3610 the insns of the function. */
3613 mark_not_eliminable (dest, x)
3619 /* A SUBREG of a hard register here is just changing its mode. We should
3620 not see a SUBREG of an eliminable hard register, but check just in
3622 if (GET_CODE (dest) == SUBREG)
3623 dest = SUBREG_REG (dest);
3625 if (dest == hard_frame_pointer_rtx)
3628 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3629 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3630 && (GET_CODE (x) != SET
3631 || GET_CODE (SET_SRC (x)) != PLUS
3632 || XEXP (SET_SRC (x), 0) != dest
3633 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3635 reg_eliminate[i].can_eliminate_previous
3636 = reg_eliminate[i].can_eliminate = 0;
3641 /* Kick all pseudos out of hard register REGNO.
3642 If GLOBAL is nonzero, try to find someplace else to put them.
3643 If DUMPFILE is nonzero, log actions taken on that file.
3645 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3646 because we found we can't eliminate some register. In the case, no pseudos
3647 are allowed to be in the register, even if they are only in a block that
3648 doesn't require spill registers, unlike the case when we are spilling this
3649 hard reg to produce another spill register.
3651 Return nonzero if any pseudos needed to be kicked out. */
3654 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3660 enum reg_class class = REGNO_REG_CLASS (regno);
3661 int something_changed = 0;
3664 SET_HARD_REG_BIT (forbidden_regs, regno);
3667 regs_ever_live[regno] = 1;
3669 /* Spill every pseudo reg that was allocated to this reg
3670 or to something that overlaps this reg. */
3672 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3673 if (reg_renumber[i] >= 0
3674 && reg_renumber[i] <= regno
3676 + HARD_REGNO_NREGS (reg_renumber[i],
3677 PSEUDO_REGNO_MODE (i))
3680 /* If this register belongs solely to a basic block which needed no
3681 spilling of any class that this register is contained in,
3682 leave it be, unless we are spilling this register because
3683 it was a hard register that can't be eliminated. */
3685 if (! cant_eliminate
3686 && basic_block_needs[0]
3687 && REG_BASIC_BLOCK (i) >= 0
3688 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
3692 for (p = reg_class_superclasses[(int) class];
3693 *p != LIM_REG_CLASSES; p++)
3694 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
3697 if (*p == LIM_REG_CLASSES)
3701 /* Mark it as no longer having a hard register home. */
3702 reg_renumber[i] = -1;
3703 /* We will need to scan everything again. */
3704 something_changed = 1;
3706 retry_global_alloc (i, forbidden_regs);
3708 alter_reg (i, regno);
3711 if (reg_renumber[i] == -1)
3712 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3714 fprintf (dumpfile, " Register %d now in %d.\n\n",
3715 i, reg_renumber[i]);
3718 for (i = 0; i < scratch_list_length; i++)
3720 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3722 if (! cant_eliminate && basic_block_needs[0]
3723 && ! basic_block_needs[(int) class][scratch_block[i]])
3727 for (p = reg_class_superclasses[(int) class];
3728 *p != LIM_REG_CLASSES; p++)
3729 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3732 if (*p == LIM_REG_CLASSES)
3735 PUT_CODE (scratch_list[i], SCRATCH);
3736 scratch_list[i] = 0;
3737 something_changed = 1;
3742 return something_changed;
3745 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3746 Also mark any hard registers used to store user variables as
3747 forbidden from being used for spill registers. */
3750 scan_paradoxical_subregs (x)
3755 register enum rtx_code code = GET_CODE (x);
3760 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3761 && REG_USERVAR_P (x))
3762 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3777 if (GET_CODE (SUBREG_REG (x)) == REG
3778 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3779 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3780 = GET_MODE_SIZE (GET_MODE (x));
3787 fmt = GET_RTX_FORMAT (code);
3788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3791 scan_paradoxical_subregs (XEXP (x, i));
3792 else if (fmt[i] == 'E')
3795 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3796 scan_paradoxical_subregs (XVECEXP (x, i, j));
3802 hard_reg_use_compare (p1p, p2p)
3803 const GENERIC_PTR p1p;
3804 const GENERIC_PTR p2p;
3806 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3807 *p2 = (struct hard_reg_n_uses *)p2p;
3808 int tem = p1->uses - p2->uses;
3809 if (tem != 0) return tem;
3810 /* If regs are equally good, sort by regno,
3811 so that the results of qsort leave nothing to chance. */
3812 return p1->regno - p2->regno;
3815 /* Choose the order to consider regs for use as reload registers
3816 based on how much trouble would be caused by spilling one.
3817 Store them in order of decreasing preference in potential_reload_regs. */
3820 order_regs_for_reload (global)
3827 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3829 CLEAR_HARD_REG_SET (bad_spill_regs);
3831 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3832 potential_reload_regs[i] = -1;
3834 /* Count number of uses of each hard reg by pseudo regs allocated to it
3835 and then order them by decreasing use. */
3837 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3839 hard_reg_n_uses[i].uses = 0;
3840 hard_reg_n_uses[i].regno = i;
3843 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3845 int regno = reg_renumber[i];
3848 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3851 /* If allocated by local-alloc, show more uses since
3852 we're not going to be able to reallocate it, but
3853 we might if allocated by global alloc. */
3854 if (global && reg_allocno[i] < 0)
3855 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
3857 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
3860 large += REG_N_REFS (i);
3863 /* Now fixed registers (which cannot safely be used for reloading)
3864 get a very high use count so they will be considered least desirable.
3865 Registers used explicitly in the rtl code are almost as bad. */
3867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3871 hard_reg_n_uses[i].uses += 2 * large + 2;
3872 SET_HARD_REG_BIT (bad_spill_regs, i);
3874 else if (regs_explicitly_used[i])
3876 hard_reg_n_uses[i].uses += large + 1;
3877 if (! SMALL_REGISTER_CLASSES)
3878 /* ??? We are doing this here because of the potential
3879 that bad code may be generated if a register explicitly
3880 used in an insn was used as a spill register for that
3881 insn. But not using these are spill registers may lose
3882 on some machine. We'll have to see how this works out. */
3883 SET_HARD_REG_BIT (bad_spill_regs, i);
3886 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3887 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3889 #ifdef ELIMINABLE_REGS
3890 /* If registers other than the frame pointer are eliminable, mark them as
3892 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3894 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3895 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3899 /* Prefer registers not so far used, for use in temporary loading.
3900 Among them, if REG_ALLOC_ORDER is defined, use that order.
3901 Otherwise, prefer registers not preserved by calls. */
3903 #ifdef REG_ALLOC_ORDER
3904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3906 int regno = reg_alloc_order[i];
3908 if (hard_reg_n_uses[regno].uses == 0)
3909 potential_reload_regs[o++] = regno;
3912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3914 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3915 potential_reload_regs[o++] = i;
3917 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3919 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3920 potential_reload_regs[o++] = i;
3924 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3925 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3927 /* Now add the regs that are already used,
3928 preferring those used less often. The fixed and otherwise forbidden
3929 registers will be at the end of this list. */
3931 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3932 if (hard_reg_n_uses[i].uses != 0)
3933 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3936 /* Used in reload_as_needed to sort the spilled regs. */
3939 compare_spill_regs (r1p, r2p)
3940 const GENERIC_PTR r1p;
3941 const GENERIC_PTR r2p;
3943 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3947 /* Reload pseudo-registers into hard regs around each insn as needed.
3948 Additional register load insns are output before the insn that needs it
3949 and perhaps store insns after insns that modify the reloaded pseudo reg.
3951 reg_last_reload_reg and reg_reloaded_contents keep track of
3952 which registers are already available in reload registers.
3953 We update these for the reloads that we perform,
3954 as the insns are scanned. */
3957 reload_as_needed (first, live_known)
3967 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3968 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3969 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3970 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3971 reg_has_output_reload = (char *) alloca (max_regno);
3972 CLEAR_HARD_REG_SET (reg_reloaded_valid);
3974 /* Reset all offsets on eliminable registers to their initial values. */
3975 #ifdef ELIMINABLE_REGS
3976 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3978 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3979 reg_eliminate[i].initial_offset);
3980 reg_eliminate[i].previous_offset
3981 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3984 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3985 reg_eliminate[0].previous_offset
3986 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3989 num_not_at_initial_offset = 0;
3991 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3992 pack registers with group needs. */
3995 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3996 for (i = 0; i < n_spills; i++)
3997 spill_reg_order[spill_regs[i]] = i;
4000 for (insn = first; insn;)
4002 register rtx next = NEXT_INSN (insn);
4004 /* Notice when we move to a new basic block. */
4005 if (live_known && this_block + 1 < n_basic_blocks
4006 && insn == basic_block_head[this_block+1])
4009 /* If we pass a label, copy the offsets from the label information
4010 into the current offsets of each elimination. */
4011 if (GET_CODE (insn) == CODE_LABEL)
4013 num_not_at_initial_offset = 0;
4014 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4016 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4017 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
4018 if (reg_eliminate[i].can_eliminate
4019 && (reg_eliminate[i].offset
4020 != reg_eliminate[i].initial_offset))
4021 num_not_at_initial_offset++;
4025 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4027 rtx avoid_return_reg = 0;
4028 rtx oldpat = PATTERN (insn);
4030 /* Set avoid_return_reg if this is an insn
4031 that might use the value of a function call. */
4032 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
4034 if (GET_CODE (PATTERN (insn)) == SET)
4035 after_call = SET_DEST (PATTERN (insn));
4036 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4037 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4038 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4042 else if (SMALL_REGISTER_CLASSES && after_call != 0
4043 && !(GET_CODE (PATTERN (insn)) == SET
4044 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
4045 && GET_CODE (PATTERN (insn)) != USE)
4047 if (reg_referenced_p (after_call, PATTERN (insn)))
4048 avoid_return_reg = after_call;
4052 /* If this is a USE and CLOBBER of a MEM, ensure that any
4053 references to eliminable registers have been removed. */
4055 if ((GET_CODE (PATTERN (insn)) == USE
4056 || GET_CODE (PATTERN (insn)) == CLOBBER)
4057 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4058 XEXP (XEXP (PATTERN (insn), 0), 0)
4059 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4060 GET_MODE (XEXP (PATTERN (insn), 0)),
4063 /* If we need to do register elimination processing, do so.
4064 This might delete the insn, in which case we are done. */
4065 if (num_eliminable && GET_MODE (insn) == QImode)
4067 eliminate_regs_in_insn (insn, 1);
4068 if (GET_CODE (insn) == NOTE)
4075 if (GET_MODE (insn) == VOIDmode)
4077 /* First find the pseudo regs that must be reloaded for this insn.
4078 This info is returned in the tables reload_... (see reload.h).
4079 Also modify the body of INSN by substituting RELOAD
4080 rtx's for those pseudo regs. */
4083 bzero (reg_has_output_reload, max_regno);
4084 CLEAR_HARD_REG_SET (reg_is_output_reload);
4086 find_reloads (insn, 1, spill_indirect_levels, live_known,
4092 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4096 /* If this block has not had spilling done for a
4097 particular clas and we have any non-optionals that need a
4098 spill reg in that class, abort. */
4100 for (class = 0; class < N_REG_CLASSES; class++)
4101 if (basic_block_needs[class] != 0
4102 && basic_block_needs[class][this_block] == 0)
4103 for (i = 0; i < n_reloads; i++)
4104 if (class == (int) reload_reg_class[i]
4105 && reload_reg_rtx[i] == 0
4106 && ! reload_optional[i]
4107 && (reload_in[i] != 0 || reload_out[i] != 0
4108 || reload_secondary_p[i] != 0))
4109 fatal_insn ("Non-optional registers need a spill register", insn);
4111 /* Now compute which reload regs to reload them into. Perhaps
4112 reusing reload regs from previous insns, or else output
4113 load insns to reload them. Maybe output store insns too.
4114 Record the choices of reload reg in reload_reg_rtx. */
4115 choose_reload_regs (insn, avoid_return_reg);
4117 /* Merge any reloads that we didn't combine for fear of
4118 increasing the number of spill registers needed but now
4119 discover can be safely merged. */
4120 if (SMALL_REGISTER_CLASSES)
4121 merge_assigned_reloads (insn);
4123 /* Generate the insns to reload operands into or out of
4124 their reload regs. */
4125 emit_reload_insns (insn);
4127 /* Substitute the chosen reload regs from reload_reg_rtx
4128 into the insn's body (or perhaps into the bodies of other
4129 load and store insn that we just made for reloading
4130 and that we moved the structure into). */
4133 /* If this was an ASM, make sure that all the reload insns
4134 we have generated are valid. If not, give an error
4137 if (asm_noperands (PATTERN (insn)) >= 0)
4138 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4139 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4140 && (recog_memoized (p) < 0
4141 || (insn_extract (p),
4142 ! constrain_operands (INSN_CODE (p), 1))))
4144 error_for_asm (insn,
4145 "`asm' operand requires impossible reload");
4147 NOTE_SOURCE_FILE (p) = 0;
4148 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4151 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4152 is no longer validly lying around to save a future reload.
4153 Note that this does not detect pseudos that were reloaded
4154 for this insn in order to be stored in
4155 (obeying register constraints). That is correct; such reload
4156 registers ARE still valid. */
4157 note_stores (oldpat, forget_old_reloads_1);
4159 /* There may have been CLOBBER insns placed after INSN. So scan
4160 between INSN and NEXT and use them to forget old reloads. */
4161 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4162 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4163 note_stores (PATTERN (x), forget_old_reloads_1);
4166 /* Likewise for regs altered by auto-increment in this insn.
4167 But note that the reg-notes are not changed by reloading:
4168 they still contain the pseudo-regs, not the spill regs. */
4169 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4170 if (REG_NOTE_KIND (x) == REG_INC)
4172 /* See if this pseudo reg was reloaded in this insn.
4173 If so, its last-reload info is still valid
4174 because it is based on this insn's reload. */
4175 for (i = 0; i < n_reloads; i++)
4176 if (reload_out[i] == XEXP (x, 0))
4180 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4184 /* A reload reg's contents are unknown after a label. */
4185 if (GET_CODE (insn) == CODE_LABEL)
4186 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4188 /* Don't assume a reload reg is still good after a call insn
4189 if it is a call-used reg. */
4190 else if (GET_CODE (insn) == CALL_INSN)
4191 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4193 /* In case registers overlap, allow certain insns to invalidate
4194 particular hard registers. */
4196 #ifdef INSN_CLOBBERS_REGNO_P
4197 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4198 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4199 && INSN_CLOBBERS_REGNO_P (insn, i))
4200 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
4211 /* Discard all record of any value reloaded from X,
4212 or reloaded in X from someplace else;
4213 unless X is an output reload reg of the current insn.
4215 X may be a hard reg (the reload reg)
4216 or it may be a pseudo reg that was reloaded from. */
4219 forget_old_reloads_1 (x, ignored)
4221 rtx ignored ATTRIBUTE_UNUSED;
4227 /* note_stores does give us subregs of hard regs. */
4228 while (GET_CODE (x) == SUBREG)
4230 offset += SUBREG_WORD (x);
4234 if (GET_CODE (x) != REG)
4237 regno = REGNO (x) + offset;
4239 if (regno >= FIRST_PSEUDO_REGISTER)
4244 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4245 /* Storing into a spilled-reg invalidates its contents.
4246 This can happen if a block-local pseudo is allocated to that reg
4247 and it wasn't spilled because this block's total need is 0.
4248 Then some insn might have an optional reload and use this reg. */
4249 for (i = 0; i < nr; i++)
4250 /* But don't do this if the reg actually serves as an output
4251 reload reg in the current instruction. */
4253 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4254 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4257 /* Since value of X has changed,
4258 forget any value previously copied from it. */
4261 /* But don't forget a copy if this is the output reload
4262 that establishes the copy's validity. */
4263 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4264 reg_last_reload_reg[regno + nr] = 0;
4267 /* For each reload, the mode of the reload register. */
4268 static enum machine_mode reload_mode[MAX_RELOADS];
4270 /* For each reload, the largest number of registers it will require. */
4271 static int reload_nregs[MAX_RELOADS];
4273 /* Comparison function for qsort to decide which of two reloads
4274 should be handled first. *P1 and *P2 are the reload numbers. */
4277 reload_reg_class_lower (r1p, r2p)
4278 const GENERIC_PTR r1p;
4279 const GENERIC_PTR r2p;
4281 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4284 /* Consider required reloads before optional ones. */
4285 t = reload_optional[r1] - reload_optional[r2];
4289 /* Count all solitary classes before non-solitary ones. */
4290 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4291 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4295 /* Aside from solitaires, consider all multi-reg groups first. */
4296 t = reload_nregs[r2] - reload_nregs[r1];
4300 /* Consider reloads in order of increasing reg-class number. */
4301 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4305 /* If reloads are equally urgent, sort by reload number,
4306 so that the results of qsort leave nothing to chance. */
4310 /* The following HARD_REG_SETs indicate when each hard register is
4311 used for a reload of various parts of the current insn. */
4313 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4314 static HARD_REG_SET reload_reg_used;
4315 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4316 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4317 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4318 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4319 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4320 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4321 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4322 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4323 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4324 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4325 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4326 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4327 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4328 static HARD_REG_SET reload_reg_used_in_op_addr;
4329 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4330 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4331 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4332 static HARD_REG_SET reload_reg_used_in_insn;
4333 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4334 static HARD_REG_SET reload_reg_used_in_other_addr;
4336 /* If reg is in use as a reload reg for any sort of reload. */
4337 static HARD_REG_SET reload_reg_used_at_all;
4339 /* If reg is use as an inherited reload. We just mark the first register
4341 static HARD_REG_SET reload_reg_used_for_inherit;
4343 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4344 TYPE. MODE is used to indicate how many consecutive regs are
4348 mark_reload_reg_in_use (regno, opnum, type, mode)
4351 enum reload_type type;
4352 enum machine_mode mode;
4354 int nregs = HARD_REGNO_NREGS (regno, mode);
4357 for (i = regno; i < nregs + regno; i++)
4362 SET_HARD_REG_BIT (reload_reg_used, i);
4365 case RELOAD_FOR_INPUT_ADDRESS:
4366 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4369 case RELOAD_FOR_INPADDR_ADDRESS:
4370 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4373 case RELOAD_FOR_OUTPUT_ADDRESS:
4374 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4377 case RELOAD_FOR_OUTADDR_ADDRESS:
4378 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4381 case RELOAD_FOR_OPERAND_ADDRESS:
4382 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4385 case RELOAD_FOR_OPADDR_ADDR:
4386 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4389 case RELOAD_FOR_OTHER_ADDRESS:
4390 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4393 case RELOAD_FOR_INPUT:
4394 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4397 case RELOAD_FOR_OUTPUT:
4398 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4401 case RELOAD_FOR_INSN:
4402 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4406 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4410 /* Similarly, but show REGNO is no longer in use for a reload. */
4413 clear_reload_reg_in_use (regno, opnum, type, mode)
4416 enum reload_type type;
4417 enum machine_mode mode;
4419 int nregs = HARD_REGNO_NREGS (regno, mode);
4422 for (i = regno; i < nregs + regno; i++)
4427 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4430 case RELOAD_FOR_INPUT_ADDRESS:
4431 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4434 case RELOAD_FOR_INPADDR_ADDRESS:
4435 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4438 case RELOAD_FOR_OUTPUT_ADDRESS:
4439 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4442 case RELOAD_FOR_OUTADDR_ADDRESS:
4443 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4446 case RELOAD_FOR_OPERAND_ADDRESS:
4447 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4450 case RELOAD_FOR_OPADDR_ADDR:
4451 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4454 case RELOAD_FOR_OTHER_ADDRESS:
4455 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4458 case RELOAD_FOR_INPUT:
4459 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4462 case RELOAD_FOR_OUTPUT:
4463 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4466 case RELOAD_FOR_INSN:
4467 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4473 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4474 specified by OPNUM and TYPE. */
4477 reload_reg_free_p (regno, opnum, type)
4480 enum reload_type type;
4484 /* In use for a RELOAD_OTHER means it's not available for anything. */
4485 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4491 /* In use for anything means we can't use it for RELOAD_OTHER. */
4492 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4493 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4494 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4497 for (i = 0; i < reload_n_operands; i++)
4498 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4499 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4500 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4501 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4502 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4503 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4508 case RELOAD_FOR_INPUT:
4509 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4510 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4513 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4516 /* If it is used for some other input, can't use it. */
4517 for (i = 0; i < reload_n_operands; i++)
4518 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4521 /* If it is used in a later operand's address, can't use it. */
4522 for (i = opnum + 1; i < reload_n_operands; i++)
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4529 case RELOAD_FOR_INPUT_ADDRESS:
4530 /* Can't use a register if it is used for an input address for this
4531 operand or used as an input in an earlier one. */
4532 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4533 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4536 for (i = 0; i < opnum; i++)
4537 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4542 case RELOAD_FOR_INPADDR_ADDRESS:
4543 /* Can't use a register if it is used for an input address
4544 for this operand or used as an input in an earlier
4546 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4549 for (i = 0; i < opnum; i++)
4550 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4555 case RELOAD_FOR_OUTPUT_ADDRESS:
4556 /* Can't use a register if it is used for an output address for this
4557 operand or used as an output in this or a later operand. */
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4561 for (i = opnum; i < reload_n_operands; i++)
4562 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4567 case RELOAD_FOR_OUTADDR_ADDRESS:
4568 /* Can't use a register if it is used for an output address
4569 for this operand or used as an output in this or a
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4574 for (i = opnum; i < reload_n_operands; i++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4580 case RELOAD_FOR_OPERAND_ADDRESS:
4581 for (i = 0; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4585 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4586 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4588 case RELOAD_FOR_OPADDR_ADDR:
4589 for (i = 0; i < reload_n_operands; i++)
4590 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4593 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4595 case RELOAD_FOR_OUTPUT:
4596 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4597 outputs, or an operand address for this or an earlier output. */
4598 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4601 for (i = 0; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4605 for (i = 0; i <= opnum; i++)
4606 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4607 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4612 case RELOAD_FOR_INSN:
4613 for (i = 0; i < reload_n_operands; i++)
4614 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4615 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4618 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4619 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4621 case RELOAD_FOR_OTHER_ADDRESS:
4622 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4627 /* Return 1 if the value in reload reg REGNO, as used by a reload
4628 needed for the part of the insn specified by OPNUM and TYPE,
4629 is not in use for a reload in any prior part of the insn.
4631 We can assume that the reload reg was already tested for availability
4632 at the time it is needed, and we should not check this again,
4633 in case the reg has already been marked in use. */
4636 reload_reg_free_before_p (regno, opnum, type)
4639 enum reload_type type;
4645 case RELOAD_FOR_OTHER_ADDRESS:
4646 /* These always come first. */
4650 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4652 /* If this use is for part of the insn,
4653 check the reg is not in use for any prior part. It is tempting
4654 to try to do this by falling through from objecs that occur
4655 later in the insn to ones that occur earlier, but that will not
4656 correctly take into account the fact that here we MUST ignore
4657 things that would prevent the register from being allocated in
4658 the first place, since we know that it was allocated. */
4660 case RELOAD_FOR_OUTPUT_ADDRESS:
4661 case RELOAD_FOR_OUTADDR_ADDRESS:
4662 /* Earlier reloads are for earlier outputs or their addresses,
4663 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4664 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4666 for (i = 0; i < opnum; i++)
4667 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4668 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4669 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4672 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4675 for (i = 0; i < reload_n_operands; i++)
4676 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4677 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4678 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4681 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4682 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4683 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4685 case RELOAD_FOR_OUTPUT:
4686 /* This can't be used in the output address for this operand and
4687 anything that can't be used for it, except that we've already
4688 tested for RELOAD_FOR_INSN objects. */
4690 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4691 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4694 for (i = 0; i < opnum; i++)
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4696 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4697 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4700 for (i = 0; i < reload_n_operands; i++)
4701 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4702 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4703 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4704 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4707 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4709 case RELOAD_FOR_OPERAND_ADDRESS:
4710 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4711 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4714 /* ... fall through ... */
4716 case RELOAD_FOR_OPADDR_ADDR:
4717 case RELOAD_FOR_INSN:
4718 /* These can't conflict with inputs, or each other, so all we have to
4719 test is input addresses and the addresses of OTHER items. */
4721 for (i = 0; i < reload_n_operands; i++)
4722 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4723 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4726 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4728 case RELOAD_FOR_INPUT:
4729 /* The only things earlier are the address for this and
4730 earlier inputs, other inputs (which we know we don't conflict
4731 with), and addresses of RELOAD_OTHER objects. */
4733 for (i = 0; i <= opnum; i++)
4734 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4735 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4738 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4740 case RELOAD_FOR_INPUT_ADDRESS:
4741 case RELOAD_FOR_INPADDR_ADDRESS:
4742 /* Similarly, all we have to check is for use in earlier inputs'
4744 for (i = 0; i < opnum; i++)
4745 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4746 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4749 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4754 /* Return 1 if the value in reload reg REGNO, as used by a reload
4755 needed for the part of the insn specified by OPNUM and TYPE,
4756 is still available in REGNO at the end of the insn.
4758 We can assume that the reload reg was already tested for availability
4759 at the time it is needed, and we should not check this again,
4760 in case the reg has already been marked in use. */
4763 reload_reg_reaches_end_p (regno, opnum, type)
4766 enum reload_type type;
4773 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4774 its value must reach the end. */
4777 /* If this use is for part of the insn,
4778 its value reaches if no subsequent part uses the same register.
4779 Just like the above function, don't try to do this with lots
4782 case RELOAD_FOR_OTHER_ADDRESS:
4783 /* Here we check for everything else, since these don't conflict
4784 with anything else and everything comes later. */
4786 for (i = 0; i < reload_n_operands; i++)
4787 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4788 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4789 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4790 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4791 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4792 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4795 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4796 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4797 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4799 case RELOAD_FOR_INPUT_ADDRESS:
4800 case RELOAD_FOR_INPADDR_ADDRESS:
4801 /* Similar, except that we check only for this and subsequent inputs
4802 and the address of only subsequent inputs and we do not need
4803 to check for RELOAD_OTHER objects since they are known not to
4806 for (i = opnum; i < reload_n_operands; i++)
4807 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4810 for (i = opnum + 1; i < reload_n_operands; i++)
4811 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4812 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4815 for (i = 0; i < reload_n_operands; i++)
4816 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4817 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4818 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4821 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4824 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4825 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4827 case RELOAD_FOR_INPUT:
4828 /* Similar to input address, except we start at the next operand for
4829 both input and input address and we do not check for
4830 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4833 for (i = opnum + 1; i < reload_n_operands; i++)
4834 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4835 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4836 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4839 /* ... fall through ... */
4841 case RELOAD_FOR_OPERAND_ADDRESS:
4842 /* Check outputs and their addresses. */
4844 for (i = 0; i < reload_n_operands; i++)
4845 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4846 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4847 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4852 case RELOAD_FOR_OPADDR_ADDR:
4853 for (i = 0; i < reload_n_operands; i++)
4854 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4855 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4856 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4859 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4860 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4862 case RELOAD_FOR_INSN:
4863 /* These conflict with other outputs with RELOAD_OTHER. So
4864 we need only check for output addresses. */
4868 /* ... fall through ... */
4870 case RELOAD_FOR_OUTPUT:
4871 case RELOAD_FOR_OUTPUT_ADDRESS:
4872 case RELOAD_FOR_OUTADDR_ADDRESS:
4873 /* We already know these can't conflict with a later output. So the
4874 only thing to check are later output addresses. */
4875 for (i = opnum + 1; i < reload_n_operands; i++)
4876 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4877 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4886 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4889 This function uses the same algorithm as reload_reg_free_p above. */
4892 reloads_conflict (r1, r2)
4895 enum reload_type r1_type = reload_when_needed[r1];
4896 enum reload_type r2_type = reload_when_needed[r2];
4897 int r1_opnum = reload_opnum[r1];
4898 int r2_opnum = reload_opnum[r2];
4900 /* RELOAD_OTHER conflicts with everything. */
4901 if (r2_type == RELOAD_OTHER)
4904 /* Otherwise, check conflicts differently for each type. */
4908 case RELOAD_FOR_INPUT:
4909 return (r2_type == RELOAD_FOR_INSN
4910 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4911 || r2_type == RELOAD_FOR_OPADDR_ADDR
4912 || r2_type == RELOAD_FOR_INPUT
4913 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4914 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4915 && r2_opnum > r1_opnum));
4917 case RELOAD_FOR_INPUT_ADDRESS:
4918 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4919 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4921 case RELOAD_FOR_INPADDR_ADDRESS:
4922 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4923 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4925 case RELOAD_FOR_OUTPUT_ADDRESS:
4926 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4927 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4929 case RELOAD_FOR_OUTADDR_ADDRESS:
4930 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4931 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4933 case RELOAD_FOR_OPERAND_ADDRESS:
4934 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4935 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4937 case RELOAD_FOR_OPADDR_ADDR:
4938 return (r2_type == RELOAD_FOR_INPUT
4939 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4941 case RELOAD_FOR_OUTPUT:
4942 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4943 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4944 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4945 && r2_opnum >= r1_opnum));
4947 case RELOAD_FOR_INSN:
4948 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4949 || r2_type == RELOAD_FOR_INSN
4950 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4952 case RELOAD_FOR_OTHER_ADDRESS:
4953 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4963 /* Vector of reload-numbers showing the order in which the reloads should
4965 short reload_order[MAX_RELOADS];
4967 /* Indexed by reload number, 1 if incoming value
4968 inherited from previous insns. */
4969 char reload_inherited[MAX_RELOADS];
4971 /* For an inherited reload, this is the insn the reload was inherited from,
4972 if we know it. Otherwise, this is 0. */
4973 rtx reload_inheritance_insn[MAX_RELOADS];
4975 /* If non-zero, this is a place to get the value of the reload,
4976 rather than using reload_in. */
4977 rtx reload_override_in[MAX_RELOADS];
4979 /* For each reload, the hard register number of the register used,
4980 or -1 if we did not need a register for this reload. */
4981 int reload_spill_index[MAX_RELOADS];
4983 /* Return 1 if the value in reload reg REGNO, as used by a reload
4984 needed for the part of the insn specified by OPNUM and TYPE,
4985 may be used to load VALUE into it.
4986 Other read-only reloads with the same value do not conflict.
4987 The caller has to make sure that there is no conflict with the return
4990 reload_reg_free_for_value_p (regno, opnum, type, value)
4993 enum reload_type type;
4999 /* We use some pseudo 'time' value to check if the lifetimes of the
5000 new register use would overlap with the one of a previous reload
5001 that is not read-only or uses a different value.
5002 The 'time' used doesn't have to be linear in any shape or form, just
5004 Some reload types use different 'buckets' for each operand.
5005 So there are MAX_RECOG_OPERANDS different time values for each
5007 We compute TIME1 as the time when the register for the prospective
5008 new reload ceases to be live, and TIME2 for each existing
5009 reload as the time when that the reload register of that reload
5011 Where there is little to be gained by exact lifetime calculations,
5012 we just make conservative assumptions, i.e. a longer lifetime;
5013 this is done in the 'default:' cases. */
5016 case RELOAD_FOR_OTHER_ADDRESS:
5019 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5020 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5021 respectively, to the time values for these, we get distinct time
5022 values. To get distinct time values for each operand, we have to
5023 multiply opnum by at least three. We round that up to four because
5024 multiply by four is often cheaper. */
5025 case RELOAD_FOR_INPADDR_ADDRESS:
5026 time1 = opnum * 4 + 1;
5028 case RELOAD_FOR_INPUT_ADDRESS:
5029 time1 = opnum * 4 + 2;
5031 case RELOAD_FOR_INPUT:
5032 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5033 instruction is executed. */
5034 time1 = (MAX_RECOG_OPERANDS - 1) * 4 + 3;
5036 /* opnum * 4 + 3 < opnum * 4 + 4
5037 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5038 case RELOAD_FOR_OUTPUT_ADDRESS:
5039 time1 = MAX_RECOG_OPERANDS * 4 + opnum;
5042 time1 = MAX_RECOG_OPERANDS * 5;
5045 for (i = 0; i < n_reloads; i++)
5047 rtx reg = reload_reg_rtx[i];
5048 if (reg && GET_CODE (reg) == REG
5049 && ((unsigned) regno - true_regnum (reg)
5050 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - 1U)
5051 && (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
5055 switch (reload_when_needed[i])
5057 case RELOAD_FOR_OTHER_ADDRESS:
5060 case RELOAD_FOR_INPADDR_ADDRESS:
5061 time2 = reload_opnum[i] * 4 + 1;
5063 case RELOAD_FOR_INPUT_ADDRESS:
5064 time2 = reload_opnum[i] * 4 + 2;
5066 case RELOAD_FOR_INPUT:
5067 time2 = reload_opnum[i] * 4 + 3;
5069 case RELOAD_FOR_OUTPUT:
5070 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5071 instruction is executed. */
5072 time2 = MAX_RECOG_OPERANDS * 4;
5074 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5075 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5076 case RELOAD_FOR_OUTPUT_ADDRESS:
5077 time2 = MAX_RECOG_OPERANDS * 4 + reload_opnum[i];
5089 /* Find a spill register to use as a reload register for reload R.
5090 LAST_RELOAD is non-zero if this is the last reload for the insn being
5093 Set reload_reg_rtx[R] to the register allocated.
5095 If NOERROR is nonzero, we return 1 if successful,
5096 or 0 if we couldn't find a spill reg and we didn't change anything. */
5099 allocate_reload_reg (r, insn, last_reload, noerror)
5111 /* If we put this reload ahead, thinking it is a group,
5112 then insist on finding a group. Otherwise we can grab a
5113 reg that some other reload needs.
5114 (That can happen when we have a 68000 DATA_OR_FP_REG
5115 which is a group of data regs or one fp reg.)
5116 We need not be so restrictive if there are no more reloads
5119 ??? Really it would be nicer to have smarter handling
5120 for that kind of reg class, where a problem like this is normal.
5121 Perhaps those classes should be avoided for reloading
5122 by use of more alternatives. */
5124 int force_group = reload_nregs[r] > 1 && ! last_reload;
5126 /* If we want a single register and haven't yet found one,
5127 take any reg in the right class and not in use.
5128 If we want a consecutive group, here is where we look for it.
5130 We use two passes so we can first look for reload regs to
5131 reuse, which are already in use for other reloads in this insn,
5132 and only then use additional registers.
5133 I think that maximizing reuse is needed to make sure we don't
5134 run out of reload regs. Suppose we have three reloads, and
5135 reloads A and B can share regs. These need two regs.
5136 Suppose A and B are given different regs.
5137 That leaves none for C. */
5138 for (pass = 0; pass < 2; pass++)
5140 /* I is the index in spill_regs.
5141 We advance it round-robin between insns to use all spill regs
5142 equally, so that inherited reloads have a chance
5143 of leapfrogging each other. Don't do this, however, when we have
5144 group needs and failure would be fatal; if we only have a relatively
5145 small number of spill registers, and more than one of them has
5146 group needs, then by starting in the middle, we may end up
5147 allocating the first one in such a way that we are not left with
5148 sufficient groups to handle the rest. */
5150 if (noerror || ! force_group)
5155 for (count = 0; count < n_spills; count++)
5157 int class = (int) reload_reg_class[r];
5159 i = (i + 1) % n_spills;
5161 if ((reload_reg_free_p (spill_regs[i], reload_opnum[r],
5162 reload_when_needed[r])
5163 || (reload_in[r] && ! reload_out[r]
5164 /* We check reload_reg_used to make sure we
5165 don't clobber the return register. */
5166 && ! TEST_HARD_REG_BIT (reload_reg_used, spill_regs[i])
5167 && reload_reg_free_for_value_p (spill_regs[i],
5169 reload_when_needed[r],
5171 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5172 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5173 /* Look first for regs to share, then for unshared. But
5174 don't share regs used for inherited reloads; they are
5175 the ones we want to preserve. */
5177 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5179 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5182 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5183 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5184 (on 68000) got us two FP regs. If NR is 1,
5185 we would reject both of them. */
5187 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5188 /* If we need only one reg, we have already won. */
5191 /* But reject a single reg if we demand a group. */
5196 /* Otherwise check that as many consecutive regs as we need
5198 Also, don't use for a group registers that are
5199 needed for nongroups. */
5200 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5203 regno = spill_regs[i] + nr - 1;
5204 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5205 && spill_reg_order[regno] >= 0
5206 && reload_reg_free_p (regno, reload_opnum[r],
5207 reload_when_needed[r])
5208 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5218 /* If we found something on pass 1, omit pass 2. */
5219 if (count < n_spills)
5223 /* We should have found a spill register by now. */
5224 if (count == n_spills)
5231 /* I is the index in SPILL_REG_RTX of the reload register we are to
5232 allocate. Get an rtx for it and find its register number. */
5234 new = spill_reg_rtx[i];
5236 if (new == 0 || GET_MODE (new) != reload_mode[r])
5237 spill_reg_rtx[i] = new
5238 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
5240 regno = true_regnum (new);
5242 /* Detect when the reload reg can't hold the reload mode.
5243 This used to be one `if', but Sequent compiler can't handle that. */
5244 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5246 enum machine_mode test_mode = VOIDmode;
5248 test_mode = GET_MODE (reload_in[r]);
5249 /* If reload_in[r] has VOIDmode, it means we will load it
5250 in whatever mode the reload reg has: to wit, reload_mode[r].
5251 We have already tested that for validity. */
5252 /* Aside from that, we need to test that the expressions
5253 to reload from or into have modes which are valid for this
5254 reload register. Otherwise the reload insns would be invalid. */
5255 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5256 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5257 if (! (reload_out[r] != 0
5258 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5260 /* The reg is OK. */
5263 /* Mark as in use for this insn the reload regs we use
5265 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5266 reload_when_needed[r], reload_mode[r]);
5268 reload_reg_rtx[r] = new;
5269 reload_spill_index[r] = spill_regs[i];
5274 /* The reg is not OK. */
5279 if (asm_noperands (PATTERN (insn)) < 0)
5280 /* It's the compiler's fault. */
5281 fatal_insn ("Could not find a spill register", insn);
5283 /* It's the user's fault; the operand's mode and constraint
5284 don't match. Disable this reload so we don't crash in final. */
5285 error_for_asm (insn,
5286 "`asm' operand constraint incompatible with operand size");
5289 reload_reg_rtx[r] = 0;
5290 reload_optional[r] = 1;
5291 reload_secondary_p[r] = 1;
5296 /* Assign hard reg targets for the pseudo-registers we must reload
5297 into hard regs for this insn.
5298 Also output the instructions to copy them in and out of the hard regs.
5300 For machines with register classes, we are responsible for
5301 finding a reload reg in the proper class. */
5304 choose_reload_regs (insn, avoid_return_reg)
5306 rtx avoid_return_reg;
5309 int max_group_size = 1;
5310 enum reg_class group_class = NO_REGS;
5313 rtx save_reload_reg_rtx[MAX_RELOADS];
5314 char save_reload_inherited[MAX_RELOADS];
5315 rtx save_reload_inheritance_insn[MAX_RELOADS];
5316 rtx save_reload_override_in[MAX_RELOADS];
5317 int save_reload_spill_index[MAX_RELOADS];
5318 HARD_REG_SET save_reload_reg_used;
5319 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5320 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5321 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5322 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5323 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5324 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5325 HARD_REG_SET save_reload_reg_used_in_op_addr;
5326 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5327 HARD_REG_SET save_reload_reg_used_in_insn;
5328 HARD_REG_SET save_reload_reg_used_in_other_addr;
5329 HARD_REG_SET save_reload_reg_used_at_all;
5331 bzero (reload_inherited, MAX_RELOADS);
5332 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5333 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5335 CLEAR_HARD_REG_SET (reload_reg_used);
5336 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5337 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5338 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5339 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5340 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5342 for (i = 0; i < reload_n_operands; i++)
5344 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5345 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5346 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5347 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5348 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5349 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5352 /* Don't bother with avoiding the return reg
5353 if we have no mandatory reload that could use it. */
5354 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5357 int regno = REGNO (avoid_return_reg);
5359 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5362 for (r = regno; r < regno + nregs; r++)
5363 if (spill_reg_order[r] >= 0)
5364 for (j = 0; j < n_reloads; j++)
5365 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5366 && (reload_in[j] != 0 || reload_out[j] != 0
5367 || reload_secondary_p[j])
5369 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5372 avoid_return_reg = 0;
5375 #if 0 /* Not needed, now that we can always retry without inheritance. */
5376 /* See if we have more mandatory reloads than spill regs.
5377 If so, then we cannot risk optimizations that could prevent
5378 reloads from sharing one spill register.
5380 Since we will try finding a better register than reload_reg_rtx
5381 unless it is equal to reload_in or reload_out, count such reloads. */
5384 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
5385 for (j = 0; j < n_reloads; j++)
5386 if (! reload_optional[j]
5387 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5388 && (reload_reg_rtx[j] == 0
5389 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5390 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5397 /* Don't use the subroutine call return reg for a reload
5398 if we are supposed to avoid it. */
5399 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
5401 int regno = REGNO (avoid_return_reg);
5403 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5406 for (r = regno; r < regno + nregs; r++)
5407 if (spill_reg_order[r] >= 0)
5408 SET_HARD_REG_BIT (reload_reg_used, r);
5411 /* In order to be certain of getting the registers we need,
5412 we must sort the reloads into order of increasing register class.
5413 Then our grabbing of reload registers will parallel the process
5414 that provided the reload registers.
5416 Also note whether any of the reloads wants a consecutive group of regs.
5417 If so, record the maximum size of the group desired and what
5418 register class contains all the groups needed by this insn. */
5420 for (j = 0; j < n_reloads; j++)
5422 reload_order[j] = j;
5423 reload_spill_index[j] = -1;
5426 = (reload_inmode[j] == VOIDmode
5427 || (GET_MODE_SIZE (reload_outmode[j])
5428 > GET_MODE_SIZE (reload_inmode[j])))
5429 ? reload_outmode[j] : reload_inmode[j];
5431 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5433 if (reload_nregs[j] > 1)
5435 max_group_size = MAX (reload_nregs[j], max_group_size);
5436 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5439 /* If we have already decided to use a certain register,
5440 don't use it in another way. */
5441 if (reload_reg_rtx[j])
5442 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5443 reload_when_needed[j], reload_mode[j]);
5447 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5449 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5450 sizeof reload_reg_rtx);
5451 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5452 bcopy ((char *) reload_inheritance_insn,
5453 (char *) save_reload_inheritance_insn,
5454 sizeof reload_inheritance_insn);
5455 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5456 sizeof reload_override_in);
5457 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5458 sizeof reload_spill_index);
5459 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5460 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5461 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5462 reload_reg_used_in_op_addr);
5464 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5465 reload_reg_used_in_op_addr_reload);
5467 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5468 reload_reg_used_in_insn);
5469 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5470 reload_reg_used_in_other_addr);
5472 for (i = 0; i < reload_n_operands; i++)
5474 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5475 reload_reg_used_in_output[i]);
5476 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5477 reload_reg_used_in_input[i]);
5478 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5479 reload_reg_used_in_input_addr[i]);
5480 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5481 reload_reg_used_in_inpaddr_addr[i]);
5482 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5483 reload_reg_used_in_output_addr[i]);
5484 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5485 reload_reg_used_in_outaddr_addr[i]);
5488 /* If -O, try first with inheritance, then turning it off.
5489 If not -O, don't do inheritance.
5490 Using inheritance when not optimizing leads to paradoxes
5491 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5492 because one side of the comparison might be inherited. */
5494 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5496 /* Process the reloads in order of preference just found.
5497 Beyond this point, subregs can be found in reload_reg_rtx.
5499 This used to look for an existing reloaded home for all
5500 of the reloads, and only then perform any new reloads.
5501 But that could lose if the reloads were done out of reg-class order
5502 because a later reload with a looser constraint might have an old
5503 home in a register needed by an earlier reload with a tighter constraint.
5505 To solve this, we make two passes over the reloads, in the order
5506 described above. In the first pass we try to inherit a reload
5507 from a previous insn. If there is a later reload that needs a
5508 class that is a proper subset of the class being processed, we must
5509 also allocate a spill register during the first pass.
5511 Then make a second pass over the reloads to allocate any reloads
5512 that haven't been given registers yet. */
5514 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5516 for (j = 0; j < n_reloads; j++)
5518 register int r = reload_order[j];
5520 /* Ignore reloads that got marked inoperative. */
5521 if (reload_out[r] == 0 && reload_in[r] == 0
5522 && ! reload_secondary_p[r])
5525 /* If find_reloads chose a to use reload_in or reload_out as a reload
5526 register, we don't need to chose one. Otherwise, try even if it
5527 found one since we might save an insn if we find the value lying
5529 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5530 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5531 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5534 #if 0 /* No longer needed for correct operation.
5535 It might give better code, or might not; worth an experiment? */
5536 /* If this is an optional reload, we can't inherit from earlier insns
5537 until we are sure that any non-optional reloads have been allocated.
5538 The following code takes advantage of the fact that optional reloads
5539 are at the end of reload_order. */
5540 if (reload_optional[r] != 0)
5541 for (i = 0; i < j; i++)
5542 if ((reload_out[reload_order[i]] != 0
5543 || reload_in[reload_order[i]] != 0
5544 || reload_secondary_p[reload_order[i]])
5545 && ! reload_optional[reload_order[i]]
5546 && reload_reg_rtx[reload_order[i]] == 0)
5547 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5550 /* First see if this pseudo is already available as reloaded
5551 for a previous insn. We cannot try to inherit for reloads
5552 that are smaller than the maximum number of registers needed
5553 for groups unless the register we would allocate cannot be used
5556 We could check here to see if this is a secondary reload for
5557 an object that is already in a register of the desired class.
5558 This would avoid the need for the secondary reload register.
5559 But this is complex because we can't easily determine what
5560 objects might want to be loaded via this reload. So let a
5561 register be allocated here. In `emit_reload_insns' we suppress
5562 one of the loads in the case described above. */
5566 register int regno = -1;
5567 enum machine_mode mode;
5569 if (reload_in[r] == 0)
5571 else if (GET_CODE (reload_in[r]) == REG)
5573 regno = REGNO (reload_in[r]);
5574 mode = GET_MODE (reload_in[r]);
5576 else if (GET_CODE (reload_in_reg[r]) == REG)
5578 regno = REGNO (reload_in_reg[r]);
5579 mode = GET_MODE (reload_in_reg[r]);
5581 else if (GET_CODE (reload_in[r]) == MEM)
5583 rtx prev = prev_nonnote_insn (insn), note;
5585 if (prev && GET_CODE (prev) == INSN
5586 && GET_CODE (PATTERN (prev)) == USE
5587 && GET_CODE (XEXP (PATTERN (prev), 0)) == REG
5588 && (REGNO (XEXP (PATTERN (prev), 0))
5589 >= FIRST_PSEUDO_REGISTER)
5590 && (note = find_reg_note (prev, REG_EQUAL, NULL_RTX))
5591 && GET_CODE (XEXP (note, 0)) == MEM)
5593 rtx addr = XEXP (XEXP (note, 0), 0);
5595 = (GET_MODE_SIZE (GET_MODE (addr))
5596 - GET_MODE_SIZE (GET_MODE (reload_in[r])));
5598 && rtx_equal_p ((BYTES_BIG_ENDIAN
5599 ? plus_constant (addr, size_diff)
5601 XEXP (reload_in[r], 0)))
5603 regno = REGNO (XEXP (PATTERN (prev), 0));
5604 mode = GET_MODE (reload_in[r]);
5609 /* This won't work, since REGNO can be a pseudo reg number.
5610 Also, it takes much more hair to keep track of all the things
5611 that can invalidate an inherited reload of part of a pseudoreg. */
5612 else if (GET_CODE (reload_in[r]) == SUBREG
5613 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5614 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5617 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5619 i = REGNO (reg_last_reload_reg[regno]);
5621 if (reg_reloaded_contents[i] == regno
5622 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5623 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5624 >= GET_MODE_SIZE (mode))
5625 && HARD_REGNO_MODE_OK (i, reload_mode[r])
5626 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5628 && (reload_nregs[r] == max_group_size
5629 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5631 && ((reload_reg_free_p (i, reload_opnum[r],
5632 reload_when_needed[r])
5633 && reload_reg_free_before_p (i, reload_opnum[r],
5634 reload_when_needed[r]))
5635 || reload_reg_free_for_value_p (i, reload_opnum[r],
5636 reload_when_needed[r],
5639 /* If a group is needed, verify that all the subsequent
5640 registers still have their values intact. */
5642 = HARD_REGNO_NREGS (i, reload_mode[r]);
5645 for (k = 1; k < nr; k++)
5646 if (reg_reloaded_contents[i + k] != regno
5647 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5654 /* We found a register that contains the
5655 value we need. If this register is the
5656 same as an `earlyclobber' operand of the
5657 current insn, just mark it as a place to
5658 reload from since we can't use it as the
5659 reload register itself. */
5661 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5662 if (reg_overlap_mentioned_for_reload_p
5663 (reg_last_reload_reg[regno],
5664 reload_earlyclobbers[i1]))
5667 if (i1 != n_earlyclobbers
5668 /* Don't use it if we'd clobber a pseudo reg. */
5669 || (spill_reg_order[i] < 0
5671 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5672 /* Don't really use the inherited spill reg
5673 if we need it wider than we've got it. */
5674 || (GET_MODE_SIZE (reload_mode[r])
5675 > GET_MODE_SIZE (mode)))
5676 reload_override_in[r] = reg_last_reload_reg[regno];
5680 /* We can use this as a reload reg. */
5681 /* Mark the register as in use for this part of
5683 mark_reload_reg_in_use (i,
5685 reload_when_needed[r],
5687 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5688 reload_inherited[r] = 1;
5689 reload_inheritance_insn[r]
5690 = reg_reloaded_insn[i];
5691 reload_spill_index[r] = i;
5692 for (k = 0; k < nr; k++)
5693 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5701 /* Here's another way to see if the value is already lying around. */
5703 && reload_in[r] != 0
5704 && ! reload_inherited[r]
5705 && reload_out[r] == 0
5706 && (CONSTANT_P (reload_in[r])
5707 || GET_CODE (reload_in[r]) == PLUS
5708 || GET_CODE (reload_in[r]) == REG
5709 || GET_CODE (reload_in[r]) == MEM)
5710 && (reload_nregs[r] == max_group_size
5711 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5714 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5715 -1, NULL_PTR, 0, reload_mode[r]);
5720 if (GET_CODE (equiv) == REG)
5721 regno = REGNO (equiv);
5722 else if (GET_CODE (equiv) == SUBREG)
5724 /* This must be a SUBREG of a hard register.
5725 Make a new REG since this might be used in an
5726 address and not all machines support SUBREGs
5728 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5729 equiv = gen_rtx_REG (reload_mode[r], regno);
5735 /* If we found a spill reg, reject it unless it is free
5736 and of the desired class. */
5738 && ((spill_reg_order[regno] >= 0
5739 && ! (reload_reg_free_before_p (regno, reload_opnum[r],
5740 reload_when_needed[r])
5741 || reload_reg_free_for_value_p (regno,
5743 reload_when_needed[r],
5745 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5749 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5752 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5755 /* We found a register that contains the value we need.
5756 If this register is the same as an `earlyclobber' operand
5757 of the current insn, just mark it as a place to reload from
5758 since we can't use it as the reload register itself. */
5761 for (i = 0; i < n_earlyclobbers; i++)
5762 if (reg_overlap_mentioned_for_reload_p (equiv,
5763 reload_earlyclobbers[i]))
5765 reload_override_in[r] = equiv;
5770 /* JRV: If the equiv register we have found is
5771 explicitly clobbered in the current insn, mark but
5772 don't use, as above. */
5774 if (equiv != 0 && regno_clobbered_p (regno, insn))
5776 reload_override_in[r] = equiv;
5780 /* If we found an equivalent reg, say no code need be generated
5781 to load it, and use it as our reload reg. */
5782 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5784 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5786 reload_reg_rtx[r] = equiv;
5787 reload_inherited[r] = 1;
5789 /* If any of the hard registers in EQUIV are spill
5790 registers, mark them as in use for this insn. */
5791 for (k = 0; k < nr; k++)
5793 i = spill_reg_order[regno + k];
5796 mark_reload_reg_in_use (regno, reload_opnum[r],
5797 reload_when_needed[r],
5799 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5806 /* If we found a register to use already, or if this is an optional
5807 reload, we are done. */
5808 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5811 #if 0 /* No longer needed for correct operation. Might or might not
5812 give better code on the average. Want to experiment? */
5814 /* See if there is a later reload that has a class different from our
5815 class that intersects our class or that requires less register
5816 than our reload. If so, we must allocate a register to this
5817 reload now, since that reload might inherit a previous reload
5818 and take the only available register in our class. Don't do this
5819 for optional reloads since they will force all previous reloads
5820 to be allocated. Also don't do this for reloads that have been
5823 for (i = j + 1; i < n_reloads; i++)
5825 int s = reload_order[i];
5827 if ((reload_in[s] == 0 && reload_out[s] == 0
5828 && ! reload_secondary_p[s])
5829 || reload_optional[s])
5832 if ((reload_reg_class[s] != reload_reg_class[r]
5833 && reg_classes_intersect_p (reload_reg_class[r],
5834 reload_reg_class[s]))
5835 || reload_nregs[s] < reload_nregs[r])
5842 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5846 /* Now allocate reload registers for anything non-optional that
5847 didn't get one yet. */
5848 for (j = 0; j < n_reloads; j++)
5850 register int r = reload_order[j];
5852 /* Ignore reloads that got marked inoperative. */
5853 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5856 /* Skip reloads that already have a register allocated or are
5858 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5861 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5865 /* If that loop got all the way, we have won. */
5870 /* Loop around and try without any inheritance. */
5871 /* First undo everything done by the failed attempt
5872 to allocate with inheritance. */
5873 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5874 sizeof reload_reg_rtx);
5875 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5876 sizeof reload_inherited);
5877 bcopy ((char *) save_reload_inheritance_insn,
5878 (char *) reload_inheritance_insn,
5879 sizeof reload_inheritance_insn);
5880 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5881 sizeof reload_override_in);
5882 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5883 sizeof reload_spill_index);
5884 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5885 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5886 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5887 save_reload_reg_used_in_op_addr);
5888 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5889 save_reload_reg_used_in_op_addr_reload);
5890 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5891 save_reload_reg_used_in_insn);
5892 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5893 save_reload_reg_used_in_other_addr);
5895 for (i = 0; i < reload_n_operands; i++)
5897 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5898 save_reload_reg_used_in_input[i]);
5899 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5900 save_reload_reg_used_in_output[i]);
5901 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5902 save_reload_reg_used_in_input_addr[i]);
5903 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5904 save_reload_reg_used_in_inpaddr_addr[i]);
5905 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5906 save_reload_reg_used_in_output_addr[i]);
5907 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5908 save_reload_reg_used_in_outaddr_addr[i]);
5912 /* If we thought we could inherit a reload, because it seemed that
5913 nothing else wanted the same reload register earlier in the insn,
5914 verify that assumption, now that all reloads have been assigned. */
5916 for (j = 0; j < n_reloads; j++)
5918 register int r = reload_order[j];
5920 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5921 && ! (reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5923 reload_when_needed[r])
5924 || reload_reg_free_for_value_p (true_regnum (reload_reg_rtx[r]),
5926 reload_when_needed[r],
5928 reload_inherited[r] = 0;
5929 /* If we can inherit a RELOAD_FOR_INPUT, then we do not need its related
5930 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads.
5931 ??? This could be extended to other reload types, but these are
5932 more tricky to handle:
5933 RELOAD_FOR_OTHER_ADDRESS reloads might have been merged, so we
5934 can't eliminate them without a check that *all* references are
5935 now unused due to inheritance.
5936 While RELOAD_FOR_INPADDR_ADDRESS and RELOAD_FOR_OUTADDR_ADDRESS are
5937 not merged, we can't be sure that we have eliminated the use of
5938 that particular reload if we have seen just one
5939 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_OUTPUT_ADDRESS being inherited,
5940 since there might be multiple of the latter two reloads for a single
5942 RELOAD_FOR_OPADDR_ADDR reloads for different operands are not
5943 merged, but might share the same register by courtesy of
5944 reload_reg_free_for_value_p. reload_reg_used_in_op_addr_reload
5945 does not differentiate by opnum, thus calling clear_reload_reg_in_use
5946 for one of these reloads would mark the register as free even though
5947 another RELOAD_FOR_OPADDR_ADDR reload might still use it. */
5948 else if (reload_inherited[r] && reload_when_needed[r] == RELOAD_FOR_INPUT)
5950 for (i = 0; i < n_reloads; i++)
5952 if ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5953 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
5954 && reload_opnum[i] == reload_opnum[r]
5955 && reload_in[i] && reload_reg_rtx[i])
5957 int regno = true_regnum (reload_reg_rtx[i]);
5960 if (spill_reg_order[regno] >= 0)
5961 clear_reload_reg_in_use (regno, reload_opnum[i],
5962 reload_when_needed[i],
5964 reload_reg_rtx[i] = 0;
5965 reload_spill_index[i] = -1;
5966 remove_replacements (i);
5971 /* If we found a better place to reload from,
5972 validate it in the same fashion, if it is a reload reg. */
5973 if (reload_override_in[r]
5974 && (GET_CODE (reload_override_in[r]) == REG
5975 || GET_CODE (reload_override_in[r]) == SUBREG))
5977 int regno = true_regnum (reload_override_in[r]);
5978 if (spill_reg_order[regno] >= 0
5979 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5980 reload_when_needed[r]))
5981 reload_override_in[r] = 0;
5985 /* Now that reload_override_in is known valid,
5986 actually override reload_in. */
5987 for (j = 0; j < n_reloads; j++)
5988 if (reload_override_in[j])
5989 reload_in[j] = reload_override_in[j];
5991 /* If this reload won't be done because it has been cancelled or is
5992 optional and not inherited, clear reload_reg_rtx so other
5993 routines (such as subst_reloads) don't get confused. */
5994 for (j = 0; j < n_reloads; j++)
5995 if (reload_reg_rtx[j] != 0
5996 && ((reload_optional[j] && ! reload_inherited[j])
5997 || (reload_in[j] == 0 && reload_out[j] == 0
5998 && ! reload_secondary_p[j])))
6000 int regno = true_regnum (reload_reg_rtx[j]);
6002 if (spill_reg_order[regno] >= 0)
6003 clear_reload_reg_in_use (regno, reload_opnum[j],
6004 reload_when_needed[j], reload_mode[j]);
6005 reload_reg_rtx[j] = 0;
6008 /* Record which pseudos and which spill regs have output reloads. */
6009 for (j = 0; j < n_reloads; j++)
6011 register int r = reload_order[j];
6013 i = reload_spill_index[r];
6015 /* I is nonneg if this reload uses a register.
6016 If reload_reg_rtx[r] is 0, this is an optional reload
6017 that we opted to ignore. */
6018 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
6019 && reload_reg_rtx[r] != 0)
6021 register int nregno = REGNO (reload_out[r]);
6024 if (nregno < FIRST_PSEUDO_REGISTER)
6025 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
6028 reg_has_output_reload[nregno + nr] = 1;
6032 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
6034 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6037 if (reload_when_needed[r] != RELOAD_OTHER
6038 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6039 && reload_when_needed[r] != RELOAD_FOR_INSN)
6045 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6046 reloads of the same item for fear that we might not have enough reload
6047 registers. However, normally they will get the same reload register
6048 and hence actually need not be loaded twice.
6050 Here we check for the most common case of this phenomenon: when we have
6051 a number of reloads for the same object, each of which were allocated
6052 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6053 reload, and is not modified in the insn itself. If we find such,
6054 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6055 This will not increase the number of spill registers needed and will
6056 prevent redundant code. */
6059 merge_assigned_reloads (insn)
6064 /* Scan all the reloads looking for ones that only load values and
6065 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6066 assigned and not modified by INSN. */
6068 for (i = 0; i < n_reloads; i++)
6070 int conflicting_input = 0;
6071 int max_input_address_opnum = -1;
6072 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6074 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6075 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6076 || reg_set_p (reload_reg_rtx[i], insn))
6079 /* Look at all other reloads. Ensure that the only use of this
6080 reload_reg_rtx is in a reload that just loads the same value
6081 as we do. Note that any secondary reloads must be of the identical
6082 class since the values, modes, and result registers are the
6083 same, so we need not do anything with any secondary reloads. */
6085 for (j = 0; j < n_reloads; j++)
6087 if (i == j || reload_reg_rtx[j] == 0
6088 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6092 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6093 && reload_opnum[j] > max_input_address_opnum)
6094 max_input_address_opnum = reload_opnum[j];
6096 /* If the reload regs aren't exactly the same (e.g, different modes)
6097 or if the values are different, we can't merge this reload.
6098 But if it is an input reload, we might still merge
6099 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6101 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6102 || reload_out[j] != 0 || reload_in[j] == 0
6103 || ! rtx_equal_p (reload_in[i], reload_in[j]))
6105 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6106 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6107 || reload_opnum[i] > reload_opnum[j])
6108 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6110 conflicting_input = 1;
6111 if (min_conflicting_input_opnum > reload_opnum[j])
6112 min_conflicting_input_opnum = reload_opnum[j];
6116 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6117 we, in fact, found any matching reloads. */
6120 && max_input_address_opnum <= min_conflicting_input_opnum)
6122 for (j = 0; j < n_reloads; j++)
6123 if (i != j && reload_reg_rtx[j] != 0
6124 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6125 && (! conflicting_input
6126 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6127 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
6129 reload_when_needed[i] = RELOAD_OTHER;
6131 reload_spill_index[j] = -1;
6132 transfer_replacements (i, j);
6135 /* If this is now RELOAD_OTHER, look for any reloads that load
6136 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6137 if they were for inputs, RELOAD_OTHER for outputs. Note that
6138 this test is equivalent to looking for reloads for this operand
6141 if (reload_when_needed[i] == RELOAD_OTHER)
6142 for (j = 0; j < n_reloads; j++)
6143 if (reload_in[j] != 0
6144 && reload_when_needed[i] != RELOAD_OTHER
6145 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6147 reload_when_needed[j]
6148 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6149 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6150 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6156 /* Output insns to reload values in and out of the chosen reload regs. */
6159 emit_reload_insns (insn)
6163 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6164 rtx other_input_address_reload_insns = 0;
6165 rtx other_input_reload_insns = 0;
6166 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6167 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6168 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6169 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6170 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6171 rtx operand_reload_insns = 0;
6172 rtx other_operand_reload_insns = 0;
6173 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6174 rtx following_insn = NEXT_INSN (insn);
6175 rtx before_insn = insn;
6177 /* Values to be put in spill_reg_store are put here first. */
6178 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6179 HARD_REG_SET reg_reloaded_died;
6181 CLEAR_HARD_REG_SET (reg_reloaded_died);
6183 for (j = 0; j < reload_n_operands; j++)
6184 input_reload_insns[j] = input_address_reload_insns[j]
6185 = inpaddr_address_reload_insns[j]
6186 = output_reload_insns[j] = output_address_reload_insns[j]
6187 = outaddr_address_reload_insns[j]
6188 = other_output_reload_insns[j] = 0;
6190 /* Now output the instructions to copy the data into and out of the
6191 reload registers. Do these in the order that the reloads were reported,
6192 since reloads of base and index registers precede reloads of operands
6193 and the operands may need the base and index registers reloaded. */
6195 for (j = 0; j < n_reloads; j++)
6198 rtx oldequiv_reg = 0;
6199 rtx this_reload_insn = 0;
6200 int expect_occurrences = 1;
6202 if (reload_spill_index[j] >= 0)
6203 new_spill_reg_store[reload_spill_index[j]] = 0;
6206 if (old != 0 && ! reload_inherited[j]
6207 && ! rtx_equal_p (reload_reg_rtx[j], old)
6208 && reload_reg_rtx[j] != 0)
6210 register rtx reloadreg = reload_reg_rtx[j];
6212 enum machine_mode mode;
6215 /* Determine the mode to reload in.
6216 This is very tricky because we have three to choose from.
6217 There is the mode the insn operand wants (reload_inmode[J]).
6218 There is the mode of the reload register RELOADREG.
6219 There is the intrinsic mode of the operand, which we could find
6220 by stripping some SUBREGs.
6221 It turns out that RELOADREG's mode is irrelevant:
6222 we can change that arbitrarily.
6224 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6225 then the reload reg may not support QImode moves, so use SImode.
6226 If foo is in memory due to spilling a pseudo reg, this is safe,
6227 because the QImode value is in the least significant part of a
6228 slot big enough for a SImode. If foo is some other sort of
6229 memory reference, then it is impossible to reload this case,
6230 so previous passes had better make sure this never happens.
6232 Then consider a one-word union which has SImode and one of its
6233 members is a float, being fetched as (SUBREG:SF union:SI).
6234 We must fetch that as SFmode because we could be loading into
6235 a float-only register. In this case OLD's mode is correct.
6237 Consider an immediate integer: it has VOIDmode. Here we need
6238 to get a mode from something else.
6240 In some cases, there is a fourth mode, the operand's
6241 containing mode. If the insn specifies a containing mode for
6242 this operand, it overrides all others.
6244 I am not sure whether the algorithm here is always right,
6245 but it does the right things in those cases. */
6247 mode = GET_MODE (old);
6248 if (mode == VOIDmode)
6249 mode = reload_inmode[j];
6251 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6252 /* If we need a secondary register for this operation, see if
6253 the value is already in a register in that class. Don't
6254 do this if the secondary register will be used as a scratch
6257 if (reload_secondary_in_reload[j] >= 0
6258 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6261 = find_equiv_reg (old, insn,
6262 reload_reg_class[reload_secondary_in_reload[j]],
6263 -1, NULL_PTR, 0, mode);
6266 /* If reloading from memory, see if there is a register
6267 that already holds the same value. If so, reload from there.
6268 We can pass 0 as the reload_reg_p argument because
6269 any other reload has either already been emitted,
6270 in which case find_equiv_reg will see the reload-insn,
6271 or has yet to be emitted, in which case it doesn't matter
6272 because we will use this equiv reg right away. */
6274 if (oldequiv == 0 && optimize
6275 && (GET_CODE (old) == MEM
6276 || (GET_CODE (old) == REG
6277 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6278 && reg_renumber[REGNO (old)] < 0)))
6279 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6280 -1, NULL_PTR, 0, mode);
6284 int regno = true_regnum (oldequiv);
6286 /* If OLDEQUIV is a spill register, don't use it for this
6287 if any other reload needs it at an earlier stage of this insn
6288 or at this stage. */
6289 if (spill_reg_order[regno] >= 0
6290 && (! reload_reg_free_p (regno, reload_opnum[j],
6291 reload_when_needed[j])
6292 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6293 reload_when_needed[j])))
6296 /* If OLDEQUIV is not a spill register,
6297 don't use it if any other reload wants it. */
6298 if (spill_reg_order[regno] < 0)
6301 for (k = 0; k < n_reloads; k++)
6302 if (reload_reg_rtx[k] != 0 && k != j
6303 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6311 /* If it is no cheaper to copy from OLDEQUIV into the
6312 reload register than it would be to move from memory,
6313 don't use it. Likewise, if we need a secondary register
6317 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6318 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6319 reload_reg_class[j])
6320 >= MEMORY_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6322 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6323 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6327 #ifdef SECONDARY_MEMORY_NEEDED
6328 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
6329 REGNO_REG_CLASS (regno),
6338 else if (GET_CODE (oldequiv) == REG)
6339 oldequiv_reg = oldequiv;
6340 else if (GET_CODE (oldequiv) == SUBREG)
6341 oldequiv_reg = SUBREG_REG (oldequiv);
6343 /* If we are reloading from a register that was recently stored in
6344 with an output-reload, see if we can prove there was
6345 actually no need to store the old value in it. */
6347 if (optimize && GET_CODE (oldequiv) == REG
6348 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6349 && spill_reg_store[REGNO (oldequiv)]
6350 && GET_CODE (old) == REG && dead_or_set_p (insn, old)
6351 /* This is unsafe if operand occurs more than once in current
6352 insn. Perhaps some occurrences weren't reloaded. */
6353 && count_occurrences (PATTERN (insn), old) == 1)
6354 delete_output_reload (insn, j, spill_reg_store[REGNO (oldequiv)]);
6356 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6357 then load RELOADREG from OLDEQUIV. Note that we cannot use
6358 gen_lowpart_common since it can do the wrong thing when
6359 RELOADREG has a multi-word mode. Note that RELOADREG
6360 must always be a REG here. */
6362 if (GET_MODE (reloadreg) != mode)
6363 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6364 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6365 oldequiv = SUBREG_REG (oldequiv);
6366 if (GET_MODE (oldequiv) != VOIDmode
6367 && mode != GET_MODE (oldequiv))
6368 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
6370 /* Switch to the right place to emit the reload insns. */
6371 switch (reload_when_needed[j])
6374 where = &other_input_reload_insns;
6376 case RELOAD_FOR_INPUT:
6377 where = &input_reload_insns[reload_opnum[j]];
6379 case RELOAD_FOR_INPUT_ADDRESS:
6380 where = &input_address_reload_insns[reload_opnum[j]];
6382 case RELOAD_FOR_INPADDR_ADDRESS:
6383 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6385 case RELOAD_FOR_OUTPUT_ADDRESS:
6386 where = &output_address_reload_insns[reload_opnum[j]];
6388 case RELOAD_FOR_OUTADDR_ADDRESS:
6389 where = &outaddr_address_reload_insns[reload_opnum[j]];
6391 case RELOAD_FOR_OPERAND_ADDRESS:
6392 where = &operand_reload_insns;
6394 case RELOAD_FOR_OPADDR_ADDR:
6395 where = &other_operand_reload_insns;
6397 case RELOAD_FOR_OTHER_ADDRESS:
6398 where = &other_input_address_reload_insns;
6404 push_to_sequence (*where);
6407 /* Auto-increment addresses must be reloaded in a special way. */
6408 if (GET_CODE (oldequiv) == POST_INC
6409 || GET_CODE (oldequiv) == POST_DEC
6410 || GET_CODE (oldequiv) == PRE_INC
6411 || GET_CODE (oldequiv) == PRE_DEC)
6413 /* We are not going to bother supporting the case where a
6414 incremented register can't be copied directly from
6415 OLDEQUIV since this seems highly unlikely. */
6416 if (reload_secondary_in_reload[j] >= 0)
6418 /* Prevent normal processing of this reload. */
6420 /* Output a special code sequence for this case. */
6421 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
6424 /* If we are reloading a pseudo-register that was set by the previous
6425 insn, see if we can get rid of that pseudo-register entirely
6426 by redirecting the previous insn into our reload register. */
6428 else if (optimize && GET_CODE (old) == REG
6429 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6430 && dead_or_set_p (insn, old)
6431 /* This is unsafe if some other reload
6432 uses the same reg first. */
6433 && reload_reg_free_before_p (REGNO (reloadreg),
6435 reload_when_needed[j]))
6437 rtx temp = PREV_INSN (insn);
6438 while (temp && GET_CODE (temp) == NOTE)
6439 temp = PREV_INSN (temp);
6441 && GET_CODE (temp) == INSN
6442 && GET_CODE (PATTERN (temp)) == SET
6443 && SET_DEST (PATTERN (temp)) == old
6444 /* Make sure we can access insn_operand_constraint. */
6445 && asm_noperands (PATTERN (temp)) < 0
6446 /* This is unsafe if prev insn rejects our reload reg. */
6447 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6449 /* This is unsafe if operand occurs more than once in current
6450 insn. Perhaps some occurrences aren't reloaded. */
6451 && count_occurrences (PATTERN (insn), old) == 1
6452 /* Don't risk splitting a matching pair of operands. */
6453 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6455 /* Store into the reload register instead of the pseudo. */
6456 SET_DEST (PATTERN (temp)) = reloadreg;
6457 /* If these are the only uses of the pseudo reg,
6458 pretend for GDB it lives in the reload reg we used. */
6459 if (REG_N_DEATHS (REGNO (old)) == 1
6460 && REG_N_SETS (REGNO (old)) == 1)
6462 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6463 alter_reg (REGNO (old), -1);
6469 /* We can't do that, so output an insn to load RELOADREG. */
6473 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6474 rtx second_reload_reg = 0;
6475 enum insn_code icode;
6477 /* If we have a secondary reload, pick up the secondary register
6478 and icode, if any. If OLDEQUIV and OLD are different or
6479 if this is an in-out reload, recompute whether or not we
6480 still need a secondary register and what the icode should
6481 be. If we still need a secondary register and the class or
6482 icode is different, go back to reloading from OLD if using
6483 OLDEQUIV means that we got the wrong type of register. We
6484 cannot have different class or icode due to an in-out reload
6485 because we don't make such reloads when both the input and
6486 output need secondary reload registers. */
6488 if (reload_secondary_in_reload[j] >= 0)
6490 int secondary_reload = reload_secondary_in_reload[j];
6491 rtx real_oldequiv = oldequiv;
6494 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6495 and similarly for OLD.
6496 See comments in get_secondary_reload in reload.c. */
6497 if (GET_CODE (oldequiv) == REG
6498 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6499 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6500 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6502 if (GET_CODE (old) == REG
6503 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6504 && reg_equiv_mem[REGNO (old)] != 0)
6505 real_old = reg_equiv_mem[REGNO (old)];
6507 second_reload_reg = reload_reg_rtx[secondary_reload];
6508 icode = reload_secondary_in_icode[j];
6510 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6511 || (reload_in[j] != 0 && reload_out[j] != 0))
6513 enum reg_class new_class
6514 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6515 mode, real_oldequiv);
6517 if (new_class == NO_REGS)
6518 second_reload_reg = 0;
6521 enum insn_code new_icode;
6522 enum machine_mode new_mode;
6524 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6525 REGNO (second_reload_reg)))
6526 oldequiv = old, real_oldequiv = real_old;
6529 new_icode = reload_in_optab[(int) mode];
6530 if (new_icode != CODE_FOR_nothing
6531 && ((insn_operand_predicate[(int) new_icode][0]
6532 && ! ((*insn_operand_predicate[(int) new_icode][0])
6534 || (insn_operand_predicate[(int) new_icode][1]
6535 && ! ((*insn_operand_predicate[(int) new_icode][1])
6536 (real_oldequiv, mode)))))
6537 new_icode = CODE_FOR_nothing;
6539 if (new_icode == CODE_FOR_nothing)
6542 new_mode = insn_operand_mode[(int) new_icode][2];
6544 if (GET_MODE (second_reload_reg) != new_mode)
6546 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6548 oldequiv = old, real_oldequiv = real_old;
6551 = gen_rtx_REG (new_mode,
6552 REGNO (second_reload_reg));
6558 /* If we still need a secondary reload register, check
6559 to see if it is being used as a scratch or intermediate
6560 register and generate code appropriately. If we need
6561 a scratch register, use REAL_OLDEQUIV since the form of
6562 the insn may depend on the actual address if it is
6565 if (second_reload_reg)
6567 if (icode != CODE_FOR_nothing)
6569 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6570 second_reload_reg));
6575 /* See if we need a scratch register to load the
6576 intermediate register (a tertiary reload). */
6577 enum insn_code tertiary_icode
6578 = reload_secondary_in_icode[secondary_reload];
6580 if (tertiary_icode != CODE_FOR_nothing)
6582 rtx third_reload_reg
6583 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6585 emit_insn ((GEN_FCN (tertiary_icode)
6586 (second_reload_reg, real_oldequiv,
6587 third_reload_reg)));
6590 gen_reload (second_reload_reg, oldequiv,
6592 reload_when_needed[j]);
6594 oldequiv = second_reload_reg;
6600 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6601 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6602 reload_when_needed[j]);
6604 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6605 /* We may have to make a REG_DEAD note for the secondary reload
6606 register in the insns we just made. Find the last insn that
6607 mentioned the register. */
6608 if (! special && second_reload_reg
6609 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6613 for (prev = get_last_insn (); prev;
6614 prev = PREV_INSN (prev))
6615 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6616 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6619 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_DEAD,
6628 this_reload_insn = get_last_insn ();
6629 /* End this sequence. */
6630 *where = get_insns ();
6634 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6635 e.g. inheriting a SImode output reload for
6636 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6637 if (optimize && reload_inherited[j] && reload_in[j]
6638 && GET_CODE (reload_in[j]) == MEM
6639 && reload_spill_index[j] >= 0
6640 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6643 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6645 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6647 /* Add a note saying the input reload reg
6648 dies in this insn, if anyone cares. */
6649 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6651 && reload_reg_rtx[j] != old
6652 && reload_reg_rtx[j] != 0
6653 && reload_out[j] == 0
6654 && ! reload_inherited[j]
6655 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6657 register rtx reloadreg = reload_reg_rtx[j];
6660 /* We can't abort here because we need to support this for sched.c.
6661 It's not terrible to miss a REG_DEAD note, but we should try
6662 to figure out how to do this correctly. */
6663 /* The code below is incorrect for address-only reloads. */
6664 if (reload_when_needed[j] != RELOAD_OTHER
6665 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6669 /* Add a death note to this insn, for an input reload. */
6671 if ((reload_when_needed[j] == RELOAD_OTHER
6672 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6673 && ! dead_or_set_p (insn, reloadreg))
6675 = gen_rtx_EXPR_LIST (REG_DEAD,
6676 reloadreg, REG_NOTES (insn));
6679 /* When we inherit a reload, the last marked death of the reload reg
6680 may no longer really be a death. */
6681 if (reload_reg_rtx[j] != 0
6682 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6683 && reload_inherited[j])
6685 /* Handle inheriting an output reload.
6686 Remove the death note from the output reload insn. */
6687 if (reload_spill_index[j] >= 0
6688 && GET_CODE (reload_in[j]) == REG
6689 && spill_reg_store[reload_spill_index[j]] != 0
6690 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6691 REG_DEAD, REGNO (reload_reg_rtx[j])))
6692 remove_death (REGNO (reload_reg_rtx[j]),
6693 spill_reg_store[reload_spill_index[j]]);
6694 /* Likewise for input reloads that were inherited. */
6695 else if (reload_spill_index[j] >= 0
6696 && GET_CODE (reload_in[j]) == REG
6697 && spill_reg_store[reload_spill_index[j]] == 0
6698 && reload_inheritance_insn[j] != 0
6699 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6700 REGNO (reload_reg_rtx[j])))
6701 remove_death (REGNO (reload_reg_rtx[j]),
6702 reload_inheritance_insn[j]);
6707 /* We got this register from find_equiv_reg.
6708 Search back for its last death note and get rid of it.
6709 But don't search back too far.
6710 Don't go past a place where this reg is set,
6711 since a death note before that remains valid. */
6712 for (prev = PREV_INSN (insn);
6713 prev && GET_CODE (prev) != CODE_LABEL;
6714 prev = PREV_INSN (prev))
6715 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6716 && dead_or_set_p (prev, reload_reg_rtx[j]))
6718 if (find_regno_note (prev, REG_DEAD,
6719 REGNO (reload_reg_rtx[j])))
6720 remove_death (REGNO (reload_reg_rtx[j]), prev);
6726 /* We might have used find_equiv_reg above to choose an alternate
6727 place from which to reload. If so, and it died, we need to remove
6728 that death and move it to one of the insns we just made. */
6730 if (oldequiv_reg != 0
6731 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6735 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6736 prev = PREV_INSN (prev))
6737 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6738 && dead_or_set_p (prev, oldequiv_reg))
6740 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6742 for (prev1 = this_reload_insn;
6743 prev1; prev1 = PREV_INSN (prev1))
6744 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6745 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6748 REG_NOTES (prev1) = gen_rtx_EXPR_LIST (REG_DEAD,
6753 remove_death (REGNO (oldequiv_reg), prev);
6760 /* If we are reloading a register that was recently stored in with an
6761 output-reload, see if we can prove there was
6762 actually no need to store the old value in it. */
6764 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6765 && reload_in[j] != 0
6766 && GET_CODE (reload_in[j]) == REG
6768 /* There doesn't seem to be any reason to restrict this to pseudos
6769 and doing so loses in the case where we are copying from a
6770 register of the wrong class. */
6771 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6773 && spill_reg_store[reload_spill_index[j]] != 0
6774 /* This is unsafe if some other reload uses the same reg first. */
6775 && reload_reg_free_before_p (reload_spill_index[j],
6776 reload_opnum[j], reload_when_needed[j])
6777 && dead_or_set_p (insn, reload_in[j])
6778 /* This is unsafe if operand occurs more than once in current
6779 insn. Perhaps some occurrences weren't reloaded. */
6780 && (count_occurrences (PATTERN (insn), reload_in[j])
6781 == expect_occurrences))
6782 delete_output_reload (insn, j,
6783 spill_reg_store[reload_spill_index[j]]);
6785 /* Input-reloading is done. Now do output-reloading,
6786 storing the value from the reload-register after the main insn
6787 if reload_out[j] is nonzero.
6789 ??? At some point we need to support handling output reloads of
6790 JUMP_INSNs or insns that set cc0. */
6791 old = reload_out[j];
6793 && reload_reg_rtx[j] != old
6794 && reload_reg_rtx[j] != 0)
6796 register rtx reloadreg = reload_reg_rtx[j];
6797 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6798 register rtx second_reloadreg = 0;
6801 enum machine_mode mode;
6804 /* An output operand that dies right away does need a reload,
6805 but need not be copied from it. Show the new location in the
6807 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6808 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6810 XEXP (note, 0) = reload_reg_rtx[j];
6813 /* Likewise for a SUBREG of an operand that dies. */
6814 else if (GET_CODE (old) == SUBREG
6815 && GET_CODE (SUBREG_REG (old)) == REG
6816 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6819 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6823 else if (GET_CODE (old) == SCRATCH)
6824 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6825 but we don't want to make an output reload. */
6829 /* Strip off of OLD any size-increasing SUBREGs such as
6830 (SUBREG:SI foo:QI 0). */
6832 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6833 && (GET_MODE_SIZE (GET_MODE (old))
6834 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6835 old = SUBREG_REG (old);
6838 /* If is a JUMP_INSN, we can't support output reloads yet. */
6839 if (GET_CODE (insn) == JUMP_INSN)
6842 if (reload_when_needed[j] == RELOAD_OTHER)
6845 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6847 /* Determine the mode to reload in.
6848 See comments above (for input reloading). */
6850 mode = GET_MODE (old);
6851 if (mode == VOIDmode)
6853 /* VOIDmode should never happen for an output. */
6854 if (asm_noperands (PATTERN (insn)) < 0)
6855 /* It's the compiler's fault. */
6856 fatal_insn ("VOIDmode on an output", insn);
6857 error_for_asm (insn, "output operand is constant in `asm'");
6858 /* Prevent crash--use something we know is valid. */
6860 old = gen_rtx_REG (mode, REGNO (reloadreg));
6863 if (GET_MODE (reloadreg) != mode)
6864 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6866 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6868 /* If we need two reload regs, set RELOADREG to the intermediate
6869 one, since it will be stored into OLD. We might need a secondary
6870 register only for an input reload, so check again here. */
6872 if (reload_secondary_out_reload[j] >= 0)
6876 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6877 && reg_equiv_mem[REGNO (old)] != 0)
6878 real_old = reg_equiv_mem[REGNO (old)];
6880 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6884 second_reloadreg = reloadreg;
6885 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6887 /* See if RELOADREG is to be used as a scratch register
6888 or as an intermediate register. */
6889 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6891 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6892 (real_old, second_reloadreg, reloadreg)));
6897 /* See if we need both a scratch and intermediate reload
6900 int secondary_reload = reload_secondary_out_reload[j];
6901 enum insn_code tertiary_icode
6902 = reload_secondary_out_icode[secondary_reload];
6904 if (GET_MODE (reloadreg) != mode)
6905 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6907 if (tertiary_icode != CODE_FOR_nothing)
6910 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6913 /* Copy primary reload reg to secondary reload reg.
6914 (Note that these have been swapped above, then
6915 secondary reload reg to OLD using our insn. */
6917 /* If REAL_OLD is a paradoxical SUBREG, remove it
6918 and try to put the opposite SUBREG on
6920 if (GET_CODE (real_old) == SUBREG
6921 && (GET_MODE_SIZE (GET_MODE (real_old))
6922 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6923 && 0 != (tem = gen_lowpart_common
6924 (GET_MODE (SUBREG_REG (real_old)),
6926 real_old = SUBREG_REG (real_old), reloadreg = tem;
6928 gen_reload (reloadreg, second_reloadreg,
6929 reload_opnum[j], reload_when_needed[j]);
6930 emit_insn ((GEN_FCN (tertiary_icode)
6931 (real_old, reloadreg, third_reloadreg)));
6936 /* Copy between the reload regs here and then to
6939 gen_reload (reloadreg, second_reloadreg,
6940 reload_opnum[j], reload_when_needed[j]);
6946 /* Output the last reload insn. */
6951 /* Don't output the last reload if OLD is not the dest of
6952 INSN and is in the src and is clobbered by INSN. */
6953 if (! flag_expensive_optimizations
6954 || GET_CODE (old) != REG
6955 || !(set = single_set (insn))
6956 || rtx_equal_p (old, SET_DEST (set))
6957 || !reg_mentioned_p (old, SET_SRC (set))
6958 || !regno_clobbered_p (REGNO (old), insn))
6959 gen_reload (old, reloadreg, reload_opnum[j],
6960 reload_when_needed[j]);
6963 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6964 /* If final will look at death notes for this reg,
6965 put one on the last output-reload insn to use it. Similarly
6966 for any secondary register. */
6967 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6968 for (p = get_last_insn (); p; p = PREV_INSN (p))
6969 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6970 && reg_overlap_mentioned_for_reload_p (reloadreg,
6972 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
6973 reloadreg, REG_NOTES (p));
6975 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6976 if (! special && second_reloadreg
6977 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6978 for (p = get_last_insn (); p; p = PREV_INSN (p))
6979 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6980 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6982 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
6987 /* Look at all insns we emitted, just to be safe. */
6988 for (p = get_insns (); p; p = NEXT_INSN (p))
6989 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6991 rtx pat = PATTERN (p);
6993 /* If this output reload doesn't come from a spill reg,
6994 clear any memory of reloaded copies of the pseudo reg.
6995 If this output reload comes from a spill reg,
6996 reg_has_output_reload will make this do nothing. */
6997 note_stores (pat, forget_old_reloads_1);
6999 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7001 if (reload_spill_index[j] < 0
7002 && GET_CODE (pat) == SET
7003 && SET_SRC (pat) == reload_reg_rtx[j])
7005 int src = REGNO (SET_SRC (pat));
7007 reload_spill_index[j] = src;
7008 SET_HARD_REG_BIT (reg_is_output_reload, src);
7009 if (find_regno_note (insn, REG_DEAD, src))
7010 SET_HARD_REG_BIT (reg_reloaded_died, src);
7012 if (reload_spill_index[j] >= 0)
7013 new_spill_reg_store[reload_spill_index[j]] = p;
7017 if (reload_when_needed[j] == RELOAD_OTHER)
7019 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7020 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7023 output_reload_insns[reload_opnum[j]] = get_insns ();
7029 /* Now write all the insns we made for reloads in the order expected by
7030 the allocation functions. Prior to the insn being reloaded, we write
7031 the following reloads:
7033 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7035 RELOAD_OTHER reloads.
7037 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7038 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7039 RELOAD_FOR_INPUT reload for the operand.
7041 RELOAD_FOR_OPADDR_ADDRS reloads.
7043 RELOAD_FOR_OPERAND_ADDRESS reloads.
7045 After the insn being reloaded, we write the following:
7047 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7048 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7049 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7050 reloads for the operand. The RELOAD_OTHER output reloads are
7051 output in descending order by reload number. */
7053 emit_insns_before (other_input_address_reload_insns, before_insn);
7054 emit_insns_before (other_input_reload_insns, before_insn);
7056 for (j = 0; j < reload_n_operands; j++)
7058 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
7059 emit_insns_before (input_address_reload_insns[j], before_insn);
7060 emit_insns_before (input_reload_insns[j], before_insn);
7063 emit_insns_before (other_operand_reload_insns, before_insn);
7064 emit_insns_before (operand_reload_insns, before_insn);
7066 for (j = 0; j < reload_n_operands; j++)
7068 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7069 emit_insns_before (output_address_reload_insns[j], following_insn);
7070 emit_insns_before (output_reload_insns[j], following_insn);
7071 emit_insns_before (other_output_reload_insns[j], following_insn);
7074 /* Move death notes from INSN
7075 to output-operand-address and output reload insns. */
7076 #ifdef PRESERVE_DEATH_INFO_REGNO_P
7079 /* Loop over those insns, last ones first. */
7080 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
7081 insn1 = PREV_INSN (insn1))
7082 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
7084 rtx source = SET_SRC (PATTERN (insn1));
7085 rtx dest = SET_DEST (PATTERN (insn1));
7087 /* The note we will examine next. */
7088 rtx reg_notes = REG_NOTES (insn);
7089 /* The place that pointed to this note. */
7090 rtx *prev_reg_note = ®_NOTES (insn);
7092 /* If the note is for something used in the source of this
7093 reload insn, or in the output address, move the note. */
7096 rtx next_reg_notes = XEXP (reg_notes, 1);
7097 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
7098 && GET_CODE (XEXP (reg_notes, 0)) == REG
7099 && ((GET_CODE (dest) != REG
7100 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7102 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7105 *prev_reg_note = next_reg_notes;
7106 XEXP (reg_notes, 1) = REG_NOTES (insn1);
7107 REG_NOTES (insn1) = reg_notes;
7110 prev_reg_note = &XEXP (reg_notes, 1);
7112 reg_notes = next_reg_notes;
7118 /* For all the spill regs newly reloaded in this instruction,
7119 record what they were reloaded from, so subsequent instructions
7120 can inherit the reloads.
7122 Update spill_reg_store for the reloads of this insn.
7123 Copy the elements that were updated in the loop above. */
7125 for (j = 0; j < n_reloads; j++)
7127 register int r = reload_order[j];
7128 register int i = reload_spill_index[r];
7130 /* I is nonneg if this reload used a register.
7131 If reload_reg_rtx[r] is 0, this is an optional reload
7132 that we opted to ignore. */
7134 if (i >= 0 && reload_reg_rtx[r] != 0)
7137 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
7139 int part_reaches_end = 0;
7140 int all_reaches_end = 1;
7142 /* For a multi register reload, we need to check if all or part
7143 of the value lives to the end. */
7144 for (k = 0; k < nr; k++)
7146 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
7147 reload_when_needed[r]))
7148 part_reaches_end = 1;
7150 all_reaches_end = 0;
7153 /* Ignore reloads that don't reach the end of the insn in
7155 if (all_reaches_end)
7157 /* First, clear out memory of what used to be in this spill reg.
7158 If consecutive registers are used, clear them all. */
7160 for (k = 0; k < nr; k++)
7161 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7163 /* Maybe the spill reg contains a copy of reload_out. */
7164 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7166 register int nregno = REGNO (reload_out[r]);
7167 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7168 : HARD_REGNO_NREGS (nregno,
7169 GET_MODE (reload_reg_rtx[r])));
7171 spill_reg_store[i] = new_spill_reg_store[i];
7172 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7174 /* If NREGNO is a hard register, it may occupy more than
7175 one register. If it does, say what is in the
7176 rest of the registers assuming that both registers
7177 agree on how many words the object takes. If not,
7178 invalidate the subsequent registers. */
7180 if (nregno < FIRST_PSEUDO_REGISTER)
7181 for (k = 1; k < nnr; k++)
7182 reg_last_reload_reg[nregno + k]
7184 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7185 REGNO (reload_reg_rtx[r]) + k)
7188 /* Now do the inverse operation. */
7189 for (k = 0; k < nr; k++)
7191 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7192 reg_reloaded_contents[i + k]
7193 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7196 reg_reloaded_insn[i + k] = insn;
7197 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7201 /* Maybe the spill reg contains a copy of reload_in. Only do
7202 something if there will not be an output reload for
7203 the register being reloaded. */
7204 else if (reload_out[r] == 0
7205 && reload_in[r] != 0
7206 && spill_reg_order[i] >= 0
7207 && ((GET_CODE (reload_in[r]) == REG
7208 && ! reg_has_output_reload[REGNO (reload_in[r])])
7209 || (GET_CODE (reload_in_reg[r]) == REG
7210 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
7212 register int nregno;
7215 if (GET_CODE (reload_in[r]) == REG)
7216 nregno = REGNO (reload_in[r]);
7218 nregno = REGNO (reload_in_reg[r]);
7220 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7221 : HARD_REGNO_NREGS (nregno,
7222 GET_MODE (reload_reg_rtx[r])));
7224 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7226 if (nregno < FIRST_PSEUDO_REGISTER)
7227 for (k = 1; k < nnr; k++)
7228 reg_last_reload_reg[nregno + k]
7230 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7231 REGNO (reload_reg_rtx[r]) + k)
7234 /* Unless we inherited this reload, show we haven't
7235 recently done a store. */
7236 if (! reload_inherited[r])
7237 spill_reg_store[i] = 0;
7239 for (k = 0; k < nr; k++)
7241 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7242 reg_reloaded_contents[i + k]
7243 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7246 reg_reloaded_insn[i + k] = insn;
7247 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7252 /* However, if part of the reload reaches the end, then we must
7253 invalidate the old info for the part that survives to the end. */
7254 else if (part_reaches_end)
7256 for (k = 0; k < nr; k++)
7257 if (reload_reg_reaches_end_p (i + k,
7259 reload_when_needed[r]))
7260 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7264 /* The following if-statement was #if 0'd in 1.34 (or before...).
7265 It's reenabled in 1.35 because supposedly nothing else
7266 deals with this problem. */
7268 /* If a register gets output-reloaded from a non-spill register,
7269 that invalidates any previous reloaded copy of it.
7270 But forget_old_reloads_1 won't get to see it, because
7271 it thinks only about the original insn. So invalidate it here. */
7272 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7274 register int nregno = REGNO (reload_out[r]);
7275 if (nregno >= FIRST_PSEUDO_REGISTER)
7276 reg_last_reload_reg[nregno] = 0;
7279 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7281 while (num_regs-- > 0)
7282 reg_last_reload_reg[nregno + num_regs] = 0;
7286 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7289 /* Emit code to perform a reload from IN (which may be a reload register) to
7290 OUT (which may also be a reload register). IN or OUT is from operand
7291 OPNUM with reload type TYPE.
7293 Returns first insn emitted. */
7296 gen_reload (out, in, opnum, type)
7300 enum reload_type type;
7302 rtx last = get_last_insn ();
7305 /* If IN is a paradoxical SUBREG, remove it and try to put the
7306 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7307 if (GET_CODE (in) == SUBREG
7308 && (GET_MODE_SIZE (GET_MODE (in))
7309 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7310 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7311 in = SUBREG_REG (in), out = tem;
7312 else if (GET_CODE (out) == SUBREG
7313 && (GET_MODE_SIZE (GET_MODE (out))
7314 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7315 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7316 out = SUBREG_REG (out), in = tem;
7318 /* How to do this reload can get quite tricky. Normally, we are being
7319 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7320 register that didn't get a hard register. In that case we can just
7321 call emit_move_insn.
7323 We can also be asked to reload a PLUS that adds a register or a MEM to
7324 another register, constant or MEM. This can occur during frame pointer
7325 elimination and while reloading addresses. This case is handled by
7326 trying to emit a single insn to perform the add. If it is not valid,
7327 we use a two insn sequence.
7329 Finally, we could be called to handle an 'o' constraint by putting
7330 an address into a register. In that case, we first try to do this
7331 with a named pattern of "reload_load_address". If no such pattern
7332 exists, we just emit a SET insn and hope for the best (it will normally
7333 be valid on machines that use 'o').
7335 This entire process is made complex because reload will never
7336 process the insns we generate here and so we must ensure that
7337 they will fit their constraints and also by the fact that parts of
7338 IN might be being reloaded separately and replaced with spill registers.
7339 Because of this, we are, in some sense, just guessing the right approach
7340 here. The one listed above seems to work.
7342 ??? At some point, this whole thing needs to be rethought. */
7344 if (GET_CODE (in) == PLUS
7345 && (GET_CODE (XEXP (in, 0)) == REG
7346 || GET_CODE (XEXP (in, 0)) == SUBREG
7347 || GET_CODE (XEXP (in, 0)) == MEM)
7348 && (GET_CODE (XEXP (in, 1)) == REG
7349 || GET_CODE (XEXP (in, 1)) == SUBREG
7350 || CONSTANT_P (XEXP (in, 1))
7351 || GET_CODE (XEXP (in, 1)) == MEM))
7353 /* We need to compute the sum of a register or a MEM and another
7354 register, constant, or MEM, and put it into the reload
7355 register. The best possible way of doing this is if the machine
7356 has a three-operand ADD insn that accepts the required operands.
7358 The simplest approach is to try to generate such an insn and see if it
7359 is recognized and matches its constraints. If so, it can be used.
7361 It might be better not to actually emit the insn unless it is valid,
7362 but we need to pass the insn as an operand to `recog' and
7363 `insn_extract' and it is simpler to emit and then delete the insn if
7364 not valid than to dummy things up. */
7366 rtx op0, op1, tem, insn;
7369 op0 = find_replacement (&XEXP (in, 0));
7370 op1 = find_replacement (&XEXP (in, 1));
7372 /* Since constraint checking is strict, commutativity won't be
7373 checked, so we need to do that here to avoid spurious failure
7374 if the add instruction is two-address and the second operand
7375 of the add is the same as the reload reg, which is frequently
7376 the case. If the insn would be A = B + A, rearrange it so
7377 it will be A = A + B as constrain_operands expects. */
7379 if (GET_CODE (XEXP (in, 1)) == REG
7380 && REGNO (out) == REGNO (XEXP (in, 1)))
7381 tem = op0, op0 = op1, op1 = tem;
7383 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7384 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7386 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7387 code = recog_memoized (insn);
7391 insn_extract (insn);
7392 /* We want constrain operands to treat this insn strictly in
7393 its validity determination, i.e., the way it would after reload
7395 if (constrain_operands (code, 1))
7399 delete_insns_since (last);
7401 /* If that failed, we must use a conservative two-insn sequence.
7402 use move to copy constant, MEM, or pseudo register to the reload
7403 register since "move" will be able to handle an arbitrary operand,
7404 unlike add which can't, in general. Then add the registers.
7406 If there is another way to do this for a specific machine, a
7407 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7410 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7411 || (GET_CODE (op1) == REG
7412 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7413 tem = op0, op0 = op1, op1 = tem;
7415 gen_reload (out, op0, opnum, type);
7417 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7418 This fixes a problem on the 32K where the stack pointer cannot
7419 be used as an operand of an add insn. */
7421 if (rtx_equal_p (op0, op1))
7424 insn = emit_insn (gen_add2_insn (out, op1));
7426 /* If that failed, copy the address register to the reload register.
7427 Then add the constant to the reload register. */
7429 code = recog_memoized (insn);
7433 insn_extract (insn);
7434 /* We want constrain operands to treat this insn strictly in
7435 its validity determination, i.e., the way it would after reload
7437 if (constrain_operands (code, 1))
7439 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7441 = gen_rtx (EXPR_LIST, REG_EQUIV, in, REG_NOTES (insn));
7446 delete_insns_since (last);
7448 gen_reload (out, op1, opnum, type);
7449 insn = emit_insn (gen_add2_insn (out, op0));
7450 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUIV, in, REG_NOTES (insn));
7453 #ifdef SECONDARY_MEMORY_NEEDED
7454 /* If we need a memory location to do the move, do it that way. */
7455 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7456 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7457 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7458 REGNO_REG_CLASS (REGNO (out)),
7461 /* Get the memory to use and rewrite both registers to its mode. */
7462 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7464 if (GET_MODE (loc) != GET_MODE (out))
7465 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7467 if (GET_MODE (loc) != GET_MODE (in))
7468 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7470 gen_reload (loc, in, opnum, type);
7471 gen_reload (out, loc, opnum, type);
7475 /* If IN is a simple operand, use gen_move_insn. */
7476 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7477 emit_insn (gen_move_insn (out, in));
7479 #ifdef HAVE_reload_load_address
7480 else if (HAVE_reload_load_address)
7481 emit_insn (gen_reload_load_address (out, in));
7484 /* Otherwise, just write (set OUT IN) and hope for the best. */
7486 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7488 /* Return the first insn emitted.
7489 We can not just return get_last_insn, because there may have
7490 been multiple instructions emitted. Also note that gen_move_insn may
7491 emit more than one insn itself, so we can not assume that there is one
7492 insn emitted per emit_insn_before call. */
7494 return last ? NEXT_INSN (last) : get_insns ();
7497 /* Delete a previously made output-reload
7498 whose result we now believe is not needed.
7499 First we double-check.
7501 INSN is the insn now being processed.
7502 OUTPUT_RELOAD_INSN is the insn of the output reload.
7503 J is the reload-number for this insn. */
7506 delete_output_reload (insn, j, output_reload_insn)
7509 rtx output_reload_insn;
7513 /* Get the raw pseudo-register referred to. */
7515 rtx reg = reload_in[j];
7516 while (GET_CODE (reg) == SUBREG)
7517 reg = SUBREG_REG (reg);
7519 /* If the pseudo-reg we are reloading is no longer referenced
7520 anywhere between the store into it and here,
7521 and no jumps or labels intervene, then the value can get
7522 here through the reload reg alone.
7523 Otherwise, give up--return. */
7524 for (i1 = NEXT_INSN (output_reload_insn);
7525 i1 != insn; i1 = NEXT_INSN (i1))
7527 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7529 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7530 && reg_mentioned_p (reg, PATTERN (i1)))
7532 /* If this is just a single USE with an REG_EQUAL note in front
7533 of INSN, this is no problem, because this mentions just the
7534 address that we are using here.
7535 But if there is more than one such USE, the insn might use
7536 the operand directly, or another reload might do that.
7537 This is analogous to the count_occurences check in the callers. */
7538 int num_occurences = 0;
7540 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE
7541 && find_reg_note (i1, REG_EQUAL, NULL_RTX))
7543 num_occurences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7544 i1 = NEXT_INSN (i1);
7546 if (num_occurences == 1 && i1 == insn)
7552 /* The caller has already checked that REG dies or is set in INSN.
7553 It has also checked that we are optimizing, and thus some inaccurancies
7554 in the debugging information are acceptable.
7555 So we could just delete output_reload_insn.
7556 But in some cases we can improve the debugging information without
7557 sacrificing optimization - maybe even improving the code:
7558 See if the pseudo reg has been completely replaced
7559 with reload regs. If so, delete the store insn
7560 and forget we had a stack slot for the pseudo. */
7561 if (reload_out[j] != reload_in[j]
7562 && REG_N_DEATHS (REGNO (reg)) == 1
7563 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7564 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7568 /* We know that it was used only between here
7569 and the beginning of the current basic block.
7570 (We also know that the last use before INSN was
7571 the output reload we are thinking of deleting, but never mind that.)
7572 Search that range; see if any ref remains. */
7573 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7575 rtx set = single_set (i2);
7577 /* Uses which just store in the pseudo don't count,
7578 since if they are the only uses, they are dead. */
7579 if (set != 0 && SET_DEST (set) == reg)
7581 if (GET_CODE (i2) == CODE_LABEL
7582 || GET_CODE (i2) == JUMP_INSN)
7584 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7585 && reg_mentioned_p (reg, PATTERN (i2)))
7587 /* Some other ref remains; just delete the output reload we
7589 delete_insn (output_reload_insn);
7594 /* Delete the now-dead stores into this pseudo. */
7595 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7597 rtx set = single_set (i2);
7599 if (set != 0 && SET_DEST (set) == reg)
7601 /* This might be a basic block head,
7602 thus don't use delete_insn. */
7603 PUT_CODE (i2, NOTE);
7604 NOTE_SOURCE_FILE (i2) = 0;
7605 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7607 if (GET_CODE (i2) == CODE_LABEL
7608 || GET_CODE (i2) == JUMP_INSN)
7612 /* For the debugging info,
7613 say the pseudo lives in this reload reg. */
7614 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7615 alter_reg (REGNO (reg), -1);
7617 delete_insn (output_reload_insn);
7621 /* Output reload-insns to reload VALUE into RELOADREG.
7622 VALUE is an autoincrement or autodecrement RTX whose operand
7623 is a register or memory location;
7624 so reloading involves incrementing that location.
7626 INC_AMOUNT is the number to increment or decrement by (always positive).
7627 This cannot be deduced from VALUE. */
7630 inc_for_reload (reloadreg, value, inc_amount)
7635 /* REG or MEM to be copied and incremented. */
7636 rtx incloc = XEXP (value, 0);
7637 /* Nonzero if increment after copying. */
7638 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7644 /* No hard register is equivalent to this register after
7645 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7646 we could inc/dec that register as well (maybe even using it for
7647 the source), but I'm not sure it's worth worrying about. */
7648 if (GET_CODE (incloc) == REG)
7649 reg_last_reload_reg[REGNO (incloc)] = 0;
7651 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7652 inc_amount = - inc_amount;
7654 inc = GEN_INT (inc_amount);
7656 /* If this is post-increment, first copy the location to the reload reg. */
7658 emit_insn (gen_move_insn (reloadreg, incloc));
7660 /* See if we can directly increment INCLOC. Use a method similar to that
7663 last = get_last_insn ();
7664 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7665 gen_rtx_PLUS (GET_MODE (incloc),
7668 code = recog_memoized (add_insn);
7671 insn_extract (add_insn);
7672 if (constrain_operands (code, 1))
7674 /* If this is a pre-increment and we have incremented the value
7675 where it lives, copy the incremented value to RELOADREG to
7676 be used as an address. */
7679 emit_insn (gen_move_insn (reloadreg, incloc));
7685 delete_insns_since (last);
7687 /* If couldn't do the increment directly, must increment in RELOADREG.
7688 The way we do this depends on whether this is pre- or post-increment.
7689 For pre-increment, copy INCLOC to the reload register, increment it
7690 there, then save back. */
7694 emit_insn (gen_move_insn (reloadreg, incloc));
7695 emit_insn (gen_add2_insn (reloadreg, inc));
7696 emit_insn (gen_move_insn (incloc, reloadreg));
7701 Because this might be a jump insn or a compare, and because RELOADREG
7702 may not be available after the insn in an input reload, we must do
7703 the incrementation before the insn being reloaded for.
7705 We have already copied INCLOC to RELOADREG. Increment the copy in
7706 RELOADREG, save that back, then decrement RELOADREG so it has
7707 the original value. */
7709 emit_insn (gen_add2_insn (reloadreg, inc));
7710 emit_insn (gen_move_insn (incloc, reloadreg));
7711 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7717 /* Return 1 if we are certain that the constraint-string STRING allows
7718 the hard register REG. Return 0 if we can't be sure of this. */
7721 constraint_accepts_reg_p (string, reg)
7726 int regno = true_regnum (reg);
7729 /* Initialize for first alternative. */
7731 /* Check that each alternative contains `g' or `r'. */
7733 switch (c = *string++)
7736 /* If an alternative lacks `g' or `r', we lose. */
7739 /* If an alternative lacks `g' or `r', we lose. */
7742 /* Initialize for next alternative. */
7747 /* Any general reg wins for this alternative. */
7748 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7752 /* Any reg in specified class wins for this alternative. */
7754 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7756 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7762 /* Return the number of places FIND appears within X, but don't count
7763 an occurrence if some SET_DEST is FIND. */
7766 count_occurrences (x, find)
7767 register rtx x, find;
7770 register enum rtx_code code;
7771 register char *format_ptr;
7779 code = GET_CODE (x);
7794 if (SET_DEST (x) == find)
7795 return count_occurrences (SET_SRC (x), find);
7802 format_ptr = GET_RTX_FORMAT (code);
7805 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7807 switch (*format_ptr++)
7810 count += count_occurrences (XEXP (x, i), find);
7814 if (XVEC (x, i) != NULL)
7816 for (j = 0; j < XVECLEN (x, i); j++)
7817 count += count_occurrences (XVECEXP (x, i, j), find);
7825 /* This array holds values which are equivalent to a hard register
7826 during reload_cse_regs. Each array element is an EXPR_LIST of
7827 values. Each time a hard register is set, we set the corresponding
7828 array element to the value. Each time a hard register is copied
7829 into memory, we add the memory location to the corresponding array
7830 element. We don't store values or memory addresses with side
7831 effects in this array.
7833 If the value is a CONST_INT, then the mode of the containing
7834 EXPR_LIST is the mode in which that CONST_INT was referenced.
7836 We sometimes clobber a specific entry in a list. In that case, we
7837 just set XEXP (list-entry, 0) to 0. */
7839 static rtx *reg_values;
7841 /* This is a preallocated REG rtx which we use as a temporary in
7842 reload_cse_invalidate_regno, so that we don't need to allocate a
7843 new one each time through a loop in that function. */
7845 static rtx invalidate_regno_rtx;
7847 /* This is a set of registers for which we must remove REG_DEAD notes in
7848 previous insns, because our modifications made them invalid. That can
7849 happen if we introduced the register into the current insn, or we deleted
7850 the current insn which used to set the register. */
7852 static HARD_REG_SET no_longer_dead_regs;
7854 /* Invalidate any entries in reg_values which depend on REGNO,
7855 including those for REGNO itself. This is called if REGNO is
7856 changing. If CLOBBER is true, then always forget anything we
7857 currently know about REGNO. MODE is the mode of the assignment to
7858 REGNO, which is used to determine how many hard registers are being
7859 changed. If MODE is VOIDmode, then only REGNO is being changed;
7860 this is used when invalidating call clobbered registers across a
7864 reload_cse_invalidate_regno (regno, mode, clobber)
7866 enum machine_mode mode;
7872 /* Our callers don't always go through true_regnum; we may see a
7873 pseudo-register here from a CLOBBER or the like. We probably
7874 won't ever see a pseudo-register that has a real register number,
7875 for we check anyhow for safety. */
7876 if (regno >= FIRST_PSEUDO_REGISTER)
7877 regno = reg_renumber[regno];
7881 if (mode == VOIDmode)
7882 endregno = regno + 1;
7884 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7887 for (i = regno; i < endregno; i++)
7890 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7894 for (x = reg_values[i]; x; x = XEXP (x, 1))
7896 if (XEXP (x, 0) != 0
7897 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
7899 /* If this is the only entry on the list, clear
7900 reg_values[i]. Otherwise, just clear this entry on
7902 if (XEXP (x, 1) == 0 && x == reg_values[i])
7912 /* We must look at earlier registers, in case REGNO is part of a
7913 multi word value but is not the first register. If an earlier
7914 register has a value in a mode which overlaps REGNO, then we must
7915 invalidate that earlier register. Note that we do not need to
7916 check REGNO or later registers (we must not check REGNO itself,
7917 because we would incorrectly conclude that there was a conflict). */
7919 for (i = 0; i < regno; i++)
7923 for (x = reg_values[i]; x; x = XEXP (x, 1))
7925 if (XEXP (x, 0) != 0)
7927 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
7928 REGNO (invalidate_regno_rtx) = i;
7929 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
7932 reload_cse_invalidate_regno (i, VOIDmode, 1);
7940 /* The memory at address MEM_BASE is being changed.
7941 Return whether this change will invalidate VAL. */
7944 reload_cse_mem_conflict_p (mem_base, val)
7952 code = GET_CODE (val);
7955 /* Get rid of a few simple cases quickly. */
7968 if (GET_MODE (mem_base) == BLKmode
7969 || GET_MODE (val) == BLKmode)
7971 if (anti_dependence (val, mem_base))
7973 /* The address may contain nested MEMs. */
7980 fmt = GET_RTX_FORMAT (code);
7982 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7986 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
7989 else if (fmt[i] == 'E')
7993 for (j = 0; j < XVECLEN (val, i); j++)
7994 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
8002 /* Invalidate any entries in reg_values which are changed because of a
8003 store to MEM_RTX. If this is called because of a non-const call
8004 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8007 reload_cse_invalidate_mem (mem_rtx)
8012 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8016 for (x = reg_values[i]; x; x = XEXP (x, 1))
8018 if (XEXP (x, 0) != 0
8019 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
8021 /* If this is the only entry on the list, clear
8022 reg_values[i]. Otherwise, just clear this entry on
8024 if (XEXP (x, 1) == 0 && x == reg_values[i])
8035 /* Invalidate DEST, which is being assigned to or clobbered. The
8036 second parameter exists so that this function can be passed to
8037 note_stores; it is ignored. */
8040 reload_cse_invalidate_rtx (dest, ignore)
8042 rtx ignore ATTRIBUTE_UNUSED;
8044 while (GET_CODE (dest) == STRICT_LOW_PART
8045 || GET_CODE (dest) == SIGN_EXTRACT
8046 || GET_CODE (dest) == ZERO_EXTRACT
8047 || GET_CODE (dest) == SUBREG)
8048 dest = XEXP (dest, 0);
8050 if (GET_CODE (dest) == REG)
8051 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8052 else if (GET_CODE (dest) == MEM)
8053 reload_cse_invalidate_mem (dest);
8056 /* Possibly delete death notes on the insns before INSN if modifying INSN
8057 extended the lifespan of the registers. */
8060 reload_cse_delete_death_notes (insn)
8065 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
8069 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
8072 for (trial = prev_nonnote_insn (insn);
8074 && GET_CODE (trial) != CODE_LABEL
8075 && GET_CODE (trial) != BARRIER);
8076 trial = prev_nonnote_insn (trial))
8078 if (find_regno_note (trial, REG_DEAD, dreg))
8080 remove_death (dreg, trial);
8087 /* Record that the current insn uses hard reg REGNO in mode MODE. This
8088 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
8089 notes for this register. */
8092 reload_cse_no_longer_dead (regno, mode)
8094 enum machine_mode mode;
8096 int nregs = HARD_REGNO_NREGS (regno, mode);
8099 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
8105 /* Do a very simple CSE pass over the hard registers.
8107 This function detects no-op moves where we happened to assign two
8108 different pseudo-registers to the same hard register, and then
8109 copied one to the other. Reload will generate a useless
8110 instruction copying a register to itself.
8112 This function also detects cases where we load a value from memory
8113 into two different registers, and (if memory is more expensive than
8114 registers) changes it to simply copy the first register into the
8117 Another optimization is performed that scans the operands of each
8118 instruction to see whether the value is already available in a
8119 hard register. It then replaces the operand with the hard register
8120 if possible, much like an optional reload would. */
8123 reload_cse_regs (first)
8131 init_alias_analysis ();
8133 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8134 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8137 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8138 free them when we are done. */
8139 push_obstacks (&reload_obstack, &reload_obstack);
8140 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8142 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8143 memory for a non-const call instruction. */
8144 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
8146 /* This is used in reload_cse_invalidate_regno to avoid consing a
8147 new REG in a loop in that function. */
8148 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
8150 for (insn = first; insn; insn = NEXT_INSN (insn))
8154 if (GET_CODE (insn) == CODE_LABEL)
8156 /* Forget all the register values at a code label. We don't
8157 try to do anything clever around jumps. */
8158 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8164 #ifdef NON_SAVING_SETJMP
8165 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8166 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8168 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8175 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8178 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8180 /* If this is a call instruction, forget anything stored in a
8181 call clobbered register, or, if this is not a const call, in
8183 if (GET_CODE (insn) == CALL_INSN)
8185 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8186 if (call_used_regs[i])
8187 reload_cse_invalidate_regno (i, VOIDmode, 1);
8189 if (! CONST_CALL_P (insn))
8190 reload_cse_invalidate_mem (callmem);
8193 body = PATTERN (insn);
8194 if (GET_CODE (body) == SET)
8197 if (reload_cse_noop_set_p (body, insn))
8199 PUT_CODE (insn, NOTE);
8200 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8201 NOTE_SOURCE_FILE (insn) = 0;
8202 reload_cse_delete_death_notes (insn);
8204 /* We're done with this insn. */
8208 /* It's not a no-op, but we can try to simplify it. */
8209 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8210 count += reload_cse_simplify_set (body, insn);
8212 if (count > 0 && apply_change_group ())
8213 reload_cse_delete_death_notes (insn);
8214 else if (reload_cse_simplify_operands (insn))
8215 reload_cse_delete_death_notes (insn);
8217 reload_cse_record_set (body, body);
8219 else if (GET_CODE (body) == PARALLEL)
8223 /* If every action in a PARALLEL is a noop, we can delete
8224 the entire PARALLEL. */
8225 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8226 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
8227 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
8228 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
8232 PUT_CODE (insn, NOTE);
8233 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8234 NOTE_SOURCE_FILE (insn) = 0;
8235 reload_cse_delete_death_notes (insn);
8237 /* We're done with this insn. */
8241 /* It's not a no-op, but we can try to simplify it. */
8242 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8243 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8244 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8245 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8247 if (count > 0 && apply_change_group ())
8248 reload_cse_delete_death_notes (insn);
8249 else if (reload_cse_simplify_operands (insn))
8250 reload_cse_delete_death_notes (insn);
8252 /* Look through the PARALLEL and record the values being
8253 set, if possible. Also handle any CLOBBERs. */
8254 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8256 rtx x = XVECEXP (body, 0, i);
8258 if (GET_CODE (x) == SET)
8259 reload_cse_record_set (x, body);
8261 note_stores (x, reload_cse_invalidate_rtx);
8265 note_stores (body, reload_cse_invalidate_rtx);
8268 /* Clobber any registers which appear in REG_INC notes. We
8269 could keep track of the changes to their values, but it is
8270 unlikely to help. */
8274 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8275 if (REG_NOTE_KIND (x) == REG_INC)
8276 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8280 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8281 after we have processed the insn. */
8282 if (GET_CODE (insn) == CALL_INSN)
8286 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8287 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8288 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8292 /* Free all the temporary structures we created, and go back to the
8293 regular obstacks. */
8294 obstack_free (&reload_obstack, firstobj);
8298 /* Return whether the values known for REGNO are equal to VAL. MODE
8299 is the mode of the object that VAL is being copied to; this matters
8300 if VAL is a CONST_INT. */
8303 reload_cse_regno_equal_p (regno, val, mode)
8306 enum machine_mode mode;
8313 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8314 if (XEXP (x, 0) != 0
8315 && rtx_equal_p (XEXP (x, 0), val)
8316 && (GET_CODE (val) != CONST_INT
8317 || mode == GET_MODE (x)
8318 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8319 /* On a big endian machine if the value spans more than
8320 one register then this register holds the high part of
8321 it and we can't use it.
8323 ??? We should also compare with the high part of the
8325 && !(WORDS_BIG_ENDIAN
8326 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8327 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8328 GET_MODE_BITSIZE (GET_MODE (x))))))
8334 /* See whether a single set is a noop. SET is the set instruction we
8335 are should check, and INSN is the instruction from which it came. */
8338 reload_cse_noop_set_p (set, insn)
8343 enum machine_mode dest_mode;
8347 src = SET_SRC (set);
8348 dest = SET_DEST (set);
8349 dest_mode = GET_MODE (dest);
8351 if (side_effects_p (src))
8354 dreg = true_regnum (dest);
8355 sreg = true_regnum (src);
8357 /* Check for setting a register to itself. In this case, we don't
8358 have to worry about REG_DEAD notes. */
8359 if (dreg >= 0 && dreg == sreg)
8365 /* Check for setting a register to itself. */
8369 /* Check for setting a register to a value which we already know
8370 is in the register. */
8371 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8374 /* Check for setting a register DREG to another register SREG
8375 where SREG is equal to a value which is already in DREG. */
8380 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8384 if (XEXP (x, 0) == 0)
8387 if (dest_mode == GET_MODE (x))
8389 else if (GET_MODE_BITSIZE (dest_mode)
8390 < GET_MODE_BITSIZE (GET_MODE (x)))
8391 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8396 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8404 else if (GET_CODE (dest) == MEM)
8406 /* Check for storing a register to memory when we know that the
8407 register is equivalent to the memory location. */
8409 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8410 && ! side_effects_p (dest))
8414 /* If we can delete this SET, then we need to look for an earlier
8415 REG_DEAD note on DREG, and remove it if it exists. */
8416 if (ret && dreg >= 0)
8418 if (! find_regno_note (insn, REG_UNUSED, dreg))
8419 reload_cse_no_longer_dead (dreg, dest_mode);
8425 /* Try to simplify a single SET instruction. SET is the set pattern.
8426 INSN is the instruction it came from.
8427 This function only handles one case: if we set a register to a value
8428 which is not a register, we try to find that value in some other register
8429 and change the set into a register copy. */
8432 reload_cse_simplify_set (set, insn)
8438 enum machine_mode dest_mode;
8439 enum reg_class dclass;
8442 dreg = true_regnum (SET_DEST (set));
8446 src = SET_SRC (set);
8447 if (side_effects_p (src) || true_regnum (src) >= 0)
8450 dclass = REGNO_REG_CLASS (dreg);
8452 /* If memory loads are cheaper than register copies, don't change
8454 if (GET_CODE (src) == MEM
8455 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
8458 dest_mode = GET_MODE (SET_DEST (set));
8459 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8462 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8463 && reload_cse_regno_equal_p (i, src, dest_mode))
8467 /* Pop back to the real obstacks while changing the insn. */
8470 validated = validate_change (insn, &SET_SRC (set),
8471 gen_rtx_REG (dest_mode, i), 1);
8473 /* Go back to the obstack we are using for temporary
8475 push_obstacks (&reload_obstack, &reload_obstack);
8477 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8479 reload_cse_no_longer_dead (i, dest_mode);
8487 /* Try to replace operands in INSN with equivalent values that are already
8488 in registers. This can be viewed as optional reloading.
8490 For each non-register operand in the insn, see if any hard regs are
8491 known to be equivalent to that operand. Record the alternatives which
8492 can accept these hard registers. Among all alternatives, select the
8493 ones which are better or equal to the one currently matching, where
8494 "better" is in terms of '?' and '!' constraints. Among the remaining
8495 alternatives, select the one which replaces most operands with
8499 reload_cse_simplify_operands (insn)
8502 #ifdef REGISTER_CONSTRAINTS
8503 int insn_code_number, n_operands, n_alternatives;
8506 char *constraints[MAX_RECOG_OPERANDS];
8508 /* Vector recording how bad an alternative is. */
8509 int *alternative_reject;
8510 /* Vector recording how many registers can be introduced by choosing
8511 this alternative. */
8512 int *alternative_nregs;
8513 /* Array of vectors recording, for each operand and each alternative,
8514 which hard register to substitute, or -1 if the operand should be
8516 int *op_alt_regno[MAX_RECOG_OPERANDS];
8517 /* Array of alternatives, sorted in order of decreasing desirability. */
8518 int *alternative_order;
8520 /* Find out some information about this insn. */
8521 insn_code_number = recog_memoized (insn);
8522 /* We don't modify asm instructions. */
8523 if (insn_code_number < 0)
8526 n_operands = insn_n_operands[insn_code_number];
8527 n_alternatives = insn_n_alternatives[insn_code_number];
8529 if (n_alternatives == 0 || n_operands == 0)
8531 insn_extract (insn);
8533 /* Figure out which alternative currently matches. */
8534 if (! constrain_operands (insn_code_number, 1))
8537 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8538 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8539 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8540 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8541 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8543 for (i = 0; i < n_operands; i++)
8545 enum machine_mode mode;
8549 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8550 for (j = 0; j < n_alternatives; j++)
8551 op_alt_regno[i][j] = -1;
8553 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8554 mode = insn_operand_mode[insn_code_number][i];
8556 /* Add the reject values for each alternative given by the constraints
8557 for this operand. */
8565 alternative_reject[j] += 3;
8567 alternative_reject[j] += 300;
8570 /* We won't change operands which are already registers. We
8571 also don't want to modify output operands. */
8572 regno = true_regnum (recog_operand[i]);
8574 || constraints[i][0] == '='
8575 || constraints[i][0] == '+')
8578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8580 int class = (int) NO_REGS;
8582 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8585 /* We found a register equal to this operand. Now look for all
8586 alternatives that can accept this register and have not been
8587 assigned a register they can use yet. */
8596 case '=': case '+': case '?':
8597 case '#': case '&': case '!':
8599 case '0': case '1': case '2': case '3': case '4':
8600 case 'm': case '<': case '>': case 'V': case 'o':
8601 case 'E': case 'F': case 'G': case 'H':
8602 case 's': case 'i': case 'n':
8603 case 'I': case 'J': case 'K': case 'L':
8604 case 'M': case 'N': case 'O': case 'P':
8605 #ifdef EXTRA_CONSTRAINT
8606 case 'Q': case 'R': case 'S': case 'T': case 'U':
8609 /* These don't say anything we care about. */
8613 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8618 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8621 case ',': case '\0':
8622 /* See if REGNO fits this alternative, and set it up as the
8623 replacement register if we don't have one for this
8625 if (op_alt_regno[i][j] == -1
8626 && reg_fits_class_p (gen_rtx_REG (mode, regno), class,
8629 alternative_nregs[j]++;
8630 op_alt_regno[i][j] = regno;
8642 /* Record all alternatives which are better or equal to the currently
8643 matching one in the alternative_order array. */
8644 for (i = j = 0; i < n_alternatives; i++)
8645 if (alternative_reject[i] <= alternative_reject[which_alternative])
8646 alternative_order[j++] = i;
8649 /* Sort it. Given a small number of alternatives, a dumb algorithm
8650 won't hurt too much. */
8651 for (i = 0; i < n_alternatives - 1; i++)
8654 int best_reject = alternative_reject[alternative_order[i]];
8655 int best_nregs = alternative_nregs[alternative_order[i]];
8658 for (j = i + 1; j < n_alternatives; j++)
8660 int this_reject = alternative_reject[alternative_order[j]];
8661 int this_nregs = alternative_nregs[alternative_order[j]];
8663 if (this_reject < best_reject
8664 || (this_reject == best_reject && this_nregs < best_nregs))
8667 best_reject = this_reject;
8668 best_nregs = this_nregs;
8672 tmp = alternative_order[best];
8673 alternative_order[best] = alternative_order[i];
8674 alternative_order[i] = tmp;
8677 /* Substitute the operands as determined by op_alt_regno for the best
8679 j = alternative_order[0];
8680 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8682 /* Pop back to the real obstacks while changing the insn. */
8685 for (i = 0; i < n_operands; i++)
8687 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8688 if (op_alt_regno[i][j] == -1)
8691 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8692 validate_change (insn, recog_operand_loc[i],
8693 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
8696 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8698 int op = recog_dup_num[i];
8699 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8701 if (op_alt_regno[op][j] == -1)
8704 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8705 validate_change (insn, recog_dup_loc[i],
8706 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
8709 /* Go back to the obstack we are using for temporary
8711 push_obstacks (&reload_obstack, &reload_obstack);
8713 return apply_change_group ();
8719 /* These two variables are used to pass information from
8720 reload_cse_record_set to reload_cse_check_clobber. */
8722 static int reload_cse_check_clobbered;
8723 static rtx reload_cse_check_src;
8725 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8726 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8727 second argument, which is passed by note_stores, is ignored. */
8730 reload_cse_check_clobber (dest, ignore)
8732 rtx ignore ATTRIBUTE_UNUSED;
8734 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8735 reload_cse_check_clobbered = 1;
8738 /* Record the result of a SET instruction. SET is the set pattern.
8739 BODY is the pattern of the insn that it came from. */
8742 reload_cse_record_set (set, body)
8748 enum machine_mode dest_mode;
8750 dest = SET_DEST (set);
8751 src = SET_SRC (set);
8752 dreg = true_regnum (dest);
8753 sreg = true_regnum (src);
8754 dest_mode = GET_MODE (dest);
8756 /* Some machines don't define AUTO_INC_DEC, but they still use push
8757 instructions. We need to catch that case here in order to
8758 invalidate the stack pointer correctly. Note that invalidating
8759 the stack pointer is different from invalidating DEST. */
8761 while (GET_CODE (x) == SUBREG
8762 || GET_CODE (x) == ZERO_EXTRACT
8763 || GET_CODE (x) == SIGN_EXTRACT
8764 || GET_CODE (x) == STRICT_LOW_PART)
8766 if (push_operand (x, GET_MODE (x)))
8768 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8769 reload_cse_invalidate_rtx (dest, NULL_RTX);
8773 /* We can only handle an assignment to a register, or a store of a
8774 register to a memory location. For other cases, we just clobber
8775 the destination. We also have to just clobber if there are side
8776 effects in SRC or DEST. */
8777 if ((dreg < 0 && GET_CODE (dest) != MEM)
8778 || side_effects_p (src)
8779 || side_effects_p (dest))
8781 reload_cse_invalidate_rtx (dest, NULL_RTX);
8786 /* We don't try to handle values involving CC, because it's a pain
8787 to keep track of when they have to be invalidated. */
8788 if (reg_mentioned_p (cc0_rtx, src)
8789 || reg_mentioned_p (cc0_rtx, dest))
8791 reload_cse_invalidate_rtx (dest, NULL_RTX);
8796 /* If BODY is a PARALLEL, then we need to see whether the source of
8797 SET is clobbered by some other instruction in the PARALLEL. */
8798 if (GET_CODE (body) == PARALLEL)
8802 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8806 x = XVECEXP (body, 0, i);
8810 reload_cse_check_clobbered = 0;
8811 reload_cse_check_src = src;
8812 note_stores (x, reload_cse_check_clobber);
8813 if (reload_cse_check_clobbered)
8815 reload_cse_invalidate_rtx (dest, NULL_RTX);
8825 /* This is an assignment to a register. Update the value we
8826 have stored for the register. */
8831 /* This is a copy from one register to another. Any values
8832 which were valid for SREG are now valid for DREG. If the
8833 mode changes, we use gen_lowpart_common to extract only
8834 the part of the value that is copied. */
8835 reg_values[dreg] = 0;
8836 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8840 if (XEXP (x, 0) == 0)
8842 if (dest_mode == GET_MODE (XEXP (x, 0)))
8844 else if (GET_MODE_BITSIZE (dest_mode)
8845 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8848 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8850 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
8855 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
8857 /* We've changed DREG, so invalidate any values held by other
8858 registers that depend upon it. */
8859 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8861 /* If this assignment changes more than one hard register,
8862 forget anything we know about the others. */
8863 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8864 reg_values[dreg + i] = 0;
8866 else if (GET_CODE (dest) == MEM)
8868 /* Invalidate conflicting memory locations. */
8869 reload_cse_invalidate_mem (dest);
8871 /* If we're storing a register to memory, add DEST to the list
8873 if (sreg >= 0 && ! side_effects_p (dest))
8874 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
8879 /* We should have bailed out earlier. */