1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
32 #include "insn-config.h"
38 #include "basic-block.h"
48 /* This file contains the reload pass of the compiler, which is
49 run after register allocation has been done. It checks that
50 each insn is valid (operands required to be in registers really
51 are in registers of the proper class) and fixes up invalid ones
52 by copying values temporarily into registers for the insns
55 The results of register allocation are described by the vector
56 reg_renumber; the insns still contain pseudo regs, but reg_renumber
57 can be used to find which hard reg, if any, a pseudo reg is in.
59 The technique we always use is to free up a few hard regs that are
60 called ``reload regs'', and for each place where a pseudo reg
61 must be in a hard reg, copy it temporarily into one of the reload regs.
63 Reload regs are allocated locally for every instruction that needs
64 reloads. When there are pseudos which are allocated to a register that
65 has been chosen as a reload reg, such pseudos must be ``spilled''.
66 This means that they go to other hard regs, or to stack slots if no other
67 available hard regs can be found. Spilling can invalidate more
68 insns, requiring additional need for reloads, so we must keep checking
69 until the process stabilizes.
71 For machines with different classes of registers, we must keep track
72 of the register class needed for each reload, and make sure that
73 we allocate enough reload registers of each class.
75 The file reload.c contains the code that checks one insn for
76 validity and reports the reloads that it needs. This file
77 is in charge of scanning the entire rtl code, accumulating the
78 reload needs, spilling, assigning reload registers to use for
79 fixing up each insn, and generating the new insns to copy values
80 into the reload registers. */
82 #ifndef REGISTER_MOVE_COST
83 #define REGISTER_MOVE_COST(m, x, y) 2
87 #define LOCAL_REGNO(REGNO) 0
90 /* During reload_as_needed, element N contains a REG rtx for the hard reg
91 into which reg N has been reloaded (perhaps for a previous insn). */
92 static rtx *reg_last_reload_reg;
94 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
95 for an output reload that stores into reg N. */
96 static char *reg_has_output_reload;
98 /* Indicates which hard regs are reload-registers for an output reload
99 in the current insn. */
100 static HARD_REG_SET reg_is_output_reload;
102 /* Element N is the constant value to which pseudo reg N is equivalent,
103 or zero if pseudo reg N is not equivalent to a constant.
104 find_reloads looks at this in order to replace pseudo reg N
105 with the constant it stands for. */
106 rtx *reg_equiv_constant;
108 /* Element N is a memory location to which pseudo reg N is equivalent,
109 prior to any register elimination (such as frame pointer to stack
110 pointer). Depending on whether or not it is a valid address, this value
111 is transferred to either reg_equiv_address or reg_equiv_mem. */
112 rtx *reg_equiv_memory_loc;
114 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
115 This is used when the address is not valid as a memory address
116 (because its displacement is too big for the machine.) */
117 rtx *reg_equiv_address;
119 /* Element N is the memory slot to which pseudo reg N is equivalent,
120 or zero if pseudo reg N is not equivalent to a memory slot. */
123 /* Widest width in which each pseudo reg is referred to (via subreg). */
124 static unsigned int *reg_max_ref_width;
126 /* Element N is the list of insns that initialized reg N from its equivalent
127 constant or memory slot. */
128 static rtx *reg_equiv_init;
130 /* Vector to remember old contents of reg_renumber before spilling. */
131 static short *reg_old_renumber;
133 /* During reload_as_needed, element N contains the last pseudo regno reloaded
134 into hard register N. If that pseudo reg occupied more than one register,
135 reg_reloaded_contents points to that pseudo for each spill register in
136 use; all of these must remain set for an inheritance to occur. */
137 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
139 /* During reload_as_needed, element N contains the insn for which
140 hard register N was last used. Its contents are significant only
141 when reg_reloaded_valid is set for this register. */
142 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
144 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
145 static HARD_REG_SET reg_reloaded_valid;
146 /* Indicate if the register was dead at the end of the reload.
147 This is only valid if reg_reloaded_contents is set and valid. */
148 static HARD_REG_SET reg_reloaded_dead;
150 /* Number of spill-regs so far; number of valid elements of spill_regs. */
153 /* In parallel with spill_regs, contains REG rtx's for those regs.
154 Holds the last rtx used for any given reg, or 0 if it has never
155 been used for spilling yet. This rtx is reused, provided it has
157 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
159 /* In parallel with spill_regs, contains nonzero for a spill reg
160 that was stored after the last time it was used.
161 The precise value is the insn generated to do the store. */
162 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
164 /* This is the register that was stored with spill_reg_store. This is a
165 copy of reload_out / reload_out_reg when the value was stored; if
166 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
167 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
169 /* This table is the inverse mapping of spill_regs:
170 indexed by hard reg number,
171 it contains the position of that reg in spill_regs,
172 or -1 for something that is not in spill_regs.
174 ?!? This is no longer accurate. */
175 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
177 /* This reg set indicates registers that can't be used as spill registers for
178 the currently processed insn. These are the hard registers which are live
179 during the insn, but not allocated to pseudos, as well as fixed
181 static HARD_REG_SET bad_spill_regs;
183 /* These are the hard registers that can't be used as spill register for any
184 insn. This includes registers used for user variables and registers that
185 we can't eliminate. A register that appears in this set also can't be used
186 to retry register allocation. */
187 static HARD_REG_SET bad_spill_regs_global;
189 /* Describes order of use of registers for reloading
190 of spilled pseudo-registers. `n_spills' is the number of
191 elements that are actually valid; new ones are added at the end.
193 Both spill_regs and spill_reg_order are used on two occasions:
194 once during find_reload_regs, where they keep track of the spill registers
195 for a single insn, but also during reload_as_needed where they show all
196 the registers ever used by reload. For the latter case, the information
197 is calculated during finish_spills. */
198 static short spill_regs[FIRST_PSEUDO_REGISTER];
200 /* This vector of reg sets indicates, for each pseudo, which hard registers
201 may not be used for retrying global allocation because the register was
202 formerly spilled from one of them. If we allowed reallocating a pseudo to
203 a register that it was already allocated to, reload might not
205 static HARD_REG_SET *pseudo_previous_regs;
207 /* This vector of reg sets indicates, for each pseudo, which hard
208 registers may not be used for retrying global allocation because they
209 are used as spill registers during one of the insns in which the
211 static HARD_REG_SET *pseudo_forbidden_regs;
213 /* All hard regs that have been used as spill registers for any insn are
214 marked in this set. */
215 static HARD_REG_SET used_spill_regs;
217 /* Index of last register assigned as a spill register. We allocate in
218 a round-robin fashion. */
219 static int last_spill_reg;
221 /* Nonzero if indirect addressing is supported on the machine; this means
222 that spilling (REG n) does not require reloading it into a register in
223 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
224 value indicates the level of indirect addressing supported, e.g., two
225 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
227 static char spill_indirect_levels;
229 /* Nonzero if indirect addressing is supported when the innermost MEM is
230 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
231 which these are valid is the same as spill_indirect_levels, above. */
232 char indirect_symref_ok;
234 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
235 char double_reg_address_ok;
237 /* Record the stack slot for each spilled hard register. */
238 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
240 /* Width allocated so far for that stack slot. */
241 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
243 /* Record which pseudos needed to be spilled. */
244 static regset_head spilled_pseudos;
246 /* Used for communication between order_regs_for_reload and count_pseudo.
247 Used to avoid counting one pseudo twice. */
248 static regset_head pseudos_counted;
250 /* First uid used by insns created by reload in this function.
251 Used in find_equiv_reg. */
252 int reload_first_uid;
254 /* Flag set by local-alloc or global-alloc if anything is live in
255 a call-clobbered reg across calls. */
256 int caller_save_needed;
258 /* Set to 1 while reload_as_needed is operating.
259 Required by some machines to handle any generated moves differently. */
260 int reload_in_progress = 0;
262 /* These arrays record the insn_code of insns that may be needed to
263 perform input and output reloads of special objects. They provide a
264 place to pass a scratch register. */
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
271 struct obstack reload_obstack;
273 /* Points to the beginning of the reload_obstack. All insn_chain structures
274 are allocated first. */
275 char *reload_startobj;
277 /* The point after all insn_chain structures. Used to quickly deallocate
278 memory allocated in copy_reloads during calculate_needs_all_insns. */
279 char *reload_firstobj;
281 /* This points before all local rtl generated by register elimination.
282 Used to quickly free all memory after processing one insn. */
283 static char *reload_insn_firstobj;
285 /* List of insn_chain instructions, one for every insn that reload needs to
287 struct insn_chain *reload_insn_chain;
290 extern tree current_function_decl;
292 extern union tree_node *current_function_decl;
295 /* List of all insns needing reloads. */
296 static struct insn_chain *insns_need_reload;
298 /* This structure is used to record information about register eliminations.
299 Each array entry describes one possible way of eliminating a register
300 in favor of another. If there is more than one way of eliminating a
301 particular register, the most preferred should be specified first. */
305 int from; /* Register number to be eliminated. */
306 int to; /* Register number used as replacement. */
307 int initial_offset; /* Initial difference between values. */
308 int can_eliminate; /* Nonzero if this elimination can be done. */
309 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
310 insns made by reload. */
311 int offset; /* Current offset between the two regs. */
312 int previous_offset; /* Offset at end of previous insn. */
313 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
314 rtx from_rtx; /* REG rtx for the register to be eliminated.
315 We cannot simply compare the number since
316 we might then spuriously replace a hard
317 register corresponding to a pseudo
318 assigned to the reg to be eliminated. */
319 rtx to_rtx; /* REG rtx for the replacement. */
322 static struct elim_table *reg_eliminate = 0;
324 /* This is an intermediate structure to initialize the table. It has
325 exactly the members provided by ELIMINABLE_REGS. */
326 static const struct elim_table_1
330 } reg_eliminate_1[] =
332 /* If a set of eliminable registers was specified, define the table from it.
333 Otherwise, default to the normal case of the frame pointer being
334 replaced by the stack pointer. */
336 #ifdef ELIMINABLE_REGS
339 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
342 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
344 /* Record the number of pending eliminations that have an offset not equal
345 to their initial offset. If nonzero, we use a new copy of each
346 replacement result in any insns encountered. */
347 int num_not_at_initial_offset;
349 /* Count the number of registers that we may be able to eliminate. */
350 static int num_eliminable;
351 /* And the number of registers that are equivalent to a constant that
352 can be eliminated to frame_pointer / arg_pointer + constant. */
353 static int num_eliminable_invariants;
355 /* For each label, we record the offset of each elimination. If we reach
356 a label by more than one path and an offset differs, we cannot do the
357 elimination. This information is indexed by the difference of the
358 number of the label and the first label number. We can't offset the
359 pointer itself as this can cause problems on machines with segmented
360 memory. The first table is an array of flags that records whether we
361 have yet encountered a label and the second table is an array of arrays,
362 one entry in the latter array for each elimination. */
364 static int first_label_num;
365 static char *offsets_known_at;
366 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
368 /* Number of labels in the current function. */
370 static int num_labels;
372 static void replace_pseudos_in_call_usage PARAMS ((rtx *,
375 static void maybe_fix_stack_asms PARAMS ((void));
376 static void copy_reloads PARAMS ((struct insn_chain *));
377 static void calculate_needs_all_insns PARAMS ((int));
378 static int find_reg PARAMS ((struct insn_chain *, int));
379 static void find_reload_regs PARAMS ((struct insn_chain *));
380 static void select_reload_regs PARAMS ((void));
381 static void delete_caller_save_insns PARAMS ((void));
383 static void spill_failure PARAMS ((rtx, enum reg_class));
384 static void count_spilled_pseudo PARAMS ((int, int, int));
385 static void delete_dead_insn PARAMS ((rtx));
386 static void alter_reg PARAMS ((int, int));
387 static void set_label_offsets PARAMS ((rtx, rtx, int));
388 static void check_eliminable_occurrences PARAMS ((rtx));
389 static void elimination_effects PARAMS ((rtx, enum machine_mode));
390 static int eliminate_regs_in_insn PARAMS ((rtx, int));
391 static void update_eliminable_offsets PARAMS ((void));
392 static void mark_not_eliminable PARAMS ((rtx, rtx, void *));
393 static void set_initial_elim_offsets PARAMS ((void));
394 static void verify_initial_elim_offsets PARAMS ((void));
395 static void set_initial_label_offsets PARAMS ((void));
396 static void set_offsets_for_label PARAMS ((rtx));
397 static void init_elim_table PARAMS ((void));
398 static void update_eliminables PARAMS ((HARD_REG_SET *));
399 static void spill_hard_reg PARAMS ((unsigned int, int));
400 static int finish_spills PARAMS ((int));
401 static void ior_hard_reg_set PARAMS ((HARD_REG_SET *, HARD_REG_SET *));
402 static void scan_paradoxical_subregs PARAMS ((rtx));
403 static void count_pseudo PARAMS ((int));
404 static void order_regs_for_reload PARAMS ((struct insn_chain *));
405 static void reload_as_needed PARAMS ((int));
406 static void forget_old_reloads_1 PARAMS ((rtx, rtx, void *));
407 static int reload_reg_class_lower PARAMS ((const PTR, const PTR));
408 static void mark_reload_reg_in_use PARAMS ((unsigned int, int,
411 static void clear_reload_reg_in_use PARAMS ((unsigned int, int,
414 static int reload_reg_free_p PARAMS ((unsigned int, int,
416 static int reload_reg_free_for_value_p PARAMS ((int, int, int,
418 rtx, rtx, int, int));
419 static int free_for_value_p PARAMS ((int, enum machine_mode, int,
420 enum reload_type, rtx, rtx,
422 static int reload_reg_reaches_end_p PARAMS ((unsigned int, int,
424 static int allocate_reload_reg PARAMS ((struct insn_chain *, int,
426 static int conflicts_with_override PARAMS ((rtx));
427 static void failed_reload PARAMS ((rtx, int));
428 static int set_reload_reg PARAMS ((int, int));
429 static void choose_reload_regs_init PARAMS ((struct insn_chain *, rtx *));
430 static void choose_reload_regs PARAMS ((struct insn_chain *));
431 static void merge_assigned_reloads PARAMS ((rtx));
432 static void emit_input_reload_insns PARAMS ((struct insn_chain *,
433 struct reload *, rtx, int));
434 static void emit_output_reload_insns PARAMS ((struct insn_chain *,
435 struct reload *, int));
436 static void do_input_reload PARAMS ((struct insn_chain *,
437 struct reload *, int));
438 static void do_output_reload PARAMS ((struct insn_chain *,
439 struct reload *, int));
440 static void emit_reload_insns PARAMS ((struct insn_chain *));
441 static void delete_output_reload PARAMS ((rtx, int, int));
442 static void delete_address_reloads PARAMS ((rtx, rtx));
443 static void delete_address_reloads_1 PARAMS ((rtx, rtx, rtx));
444 static rtx inc_for_reload PARAMS ((rtx, rtx, rtx, int));
445 static void reload_cse_regs_1 PARAMS ((rtx));
446 static int reload_cse_noop_set_p PARAMS ((rtx));
447 static int reload_cse_simplify_set PARAMS ((rtx, rtx));
448 static int reload_cse_simplify_operands PARAMS ((rtx, rtx));
449 static void reload_combine PARAMS ((void));
450 static void reload_combine_note_use PARAMS ((rtx *, rtx));
451 static void reload_combine_note_store PARAMS ((rtx, rtx, void *));
452 static void reload_cse_move2add PARAMS ((rtx));
453 static void move2add_note_store PARAMS ((rtx, rtx, void *));
455 static void add_auto_inc_notes PARAMS ((rtx, rtx));
457 static void copy_eh_notes PARAMS ((rtx, rtx));
458 static void failed_reload PARAMS ((rtx, int));
459 static int set_reload_reg PARAMS ((int, int));
460 static void reload_cse_simplify PARAMS ((rtx, rtx));
461 void fixup_abnormal_edges PARAMS ((void));
462 extern void dump_needs PARAMS ((struct insn_chain *));
464 /* Initialize the reload pass once per compilation. */
471 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
472 Set spill_indirect_levels to the number of levels such addressing is
473 permitted, zero if it is not permitted at all. */
476 = gen_rtx_MEM (Pmode,
479 LAST_VIRTUAL_REGISTER + 1),
481 spill_indirect_levels = 0;
483 while (memory_address_p (QImode, tem))
485 spill_indirect_levels++;
486 tem = gen_rtx_MEM (Pmode, tem);
489 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
491 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
492 indirect_symref_ok = memory_address_p (QImode, tem);
494 /* See if reg+reg is a valid (and offsettable) address. */
496 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
498 tem = gen_rtx_PLUS (Pmode,
499 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
500 gen_rtx_REG (Pmode, i));
502 /* This way, we make sure that reg+reg is an offsettable address. */
503 tem = plus_constant (tem, 4);
505 if (memory_address_p (QImode, tem))
507 double_reg_address_ok = 1;
512 /* Initialize obstack for our rtl allocation. */
513 gcc_obstack_init (&reload_obstack);
514 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
516 INIT_REG_SET (&spilled_pseudos);
517 INIT_REG_SET (&pseudos_counted);
520 /* List of insn chains that are currently unused. */
521 static struct insn_chain *unused_insn_chains = 0;
523 /* Allocate an empty insn_chain structure. */
527 struct insn_chain *c;
529 if (unused_insn_chains == 0)
531 c = (struct insn_chain *)
532 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
533 INIT_REG_SET (&c->live_throughout);
534 INIT_REG_SET (&c->dead_or_set);
538 c = unused_insn_chains;
539 unused_insn_chains = c->next;
541 c->is_caller_save_insn = 0;
542 c->need_operand_change = 0;
548 /* Small utility function to set all regs in hard reg set TO which are
549 allocated to pseudos in regset FROM. */
552 compute_use_by_pseudos (to, from)
558 EXECUTE_IF_SET_IN_REG_SET
559 (from, FIRST_PSEUDO_REGISTER, regno,
561 int r = reg_renumber[regno];
566 /* reload_combine uses the information from
567 BASIC_BLOCK->global_live_at_start, which might still
568 contain registers that have not actually been allocated
569 since they have an equivalence. */
570 if (! reload_completed)
575 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
577 SET_HARD_REG_BIT (*to, r + nregs);
582 /* Replace all pseudos found in LOC with their corresponding
586 replace_pseudos_in_call_usage (loc, mem_mode, usage)
588 enum machine_mode mem_mode;
602 unsigned int regno = REGNO (x);
604 if (regno < FIRST_PSEUDO_REGISTER)
607 x = eliminate_regs (x, mem_mode, usage);
611 replace_pseudos_in_call_usage (loc, mem_mode, usage);
615 if (reg_equiv_constant[regno])
616 *loc = reg_equiv_constant[regno];
617 else if (reg_equiv_mem[regno])
618 *loc = reg_equiv_mem[regno];
619 else if (reg_equiv_address[regno])
620 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
621 else if (GET_CODE (regno_reg_rtx[regno]) != REG
622 || REGNO (regno_reg_rtx[regno]) != regno)
623 *loc = regno_reg_rtx[regno];
629 else if (code == MEM)
631 replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
635 /* Process each of our operands recursively. */
636 fmt = GET_RTX_FORMAT (code);
637 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
639 replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
640 else if (*fmt == 'E')
641 for (j = 0; j < XVECLEN (x, i); j++)
642 replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
646 /* Global variables used by reload and its subroutines. */
648 /* Set during calculate_needs if an insn needs register elimination. */
649 static int something_needs_elimination;
650 /* Set during calculate_needs if an insn needs an operand changed. */
651 int something_needs_operands_changed;
653 /* Nonzero means we couldn't get enough spill regs. */
656 /* Main entry point for the reload pass.
658 FIRST is the first insn of the function being compiled.
660 GLOBAL nonzero means we were called from global_alloc
661 and should attempt to reallocate any pseudoregs that we
662 displace from hard regs we will use for reloads.
663 If GLOBAL is zero, we do not have enough information to do that,
664 so any pseudo reg that is spilled must go to the stack.
666 Return value is nonzero if reload failed
667 and we must not do any more for this function. */
670 reload (first, global)
676 struct elim_table *ep;
679 /* Make sure even insns with volatile mem refs are recognizable. */
684 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
686 /* Make sure that the last insn in the chain
687 is not something that needs reloading. */
688 emit_note (NULL, NOTE_INSN_DELETED);
690 /* Enable find_equiv_reg to distinguish insns made by reload. */
691 reload_first_uid = get_max_uid ();
693 #ifdef SECONDARY_MEMORY_NEEDED
694 /* Initialize the secondary memory table. */
695 clear_secondary_mem ();
698 /* We don't have a stack slot for any spill reg yet. */
699 memset ((char *) spill_stack_slot, 0, sizeof spill_stack_slot);
700 memset ((char *) spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
702 /* Initialize the save area information for caller-save, in case some
706 /* Compute which hard registers are now in use
707 as homes for pseudo registers.
708 This is done here rather than (eg) in global_alloc
709 because this point is reached even if not optimizing. */
710 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
713 /* A function that receives a nonlocal goto must save all call-saved
715 if (current_function_has_nonlocal_label)
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
718 regs_ever_live[i] = 1;
720 /* Find all the pseudo registers that didn't get hard regs
721 but do have known equivalent constants or memory slots.
722 These include parameters (known equivalent to parameter slots)
723 and cse'd or loop-moved constant memory addresses.
725 Record constant equivalents in reg_equiv_constant
726 so they will be substituted by find_reloads.
727 Record memory equivalents in reg_mem_equiv so they can
728 be substituted eventually by altering the REG-rtx's. */
730 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
731 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
732 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
733 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
734 reg_max_ref_width = (unsigned int *) xcalloc (max_regno, sizeof (int));
735 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
736 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
737 pseudo_forbidden_regs
738 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
740 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
742 CLEAR_HARD_REG_SET (bad_spill_regs_global);
744 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
745 Also find all paradoxical subregs and find largest such for each pseudo.
746 On machines with small register classes, record hard registers that
747 are used for user variables. These can never be used for spills.
748 Also look for a "constant" REG_SETJMP. This means that all
749 caller-saved registers must be marked live. */
751 num_eliminable_invariants = 0;
752 for (insn = first; insn; insn = NEXT_INSN (insn))
754 rtx set = single_set (insn);
756 /* We may introduce USEs that we want to remove at the end, so
757 we'll mark them with QImode. Make sure there are no
758 previously-marked insns left by say regmove. */
759 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
760 && GET_MODE (insn) != VOIDmode)
761 PUT_MODE (insn, VOIDmode);
763 if (GET_CODE (insn) == CALL_INSN
764 && find_reg_note (insn, REG_SETJMP, NULL))
765 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
766 if (! call_used_regs[i])
767 regs_ever_live[i] = 1;
769 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
771 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
773 #ifdef LEGITIMATE_PIC_OPERAND_P
774 && (! function_invariant_p (XEXP (note, 0))
776 /* A function invariant is often CONSTANT_P but may
777 include a register. We promise to only pass
778 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
779 || (CONSTANT_P (XEXP (note, 0))
780 && LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0))))
784 rtx x = XEXP (note, 0);
785 i = REGNO (SET_DEST (set));
786 if (i > LAST_VIRTUAL_REGISTER)
788 /* It can happen that a REG_EQUIV note contains a MEM
789 that is not a legitimate memory operand. As later
790 stages of reload assume that all addresses found
791 in the reg_equiv_* arrays were originally legitimate,
792 we ignore such REG_EQUIV notes. */
793 if (memory_operand (x, VOIDmode))
795 /* Always unshare the equivalence, so we can
796 substitute into this insn without touching the
798 reg_equiv_memory_loc[i] = copy_rtx (x);
800 else if (function_invariant_p (x))
802 if (GET_CODE (x) == PLUS)
804 /* This is PLUS of frame pointer and a constant,
805 and might be shared. Unshare it. */
806 reg_equiv_constant[i] = copy_rtx (x);
807 num_eliminable_invariants++;
809 else if (x == frame_pointer_rtx
810 || x == arg_pointer_rtx)
812 reg_equiv_constant[i] = x;
813 num_eliminable_invariants++;
815 else if (LEGITIMATE_CONSTANT_P (x))
816 reg_equiv_constant[i] = x;
819 reg_equiv_memory_loc[i]
820 = force_const_mem (GET_MODE (SET_DEST (set)), x);
821 if (!reg_equiv_memory_loc[i])
828 /* If this register is being made equivalent to a MEM
829 and the MEM is not SET_SRC, the equivalencing insn
830 is one with the MEM as a SET_DEST and it occurs later.
831 So don't mark this insn now. */
832 if (GET_CODE (x) != MEM
833 || rtx_equal_p (SET_SRC (set), x))
835 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
840 /* If this insn is setting a MEM from a register equivalent to it,
841 this is the equivalencing insn. */
842 else if (set && GET_CODE (SET_DEST (set)) == MEM
843 && GET_CODE (SET_SRC (set)) == REG
844 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
845 && rtx_equal_p (SET_DEST (set),
846 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
847 reg_equiv_init[REGNO (SET_SRC (set))]
848 = gen_rtx_INSN_LIST (VOIDmode, insn,
849 reg_equiv_init[REGNO (SET_SRC (set))]);
852 scan_paradoxical_subregs (PATTERN (insn));
857 first_label_num = get_first_label_num ();
858 num_labels = max_label_num () - first_label_num;
860 /* Allocate the tables used to store offset information at labels. */
861 /* We used to use alloca here, but the size of what it would try to
862 allocate would occasionally cause it to exceed the stack limit and
863 cause a core dump. */
864 offsets_known_at = xmalloc (num_labels);
866 = (int (*)[NUM_ELIMINABLE_REGS])
867 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
869 /* Alter each pseudo-reg rtx to contain its hard reg number.
870 Assign stack slots to the pseudos that lack hard regs or equivalents.
871 Do not touch virtual registers. */
873 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
876 /* If we have some registers we think can be eliminated, scan all insns to
877 see if there is an insn that sets one of these registers to something
878 other than itself plus a constant. If so, the register cannot be
879 eliminated. Doing this scan here eliminates an extra pass through the
880 main reload loop in the most common case where register elimination
882 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
883 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
884 || GET_CODE (insn) == CALL_INSN)
885 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
887 maybe_fix_stack_asms ();
889 insns_need_reload = 0;
890 something_needs_elimination = 0;
892 /* Initialize to -1, which means take the first spill register. */
895 /* Spill any hard regs that we know we can't eliminate. */
896 CLEAR_HARD_REG_SET (used_spill_regs);
897 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
898 if (! ep->can_eliminate)
899 spill_hard_reg (ep->from, 1);
901 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
902 if (frame_pointer_needed)
903 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
905 finish_spills (global);
907 /* From now on, we may need to generate moves differently. We may also
908 allow modifications of insns which cause them to not be recognized.
909 Any such modifications will be cleaned up during reload itself. */
910 reload_in_progress = 1;
912 /* This loop scans the entire function each go-round
913 and repeats until one repetition spills no additional hard regs. */
916 int something_changed;
919 HOST_WIDE_INT starting_frame_size;
921 /* Round size of stack frame to stack_alignment_needed. This must be done
922 here because the stack size may be a part of the offset computation
923 for register elimination, and there might have been new stack slots
924 created in the last iteration of this loop. */
925 if (cfun->stack_alignment_needed)
926 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
928 starting_frame_size = get_frame_size ();
930 set_initial_elim_offsets ();
931 set_initial_label_offsets ();
933 /* For each pseudo register that has an equivalent location defined,
934 try to eliminate any eliminable registers (such as the frame pointer)
935 assuming initial offsets for the replacement register, which
938 If the resulting location is directly addressable, substitute
939 the MEM we just got directly for the old REG.
941 If it is not addressable but is a constant or the sum of a hard reg
942 and constant, it is probably not addressable because the constant is
943 out of range, in that case record the address; we will generate
944 hairy code to compute the address in a register each time it is
945 needed. Similarly if it is a hard register, but one that is not
946 valid as an address register.
948 If the location is not addressable, but does not have one of the
949 above forms, assign a stack slot. We have to do this to avoid the
950 potential of producing lots of reloads if, e.g., a location involves
951 a pseudo that didn't get a hard register and has an equivalent memory
952 location that also involves a pseudo that didn't get a hard register.
954 Perhaps at some point we will improve reload_when_needed handling
955 so this problem goes away. But that's very hairy. */
957 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
958 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
960 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
962 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
964 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
965 else if (CONSTANT_P (XEXP (x, 0))
966 || (GET_CODE (XEXP (x, 0)) == REG
967 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
968 || (GET_CODE (XEXP (x, 0)) == PLUS
969 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
970 && (REGNO (XEXP (XEXP (x, 0), 0))
971 < FIRST_PSEUDO_REGISTER)
972 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
973 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
976 /* Make a new stack slot. Then indicate that something
977 changed so we go back and recompute offsets for
978 eliminable registers because the allocation of memory
979 below might change some offset. reg_equiv_{mem,address}
980 will be set up for this pseudo on the next pass around
982 reg_equiv_memory_loc[i] = 0;
983 reg_equiv_init[i] = 0;
988 if (caller_save_needed)
991 /* If we allocated another stack slot, redo elimination bookkeeping. */
992 if (starting_frame_size != get_frame_size ())
995 if (caller_save_needed)
997 save_call_clobbered_regs ();
998 /* That might have allocated new insn_chain structures. */
999 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1002 calculate_needs_all_insns (global);
1004 CLEAR_REG_SET (&spilled_pseudos);
1007 something_changed = 0;
1009 /* If we allocated any new memory locations, make another pass
1010 since it might have changed elimination offsets. */
1011 if (starting_frame_size != get_frame_size ())
1012 something_changed = 1;
1015 HARD_REG_SET to_spill;
1016 CLEAR_HARD_REG_SET (to_spill);
1017 update_eliminables (&to_spill);
1018 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1019 if (TEST_HARD_REG_BIT (to_spill, i))
1021 spill_hard_reg (i, 1);
1024 /* Regardless of the state of spills, if we previously had
1025 a register that we thought we could eliminate, but now can
1026 not eliminate, we must run another pass.
1028 Consider pseudos which have an entry in reg_equiv_* which
1029 reference an eliminable register. We must make another pass
1030 to update reg_equiv_* so that we do not substitute in the
1031 old value from when we thought the elimination could be
1033 something_changed = 1;
1037 select_reload_regs ();
1041 if (insns_need_reload != 0 || did_spill)
1042 something_changed |= finish_spills (global);
1044 if (! something_changed)
1047 if (caller_save_needed)
1048 delete_caller_save_insns ();
1050 obstack_free (&reload_obstack, reload_firstobj);
1053 /* If global-alloc was run, notify it of any register eliminations we have
1056 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1057 if (ep->can_eliminate)
1058 mark_elimination (ep->from, ep->to);
1060 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1061 If that insn didn't set the register (i.e., it copied the register to
1062 memory), just delete that insn instead of the equivalencing insn plus
1063 anything now dead. If we call delete_dead_insn on that insn, we may
1064 delete the insn that actually sets the register if the register dies
1065 there and that is incorrect. */
1067 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1069 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1072 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1074 rtx equiv_insn = XEXP (list, 0);
1076 /* If we already deleted the insn or if it may trap, we can't
1077 delete it. The latter case shouldn't happen, but can
1078 if an insn has a variable address, gets a REG_EH_REGION
1079 note added to it, and then gets converted into an load
1080 from a constant address. */
1081 if (GET_CODE (equiv_insn) == NOTE
1082 || can_throw_internal (equiv_insn))
1084 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1085 delete_dead_insn (equiv_insn);
1088 PUT_CODE (equiv_insn, NOTE);
1089 NOTE_SOURCE_FILE (equiv_insn) = 0;
1090 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1096 /* Use the reload registers where necessary
1097 by generating move instructions to move the must-be-register
1098 values into or out of the reload registers. */
1100 if (insns_need_reload != 0 || something_needs_elimination
1101 || something_needs_operands_changed)
1103 HOST_WIDE_INT old_frame_size = get_frame_size ();
1105 reload_as_needed (global);
1107 if (old_frame_size != get_frame_size ())
1111 verify_initial_elim_offsets ();
1114 /* If we were able to eliminate the frame pointer, show that it is no
1115 longer live at the start of any basic block. If it ls live by
1116 virtue of being in a pseudo, that pseudo will be marked live
1117 and hence the frame pointer will be known to be live via that
1120 if (! frame_pointer_needed)
1122 CLEAR_REGNO_REG_SET (bb->global_live_at_start,
1123 HARD_FRAME_POINTER_REGNUM);
1125 /* Come here (with failure set nonzero) if we can't get enough spill regs
1126 and we decide not to abort about it. */
1129 CLEAR_REG_SET (&spilled_pseudos);
1130 reload_in_progress = 0;
1132 /* Now eliminate all pseudo regs by modifying them into
1133 their equivalent memory references.
1134 The REG-rtx's for the pseudos are modified in place,
1135 so all insns that used to refer to them now refer to memory.
1137 For a reg that has a reg_equiv_address, all those insns
1138 were changed by reloading so that no insns refer to it any longer;
1139 but the DECL_RTL of a variable decl may refer to it,
1140 and if so this causes the debugging info to mention the variable. */
1142 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1146 if (reg_equiv_mem[i])
1147 addr = XEXP (reg_equiv_mem[i], 0);
1149 if (reg_equiv_address[i])
1150 addr = reg_equiv_address[i];
1154 if (reg_renumber[i] < 0)
1156 rtx reg = regno_reg_rtx[i];
1158 REG_USERVAR_P (reg) = 0;
1159 PUT_CODE (reg, MEM);
1160 XEXP (reg, 0) = addr;
1161 if (reg_equiv_memory_loc[i])
1162 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1165 RTX_UNCHANGING_P (reg) = MEM_IN_STRUCT_P (reg)
1166 = MEM_SCALAR_P (reg) = 0;
1167 MEM_ATTRS (reg) = 0;
1170 else if (reg_equiv_mem[i])
1171 XEXP (reg_equiv_mem[i], 0) = addr;
1175 /* We must set reload_completed now since the cleanup_subreg_operands call
1176 below will re-recognize each insn and reload may have generated insns
1177 which are only valid during and after reload. */
1178 reload_completed = 1;
1180 /* Make a pass over all the insns and delete all USEs which we inserted
1181 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1182 notes. Delete all CLOBBER insns, except those that refer to the return
1183 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1184 from misarranging variable-array code, and simplify (subreg (reg))
1185 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1186 are no longer useful or accurate. Strip and regenerate REG_INC notes
1187 that may have been moved around. */
1189 for (insn = first; insn; insn = NEXT_INSN (insn))
1194 if (GET_CODE (insn) == CALL_INSN)
1195 replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
1197 CALL_INSN_FUNCTION_USAGE (insn));
1199 if ((GET_CODE (PATTERN (insn)) == USE
1200 /* We mark with QImode USEs introduced by reload itself. */
1201 && (GET_MODE (insn) == QImode
1202 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1203 || (GET_CODE (PATTERN (insn)) == CLOBBER
1204 && (GET_CODE (XEXP (PATTERN (insn), 0)) != MEM
1205 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1206 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1207 && XEXP (XEXP (PATTERN (insn), 0), 0)
1208 != stack_pointer_rtx))
1209 && (GET_CODE (XEXP (PATTERN (insn), 0)) != REG
1210 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1216 pnote = ®_NOTES (insn);
1219 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1220 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1221 || REG_NOTE_KIND (*pnote) == REG_INC
1222 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1223 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1224 *pnote = XEXP (*pnote, 1);
1226 pnote = &XEXP (*pnote, 1);
1230 add_auto_inc_notes (insn, PATTERN (insn));
1233 /* And simplify (subreg (reg)) if it appears as an operand. */
1234 cleanup_subreg_operands (insn);
1237 /* If we are doing stack checking, give a warning if this function's
1238 frame size is larger than we expect. */
1239 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1241 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1242 static int verbose_warned = 0;
1244 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1245 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1246 size += UNITS_PER_WORD;
1248 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1250 warning ("frame size too large for reliable stack checking");
1251 if (! verbose_warned)
1253 warning ("try reducing the number of local variables");
1259 /* Indicate that we no longer have known memory locations or constants. */
1260 if (reg_equiv_constant)
1261 free (reg_equiv_constant);
1262 reg_equiv_constant = 0;
1263 if (reg_equiv_memory_loc)
1264 free (reg_equiv_memory_loc);
1265 reg_equiv_memory_loc = 0;
1267 if (offsets_known_at)
1268 free (offsets_known_at);
1272 free (reg_equiv_mem);
1273 free (reg_equiv_init);
1274 free (reg_equiv_address);
1275 free (reg_max_ref_width);
1276 free (reg_old_renumber);
1277 free (pseudo_previous_regs);
1278 free (pseudo_forbidden_regs);
1280 CLEAR_HARD_REG_SET (used_spill_regs);
1281 for (i = 0; i < n_spills; i++)
1282 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1284 /* Free all the insn_chain structures at once. */
1285 obstack_free (&reload_obstack, reload_startobj);
1286 unused_insn_chains = 0;
1287 fixup_abnormal_edges ();
1289 /* Replacing pseudos with their memory equivalents might have
1290 created shared rtx. Subsequent passes would get confused
1291 by this, so unshare everything here. */
1292 unshare_all_rtl_again (first);
1297 /* Yet another special case. Unfortunately, reg-stack forces people to
1298 write incorrect clobbers in asm statements. These clobbers must not
1299 cause the register to appear in bad_spill_regs, otherwise we'll call
1300 fatal_insn later. We clear the corresponding regnos in the live
1301 register sets to avoid this.
1302 The whole thing is rather sick, I'm afraid. */
1305 maybe_fix_stack_asms ()
1308 const char *constraints[MAX_RECOG_OPERANDS];
1309 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1310 struct insn_chain *chain;
1312 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1315 HARD_REG_SET clobbered, allowed;
1318 if (! INSN_P (chain->insn)
1319 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1321 pat = PATTERN (chain->insn);
1322 if (GET_CODE (pat) != PARALLEL)
1325 CLEAR_HARD_REG_SET (clobbered);
1326 CLEAR_HARD_REG_SET (allowed);
1328 /* First, make a mask of all stack regs that are clobbered. */
1329 for (i = 0; i < XVECLEN (pat, 0); i++)
1331 rtx t = XVECEXP (pat, 0, i);
1332 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1333 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1336 /* Get the operand values and constraints out of the insn. */
1337 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1338 constraints, operand_mode);
1340 /* For every operand, see what registers are allowed. */
1341 for (i = 0; i < noperands; i++)
1343 const char *p = constraints[i];
1344 /* For every alternative, we compute the class of registers allowed
1345 for reloading in CLS, and merge its contents into the reg set
1347 int cls = (int) NO_REGS;
1353 if (c == '\0' || c == ',' || c == '#')
1355 /* End of one alternative - mark the regs in the current
1356 class, and reset the class. */
1357 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1363 } while (c != '\0' && c != ',');
1371 case '=': case '+': case '*': case '%': case '?': case '!':
1372 case '0': case '1': case '2': case '3': case '4': case 'm':
1373 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1374 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1375 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1380 cls = (int) reg_class_subunion[cls]
1381 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1386 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1390 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1391 cls = (int) reg_class_subunion[cls]
1392 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1394 cls = (int) reg_class_subunion[cls]
1395 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1397 p += CONSTRAINT_LEN (c, p);
1400 /* Those of the registers which are clobbered, but allowed by the
1401 constraints, must be usable as reload registers. So clear them
1402 out of the life information. */
1403 AND_HARD_REG_SET (allowed, clobbered);
1404 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1405 if (TEST_HARD_REG_BIT (allowed, i))
1407 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1408 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1415 /* Copy the global variables n_reloads and rld into the corresponding elts
1418 copy_reloads (chain)
1419 struct insn_chain *chain;
1421 chain->n_reloads = n_reloads;
1423 = (struct reload *) obstack_alloc (&reload_obstack,
1424 n_reloads * sizeof (struct reload));
1425 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1426 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1429 /* Walk the chain of insns, and determine for each whether it needs reloads
1430 and/or eliminations. Build the corresponding insns_need_reload list, and
1431 set something_needs_elimination as appropriate. */
1433 calculate_needs_all_insns (global)
1436 struct insn_chain **pprev_reload = &insns_need_reload;
1437 struct insn_chain *chain, *next = 0;
1439 something_needs_elimination = 0;
1441 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1442 for (chain = reload_insn_chain; chain != 0; chain = next)
1444 rtx insn = chain->insn;
1448 /* Clear out the shortcuts. */
1449 chain->n_reloads = 0;
1450 chain->need_elim = 0;
1451 chain->need_reload = 0;
1452 chain->need_operand_change = 0;
1454 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1455 include REG_LABEL), we need to see what effects this has on the
1456 known offsets at labels. */
1458 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1459 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1460 set_label_offsets (insn, insn, 0);
1464 rtx old_body = PATTERN (insn);
1465 int old_code = INSN_CODE (insn);
1466 rtx old_notes = REG_NOTES (insn);
1467 int did_elimination = 0;
1468 int operands_changed = 0;
1469 rtx set = single_set (insn);
1471 /* Skip insns that only set an equivalence. */
1472 if (set && GET_CODE (SET_DEST (set)) == REG
1473 && reg_renumber[REGNO (SET_DEST (set))] < 0
1474 && reg_equiv_constant[REGNO (SET_DEST (set))])
1477 /* If needed, eliminate any eliminable registers. */
1478 if (num_eliminable || num_eliminable_invariants)
1479 did_elimination = eliminate_regs_in_insn (insn, 0);
1481 /* Analyze the instruction. */
1482 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1483 global, spill_reg_order);
1485 /* If a no-op set needs more than one reload, this is likely
1486 to be something that needs input address reloads. We
1487 can't get rid of this cleanly later, and it is of no use
1488 anyway, so discard it now.
1489 We only do this when expensive_optimizations is enabled,
1490 since this complements reload inheritance / output
1491 reload deletion, and it can make debugging harder. */
1492 if (flag_expensive_optimizations && n_reloads > 1)
1494 rtx set = single_set (insn);
1496 && SET_SRC (set) == SET_DEST (set)
1497 && GET_CODE (SET_SRC (set)) == REG
1498 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1501 /* Delete it from the reload chain. */
1503 chain->prev->next = next;
1505 reload_insn_chain = next;
1507 next->prev = chain->prev;
1508 chain->next = unused_insn_chains;
1509 unused_insn_chains = chain;
1514 update_eliminable_offsets ();
1516 /* Remember for later shortcuts which insns had any reloads or
1517 register eliminations. */
1518 chain->need_elim = did_elimination;
1519 chain->need_reload = n_reloads > 0;
1520 chain->need_operand_change = operands_changed;
1522 /* Discard any register replacements done. */
1523 if (did_elimination)
1525 obstack_free (&reload_obstack, reload_insn_firstobj);
1526 PATTERN (insn) = old_body;
1527 INSN_CODE (insn) = old_code;
1528 REG_NOTES (insn) = old_notes;
1529 something_needs_elimination = 1;
1532 something_needs_operands_changed |= operands_changed;
1536 copy_reloads (chain);
1537 *pprev_reload = chain;
1538 pprev_reload = &chain->next_need_reload;
1545 /* Comparison function for qsort to decide which of two reloads
1546 should be handled first. *P1 and *P2 are the reload numbers. */
1549 reload_reg_class_lower (r1p, r2p)
1553 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1556 /* Consider required reloads before optional ones. */
1557 t = rld[r1].optional - rld[r2].optional;
1561 /* Count all solitary classes before non-solitary ones. */
1562 t = ((reg_class_size[(int) rld[r2].class] == 1)
1563 - (reg_class_size[(int) rld[r1].class] == 1));
1567 /* Aside from solitaires, consider all multi-reg groups first. */
1568 t = rld[r2].nregs - rld[r1].nregs;
1572 /* Consider reloads in order of increasing reg-class number. */
1573 t = (int) rld[r1].class - (int) rld[r2].class;
1577 /* If reloads are equally urgent, sort by reload number,
1578 so that the results of qsort leave nothing to chance. */
1582 /* The cost of spilling each hard reg. */
1583 static int spill_cost[FIRST_PSEUDO_REGISTER];
1585 /* When spilling multiple hard registers, we use SPILL_COST for the first
1586 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1587 only the first hard reg for a multi-reg pseudo. */
1588 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1590 /* Update the spill cost arrays, considering that pseudo REG is live. */
1596 int freq = REG_FREQ (reg);
1597 int r = reg_renumber[reg];
1600 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1601 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1604 SET_REGNO_REG_SET (&pseudos_counted, reg);
1609 spill_add_cost[r] += freq;
1611 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1613 spill_cost[r + nregs] += freq;
1616 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1617 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1620 order_regs_for_reload (chain)
1621 struct insn_chain *chain;
1624 HARD_REG_SET used_by_pseudos;
1625 HARD_REG_SET used_by_pseudos2;
1627 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1629 memset (spill_cost, 0, sizeof spill_cost);
1630 memset (spill_add_cost, 0, sizeof spill_add_cost);
1632 /* Count number of uses of each hard reg by pseudo regs allocated to it
1633 and then order them by decreasing use. First exclude hard registers
1634 that are live in or across this insn. */
1636 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1637 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1638 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1639 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1641 /* Now find out which pseudos are allocated to it, and update
1643 CLEAR_REG_SET (&pseudos_counted);
1645 EXECUTE_IF_SET_IN_REG_SET
1646 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
1650 EXECUTE_IF_SET_IN_REG_SET
1651 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
1655 CLEAR_REG_SET (&pseudos_counted);
1658 /* Vector of reload-numbers showing the order in which the reloads should
1660 static short reload_order[MAX_RELOADS];
1662 /* This is used to keep track of the spill regs used in one insn. */
1663 static HARD_REG_SET used_spill_regs_local;
1665 /* We decided to spill hard register SPILLED, which has a size of
1666 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1667 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1668 update SPILL_COST/SPILL_ADD_COST. */
1671 count_spilled_pseudo (spilled, spilled_nregs, reg)
1672 int spilled, spilled_nregs, reg;
1674 int r = reg_renumber[reg];
1675 int nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1677 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1678 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1681 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1683 spill_add_cost[r] -= REG_FREQ (reg);
1685 spill_cost[r + nregs] -= REG_FREQ (reg);
1688 /* Find reload register to use for reload number ORDER. */
1691 find_reg (chain, order)
1692 struct insn_chain *chain;
1695 int rnum = reload_order[order];
1696 struct reload *rl = rld + rnum;
1697 int best_cost = INT_MAX;
1701 HARD_REG_SET not_usable;
1702 HARD_REG_SET used_by_other_reload;
1704 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1705 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1706 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1708 CLEAR_HARD_REG_SET (used_by_other_reload);
1709 for (k = 0; k < order; k++)
1711 int other = reload_order[k];
1713 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1714 for (j = 0; j < rld[other].nregs; j++)
1715 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1718 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1720 unsigned int regno = i;
1722 if (! TEST_HARD_REG_BIT (not_usable, regno)
1723 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1724 && HARD_REGNO_MODE_OK (regno, rl->mode))
1726 int this_cost = spill_cost[regno];
1728 unsigned int this_nregs = HARD_REGNO_NREGS (regno, rl->mode);
1730 for (j = 1; j < this_nregs; j++)
1732 this_cost += spill_add_cost[regno + j];
1733 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1734 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1739 if (rl->in && GET_CODE (rl->in) == REG && REGNO (rl->in) == regno)
1741 if (rl->out && GET_CODE (rl->out) == REG && REGNO (rl->out) == regno)
1743 if (this_cost < best_cost
1744 /* Among registers with equal cost, prefer caller-saved ones, or
1745 use REG_ALLOC_ORDER if it is defined. */
1746 || (this_cost == best_cost
1747 #ifdef REG_ALLOC_ORDER
1748 && (inv_reg_alloc_order[regno]
1749 < inv_reg_alloc_order[best_reg])
1751 && call_used_regs[regno]
1752 && ! call_used_regs[best_reg]
1757 best_cost = this_cost;
1765 fprintf (rtl_dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1767 rl->nregs = HARD_REGNO_NREGS (best_reg, rl->mode);
1768 rl->regno = best_reg;
1770 EXECUTE_IF_SET_IN_REG_SET
1771 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
1773 count_spilled_pseudo (best_reg, rl->nregs, j);
1776 EXECUTE_IF_SET_IN_REG_SET
1777 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
1779 count_spilled_pseudo (best_reg, rl->nregs, j);
1782 for (i = 0; i < rl->nregs; i++)
1784 if (spill_cost[best_reg + i] != 0
1785 || spill_add_cost[best_reg + i] != 0)
1787 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1792 /* Find more reload regs to satisfy the remaining need of an insn, which
1794 Do it by ascending class number, since otherwise a reg
1795 might be spilled for a big class and might fail to count
1796 for a smaller class even though it belongs to that class. */
1799 find_reload_regs (chain)
1800 struct insn_chain *chain;
1804 /* In order to be certain of getting the registers we need,
1805 we must sort the reloads into order of increasing register class.
1806 Then our grabbing of reload registers will parallel the process
1807 that provided the reload registers. */
1808 for (i = 0; i < chain->n_reloads; i++)
1810 /* Show whether this reload already has a hard reg. */
1811 if (chain->rld[i].reg_rtx)
1813 int regno = REGNO (chain->rld[i].reg_rtx);
1814 chain->rld[i].regno = regno;
1816 = HARD_REGNO_NREGS (regno, GET_MODE (chain->rld[i].reg_rtx));
1819 chain->rld[i].regno = -1;
1820 reload_order[i] = i;
1823 n_reloads = chain->n_reloads;
1824 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1826 CLEAR_HARD_REG_SET (used_spill_regs_local);
1829 fprintf (rtl_dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1831 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1833 /* Compute the order of preference for hard registers to spill. */
1835 order_regs_for_reload (chain);
1837 for (i = 0; i < n_reloads; i++)
1839 int r = reload_order[i];
1841 /* Ignore reloads that got marked inoperative. */
1842 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1843 && ! rld[r].optional
1844 && rld[r].regno == -1)
1845 if (! find_reg (chain, i))
1847 spill_failure (chain->insn, rld[r].class);
1853 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1854 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1856 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1860 select_reload_regs ()
1862 struct insn_chain *chain;
1864 /* Try to satisfy the needs for each insn. */
1865 for (chain = insns_need_reload; chain != 0;
1866 chain = chain->next_need_reload)
1867 find_reload_regs (chain);
1870 /* Delete all insns that were inserted by emit_caller_save_insns during
1873 delete_caller_save_insns ()
1875 struct insn_chain *c = reload_insn_chain;
1879 while (c != 0 && c->is_caller_save_insn)
1881 struct insn_chain *next = c->next;
1884 if (c == reload_insn_chain)
1885 reload_insn_chain = next;
1889 next->prev = c->prev;
1891 c->prev->next = next;
1892 c->next = unused_insn_chains;
1893 unused_insn_chains = c;
1901 /* Handle the failure to find a register to spill.
1902 INSN should be one of the insns which needed this particular spill reg. */
1905 spill_failure (insn, class)
1907 enum reg_class class;
1909 static const char *const reg_class_names[] = REG_CLASS_NAMES;
1910 if (asm_noperands (PATTERN (insn)) >= 0)
1911 error_for_asm (insn, "can't find a register in class `%s' while reloading `asm'",
1912 reg_class_names[class]);
1915 error ("unable to find a register to spill in class `%s'",
1916 reg_class_names[class]);
1917 fatal_insn ("this is the insn:", insn);
1921 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1922 data that is dead in INSN. */
1925 delete_dead_insn (insn)
1928 rtx prev = prev_real_insn (insn);
1931 /* If the previous insn sets a register that dies in our insn, delete it
1933 if (prev && GET_CODE (PATTERN (prev)) == SET
1934 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1935 && reg_mentioned_p (prev_dest, PATTERN (insn))
1936 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1937 && ! side_effects_p (SET_SRC (PATTERN (prev))))
1938 delete_dead_insn (prev);
1940 PUT_CODE (insn, NOTE);
1941 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1942 NOTE_SOURCE_FILE (insn) = 0;
1945 /* Modify the home of pseudo-reg I.
1946 The new home is present in reg_renumber[I].
1948 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1949 or it may be -1, meaning there is none or it is not relevant.
1950 This is used so that all pseudos spilled from a given hard reg
1951 can share one stack slot. */
1954 alter_reg (i, from_reg)
1958 /* When outputting an inline function, this can happen
1959 for a reg that isn't actually used. */
1960 if (regno_reg_rtx[i] == 0)
1963 /* If the reg got changed to a MEM at rtl-generation time,
1965 if (GET_CODE (regno_reg_rtx[i]) != REG)
1968 /* Modify the reg-rtx to contain the new hard reg
1969 number or else to contain its pseudo reg number. */
1970 REGNO (regno_reg_rtx[i])
1971 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1973 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1974 allocate a stack slot for it. */
1976 if (reg_renumber[i] < 0
1977 && REG_N_REFS (i) > 0
1978 && reg_equiv_constant[i] == 0
1979 && reg_equiv_memory_loc[i] == 0)
1982 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1983 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
1986 /* Each pseudo reg has an inherent size which comes from its own mode,
1987 and a total size which provides room for paradoxical subregs
1988 which refer to the pseudo reg in wider modes.
1990 We can use a slot already allocated if it provides both
1991 enough inherent space and enough total space.
1992 Otherwise, we allocate a new slot, making sure that it has no less
1993 inherent space, and no less total space, then the previous slot. */
1996 /* No known place to spill from => no slot to reuse. */
1997 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1998 inherent_size == total_size ? 0 : -1);
1999 if (BYTES_BIG_ENDIAN)
2000 /* Cancel the big-endian correction done in assign_stack_local.
2001 Get the address of the beginning of the slot.
2002 This is so we can do a big-endian correction unconditionally
2004 adjust = inherent_size - total_size;
2006 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2008 /* Nothing can alias this slot except this pseudo. */
2009 set_mem_alias_set (x, new_alias_set ());
2012 /* Reuse a stack slot if possible. */
2013 else if (spill_stack_slot[from_reg] != 0
2014 && spill_stack_slot_width[from_reg] >= total_size
2015 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2017 x = spill_stack_slot[from_reg];
2019 /* Allocate a bigger slot. */
2022 /* Compute maximum size needed, both for inherent size
2023 and for total size. */
2024 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2027 if (spill_stack_slot[from_reg])
2029 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2031 mode = GET_MODE (spill_stack_slot[from_reg]);
2032 if (spill_stack_slot_width[from_reg] > total_size)
2033 total_size = spill_stack_slot_width[from_reg];
2036 /* Make a slot with that size. */
2037 x = assign_stack_local (mode, total_size,
2038 inherent_size == total_size ? 0 : -1);
2041 /* All pseudos mapped to this slot can alias each other. */
2042 if (spill_stack_slot[from_reg])
2043 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2045 set_mem_alias_set (x, new_alias_set ());
2047 if (BYTES_BIG_ENDIAN)
2049 /* Cancel the big-endian correction done in assign_stack_local.
2050 Get the address of the beginning of the slot.
2051 This is so we can do a big-endian correction unconditionally
2053 adjust = GET_MODE_SIZE (mode) - total_size;
2056 = adjust_address_nv (x, mode_for_size (total_size
2062 spill_stack_slot[from_reg] = stack_slot;
2063 spill_stack_slot_width[from_reg] = total_size;
2066 /* On a big endian machine, the "address" of the slot
2067 is the address of the low part that fits its inherent mode. */
2068 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2069 adjust += (total_size - inherent_size);
2071 /* If we have any adjustment to make, or if the stack slot is the
2072 wrong mode, make a new stack slot. */
2073 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2075 /* If we have a decl for the original register, set it for the
2076 memory. If this is a shared MEM, make a copy. */
2077 if (REG_EXPR (regno_reg_rtx[i])
2078 && TREE_CODE_CLASS (TREE_CODE (REG_EXPR (regno_reg_rtx[i]))) == 'd')
2080 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
2082 /* We can do this only for the DECLs home pseudo, not for
2083 any copies of it, since otherwise when the stack slot
2084 is reused, nonoverlapping_memrefs_p might think they
2086 if (decl && GET_CODE (decl) == REG && REGNO (decl) == (unsigned) i)
2088 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2091 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
2095 /* Save the stack slot for later. */
2096 reg_equiv_memory_loc[i] = x;
2100 /* Mark the slots in regs_ever_live for the hard regs
2101 used by pseudo-reg number REGNO. */
2104 mark_home_live (regno)
2109 i = reg_renumber[regno];
2112 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2114 regs_ever_live[i++] = 1;
2117 /* This function handles the tracking of elimination offsets around branches.
2119 X is a piece of RTL being scanned.
2121 INSN is the insn that it came from, if any.
2123 INITIAL_P is nonzero if we are to set the offset to be the initial
2124 offset and zero if we are setting the offset of the label to be the
2128 set_label_offsets (x, insn, initial_p)
2133 enum rtx_code code = GET_CODE (x);
2136 struct elim_table *p;
2141 if (LABEL_REF_NONLOCAL_P (x))
2146 /* ... fall through ... */
2149 /* If we know nothing about this label, set the desired offsets. Note
2150 that this sets the offset at a label to be the offset before a label
2151 if we don't know anything about the label. This is not correct for
2152 the label after a BARRIER, but is the best guess we can make. If
2153 we guessed wrong, we will suppress an elimination that might have
2154 been possible had we been able to guess correctly. */
2156 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2158 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2159 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2160 = (initial_p ? reg_eliminate[i].initial_offset
2161 : reg_eliminate[i].offset);
2162 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2165 /* Otherwise, if this is the definition of a label and it is
2166 preceded by a BARRIER, set our offsets to the known offset of
2170 && (tem = prev_nonnote_insn (insn)) != 0
2171 && GET_CODE (tem) == BARRIER)
2172 set_offsets_for_label (insn);
2174 /* If neither of the above cases is true, compare each offset
2175 with those previously recorded and suppress any eliminations
2176 where the offsets disagree. */
2178 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2179 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2180 != (initial_p ? reg_eliminate[i].initial_offset
2181 : reg_eliminate[i].offset))
2182 reg_eliminate[i].can_eliminate = 0;
2187 set_label_offsets (PATTERN (insn), insn, initial_p);
2189 /* ... fall through ... */
2193 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2194 and hence must have all eliminations at their initial offsets. */
2195 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2196 if (REG_NOTE_KIND (tem) == REG_LABEL)
2197 set_label_offsets (XEXP (tem, 0), insn, 1);
2203 /* Each of the labels in the parallel or address vector must be
2204 at their initial offsets. We want the first field for PARALLEL
2205 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2207 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2208 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2213 /* We only care about setting PC. If the source is not RETURN,
2214 IF_THEN_ELSE, or a label, disable any eliminations not at
2215 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2216 isn't one of those possibilities. For branches to a label,
2217 call ourselves recursively.
2219 Note that this can disable elimination unnecessarily when we have
2220 a non-local goto since it will look like a non-constant jump to
2221 someplace in the current function. This isn't a significant
2222 problem since such jumps will normally be when all elimination
2223 pairs are back to their initial offsets. */
2225 if (SET_DEST (x) != pc_rtx)
2228 switch (GET_CODE (SET_SRC (x)))
2235 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2239 tem = XEXP (SET_SRC (x), 1);
2240 if (GET_CODE (tem) == LABEL_REF)
2241 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2242 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2245 tem = XEXP (SET_SRC (x), 2);
2246 if (GET_CODE (tem) == LABEL_REF)
2247 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2248 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2256 /* If we reach here, all eliminations must be at their initial
2257 offset because we are doing a jump to a variable address. */
2258 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2259 if (p->offset != p->initial_offset)
2260 p->can_eliminate = 0;
2268 /* Scan X and replace any eliminable registers (such as fp) with a
2269 replacement (such as sp), plus an offset.
2271 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2272 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2273 MEM, we are allowed to replace a sum of a register and the constant zero
2274 with the register, which we cannot do outside a MEM. In addition, we need
2275 to record the fact that a register is referenced outside a MEM.
2277 If INSN is an insn, it is the insn containing X. If we replace a REG
2278 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2279 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2280 the REG is being modified.
2282 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2283 That's used when we eliminate in expressions stored in notes.
2284 This means, do not set ref_outside_mem even if the reference
2287 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2288 replacements done assuming all offsets are at their initial values. If
2289 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2290 encounter, return the actual location so that find_reloads will do
2291 the proper thing. */
2294 eliminate_regs (x, mem_mode, insn)
2296 enum machine_mode mem_mode;
2299 enum rtx_code code = GET_CODE (x);
2300 struct elim_table *ep;
2307 if (! current_function_decl)
2327 /* This is only for the benefit of the debugging backends, which call
2328 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2329 removed after CSE. */
2330 new = eliminate_regs (XEXP (x, 0), 0, insn);
2331 if (GET_CODE (new) == MEM)
2332 return XEXP (new, 0);
2338 /* First handle the case where we encounter a bare register that
2339 is eliminable. Replace it with a PLUS. */
2340 if (regno < FIRST_PSEUDO_REGISTER)
2342 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2344 if (ep->from_rtx == x && ep->can_eliminate)
2345 return plus_constant (ep->to_rtx, ep->previous_offset);
2348 else if (reg_renumber && reg_renumber[regno] < 0
2349 && reg_equiv_constant && reg_equiv_constant[regno]
2350 && ! CONSTANT_P (reg_equiv_constant[regno]))
2351 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2355 /* You might think handling MINUS in a manner similar to PLUS is a
2356 good idea. It is not. It has been tried multiple times and every
2357 time the change has had to have been reverted.
2359 Other parts of reload know a PLUS is special (gen_reload for example)
2360 and require special code to handle code a reloaded PLUS operand.
2362 Also consider backends where the flags register is clobbered by a
2363 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2364 lea instruction comes to mind). If we try to reload a MINUS, we
2365 may kill the flags register that was holding a useful value.
2367 So, please before trying to handle MINUS, consider reload as a
2368 whole instead of this little section as well as the backend issues. */
2370 /* If this is the sum of an eliminable register and a constant, rework
2372 if (GET_CODE (XEXP (x, 0)) == REG
2373 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2374 && CONSTANT_P (XEXP (x, 1)))
2376 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2378 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2380 /* The only time we want to replace a PLUS with a REG (this
2381 occurs when the constant operand of the PLUS is the negative
2382 of the offset) is when we are inside a MEM. We won't want
2383 to do so at other times because that would change the
2384 structure of the insn in a way that reload can't handle.
2385 We special-case the commonest situation in
2386 eliminate_regs_in_insn, so just replace a PLUS with a
2387 PLUS here, unless inside a MEM. */
2388 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2389 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2392 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2393 plus_constant (XEXP (x, 1),
2394 ep->previous_offset));
2397 /* If the register is not eliminable, we are done since the other
2398 operand is a constant. */
2402 /* If this is part of an address, we want to bring any constant to the
2403 outermost PLUS. We will do this by doing register replacement in
2404 our operands and seeing if a constant shows up in one of them.
2406 Note that there is no risk of modifying the structure of the insn,
2407 since we only get called for its operands, thus we are either
2408 modifying the address inside a MEM, or something like an address
2409 operand of a load-address insn. */
2412 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2413 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2415 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2417 /* If one side is a PLUS and the other side is a pseudo that
2418 didn't get a hard register but has a reg_equiv_constant,
2419 we must replace the constant here since it may no longer
2420 be in the position of any operand. */
2421 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2422 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2423 && reg_renumber[REGNO (new1)] < 0
2424 && reg_equiv_constant != 0
2425 && reg_equiv_constant[REGNO (new1)] != 0)
2426 new1 = reg_equiv_constant[REGNO (new1)];
2427 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2428 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2429 && reg_renumber[REGNO (new0)] < 0
2430 && reg_equiv_constant[REGNO (new0)] != 0)
2431 new0 = reg_equiv_constant[REGNO (new0)];
2433 new = form_sum (new0, new1);
2435 /* As above, if we are not inside a MEM we do not want to
2436 turn a PLUS into something else. We might try to do so here
2437 for an addition of 0 if we aren't optimizing. */
2438 if (! mem_mode && GET_CODE (new) != PLUS)
2439 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2447 /* If this is the product of an eliminable register and a
2448 constant, apply the distribute law and move the constant out
2449 so that we have (plus (mult ..) ..). This is needed in order
2450 to keep load-address insns valid. This case is pathological.
2451 We ignore the possibility of overflow here. */
2452 if (GET_CODE (XEXP (x, 0)) == REG
2453 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2454 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2455 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2457 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2460 /* Refs inside notes don't count for this purpose. */
2461 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2462 || GET_CODE (insn) == INSN_LIST)))
2463 ep->ref_outside_mem = 1;
2466 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2467 ep->previous_offset * INTVAL (XEXP (x, 1)));
2470 /* ... fall through ... */
2474 /* See comments before PLUS about handling MINUS. */
2476 case DIV: case UDIV:
2477 case MOD: case UMOD:
2478 case AND: case IOR: case XOR:
2479 case ROTATERT: case ROTATE:
2480 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2482 case GE: case GT: case GEU: case GTU:
2483 case LE: case LT: case LEU: case LTU:
2485 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2487 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2489 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2490 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2495 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2498 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2499 if (new != XEXP (x, 0))
2501 /* If this is a REG_DEAD note, it is not valid anymore.
2502 Using the eliminated version could result in creating a
2503 REG_DEAD note for the stack or frame pointer. */
2504 if (GET_MODE (x) == REG_DEAD)
2506 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2509 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2513 /* ... fall through ... */
2516 /* Now do eliminations in the rest of the chain. If this was
2517 an EXPR_LIST, this might result in allocating more memory than is
2518 strictly needed, but it simplifies the code. */
2521 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2522 if (new != XEXP (x, 1))
2524 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2532 case STRICT_LOW_PART:
2534 case SIGN_EXTEND: case ZERO_EXTEND:
2535 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2536 case FLOAT: case FIX:
2537 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2545 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2546 if (new != XEXP (x, 0))
2547 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2551 /* Similar to above processing, but preserve SUBREG_BYTE.
2552 Convert (subreg (mem)) to (mem) if not paradoxical.
2553 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2554 pseudo didn't get a hard reg, we must replace this with the
2555 eliminated version of the memory location because push_reloads
2556 may do the replacement in certain circumstances. */
2557 if (GET_CODE (SUBREG_REG (x)) == REG
2558 && (GET_MODE_SIZE (GET_MODE (x))
2559 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2560 && reg_equiv_memory_loc != 0
2561 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2563 new = SUBREG_REG (x);
2566 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2568 if (new != SUBREG_REG (x))
2570 int x_size = GET_MODE_SIZE (GET_MODE (x));
2571 int new_size = GET_MODE_SIZE (GET_MODE (new));
2573 if (GET_CODE (new) == MEM
2574 && ((x_size < new_size
2575 #ifdef WORD_REGISTER_OPERATIONS
2576 /* On these machines, combine can create rtl of the form
2577 (set (subreg:m1 (reg:m2 R) 0) ...)
2578 where m1 < m2, and expects something interesting to
2579 happen to the entire word. Moreover, it will use the
2580 (reg:m2 R) later, expecting all bits to be preserved.
2581 So if the number of words is the same, preserve the
2582 subreg so that push_reloads can see it. */
2583 && ! ((x_size - 1) / UNITS_PER_WORD
2584 == (new_size -1 ) / UNITS_PER_WORD)
2587 || x_size == new_size)
2589 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
2591 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2597 /* This is only for the benefit of the debugging backends, which call
2598 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2599 removed after CSE. */
2600 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2601 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
2603 /* Our only special processing is to pass the mode of the MEM to our
2604 recursive call and copy the flags. While we are here, handle this
2605 case more efficiently. */
2607 replace_equiv_address_nv (x,
2608 eliminate_regs (XEXP (x, 0),
2609 GET_MODE (x), insn));
2612 /* Handle insn_list USE that a call to a pure function may generate. */
2613 new = eliminate_regs (XEXP (x, 0), 0, insn);
2614 if (new != XEXP (x, 0))
2615 return gen_rtx_USE (GET_MODE (x), new);
2627 /* Process each of our operands recursively. If any have changed, make a
2629 fmt = GET_RTX_FORMAT (code);
2630 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2634 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
2635 if (new != XEXP (x, i) && ! copied)
2637 rtx new_x = rtx_alloc (code);
2639 (sizeof (*new_x) - sizeof (new_x->fld)
2640 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
2646 else if (*fmt == 'E')
2649 for (j = 0; j < XVECLEN (x, i); j++)
2651 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2652 if (new != XVECEXP (x, i, j) && ! copied_vec)
2654 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2658 rtx new_x = rtx_alloc (code);
2660 (sizeof (*new_x) - sizeof (new_x->fld)
2661 + (sizeof (new_x->fld[0])
2662 * GET_RTX_LENGTH (code))));
2666 XVEC (x, i) = new_v;
2669 XVECEXP (x, i, j) = new;
2677 /* Scan rtx X for modifications of elimination target registers. Update
2678 the table of eliminables to reflect the changed state. MEM_MODE is
2679 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2682 elimination_effects (x, mem_mode)
2684 enum machine_mode mem_mode;
2687 enum rtx_code code = GET_CODE (x);
2688 struct elim_table *ep;
2715 /* First handle the case where we encounter a bare register that
2716 is eliminable. Replace it with a PLUS. */
2717 if (regno < FIRST_PSEUDO_REGISTER)
2719 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2721 if (ep->from_rtx == x && ep->can_eliminate)
2724 ep->ref_outside_mem = 1;
2729 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2730 && reg_equiv_constant[regno]
2731 && ! function_invariant_p (reg_equiv_constant[regno]))
2732 elimination_effects (reg_equiv_constant[regno], mem_mode);
2741 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2742 if (ep->to_rtx == XEXP (x, 0))
2744 int size = GET_MODE_SIZE (mem_mode);
2746 /* If more bytes than MEM_MODE are pushed, account for them. */
2747 #ifdef PUSH_ROUNDING
2748 if (ep->to_rtx == stack_pointer_rtx)
2749 size = PUSH_ROUNDING (size);
2751 if (code == PRE_DEC || code == POST_DEC)
2753 else if (code == PRE_INC || code == POST_INC)
2755 else if ((code == PRE_MODIFY || code == POST_MODIFY)
2756 && GET_CODE (XEXP (x, 1)) == PLUS
2757 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2758 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2759 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2762 /* These two aren't unary operators. */
2763 if (code == POST_MODIFY || code == PRE_MODIFY)
2766 /* Fall through to generic unary operation case. */
2767 case STRICT_LOW_PART:
2769 case SIGN_EXTEND: case ZERO_EXTEND:
2770 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2771 case FLOAT: case FIX:
2772 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2780 elimination_effects (XEXP (x, 0), mem_mode);
2784 if (GET_CODE (SUBREG_REG (x)) == REG
2785 && (GET_MODE_SIZE (GET_MODE (x))
2786 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2787 && reg_equiv_memory_loc != 0
2788 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2791 elimination_effects (SUBREG_REG (x), mem_mode);
2795 /* If using a register that is the source of an eliminate we still
2796 think can be performed, note it cannot be performed since we don't
2797 know how this register is used. */
2798 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2799 if (ep->from_rtx == XEXP (x, 0))
2800 ep->can_eliminate = 0;
2802 elimination_effects (XEXP (x, 0), mem_mode);
2806 /* If clobbering a register that is the replacement register for an
2807 elimination we still think can be performed, note that it cannot
2808 be performed. Otherwise, we need not be concerned about it. */
2809 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2810 if (ep->to_rtx == XEXP (x, 0))
2811 ep->can_eliminate = 0;
2813 elimination_effects (XEXP (x, 0), mem_mode);
2817 /* Check for setting a register that we know about. */
2818 if (GET_CODE (SET_DEST (x)) == REG)
2820 /* See if this is setting the replacement register for an
2823 If DEST is the hard frame pointer, we do nothing because we
2824 assume that all assignments to the frame pointer are for
2825 non-local gotos and are being done at a time when they are valid
2826 and do not disturb anything else. Some machines want to
2827 eliminate a fake argument pointer (or even a fake frame pointer)
2828 with either the real frame or the stack pointer. Assignments to
2829 the hard frame pointer must not prevent this elimination. */
2831 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2833 if (ep->to_rtx == SET_DEST (x)
2834 && SET_DEST (x) != hard_frame_pointer_rtx)
2836 /* If it is being incremented, adjust the offset. Otherwise,
2837 this elimination can't be done. */
2838 rtx src = SET_SRC (x);
2840 if (GET_CODE (src) == PLUS
2841 && XEXP (src, 0) == SET_DEST (x)
2842 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2843 ep->offset -= INTVAL (XEXP (src, 1));
2845 ep->can_eliminate = 0;
2849 elimination_effects (SET_DEST (x), 0);
2850 elimination_effects (SET_SRC (x), 0);
2854 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2857 /* Our only special processing is to pass the mode of the MEM to our
2859 elimination_effects (XEXP (x, 0), GET_MODE (x));
2866 fmt = GET_RTX_FORMAT (code);
2867 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2870 elimination_effects (XEXP (x, i), mem_mode);
2871 else if (*fmt == 'E')
2872 for (j = 0; j < XVECLEN (x, i); j++)
2873 elimination_effects (XVECEXP (x, i, j), mem_mode);
2877 /* Descend through rtx X and verify that no references to eliminable registers
2878 remain. If any do remain, mark the involved register as not
2882 check_eliminable_occurrences (x)
2892 code = GET_CODE (x);
2894 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2896 struct elim_table *ep;
2898 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2899 if (ep->from_rtx == x && ep->can_eliminate)
2900 ep->can_eliminate = 0;
2904 fmt = GET_RTX_FORMAT (code);
2905 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2908 check_eliminable_occurrences (XEXP (x, i));
2909 else if (*fmt == 'E')
2912 for (j = 0; j < XVECLEN (x, i); j++)
2913 check_eliminable_occurrences (XVECEXP (x, i, j));
2918 /* Scan INSN and eliminate all eliminable registers in it.
2920 If REPLACE is nonzero, do the replacement destructively. Also
2921 delete the insn as dead it if it is setting an eliminable register.
2923 If REPLACE is zero, do all our allocations in reload_obstack.
2925 If no eliminations were done and this insn doesn't require any elimination
2926 processing (these are not identical conditions: it might be updating sp,
2927 but not referencing fp; this needs to be seen during reload_as_needed so
2928 that the offset between fp and sp can be taken into consideration), zero
2929 is returned. Otherwise, 1 is returned. */
2932 eliminate_regs_in_insn (insn, replace)
2936 int icode = recog_memoized (insn);
2937 rtx old_body = PATTERN (insn);
2938 int insn_is_asm = asm_noperands (old_body) >= 0;
2939 rtx old_set = single_set (insn);
2943 rtx substed_operand[MAX_RECOG_OPERANDS];
2944 rtx orig_operand[MAX_RECOG_OPERANDS];
2945 struct elim_table *ep;
2947 if (! insn_is_asm && icode < 0)
2949 if (GET_CODE (PATTERN (insn)) == USE
2950 || GET_CODE (PATTERN (insn)) == CLOBBER
2951 || GET_CODE (PATTERN (insn)) == ADDR_VEC
2952 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2953 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
2958 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
2959 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2961 /* Check for setting an eliminable register. */
2962 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2963 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2965 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2966 /* If this is setting the frame pointer register to the
2967 hardware frame pointer register and this is an elimination
2968 that will be done (tested above), this insn is really
2969 adjusting the frame pointer downward to compensate for
2970 the adjustment done before a nonlocal goto. */
2971 if (ep->from == FRAME_POINTER_REGNUM
2972 && ep->to == HARD_FRAME_POINTER_REGNUM)
2974 rtx base = SET_SRC (old_set);
2975 rtx base_insn = insn;
2978 while (base != ep->to_rtx)
2980 rtx prev_insn, prev_set;
2982 if (GET_CODE (base) == PLUS
2983 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2985 offset += INTVAL (XEXP (base, 1));
2986 base = XEXP (base, 0);
2988 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2989 && (prev_set = single_set (prev_insn)) != 0
2990 && rtx_equal_p (SET_DEST (prev_set), base))
2992 base = SET_SRC (prev_set);
2993 base_insn = prev_insn;
2999 if (base == ep->to_rtx)
3002 = plus_constant (ep->to_rtx, offset - ep->offset);
3004 new_body = old_body;
3007 new_body = copy_insn (old_body);
3008 if (REG_NOTES (insn))
3009 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3011 PATTERN (insn) = new_body;
3012 old_set = single_set (insn);
3014 /* First see if this insn remains valid when we
3015 make the change. If not, keep the INSN_CODE
3016 the same and let reload fit it up. */
3017 validate_change (insn, &SET_SRC (old_set), src, 1);
3018 validate_change (insn, &SET_DEST (old_set),
3020 if (! apply_change_group ())
3022 SET_SRC (old_set) = src;
3023 SET_DEST (old_set) = ep->to_rtx;
3032 /* In this case this insn isn't serving a useful purpose. We
3033 will delete it in reload_as_needed once we know that this
3034 elimination is, in fact, being done.
3036 If REPLACE isn't set, we can't delete this insn, but needn't
3037 process it since it won't be used unless something changes. */
3040 delete_dead_insn (insn);
3048 /* We allow one special case which happens to work on all machines we
3049 currently support: a single set with the source being a PLUS of an
3050 eliminable register and a constant. */
3052 && GET_CODE (SET_DEST (old_set)) == REG
3053 && GET_CODE (SET_SRC (old_set)) == PLUS
3054 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3055 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
3056 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
3058 rtx reg = XEXP (SET_SRC (old_set), 0);
3059 int offset = INTVAL (XEXP (SET_SRC (old_set), 1));
3061 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3062 if (ep->from_rtx == reg && ep->can_eliminate)
3064 offset += ep->offset;
3069 /* We assume here that if we need a PARALLEL with
3070 CLOBBERs for this assignment, we can do with the
3071 MATCH_SCRATCHes that add_clobbers allocates.
3072 There's not much we can do if that doesn't work. */
3073 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3077 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3080 rtvec vec = rtvec_alloc (num_clobbers + 1);
3082 vec->elem[0] = PATTERN (insn);
3083 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3084 add_clobbers (PATTERN (insn), INSN_CODE (insn));
3086 if (INSN_CODE (insn) < 0)
3091 new_body = old_body;
3094 new_body = copy_insn (old_body);
3095 if (REG_NOTES (insn))
3096 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3098 PATTERN (insn) = new_body;
3099 old_set = single_set (insn);
3101 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3102 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3105 /* This can't have an effect on elimination offsets, so skip right
3111 /* Determine the effects of this insn on elimination offsets. */
3112 elimination_effects (old_body, 0);
3114 /* Eliminate all eliminable registers occurring in operands that
3115 can be handled by reload. */
3116 extract_insn (insn);
3117 for (i = 0; i < recog_data.n_operands; i++)
3119 orig_operand[i] = recog_data.operand[i];
3120 substed_operand[i] = recog_data.operand[i];
3122 /* For an asm statement, every operand is eliminable. */
3123 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3125 /* Check for setting a register that we know about. */
3126 if (recog_data.operand_type[i] != OP_IN
3127 && GET_CODE (orig_operand[i]) == REG)
3129 /* If we are assigning to a register that can be eliminated, it
3130 must be as part of a PARALLEL, since the code above handles
3131 single SETs. We must indicate that we can no longer
3132 eliminate this reg. */
3133 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3135 if (ep->from_rtx == orig_operand[i] && ep->can_eliminate)
3136 ep->can_eliminate = 0;
3139 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3140 replace ? insn : NULL_RTX);
3141 if (substed_operand[i] != orig_operand[i])
3143 /* Terminate the search in check_eliminable_occurrences at
3145 *recog_data.operand_loc[i] = 0;
3147 /* If an output operand changed from a REG to a MEM and INSN is an
3148 insn, write a CLOBBER insn. */
3149 if (recog_data.operand_type[i] != OP_IN
3150 && GET_CODE (orig_operand[i]) == REG
3151 && GET_CODE (substed_operand[i]) == MEM
3153 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3158 for (i = 0; i < recog_data.n_dups; i++)
3159 *recog_data.dup_loc[i]
3160 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3162 /* If any eliminable remain, they aren't eliminable anymore. */
3163 check_eliminable_occurrences (old_body);
3165 /* Substitute the operands; the new values are in the substed_operand
3167 for (i = 0; i < recog_data.n_operands; i++)
3168 *recog_data.operand_loc[i] = substed_operand[i];
3169 for (i = 0; i < recog_data.n_dups; i++)
3170 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3172 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3173 re-recognize the insn. We do this in case we had a simple addition
3174 but now can do this as a load-address. This saves an insn in this
3176 If re-recognition fails, the old insn code number will still be used,
3177 and some register operands may have changed into PLUS expressions.
3178 These will be handled by find_reloads by loading them into a register
3183 /* If we aren't replacing things permanently and we changed something,
3184 make another copy to ensure that all the RTL is new. Otherwise
3185 things can go wrong if find_reload swaps commutative operands
3186 and one is inside RTL that has been copied while the other is not. */
3187 new_body = old_body;
3190 new_body = copy_insn (old_body);
3191 if (REG_NOTES (insn))
3192 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3194 PATTERN (insn) = new_body;
3196 /* If we had a move insn but now we don't, rerecognize it. This will
3197 cause spurious re-recognition if the old move had a PARALLEL since
3198 the new one still will, but we can't call single_set without
3199 having put NEW_BODY into the insn and the re-recognition won't
3200 hurt in this rare case. */
3201 /* ??? Why this huge if statement - why don't we just rerecognize the
3205 && ((GET_CODE (SET_SRC (old_set)) == REG
3206 && (GET_CODE (new_body) != SET
3207 || GET_CODE (SET_SRC (new_body)) != REG))
3208 /* If this was a load from or store to memory, compare
3209 the MEM in recog_data.operand to the one in the insn.
3210 If they are not equal, then rerecognize the insn. */
3212 && ((GET_CODE (SET_SRC (old_set)) == MEM
3213 && SET_SRC (old_set) != recog_data.operand[1])
3214 || (GET_CODE (SET_DEST (old_set)) == MEM
3215 && SET_DEST (old_set) != recog_data.operand[0])))
3216 /* If this was an add insn before, rerecognize. */
3217 || GET_CODE (SET_SRC (old_set)) == PLUS))
3219 int new_icode = recog (PATTERN (insn), insn, 0);
3221 INSN_CODE (insn) = icode;
3225 /* Restore the old body. If there were any changes to it, we made a copy
3226 of it while the changes were still in place, so we'll correctly return
3227 a modified insn below. */
3230 /* Restore the old body. */
3231 for (i = 0; i < recog_data.n_operands; i++)
3232 *recog_data.operand_loc[i] = orig_operand[i];
3233 for (i = 0; i < recog_data.n_dups; i++)
3234 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3237 /* Update all elimination pairs to reflect the status after the current
3238 insn. The changes we make were determined by the earlier call to
3239 elimination_effects.
3241 We also detect cases where register elimination cannot be done,
3242 namely, if a register would be both changed and referenced outside a MEM
3243 in the resulting insn since such an insn is often undefined and, even if
3244 not, we cannot know what meaning will be given to it. Note that it is
3245 valid to have a register used in an address in an insn that changes it
3246 (presumably with a pre- or post-increment or decrement).
3248 If anything changes, return nonzero. */
3250 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3252 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3253 ep->can_eliminate = 0;
3255 ep->ref_outside_mem = 0;
3257 if (ep->previous_offset != ep->offset)
3262 /* If we changed something, perform elimination in REG_NOTES. This is
3263 needed even when REPLACE is zero because a REG_DEAD note might refer
3264 to a register that we eliminate and could cause a different number
3265 of spill registers to be needed in the final reload pass than in
3267 if (val && REG_NOTES (insn) != 0)
3268 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3273 /* Loop through all elimination pairs.
3274 Recalculate the number not at initial offset.
3276 Compute the maximum offset (minimum offset if the stack does not
3277 grow downward) for each elimination pair. */
3280 update_eliminable_offsets ()
3282 struct elim_table *ep;
3284 num_not_at_initial_offset = 0;
3285 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3287 ep->previous_offset = ep->offset;
3288 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3289 num_not_at_initial_offset++;
3293 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3294 replacement we currently believe is valid, mark it as not eliminable if X
3295 modifies DEST in any way other than by adding a constant integer to it.
3297 If DEST is the frame pointer, we do nothing because we assume that
3298 all assignments to the hard frame pointer are nonlocal gotos and are being
3299 done at a time when they are valid and do not disturb anything else.
3300 Some machines want to eliminate a fake argument pointer with either the
3301 frame or stack pointer. Assignments to the hard frame pointer must not
3302 prevent this elimination.
3304 Called via note_stores from reload before starting its passes to scan
3305 the insns of the function. */
3308 mark_not_eliminable (dest, x, data)
3311 void *data ATTRIBUTE_UNUSED;
3315 /* A SUBREG of a hard register here is just changing its mode. We should
3316 not see a SUBREG of an eliminable hard register, but check just in
3318 if (GET_CODE (dest) == SUBREG)
3319 dest = SUBREG_REG (dest);
3321 if (dest == hard_frame_pointer_rtx)
3324 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3325 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3326 && (GET_CODE (x) != SET
3327 || GET_CODE (SET_SRC (x)) != PLUS
3328 || XEXP (SET_SRC (x), 0) != dest
3329 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3331 reg_eliminate[i].can_eliminate_previous
3332 = reg_eliminate[i].can_eliminate = 0;
3337 /* Verify that the initial elimination offsets did not change since the
3338 last call to set_initial_elim_offsets. This is used to catch cases
3339 where something illegal happened during reload_as_needed that could
3340 cause incorrect code to be generated if we did not check for it. */
3343 verify_initial_elim_offsets ()
3347 #ifdef ELIMINABLE_REGS
3348 struct elim_table *ep;
3350 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3352 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3353 if (t != ep->initial_offset)
3357 INITIAL_FRAME_POINTER_OFFSET (t);
3358 if (t != reg_eliminate[0].initial_offset)
3363 /* Reset all offsets on eliminable registers to their initial values. */
3366 set_initial_elim_offsets ()
3368 struct elim_table *ep = reg_eliminate;
3370 #ifdef ELIMINABLE_REGS
3371 for (; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3373 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3374 ep->previous_offset = ep->offset = ep->initial_offset;
3377 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3378 ep->previous_offset = ep->offset = ep->initial_offset;
3381 num_not_at_initial_offset = 0;
3384 /* Initialize the known label offsets.
3385 Set a known offset for each forced label to be at the initial offset
3386 of each elimination. We do this because we assume that all
3387 computed jumps occur from a&n