1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
32 #include "insn-config.h"
38 #include "addresses.h"
39 #include "basic-block.h"
51 /* This file contains the reload pass of the compiler, which is
52 run after register allocation has been done. It checks that
53 each insn is valid (operands required to be in registers really
54 are in registers of the proper class) and fixes up invalid ones
55 by copying values temporarily into registers for the insns
58 The results of register allocation are described by the vector
59 reg_renumber; the insns still contain pseudo regs, but reg_renumber
60 can be used to find which hard reg, if any, a pseudo reg is in.
62 The technique we always use is to free up a few hard regs that are
63 called ``reload regs'', and for each place where a pseudo reg
64 must be in a hard reg, copy it temporarily into one of the reload regs.
66 Reload regs are allocated locally for every instruction that needs
67 reloads. When there are pseudos which are allocated to a register that
68 has been chosen as a reload reg, such pseudos must be ``spilled''.
69 This means that they go to other hard regs, or to stack slots if no other
70 available hard regs can be found. Spilling can invalidate more
71 insns, requiring additional need for reloads, so we must keep checking
72 until the process stabilizes.
74 For machines with different classes of registers, we must keep track
75 of the register class needed for each reload, and make sure that
76 we allocate enough reload registers of each class.
78 The file reload.c contains the code that checks one insn for
79 validity and reports the reloads that it needs. This file
80 is in charge of scanning the entire rtl code, accumulating the
81 reload needs, spilling, assigning reload registers to use for
82 fixing up each insn, and generating the new insns to copy values
83 into the reload registers. */
85 /* During reload_as_needed, element N contains a REG rtx for the hard reg
86 into which reg N has been reloaded (perhaps for a previous insn). */
87 static rtx *reg_last_reload_reg;
89 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
90 for an output reload that stores into reg N. */
91 static regset_head reg_has_output_reload;
93 /* Indicates which hard regs are reload-registers for an output reload
94 in the current insn. */
95 static HARD_REG_SET reg_is_output_reload;
97 /* Element N is the constant value to which pseudo reg N is equivalent,
98 or zero if pseudo reg N is not equivalent to a constant.
99 find_reloads looks at this in order to replace pseudo reg N
100 with the constant it stands for. */
101 rtx *reg_equiv_constant;
103 /* Element N is an invariant value to which pseudo reg N is equivalent.
104 eliminate_regs_in_insn uses this to replace pseudos in particular
106 rtx *reg_equiv_invariant;
108 /* Element N is a memory location to which pseudo reg N is equivalent,
109 prior to any register elimination (such as frame pointer to stack
110 pointer). Depending on whether or not it is a valid address, this value
111 is transferred to either reg_equiv_address or reg_equiv_mem. */
112 rtx *reg_equiv_memory_loc;
114 /* We allocate reg_equiv_memory_loc inside a varray so that the garbage
115 collector can keep track of what is inside. */
116 VEC(rtx,gc) *reg_equiv_memory_loc_vec;
118 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
119 This is used when the address is not valid as a memory address
120 (because its displacement is too big for the machine.) */
121 rtx *reg_equiv_address;
123 /* Element N is the memory slot to which pseudo reg N is equivalent,
124 or zero if pseudo reg N is not equivalent to a memory slot. */
127 /* Element N is an EXPR_LIST of REG_EQUIVs containing MEMs with
128 alternate representations of the location of pseudo reg N. */
129 rtx *reg_equiv_alt_mem_list;
131 /* Widest width in which each pseudo reg is referred to (via subreg). */
132 static unsigned int *reg_max_ref_width;
134 /* Element N is the list of insns that initialized reg N from its equivalent
135 constant or memory slot. */
137 int reg_equiv_init_size;
139 /* Vector to remember old contents of reg_renumber before spilling. */
140 static short *reg_old_renumber;
142 /* During reload_as_needed, element N contains the last pseudo regno reloaded
143 into hard register N. If that pseudo reg occupied more than one register,
144 reg_reloaded_contents points to that pseudo for each spill register in
145 use; all of these must remain set for an inheritance to occur. */
146 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
148 /* During reload_as_needed, element N contains the insn for which
149 hard register N was last used. Its contents are significant only
150 when reg_reloaded_valid is set for this register. */
151 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
153 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
154 static HARD_REG_SET reg_reloaded_valid;
155 /* Indicate if the register was dead at the end of the reload.
156 This is only valid if reg_reloaded_contents is set and valid. */
157 static HARD_REG_SET reg_reloaded_dead;
159 /* Indicate whether the register's current value is one that is not
160 safe to retain across a call, even for registers that are normally
161 call-saved. This is only meaningful for members of reg_reloaded_valid. */
162 static HARD_REG_SET reg_reloaded_call_part_clobbered;
164 /* Number of spill-regs so far; number of valid elements of spill_regs. */
167 /* In parallel with spill_regs, contains REG rtx's for those regs.
168 Holds the last rtx used for any given reg, or 0 if it has never
169 been used for spilling yet. This rtx is reused, provided it has
171 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
173 /* In parallel with spill_regs, contains nonzero for a spill reg
174 that was stored after the last time it was used.
175 The precise value is the insn generated to do the store. */
176 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
178 /* This is the register that was stored with spill_reg_store. This is a
179 copy of reload_out / reload_out_reg when the value was stored; if
180 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
181 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
183 /* This table is the inverse mapping of spill_regs:
184 indexed by hard reg number,
185 it contains the position of that reg in spill_regs,
186 or -1 for something that is not in spill_regs.
188 ?!? This is no longer accurate. */
189 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
191 /* This reg set indicates registers that can't be used as spill registers for
192 the currently processed insn. These are the hard registers which are live
193 during the insn, but not allocated to pseudos, as well as fixed
195 static HARD_REG_SET bad_spill_regs;
197 /* These are the hard registers that can't be used as spill register for any
198 insn. This includes registers used for user variables and registers that
199 we can't eliminate. A register that appears in this set also can't be used
200 to retry register allocation. */
201 static HARD_REG_SET bad_spill_regs_global;
203 /* Describes order of use of registers for reloading
204 of spilled pseudo-registers. `n_spills' is the number of
205 elements that are actually valid; new ones are added at the end.
207 Both spill_regs and spill_reg_order are used on two occasions:
208 once during find_reload_regs, where they keep track of the spill registers
209 for a single insn, but also during reload_as_needed where they show all
210 the registers ever used by reload. For the latter case, the information
211 is calculated during finish_spills. */
212 static short spill_regs[FIRST_PSEUDO_REGISTER];
214 /* This vector of reg sets indicates, for each pseudo, which hard registers
215 may not be used for retrying global allocation because the register was
216 formerly spilled from one of them. If we allowed reallocating a pseudo to
217 a register that it was already allocated to, reload might not
219 static HARD_REG_SET *pseudo_previous_regs;
221 /* This vector of reg sets indicates, for each pseudo, which hard
222 registers may not be used for retrying global allocation because they
223 are used as spill registers during one of the insns in which the
225 static HARD_REG_SET *pseudo_forbidden_regs;
227 /* All hard regs that have been used as spill registers for any insn are
228 marked in this set. */
229 static HARD_REG_SET used_spill_regs;
231 /* Index of last register assigned as a spill register. We allocate in
232 a round-robin fashion. */
233 static int last_spill_reg;
235 /* Nonzero if indirect addressing is supported on the machine; this means
236 that spilling (REG n) does not require reloading it into a register in
237 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
238 value indicates the level of indirect addressing supported, e.g., two
239 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
241 static char spill_indirect_levels;
243 /* Nonzero if indirect addressing is supported when the innermost MEM is
244 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
245 which these are valid is the same as spill_indirect_levels, above. */
246 char indirect_symref_ok;
248 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
249 char double_reg_address_ok;
251 /* Record the stack slot for each spilled hard register. */
252 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
254 /* Width allocated so far for that stack slot. */
255 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
257 /* Record which pseudos needed to be spilled. */
258 static regset_head spilled_pseudos;
260 /* Record which pseudos changed their allocation in finish_spills. */
261 static regset_head changed_allocation_pseudos;
263 /* Used for communication between order_regs_for_reload and count_pseudo.
264 Used to avoid counting one pseudo twice. */
265 static regset_head pseudos_counted;
267 /* First uid used by insns created by reload in this function.
268 Used in find_equiv_reg. */
269 int reload_first_uid;
271 /* Flag set by local-alloc or global-alloc if anything is live in
272 a call-clobbered reg across calls. */
273 int caller_save_needed;
275 /* Set to 1 while reload_as_needed is operating.
276 Required by some machines to handle any generated moves differently. */
277 int reload_in_progress = 0;
279 /* These arrays record the insn_code of insns that may be needed to
280 perform input and output reloads of special objects. They provide a
281 place to pass a scratch register. */
282 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
283 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
285 /* This obstack is used for allocation of rtl during register elimination.
286 The allocated storage can be freed once find_reloads has processed the
288 static struct obstack reload_obstack;
290 /* Points to the beginning of the reload_obstack. All insn_chain structures
291 are allocated first. */
292 static char *reload_startobj;
294 /* The point after all insn_chain structures. Used to quickly deallocate
295 memory allocated in copy_reloads during calculate_needs_all_insns. */
296 static char *reload_firstobj;
298 /* This points before all local rtl generated by register elimination.
299 Used to quickly free all memory after processing one insn. */
300 static char *reload_insn_firstobj;
302 /* List of insn_chain instructions, one for every insn that reload needs to
304 struct insn_chain *reload_insn_chain;
306 /* List of all insns needing reloads. */
307 static struct insn_chain *insns_need_reload;
309 /* This structure is used to record information about register eliminations.
310 Each array entry describes one possible way of eliminating a register
311 in favor of another. If there is more than one way of eliminating a
312 particular register, the most preferred should be specified first. */
316 int from; /* Register number to be eliminated. */
317 int to; /* Register number used as replacement. */
318 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
319 int can_eliminate; /* Nonzero if this elimination can be done. */
320 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
321 target hook in previous scan over insns
323 HOST_WIDE_INT offset; /* Current offset between the two regs. */
324 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
325 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
326 rtx from_rtx; /* REG rtx for the register to be eliminated.
327 We cannot simply compare the number since
328 we might then spuriously replace a hard
329 register corresponding to a pseudo
330 assigned to the reg to be eliminated. */
331 rtx to_rtx; /* REG rtx for the replacement. */
334 static struct elim_table *reg_eliminate = 0;
336 /* This is an intermediate structure to initialize the table. It has
337 exactly the members provided by ELIMINABLE_REGS. */
338 static const struct elim_table_1
342 } reg_eliminate_1[] =
344 /* If a set of eliminable registers was specified, define the table from it.
345 Otherwise, default to the normal case of the frame pointer being
346 replaced by the stack pointer. */
348 #ifdef ELIMINABLE_REGS
351 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
354 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
356 /* Record the number of pending eliminations that have an offset not equal
357 to their initial offset. If nonzero, we use a new copy of each
358 replacement result in any insns encountered. */
359 int num_not_at_initial_offset;
361 /* Count the number of registers that we may be able to eliminate. */
362 static int num_eliminable;
363 /* And the number of registers that are equivalent to a constant that
364 can be eliminated to frame_pointer / arg_pointer + constant. */
365 static int num_eliminable_invariants;
367 /* For each label, we record the offset of each elimination. If we reach
368 a label by more than one path and an offset differs, we cannot do the
369 elimination. This information is indexed by the difference of the
370 number of the label and the first label number. We can't offset the
371 pointer itself as this can cause problems on machines with segmented
372 memory. The first table is an array of flags that records whether we
373 have yet encountered a label and the second table is an array of arrays,
374 one entry in the latter array for each elimination. */
376 static int first_label_num;
377 static char *offsets_known_at;
378 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
380 /* Stack of addresses where an rtx has been changed. We can undo the
381 changes by popping items off the stack and restoring the original
382 value at each location.
384 We use this simplistic undo capability rather than copy_rtx as copy_rtx
385 will not make a deep copy of a normally sharable rtx, such as
386 (const (plus (symbol_ref) (const_int))). If such an expression appears
387 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
388 rtx expression would be changed. See PR 42431. */
392 DEF_VEC_ALLOC_P(rtx_p,heap);
393 static VEC(rtx_p,heap) *substitute_stack;
395 /* Number of labels in the current function. */
397 static int num_labels;
399 static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
400 static void maybe_fix_stack_asms (void);
401 static void copy_reloads (struct insn_chain *);
402 static void calculate_needs_all_insns (int);
403 static int find_reg (struct insn_chain *, int);
404 static void find_reload_regs (struct insn_chain *);
405 static void select_reload_regs (void);
406 static void delete_caller_save_insns (void);
408 static void spill_failure (rtx, enum reg_class);
409 static void count_spilled_pseudo (int, int, int);
410 static void delete_dead_insn (rtx);
411 static void alter_reg (int, int, bool);
412 static void set_label_offsets (rtx, rtx, int);
413 static void check_eliminable_occurrences (rtx);
414 static void elimination_effects (rtx, enum machine_mode);
415 static rtx eliminate_regs_1 (rtx, enum machine_mode, rtx, bool, bool);
416 static int eliminate_regs_in_insn (rtx, int);
417 static void update_eliminable_offsets (void);
418 static void mark_not_eliminable (rtx, const_rtx, void *);
419 static void set_initial_elim_offsets (void);
420 static bool verify_initial_elim_offsets (void);
421 static void set_initial_label_offsets (void);
422 static void set_offsets_for_label (rtx);
423 static void init_eliminable_invariants (rtx, bool);
424 static void init_elim_table (void);
425 static void free_reg_equiv (void);
426 static void update_eliminables (HARD_REG_SET *);
427 static void elimination_costs_in_insn (rtx);
428 static void spill_hard_reg (unsigned int, int);
429 static int finish_spills (int);
430 static void scan_paradoxical_subregs (rtx);
431 static void count_pseudo (int);
432 static void order_regs_for_reload (struct insn_chain *);
433 static void reload_as_needed (int);
434 static void forget_old_reloads_1 (rtx, const_rtx, void *);
435 static void forget_marked_reloads (regset);
436 static int reload_reg_class_lower (const void *, const void *);
437 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
439 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
441 static int reload_reg_free_p (unsigned int, int, enum reload_type);
442 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
444 static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
446 static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
447 static int allocate_reload_reg (struct insn_chain *, int, int);
448 static int conflicts_with_override (rtx);
449 static void failed_reload (rtx, int);
450 static int set_reload_reg (int, int);
451 static void choose_reload_regs_init (struct insn_chain *, rtx *);
452 static void choose_reload_regs (struct insn_chain *);
453 static void merge_assigned_reloads (rtx);
454 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
456 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
458 static void do_input_reload (struct insn_chain *, struct reload *, int);
459 static void do_output_reload (struct insn_chain *, struct reload *, int);
460 static void emit_reload_insns (struct insn_chain *);
461 static void delete_output_reload (rtx, int, int, rtx);
462 static void delete_address_reloads (rtx, rtx);
463 static void delete_address_reloads_1 (rtx, rtx, rtx);
464 static rtx inc_for_reload (rtx, rtx, rtx, int);
466 static void add_auto_inc_notes (rtx, rtx);
468 static void substitute (rtx *, const_rtx, rtx);
469 static bool gen_reload_chain_without_interm_reg_p (int, int);
470 static int reloads_conflict (int, int);
471 static rtx gen_reload (rtx, rtx, int, enum reload_type);
472 static rtx emit_insn_if_valid_for_reload (rtx);
474 /* Initialize the reload pass. This is called at the beginning of compilation
475 and may be called again if the target is reinitialized. */
482 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
483 Set spill_indirect_levels to the number of levels such addressing is
484 permitted, zero if it is not permitted at all. */
487 = gen_rtx_MEM (Pmode,
490 LAST_VIRTUAL_REGISTER + 1),
492 spill_indirect_levels = 0;
494 while (memory_address_p (QImode, tem))
496 spill_indirect_levels++;
497 tem = gen_rtx_MEM (Pmode, tem);
500 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
502 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
503 indirect_symref_ok = memory_address_p (QImode, tem);
505 /* See if reg+reg is a valid (and offsettable) address. */
507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
509 tem = gen_rtx_PLUS (Pmode,
510 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
511 gen_rtx_REG (Pmode, i));
513 /* This way, we make sure that reg+reg is an offsettable address. */
514 tem = plus_constant (tem, 4);
516 if (memory_address_p (QImode, tem))
518 double_reg_address_ok = 1;
523 /* Initialize obstack for our rtl allocation. */
524 gcc_obstack_init (&reload_obstack);
525 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
527 INIT_REG_SET (&spilled_pseudos);
528 INIT_REG_SET (&changed_allocation_pseudos);
529 INIT_REG_SET (&pseudos_counted);
532 /* List of insn chains that are currently unused. */
533 static struct insn_chain *unused_insn_chains = 0;
535 /* Allocate an empty insn_chain structure. */
537 new_insn_chain (void)
539 struct insn_chain *c;
541 if (unused_insn_chains == 0)
543 c = XOBNEW (&reload_obstack, struct insn_chain);
544 INIT_REG_SET (&c->live_throughout);
545 INIT_REG_SET (&c->dead_or_set);
549 c = unused_insn_chains;
550 unused_insn_chains = c->next;
552 c->is_caller_save_insn = 0;
553 c->need_operand_change = 0;
559 /* Small utility function to set all regs in hard reg set TO which are
560 allocated to pseudos in regset FROM. */
563 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
566 reg_set_iterator rsi;
568 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
570 int r = reg_renumber[regno];
574 /* reload_combine uses the information from DF_LIVE_IN,
575 which might still contain registers that have not
576 actually been allocated since they have an
578 gcc_assert (ira_conflicts_p || reload_completed);
581 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
585 /* Replace all pseudos found in LOC with their corresponding
589 replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
602 unsigned int regno = REGNO (x);
604 if (regno < FIRST_PSEUDO_REGISTER)
607 x = eliminate_regs (x, mem_mode, usage);
611 replace_pseudos_in (loc, mem_mode, usage);
615 if (reg_equiv_constant[regno])
616 *loc = reg_equiv_constant[regno];
617 else if (reg_equiv_mem[regno])
618 *loc = reg_equiv_mem[regno];
619 else if (reg_equiv_address[regno])
620 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
623 gcc_assert (!REG_P (regno_reg_rtx[regno])
624 || REGNO (regno_reg_rtx[regno]) != regno);
625 *loc = regno_reg_rtx[regno];
630 else if (code == MEM)
632 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
636 /* Process each of our operands recursively. */
637 fmt = GET_RTX_FORMAT (code);
638 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
640 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
641 else if (*fmt == 'E')
642 for (j = 0; j < XVECLEN (x, i); j++)
643 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
646 /* Determine if the current function has an exception receiver block
647 that reaches the exit block via non-exceptional edges */
650 has_nonexceptional_receiver (void)
654 basic_block *tos, *worklist, bb;
656 /* If we're not optimizing, then just err on the safe side. */
660 /* First determine which blocks can reach exit via normal paths. */
661 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
664 bb->flags &= ~BB_REACHABLE;
666 /* Place the exit block on our worklist. */
667 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
668 *tos++ = EXIT_BLOCK_PTR;
670 /* Iterate: find everything reachable from what we've already seen. */
671 while (tos != worklist)
675 FOR_EACH_EDGE (e, ei, bb->preds)
676 if (!(e->flags & EDGE_ABNORMAL))
678 basic_block src = e->src;
680 if (!(src->flags & BB_REACHABLE))
682 src->flags |= BB_REACHABLE;
689 /* Now see if there's a reachable block with an exceptional incoming
692 if (bb->flags & BB_REACHABLE)
693 FOR_EACH_EDGE (e, ei, bb->preds)
694 if (e->flags & EDGE_ABNORMAL)
697 /* No exceptional block reached exit unexceptionally. */
702 /* Global variables used by reload and its subroutines. */
704 /* The current basic block while in calculate_elim_costs_all_insns. */
705 static basic_block elim_bb;
707 /* Set during calculate_needs if an insn needs register elimination. */
708 static int something_needs_elimination;
709 /* Set during calculate_needs if an insn needs an operand changed. */
710 static int something_needs_operands_changed;
711 /* Set by alter_regs if we spilled a register to the stack. */
712 static bool something_was_spilled;
714 /* Nonzero means we couldn't get enough spill regs. */
717 /* Temporary array of pseudo-register number. */
718 static int *temp_pseudo_reg_arr;
720 /* Main entry point for the reload pass.
722 FIRST is the first insn of the function being compiled.
724 GLOBAL nonzero means we were called from global_alloc
725 and should attempt to reallocate any pseudoregs that we
726 displace from hard regs we will use for reloads.
727 If GLOBAL is zero, we do not have enough information to do that,
728 so any pseudo reg that is spilled must go to the stack.
730 Return value is nonzero if reload failed
731 and we must not do any more for this function. */
734 reload (rtx first, int global)
738 struct elim_table *ep;
741 /* Make sure even insns with volatile mem refs are recognizable. */
746 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
748 /* Make sure that the last insn in the chain
749 is not something that needs reloading. */
750 emit_note (NOTE_INSN_DELETED);
752 /* Enable find_equiv_reg to distinguish insns made by reload. */
753 reload_first_uid = get_max_uid ();
755 #ifdef SECONDARY_MEMORY_NEEDED
756 /* Initialize the secondary memory table. */
757 clear_secondary_mem ();
760 /* We don't have a stack slot for any spill reg yet. */
761 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
762 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
764 /* Initialize the save area information for caller-save, in case some
768 /* Compute which hard registers are now in use
769 as homes for pseudo registers.
770 This is done here rather than (eg) in global_alloc
771 because this point is reached even if not optimizing. */
772 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
775 /* A function that has a nonlocal label that can reach the exit
776 block via non-exceptional paths must save all call-saved
778 if (cfun->has_nonlocal_label
779 && has_nonexceptional_receiver ())
780 crtl->saves_all_registers = 1;
782 if (crtl->saves_all_registers)
783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
784 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
785 df_set_regs_ever_live (i, true);
787 reg_old_renumber = XCNEWVEC (short, max_regno);
788 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
789 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
790 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
792 CLEAR_HARD_REG_SET (bad_spill_regs_global);
794 init_eliminable_invariants (first, true);
797 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
798 stack slots to the pseudos that lack hard regs or equivalents.
799 Do not touch virtual registers. */
801 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
802 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
803 temp_pseudo_reg_arr[n++] = i;
806 /* Ask IRA to order pseudo-registers for better stack slot
808 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
810 for (i = 0; i < n; i++)
811 alter_reg (temp_pseudo_reg_arr[i], -1, false);
813 /* If we have some registers we think can be eliminated, scan all insns to
814 see if there is an insn that sets one of these registers to something
815 other than itself plus a constant. If so, the register cannot be
816 eliminated. Doing this scan here eliminates an extra pass through the
817 main reload loop in the most common case where register elimination
819 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
821 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
823 maybe_fix_stack_asms ();
825 insns_need_reload = 0;
826 something_needs_elimination = 0;
828 /* Initialize to -1, which means take the first spill register. */
831 /* Spill any hard regs that we know we can't eliminate. */
832 CLEAR_HARD_REG_SET (used_spill_regs);
833 /* There can be multiple ways to eliminate a register;
834 they should be listed adjacently.
835 Elimination for any register fails only if all possible ways fail. */
836 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; )
839 int can_eliminate = 0;
842 can_eliminate |= ep->can_eliminate;
845 while (ep < ®_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
847 spill_hard_reg (from, 1);
850 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
851 if (frame_pointer_needed)
852 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
854 finish_spills (global);
856 /* From now on, we may need to generate moves differently. We may also
857 allow modifications of insns which cause them to not be recognized.
858 Any such modifications will be cleaned up during reload itself. */
859 reload_in_progress = 1;
861 /* This loop scans the entire function each go-round
862 and repeats until one repetition spills no additional hard regs. */
865 int something_changed;
867 HOST_WIDE_INT starting_frame_size;
869 starting_frame_size = get_frame_size ();
870 something_was_spilled = false;
872 set_initial_elim_offsets ();
873 set_initial_label_offsets ();
875 /* For each pseudo register that has an equivalent location defined,
876 try to eliminate any eliminable registers (such as the frame pointer)
877 assuming initial offsets for the replacement register, which
880 If the resulting location is directly addressable, substitute
881 the MEM we just got directly for the old REG.
883 If it is not addressable but is a constant or the sum of a hard reg
884 and constant, it is probably not addressable because the constant is
885 out of range, in that case record the address; we will generate
886 hairy code to compute the address in a register each time it is
887 needed. Similarly if it is a hard register, but one that is not
888 valid as an address register.
890 If the location is not addressable, but does not have one of the
891 above forms, assign a stack slot. We have to do this to avoid the
892 potential of producing lots of reloads if, e.g., a location involves
893 a pseudo that didn't get a hard register and has an equivalent memory
894 location that also involves a pseudo that didn't get a hard register.
896 Perhaps at some point we will improve reload_when_needed handling
897 so this problem goes away. But that's very hairy. */
899 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
900 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
902 rtx x = eliminate_regs (reg_equiv_memory_loc[i], VOIDmode,
905 if (strict_memory_address_addr_space_p
906 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
908 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
909 else if (CONSTANT_P (XEXP (x, 0))
910 || (REG_P (XEXP (x, 0))
911 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
912 || (GET_CODE (XEXP (x, 0)) == PLUS
913 && REG_P (XEXP (XEXP (x, 0), 0))
914 && (REGNO (XEXP (XEXP (x, 0), 0))
915 < FIRST_PSEUDO_REGISTER)
916 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
917 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
920 /* Make a new stack slot. Then indicate that something
921 changed so we go back and recompute offsets for
922 eliminable registers because the allocation of memory
923 below might change some offset. reg_equiv_{mem,address}
924 will be set up for this pseudo on the next pass around
926 reg_equiv_memory_loc[i] = 0;
927 reg_equiv_init[i] = 0;
928 alter_reg (i, -1, true);
932 if (caller_save_needed)
935 /* If we allocated another stack slot, redo elimination bookkeeping. */
936 if (something_was_spilled || starting_frame_size != get_frame_size ())
938 if (starting_frame_size && crtl->stack_alignment_needed)
940 /* If we have a stack frame, we must align it now. The
941 stack size may be a part of the offset computation for
942 register elimination. So if this changes the stack size,
943 then repeat the elimination bookkeeping. We don't
944 realign when there is no stack, as that will cause a
945 stack frame when none is needed should
946 STARTING_FRAME_OFFSET not be already aligned to
948 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
949 if (starting_frame_size != get_frame_size ())
953 if (caller_save_needed)
955 save_call_clobbered_regs ();
956 /* That might have allocated new insn_chain structures. */
957 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
960 calculate_needs_all_insns (global);
962 if (! ira_conflicts_p)
963 /* Don't do it for IRA. We need this info because we don't
964 change live_throughout and dead_or_set for chains when IRA
966 CLEAR_REG_SET (&spilled_pseudos);
970 something_changed = 0;
972 /* If we allocated any new memory locations, make another pass
973 since it might have changed elimination offsets. */
974 if (something_was_spilled || starting_frame_size != get_frame_size ())
975 something_changed = 1;
977 /* Even if the frame size remained the same, we might still have
978 changed elimination offsets, e.g. if find_reloads called
979 force_const_mem requiring the back end to allocate a constant
980 pool base register that needs to be saved on the stack. */
981 else if (!verify_initial_elim_offsets ())
982 something_changed = 1;
985 HARD_REG_SET to_spill;
986 CLEAR_HARD_REG_SET (to_spill);
987 update_eliminables (&to_spill);
988 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
990 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
991 if (TEST_HARD_REG_BIT (to_spill, i))
993 spill_hard_reg (i, 1);
996 /* Regardless of the state of spills, if we previously had
997 a register that we thought we could eliminate, but now can
998 not eliminate, we must run another pass.
1000 Consider pseudos which have an entry in reg_equiv_* which
1001 reference an eliminable register. We must make another pass
1002 to update reg_equiv_* so that we do not substitute in the
1003 old value from when we thought the elimination could be
1005 something_changed = 1;
1009 select_reload_regs ();
1013 if (insns_need_reload != 0 || did_spill)
1014 something_changed |= finish_spills (global);
1016 if (! something_changed)
1019 if (caller_save_needed)
1020 delete_caller_save_insns ();
1022 obstack_free (&reload_obstack, reload_firstobj);
1025 /* If global-alloc was run, notify it of any register eliminations we have
1028 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1029 if (ep->can_eliminate)
1030 mark_elimination (ep->from, ep->to);
1032 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1033 If that insn didn't set the register (i.e., it copied the register to
1034 memory), just delete that insn instead of the equivalencing insn plus
1035 anything now dead. If we call delete_dead_insn on that insn, we may
1036 delete the insn that actually sets the register if the register dies
1037 there and that is incorrect. */
1039 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1041 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1044 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1046 rtx equiv_insn = XEXP (list, 0);
1048 /* If we already deleted the insn or if it may trap, we can't
1049 delete it. The latter case shouldn't happen, but can
1050 if an insn has a variable address, gets a REG_EH_REGION
1051 note added to it, and then gets converted into a load
1052 from a constant address. */
1053 if (NOTE_P (equiv_insn)
1054 || can_throw_internal (equiv_insn))
1056 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1057 delete_dead_insn (equiv_insn);
1059 SET_INSN_DELETED (equiv_insn);
1064 /* Use the reload registers where necessary
1065 by generating move instructions to move the must-be-register
1066 values into or out of the reload registers. */
1068 if (insns_need_reload != 0 || something_needs_elimination
1069 || something_needs_operands_changed)
1071 HOST_WIDE_INT old_frame_size = get_frame_size ();
1073 reload_as_needed (global);
1075 gcc_assert (old_frame_size == get_frame_size ());
1077 gcc_assert (verify_initial_elim_offsets ());
1080 /* If we were able to eliminate the frame pointer, show that it is no
1081 longer live at the start of any basic block. If it ls live by
1082 virtue of being in a pseudo, that pseudo will be marked live
1083 and hence the frame pointer will be known to be live via that
1086 if (! frame_pointer_needed)
1088 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1090 /* Come here (with failure set nonzero) if we can't get enough spill
1094 CLEAR_REG_SET (&changed_allocation_pseudos);
1095 CLEAR_REG_SET (&spilled_pseudos);
1096 reload_in_progress = 0;
1098 /* Now eliminate all pseudo regs by modifying them into
1099 their equivalent memory references.
1100 The REG-rtx's for the pseudos are modified in place,
1101 so all insns that used to refer to them now refer to memory.
1103 For a reg that has a reg_equiv_address, all those insns
1104 were changed by reloading so that no insns refer to it any longer;
1105 but the DECL_RTL of a variable decl may refer to it,
1106 and if so this causes the debugging info to mention the variable. */
1108 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1112 if (reg_equiv_mem[i])
1113 addr = XEXP (reg_equiv_mem[i], 0);
1115 if (reg_equiv_address[i])
1116 addr = reg_equiv_address[i];
1120 if (reg_renumber[i] < 0)
1122 rtx reg = regno_reg_rtx[i];
1124 REG_USERVAR_P (reg) = 0;
1125 PUT_CODE (reg, MEM);
1126 XEXP (reg, 0) = addr;
1127 if (reg_equiv_memory_loc[i])
1128 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1131 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
1132 MEM_ATTRS (reg) = 0;
1134 MEM_NOTRAP_P (reg) = 1;
1136 else if (reg_equiv_mem[i])
1137 XEXP (reg_equiv_mem[i], 0) = addr;
1140 /* We don't want complex addressing modes in debug insns
1141 if simpler ones will do, so delegitimize equivalences
1143 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1145 rtx reg = regno_reg_rtx[i];
1149 if (reg_equiv_constant[i])
1150 equiv = reg_equiv_constant[i];
1151 else if (reg_equiv_invariant[i])
1152 equiv = reg_equiv_invariant[i];
1153 else if (reg && MEM_P (reg))
1154 equiv = targetm.delegitimize_address (reg);
1155 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1161 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1163 insn = DF_REF_INSN (use);
1165 /* Make sure the next ref is for a different instruction,
1166 so that we're not affected by the rescan. */
1167 next = DF_REF_NEXT_REG (use);
1168 while (next && DF_REF_INSN (next) == insn)
1169 next = DF_REF_NEXT_REG (next);
1171 if (DEBUG_INSN_P (insn))
1175 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1176 df_insn_rescan_debug_internal (insn);
1179 INSN_VAR_LOCATION_LOC (insn)
1180 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1187 /* We must set reload_completed now since the cleanup_subreg_operands call
1188 below will re-recognize each insn and reload may have generated insns
1189 which are only valid during and after reload. */
1190 reload_completed = 1;
1192 /* Make a pass over all the insns and delete all USEs which we inserted
1193 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1194 notes. Delete all CLOBBER insns, except those that refer to the return
1195 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1196 from misarranging variable-array code, and simplify (subreg (reg))
1197 operands. Strip and regenerate REG_INC notes that may have been moved
1200 for (insn = first; insn; insn = NEXT_INSN (insn))
1206 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1207 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1209 if ((GET_CODE (PATTERN (insn)) == USE
1210 /* We mark with QImode USEs introduced by reload itself. */
1211 && (GET_MODE (insn) == QImode
1212 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1213 || (GET_CODE (PATTERN (insn)) == CLOBBER
1214 && (!MEM_P (XEXP (PATTERN (insn), 0))
1215 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1216 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1217 && XEXP (XEXP (PATTERN (insn), 0), 0)
1218 != stack_pointer_rtx))
1219 && (!REG_P (XEXP (PATTERN (insn), 0))
1220 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1226 /* Some CLOBBERs may survive until here and still reference unassigned
1227 pseudos with const equivalent, which may in turn cause ICE in later
1228 passes if the reference remains in place. */
1229 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1230 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1231 VOIDmode, PATTERN (insn));
1233 /* Discard obvious no-ops, even without -O. This optimization
1234 is fast and doesn't interfere with debugging. */
1235 if (NONJUMP_INSN_P (insn)
1236 && GET_CODE (PATTERN (insn)) == SET
1237 && REG_P (SET_SRC (PATTERN (insn)))
1238 && REG_P (SET_DEST (PATTERN (insn)))
1239 && (REGNO (SET_SRC (PATTERN (insn)))
1240 == REGNO (SET_DEST (PATTERN (insn)))))
1246 pnote = ®_NOTES (insn);
1249 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1250 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1251 || REG_NOTE_KIND (*pnote) == REG_INC)
1252 *pnote = XEXP (*pnote, 1);
1254 pnote = &XEXP (*pnote, 1);
1258 add_auto_inc_notes (insn, PATTERN (insn));
1261 /* Simplify (subreg (reg)) if it appears as an operand. */
1262 cleanup_subreg_operands (insn);
1264 /* Clean up invalid ASMs so that they don't confuse later passes.
1266 if (asm_noperands (PATTERN (insn)) >= 0)
1268 extract_insn (insn);
1269 if (!constrain_operands (1))
1271 error_for_asm (insn,
1272 "%<asm%> operand has impossible constraints");
1279 /* If we are doing generic stack checking, give a warning if this
1280 function's frame size is larger than we expect. */
1281 if (flag_stack_check == GENERIC_STACK_CHECK)
1283 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1284 static int verbose_warned = 0;
1286 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1287 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1288 size += UNITS_PER_WORD;
1290 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1292 warning (0, "frame size too large for reliable stack checking");
1293 if (! verbose_warned)
1295 warning (0, "try reducing the number of local variables");
1301 free (temp_pseudo_reg_arr);
1303 /* Indicate that we no longer have known memory locations or constants. */
1306 free (reg_max_ref_width);
1307 free (reg_old_renumber);
1308 free (pseudo_previous_regs);
1309 free (pseudo_forbidden_regs);
1311 CLEAR_HARD_REG_SET (used_spill_regs);
1312 for (i = 0; i < n_spills; i++)
1313 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1315 /* Free all the insn_chain structures at once. */
1316 obstack_free (&reload_obstack, reload_startobj);
1317 unused_insn_chains = 0;
1318 fixup_abnormal_edges ();
1320 /* Replacing pseudos with their memory equivalents might have
1321 created shared rtx. Subsequent passes would get confused
1322 by this, so unshare everything here. */
1323 unshare_all_rtl_again (first);
1325 #ifdef STACK_BOUNDARY
1326 /* init_emit has set the alignment of the hard frame pointer
1327 to STACK_BOUNDARY. It is very likely no longer valid if
1328 the hard frame pointer was used for register allocation. */
1329 if (!frame_pointer_needed)
1330 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1333 VEC_free (rtx_p, heap, substitute_stack);
1338 /* Yet another special case. Unfortunately, reg-stack forces people to
1339 write incorrect clobbers in asm statements. These clobbers must not
1340 cause the register to appear in bad_spill_regs, otherwise we'll call
1341 fatal_insn later. We clear the corresponding regnos in the live
1342 register sets to avoid this.
1343 The whole thing is rather sick, I'm afraid. */
1346 maybe_fix_stack_asms (void)
1349 const char *constraints[MAX_RECOG_OPERANDS];
1350 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1351 struct insn_chain *chain;
1353 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1356 HARD_REG_SET clobbered, allowed;
1359 if (! INSN_P (chain->insn)
1360 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1362 pat = PATTERN (chain->insn);
1363 if (GET_CODE (pat) != PARALLEL)
1366 CLEAR_HARD_REG_SET (clobbered);
1367 CLEAR_HARD_REG_SET (allowed);
1369 /* First, make a mask of all stack regs that are clobbered. */
1370 for (i = 0; i < XVECLEN (pat, 0); i++)
1372 rtx t = XVECEXP (pat, 0, i);
1373 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1374 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1377 /* Get the operand values and constraints out of the insn. */
1378 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1379 constraints, operand_mode, NULL);
1381 /* For every operand, see what registers are allowed. */
1382 for (i = 0; i < noperands; i++)
1384 const char *p = constraints[i];
1385 /* For every alternative, we compute the class of registers allowed
1386 for reloading in CLS, and merge its contents into the reg set
1388 int cls = (int) NO_REGS;
1394 if (c == '\0' || c == ',' || c == '#')
1396 /* End of one alternative - mark the regs in the current
1397 class, and reset the class. */
1398 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1404 } while (c != '\0' && c != ',');
1412 case '=': case '+': case '*': case '%': case '?': case '!':
1413 case '0': case '1': case '2': case '3': case '4': case '<':
1414 case '>': case 'V': case 'o': case '&': case 'E': case 'F':
1415 case 's': case 'i': case 'n': case 'X': case 'I': case 'J':
1416 case 'K': case 'L': case 'M': case 'N': case 'O': case 'P':
1417 case TARGET_MEM_CONSTRAINT:
1421 cls = (int) reg_class_subunion[cls]
1422 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1427 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1431 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
1432 cls = (int) reg_class_subunion[cls]
1433 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
1435 cls = (int) reg_class_subunion[cls]
1436 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
1438 p += CONSTRAINT_LEN (c, p);
1441 /* Those of the registers which are clobbered, but allowed by the
1442 constraints, must be usable as reload registers. So clear them
1443 out of the life information. */
1444 AND_HARD_REG_SET (allowed, clobbered);
1445 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1446 if (TEST_HARD_REG_BIT (allowed, i))
1448 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1449 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1456 /* Copy the global variables n_reloads and rld into the corresponding elts
1459 copy_reloads (struct insn_chain *chain)
1461 chain->n_reloads = n_reloads;
1462 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1463 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1464 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1467 /* Walk the chain of insns, and determine for each whether it needs reloads
1468 and/or eliminations. Build the corresponding insns_need_reload list, and
1469 set something_needs_elimination as appropriate. */
1471 calculate_needs_all_insns (int global)
1473 struct insn_chain **pprev_reload = &insns_need_reload;
1474 struct insn_chain *chain, *next = 0;
1476 something_needs_elimination = 0;
1478 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1479 for (chain = reload_insn_chain; chain != 0; chain = next)
1481 rtx insn = chain->insn;
1485 /* Clear out the shortcuts. */
1486 chain->n_reloads = 0;
1487 chain->need_elim = 0;
1488 chain->need_reload = 0;
1489 chain->need_operand_change = 0;
1491 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1492 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1493 what effects this has on the known offsets at labels. */
1495 if (LABEL_P (insn) || JUMP_P (insn)
1496 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1497 set_label_offsets (insn, insn, 0);
1501 rtx old_body = PATTERN (insn);
1502 int old_code = INSN_CODE (insn);
1503 rtx old_notes = REG_NOTES (insn);
1504 int did_elimination = 0;
1505 int operands_changed = 0;
1506 rtx set = single_set (insn);
1508 /* Skip insns that only set an equivalence. */
1509 if (set && REG_P (SET_DEST (set))
1510 && reg_renumber[REGNO (SET_DEST (set))] < 0
1511 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1512 || (reg_equiv_invariant[REGNO (SET_DEST (set))]))
1513 && reg_equiv_init[REGNO (SET_DEST (set))])
1516 /* If needed, eliminate any eliminable registers. */
1517 if (num_eliminable || num_eliminable_invariants)
1518 did_elimination = eliminate_regs_in_insn (insn, 0);
1520 /* Analyze the instruction. */
1521 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1522 global, spill_reg_order);
1524 /* If a no-op set needs more than one reload, this is likely
1525 to be something that needs input address reloads. We
1526 can't get rid of this cleanly later, and it is of no use
1527 anyway, so discard it now.
1528 We only do this when expensive_optimizations is enabled,
1529 since this complements reload inheritance / output
1530 reload deletion, and it can make debugging harder. */
1531 if (flag_expensive_optimizations && n_reloads > 1)
1533 rtx set = single_set (insn);
1536 ((SET_SRC (set) == SET_DEST (set)
1537 && REG_P (SET_SRC (set))
1538 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1539 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1540 && reg_renumber[REGNO (SET_SRC (set))] < 0
1541 && reg_renumber[REGNO (SET_DEST (set))] < 0
1542 && reg_equiv_memory_loc[REGNO (SET_SRC (set))] != NULL
1543 && reg_equiv_memory_loc[REGNO (SET_DEST (set))] != NULL
1544 && rtx_equal_p (reg_equiv_memory_loc
1545 [REGNO (SET_SRC (set))],
1546 reg_equiv_memory_loc
1547 [REGNO (SET_DEST (set))]))))
1549 if (ira_conflicts_p)
1550 /* Inform IRA about the insn deletion. */
1551 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1552 REGNO (SET_SRC (set)));
1554 /* Delete it from the reload chain. */
1556 chain->prev->next = next;
1558 reload_insn_chain = next;
1560 next->prev = chain->prev;
1561 chain->next = unused_insn_chains;
1562 unused_insn_chains = chain;
1567 update_eliminable_offsets ();
1569 /* Remember for later shortcuts which insns had any reloads or
1570 register eliminations. */
1571 chain->need_elim = did_elimination;
1572 chain->need_reload = n_reloads > 0;
1573 chain->need_operand_change = operands_changed;
1575 /* Discard any register replacements done. */
1576 if (did_elimination)
1578 obstack_free (&reload_obstack, reload_insn_firstobj);
1579 PATTERN (insn) = old_body;
1580 INSN_CODE (insn) = old_code;
1581 REG_NOTES (insn) = old_notes;
1582 something_needs_elimination = 1;
1585 something_needs_operands_changed |= operands_changed;
1589 copy_reloads (chain);
1590 *pprev_reload = chain;
1591 pprev_reload = &chain->next_need_reload;
1598 /* This function is called from the register allocator to set up estimates
1599 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1600 an invariant. The structure is similar to calculate_needs_all_insns. */
1603 calculate_elim_costs_all_insns (void)
1605 int *reg_equiv_init_cost;
1609 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1611 init_eliminable_invariants (get_insns (), false);
1613 set_initial_elim_offsets ();
1614 set_initial_label_offsets ();
1621 FOR_BB_INSNS (bb, insn)
1623 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1624 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1625 what effects this has on the known offsets at labels. */
1627 if (LABEL_P (insn) || JUMP_P (insn)
1628 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1629 set_label_offsets (insn, insn, 0);
1633 rtx set = single_set (insn);
1635 /* Skip insns that only set an equivalence. */
1636 if (set && REG_P (SET_DEST (set))
1637 && reg_renumber[REGNO (SET_DEST (set))] < 0
1638 && (reg_equiv_constant[REGNO (SET_DEST (set))]
1639 || (reg_equiv_invariant[REGNO (SET_DEST (set))])))
1641 unsigned regno = REGNO (SET_DEST (set));
1642 rtx init = reg_equiv_init[regno];
1645 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1647 int cost = rtx_cost (t, SET,
1648 optimize_bb_for_speed_p (bb));
1649 int freq = REG_FREQ_FROM_BB (bb);
1651 reg_equiv_init_cost[regno] = cost * freq;
1655 /* If needed, eliminate any eliminable registers. */
1656 if (num_eliminable || num_eliminable_invariants)
1657 elimination_costs_in_insn (insn);
1660 update_eliminable_offsets ();
1664 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1666 if (reg_equiv_invariant[i])
1668 if (reg_equiv_init[i])
1670 int cost = reg_equiv_init_cost[i];
1673 "Reg %d has equivalence, initial gains %d\n", i, cost);
1675 ira_adjust_equiv_reg_cost (i, cost);
1681 "Reg %d had equivalence, but can't be eliminated\n",
1683 ira_adjust_equiv_reg_cost (i, 0);
1689 free (reg_equiv_init_cost);
1692 /* Comparison function for qsort to decide which of two reloads
1693 should be handled first. *P1 and *P2 are the reload numbers. */
1696 reload_reg_class_lower (const void *r1p, const void *r2p)
1698 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1701 /* Consider required reloads before optional ones. */
1702 t = rld[r1].optional - rld[r2].optional;
1706 /* Count all solitary classes before non-solitary ones. */
1707 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1708 - (reg_class_size[(int) rld[r1].rclass] == 1));
1712 /* Aside from solitaires, consider all multi-reg groups first. */
1713 t = rld[r2].nregs - rld[r1].nregs;
1717 /* Consider reloads in order of increasing reg-class number. */
1718 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1722 /* If reloads are equally urgent, sort by reload number,
1723 so that the results of qsort leave nothing to chance. */
1727 /* The cost of spilling each hard reg. */
1728 static int spill_cost[FIRST_PSEUDO_REGISTER];
1730 /* When spilling multiple hard registers, we use SPILL_COST for the first
1731 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1732 only the first hard reg for a multi-reg pseudo. */
1733 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1735 /* Map of hard regno to pseudo regno currently occupying the hard
1737 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1739 /* Update the spill cost arrays, considering that pseudo REG is live. */
1742 count_pseudo (int reg)
1744 int freq = REG_FREQ (reg);
1745 int r = reg_renumber[reg];
1748 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1749 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1750 /* Ignore spilled pseudo-registers which can be here only if IRA
1752 || (ira_conflicts_p && r < 0))
1755 SET_REGNO_REG_SET (&pseudos_counted, reg);
1757 gcc_assert (r >= 0);
1759 spill_add_cost[r] += freq;
1760 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1763 hard_regno_to_pseudo_regno[r + nregs] = reg;
1764 spill_cost[r + nregs] += freq;
1768 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1769 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1772 order_regs_for_reload (struct insn_chain *chain)
1775 HARD_REG_SET used_by_pseudos;
1776 HARD_REG_SET used_by_pseudos2;
1777 reg_set_iterator rsi;
1779 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1781 memset (spill_cost, 0, sizeof spill_cost);
1782 memset (spill_add_cost, 0, sizeof spill_add_cost);
1783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1784 hard_regno_to_pseudo_regno[i] = -1;
1786 /* Count number of uses of each hard reg by pseudo regs allocated to it
1787 and then order them by decreasing use. First exclude hard registers
1788 that are live in or across this insn. */
1790 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1791 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1792 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1793 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1795 /* Now find out which pseudos are allocated to it, and update
1797 CLEAR_REG_SET (&pseudos_counted);
1799 EXECUTE_IF_SET_IN_REG_SET
1800 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1804 EXECUTE_IF_SET_IN_REG_SET
1805 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1809 CLEAR_REG_SET (&pseudos_counted);
1812 /* Vector of reload-numbers showing the order in which the reloads should
1814 static short reload_order[MAX_RELOADS];
1816 /* This is used to keep track of the spill regs used in one insn. */
1817 static HARD_REG_SET used_spill_regs_local;
1819 /* We decided to spill hard register SPILLED, which has a size of
1820 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1821 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1822 update SPILL_COST/SPILL_ADD_COST. */
1825 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1827 int freq = REG_FREQ (reg);
1828 int r = reg_renumber[reg];
1829 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1831 /* Ignore spilled pseudo-registers which can be here only if IRA is
1833 if ((ira_conflicts_p && r < 0)
1834 || REGNO_REG_SET_P (&spilled_pseudos, reg)
1835 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1838 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1840 spill_add_cost[r] -= freq;
1843 hard_regno_to_pseudo_regno[r + nregs] = -1;
1844 spill_cost[r + nregs] -= freq;
1848 /* Find reload register to use for reload number ORDER. */
1851 find_reg (struct insn_chain *chain, int order)
1853 int rnum = reload_order[order];
1854 struct reload *rl = rld + rnum;
1855 int best_cost = INT_MAX;
1857 unsigned int i, j, n;
1859 HARD_REG_SET not_usable;
1860 HARD_REG_SET used_by_other_reload;
1861 reg_set_iterator rsi;
1862 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1863 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1865 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1866 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1867 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1869 CLEAR_HARD_REG_SET (used_by_other_reload);
1870 for (k = 0; k < order; k++)
1872 int other = reload_order[k];
1874 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1875 for (j = 0; j < rld[other].nregs; j++)
1876 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1879 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1881 #ifdef REG_ALLOC_ORDER
1882 unsigned int regno = reg_alloc_order[i];
1884 unsigned int regno = i;
1887 if (! TEST_HARD_REG_BIT (not_usable, regno)
1888 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1889 && HARD_REGNO_MODE_OK (regno, rl->mode))
1891 int this_cost = spill_cost[regno];
1893 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1895 for (j = 1; j < this_nregs; j++)
1897 this_cost += spill_add_cost[regno + j];
1898 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1899 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1905 if (ira_conflicts_p)
1907 /* Ask IRA to find a better pseudo-register for
1909 for (n = j = 0; j < this_nregs; j++)
1911 int r = hard_regno_to_pseudo_regno[regno + j];
1915 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1916 regno_pseudo_regs[n++] = r;
1918 regno_pseudo_regs[n++] = -1;
1920 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1921 best_regno_pseudo_regs,
1928 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1929 if (regno_pseudo_regs[j] < 0)
1936 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1938 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1940 if (this_cost < best_cost
1941 /* Among registers with equal cost, prefer caller-saved ones, or
1942 use REG_ALLOC_ORDER if it is defined. */
1943 || (this_cost == best_cost
1944 #ifdef REG_ALLOC_ORDER
1945 && (inv_reg_alloc_order[regno]
1946 < inv_reg_alloc_order[best_reg])
1948 && call_used_regs[regno]
1949 && ! call_used_regs[best_reg]
1954 best_cost = this_cost;
1962 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1964 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1965 rl->regno = best_reg;
1967 EXECUTE_IF_SET_IN_REG_SET
1968 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1970 count_spilled_pseudo (best_reg, rl->nregs, j);
1973 EXECUTE_IF_SET_IN_REG_SET
1974 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1976 count_spilled_pseudo (best_reg, rl->nregs, j);
1979 for (i = 0; i < rl->nregs; i++)
1981 gcc_assert (spill_cost[best_reg + i] == 0);
1982 gcc_assert (spill_add_cost[best_reg + i] == 0);
1983 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1984 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1989 /* Find more reload regs to satisfy the remaining need of an insn, which
1991 Do it by ascending class number, since otherwise a reg
1992 might be spilled for a big class and might fail to count
1993 for a smaller class even though it belongs to that class. */
1996 find_reload_regs (struct insn_chain *chain)
2000 /* In order to be certain of getting the registers we need,
2001 we must sort the reloads into order of increasing register class.
2002 Then our grabbing of reload registers will parallel the process
2003 that provided the reload registers. */
2004 for (i = 0; i < chain->n_reloads; i++)
2006 /* Show whether this reload already has a hard reg. */
2007 if (chain->rld[i].reg_rtx)
2009 int regno = REGNO (chain->rld[i].reg_rtx);
2010 chain->rld[i].regno = regno;
2012 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2015 chain->rld[i].regno = -1;
2016 reload_order[i] = i;
2019 n_reloads = chain->n_reloads;
2020 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2022 CLEAR_HARD_REG_SET (used_spill_regs_local);
2025 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2027 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2029 /* Compute the order of preference for hard registers to spill. */
2031 order_regs_for_reload (chain);
2033 for (i = 0; i < n_reloads; i++)
2035 int r = reload_order[i];
2037 /* Ignore reloads that got marked inoperative. */
2038 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2039 && ! rld[r].optional
2040 && rld[r].regno == -1)
2041 if (! find_reg (chain, i))
2044 fprintf (dump_file, "reload failure for reload %d\n", r);
2045 spill_failure (chain->insn, rld[r].rclass);
2051 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2052 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2054 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2058 select_reload_regs (void)
2060 struct insn_chain *chain;
2062 /* Try to satisfy the needs for each insn. */
2063 for (chain = insns_need_reload; chain != 0;
2064 chain = chain->next_need_reload)
2065 find_reload_regs (chain);
2068 /* Delete all insns that were inserted by emit_caller_save_insns during
2071 delete_caller_save_insns (void)
2073 struct insn_chain *c = reload_insn_chain;
2077 while (c != 0 && c->is_caller_save_insn)
2079 struct insn_chain *next = c->next;
2082 if (c == reload_insn_chain)
2083 reload_insn_chain = next;
2087 next->prev = c->prev;
2089 c->prev->next = next;
2090 c->next = unused_insn_chains;
2091 unused_insn_chains = c;
2099 /* Handle the failure to find a register to spill.
2100 INSN should be one of the insns which needed this particular spill reg. */
2103 spill_failure (rtx insn, enum reg_class rclass)
2105 if (asm_noperands (PATTERN (insn)) >= 0)
2106 error_for_asm (insn, "can't find a register in class %qs while "
2107 "reloading %<asm%>",
2108 reg_class_names[rclass]);
2111 error ("unable to find a register to spill in class %qs",
2112 reg_class_names[rclass]);
2116 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2117 debug_reload_to_stream (dump_file);
2119 fatal_insn ("this is the insn:", insn);
2123 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2124 data that is dead in INSN. */
2127 delete_dead_insn (rtx insn)
2129 rtx prev = prev_real_insn (insn);
2132 /* If the previous insn sets a register that dies in our insn, delete it
2134 if (prev && GET_CODE (PATTERN (prev)) == SET
2135 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2136 && reg_mentioned_p (prev_dest, PATTERN (insn))
2137 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2138 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2139 delete_dead_insn (prev);
2141 SET_INSN_DELETED (insn);
2144 /* Modify the home of pseudo-reg I.
2145 The new home is present in reg_renumber[I].
2147 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2148 or it may be -1, meaning there is none or it is not relevant.
2149 This is used so that all pseudos spilled from a given hard reg
2150 can share one stack slot. */
2153 alter_reg (int i, int from_reg, bool dont_share_p)
2155 /* When outputting an inline function, this can happen
2156 for a reg that isn't actually used. */
2157 if (regno_reg_rtx[i] == 0)
2160 /* If the reg got changed to a MEM at rtl-generation time,
2162 if (!REG_P (regno_reg_rtx[i]))
2165 /* Modify the reg-rtx to contain the new hard reg
2166 number or else to contain its pseudo reg number. */
2167 SET_REGNO (regno_reg_rtx[i],
2168 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2170 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2171 allocate a stack slot for it. */
2173 if (reg_renumber[i] < 0
2174 && REG_N_REFS (i) > 0
2175 && reg_equiv_constant[i] == 0
2176 && (reg_equiv_invariant[i] == 0 || reg_equiv_init[i] == 0)
2177 && reg_equiv_memory_loc[i] == 0)
2180 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2181 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2182 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2183 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2184 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2187 something_was_spilled = true;
2189 if (ira_conflicts_p)
2191 /* Mark the spill for IRA. */
2192 SET_REGNO_REG_SET (&spilled_pseudos, i);
2194 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2200 /* Each pseudo reg has an inherent size which comes from its own mode,
2201 and a total size which provides room for paradoxical subregs
2202 which refer to the pseudo reg in wider modes.
2204 We can use a slot already allocated if it provides both
2205 enough inherent space and enough total space.
2206 Otherwise, we allocate a new slot, making sure that it has no less
2207 inherent space, and no less total space, then the previous slot. */
2208 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2212 /* No known place to spill from => no slot to reuse. */
2213 x = assign_stack_local (mode, total_size,
2214 min_align > inherent_align
2215 || total_size > inherent_size ? -1 : 0);
2219 /* Cancel the big-endian correction done in assign_stack_local.
2220 Get the address of the beginning of the slot. This is so we
2221 can do a big-endian correction unconditionally below. */
2222 if (BYTES_BIG_ENDIAN)
2224 adjust = inherent_size - total_size;
2227 = adjust_address_nv (x, mode_for_size (total_size
2233 if (! dont_share_p && ira_conflicts_p)
2234 /* Inform IRA about allocation a new stack slot. */
2235 ira_mark_new_stack_slot (stack_slot, i, total_size);
2238 /* Reuse a stack slot if possible. */
2239 else if (spill_stack_slot[from_reg] != 0
2240 && spill_stack_slot_width[from_reg] >= total_size
2241 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2243 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2244 x = spill_stack_slot[from_reg];
2246 /* Allocate a bigger slot. */
2249 /* Compute maximum size needed, both for inherent size
2250 and for total size. */
2253 if (spill_stack_slot[from_reg])
2255 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2257 mode = GET_MODE (spill_stack_slot[from_reg]);
2258 if (spill_stack_slot_width[from_reg] > total_size)
2259 total_size = spill_stack_slot_width[from_reg];
2260 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2261 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2264 /* Make a slot with that size. */
2265 x = assign_stack_local (mode, total_size,
2266 min_align > inherent_align
2267 || total_size > inherent_size ? -1 : 0);
2270 /* Cancel the big-endian correction done in assign_stack_local.
2271 Get the address of the beginning of the slot. This is so we
2272 can do a big-endian correction unconditionally below. */
2273 if (BYTES_BIG_ENDIAN)
2275 adjust = GET_MODE_SIZE (mode) - total_size;
2278 = adjust_address_nv (x, mode_for_size (total_size
2284 spill_stack_slot[from_reg] = stack_slot;
2285 spill_stack_slot_width[from_reg] = total_size;
2288 /* On a big endian machine, the "address" of the slot
2289 is the address of the low part that fits its inherent mode. */
2290 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2291 adjust += (total_size - inherent_size);
2293 /* If we have any adjustment to make, or if the stack slot is the
2294 wrong mode, make a new stack slot. */
2295 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2297 /* Set all of the memory attributes as appropriate for a spill. */
2298 set_mem_attrs_for_spill (x);
2300 /* Save the stack slot for later. */
2301 reg_equiv_memory_loc[i] = x;
2305 /* Mark the slots in regs_ever_live for the hard regs used by
2306 pseudo-reg number REGNO, accessed in MODE. */
2309 mark_home_live_1 (int regno, enum machine_mode mode)
2313 i = reg_renumber[regno];
2316 lim = end_hard_regno (mode, i);
2318 df_set_regs_ever_live(i++, true);
2321 /* Mark the slots in regs_ever_live for the hard regs
2322 used by pseudo-reg number REGNO. */
2325 mark_home_live (int regno)
2327 if (reg_renumber[regno] >= 0)
2328 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2331 /* This function handles the tracking of elimination offsets around branches.
2333 X is a piece of RTL being scanned.
2335 INSN is the insn that it came from, if any.
2337 INITIAL_P is nonzero if we are to set the offset to be the initial
2338 offset and zero if we are setting the offset of the label to be the
2342 set_label_offsets (rtx x, rtx insn, int initial_p)
2344 enum rtx_code code = GET_CODE (x);
2347 struct elim_table *p;
2352 if (LABEL_REF_NONLOCAL_P (x))
2357 /* ... fall through ... */
2360 /* If we know nothing about this label, set the desired offsets. Note
2361 that this sets the offset at a label to be the offset before a label
2362 if we don't know anything about the label. This is not correct for
2363 the label after a BARRIER, but is the best guess we can make. If
2364 we guessed wrong, we will suppress an elimination that might have
2365 been possible had we been able to guess correctly. */
2367 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2369 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2370 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2371 = (initial_p ? reg_eliminate[i].initial_offset
2372 : reg_eliminate[i].offset);
2373 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2376 /* Otherwise, if this is the definition of a label and it is
2377 preceded by a BARRIER, set our offsets to the known offset of
2381 && (tem = prev_nonnote_insn (insn)) != 0
2383 set_offsets_for_label (insn);
2385 /* If neither of the above cases is true, compare each offset
2386 with those previously recorded and suppress any eliminations
2387 where the offsets disagree. */
2389 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2390 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2391 != (initial_p ? reg_eliminate[i].initial_offset
2392 : reg_eliminate[i].offset))
2393 reg_eliminate[i].can_eliminate = 0;
2398 set_label_offsets (PATTERN (insn), insn, initial_p);
2400 /* ... fall through ... */
2404 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2405 to indirectly and hence must have all eliminations at their
2407 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2408 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2409 set_label_offsets (XEXP (tem, 0), insn, 1);
2415 /* Each of the labels in the parallel or address vector must be
2416 at their initial offsets. We want the first field for PARALLEL
2417 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2419 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2420 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2425 /* We only care about setting PC. If the source is not RETURN,
2426 IF_THEN_ELSE, or a label, disable any eliminations not at
2427 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2428 isn't one of those possibilities. For branches to a label,
2429 call ourselves recursively.
2431 Note that this can disable elimination unnecessarily when we have
2432 a non-local goto since it will look like a non-constant jump to
2433 someplace in the current function. This isn't a significant
2434 problem since such jumps will normally be when all elimination
2435 pairs are back to their initial offsets. */
2437 if (SET_DEST (x) != pc_rtx)
2440 switch (GET_CODE (SET_SRC (x)))
2447 set_label_offsets (SET_SRC (x), insn, initial_p);
2451 tem = XEXP (SET_SRC (x), 1);
2452 if (GET_CODE (tem) == LABEL_REF)
2453 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2454 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2457 tem = XEXP (SET_SRC (x), 2);
2458 if (GET_CODE (tem) == LABEL_REF)
2459 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2460 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2468 /* If we reach here, all eliminations must be at their initial
2469 offset because we are doing a jump to a variable address. */
2470 for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2471 if (p->offset != p->initial_offset)
2472 p->can_eliminate = 0;
2480 /* Called through for_each_rtx, this function examines every reg that occurs
2481 in PX and adjusts the costs for its elimination which are gathered by IRA.
2482 DATA is the insn in which PX occurs. We do not recurse into MEM
2486 note_reg_elim_costly (rtx *px, void *data)
2488 rtx insn = (rtx)data;
2495 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2496 && reg_equiv_init[REGNO (x)]
2497 && reg_equiv_invariant[REGNO (x)])
2499 rtx t = reg_equiv_invariant[REGNO (x)];
2500 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2501 int cost = rtx_cost (new_rtx, SET, optimize_bb_for_speed_p (elim_bb));
2502 int freq = REG_FREQ_FROM_BB (elim_bb);
2505 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2510 /* Scan X and replace any eliminable registers (such as fp) with a
2511 replacement (such as sp), plus an offset.
2513 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2514 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2515 MEM, we are allowed to replace a sum of a register and the constant zero
2516 with the register, which we cannot do outside a MEM. In addition, we need
2517 to record the fact that a register is referenced outside a MEM.
2519 If INSN is an insn, it is the insn containing X. If we replace a REG
2520 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2521 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2522 the REG is being modified.
2524 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2525 That's used when we eliminate in expressions stored in notes.
2526 This means, do not set ref_outside_mem even if the reference
2529 If FOR_COSTS is true, we are being called before reload in order to
2530 estimate the costs of keeping registers with an equivalence unallocated.
2532 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2533 replacements done assuming all offsets are at their initial values. If
2534 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2535 encounter, return the actual location so that find_reloads will do
2536 the proper thing. */
2539 eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
2540 bool may_use_invariant, bool for_costs)
2542 enum rtx_code code = GET_CODE (x);
2543 struct elim_table *ep;
2550 if (! current_function_decl)
2573 /* First handle the case where we encounter a bare register that
2574 is eliminable. Replace it with a PLUS. */
2575 if (regno < FIRST_PSEUDO_REGISTER)
2577 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2579 if (ep->from_rtx == x && ep->can_eliminate)
2580 return plus_constant (ep->to_rtx, ep->previous_offset);
2583 else if (reg_renumber && reg_renumber[regno] < 0
2584 && reg_equiv_invariant && reg_equiv_invariant[regno])
2586 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2587 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant[regno]),
2588 mem_mode, insn, true, for_costs);
2589 /* There exists at least one use of REGNO that cannot be
2590 eliminated. Prevent the defining insn from being deleted. */
2591 reg_equiv_init[regno] = NULL_RTX;
2593 alter_reg (regno, -1, true);
2597 /* You might think handling MINUS in a manner similar to PLUS is a
2598 good idea. It is not. It has been tried multiple times and every
2599 time the change has had to have been reverted.
2601 Other parts of reload know a PLUS is special (gen_reload for example)
2602 and require special code to handle code a reloaded PLUS operand.
2604 Also consider backends where the flags register is clobbered by a
2605 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2606 lea instruction comes to mind). If we try to reload a MINUS, we
2607 may kill the flags register that was holding a useful value.
2609 So, please before trying to handle MINUS, consider reload as a
2610 whole instead of this little section as well as the backend issues. */
2612 /* If this is the sum of an eliminable register and a constant, rework
2614 if (REG_P (XEXP (x, 0))
2615 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2616 && CONSTANT_P (XEXP (x, 1)))
2618 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2620 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2622 /* The only time we want to replace a PLUS with a REG (this
2623 occurs when the constant operand of the PLUS is the negative
2624 of the offset) is when we are inside a MEM. We won't want
2625 to do so at other times because that would change the
2626 structure of the insn in a way that reload can't handle.
2627 We special-case the commonest situation in
2628 eliminate_regs_in_insn, so just replace a PLUS with a
2629 PLUS here, unless inside a MEM. */
2630 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2631 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2634 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2635 plus_constant (XEXP (x, 1),
2636 ep->previous_offset));
2639 /* If the register is not eliminable, we are done since the other
2640 operand is a constant. */
2644 /* If this is part of an address, we want to bring any constant to the
2645 outermost PLUS. We will do this by doing register replacement in
2646 our operands and seeing if a constant shows up in one of them.
2648 Note that there is no risk of modifying the structure of the insn,
2649 since we only get called for its operands, thus we are either
2650 modifying the address inside a MEM, or something like an address
2651 operand of a load-address insn. */
2654 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2656 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2659 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2661 /* If one side is a PLUS and the other side is a pseudo that
2662 didn't get a hard register but has a reg_equiv_constant,
2663 we must replace the constant here since it may no longer
2664 be in the position of any operand. */
2665 if (GET_CODE (new0) == PLUS && REG_P (new1)
2666 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2667 && reg_renumber[REGNO (new1)] < 0
2668 && reg_equiv_constant != 0
2669 && reg_equiv_constant[REGNO (new1)] != 0)
2670 new1 = reg_equiv_constant[REGNO (new1)];
2671 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2672 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2673 && reg_renumber[REGNO (new0)] < 0
2674 && reg_equiv_constant[REGNO (new0)] != 0)
2675 new0 = reg_equiv_constant[REGNO (new0)];
2677 new_rtx = form_sum (GET_MODE (x), new0, new1);
2679 /* As above, if we are not inside a MEM we do not want to
2680 turn a PLUS into something else. We might try to do so here
2681 for an addition of 0 if we aren't optimizing. */
2682 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2683 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2691 /* If this is the product of an eliminable register and a
2692 constant, apply the distribute law and move the constant out
2693 so that we have (plus (mult ..) ..). This is needed in order
2694 to keep load-address insns valid. This case is pathological.
2695 We ignore the possibility of overflow here. */
2696 if (REG_P (XEXP (x, 0))
2697 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2698 && CONST_INT_P (XEXP (x, 1)))
2699 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2701 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2704 /* Refs inside notes or in DEBUG_INSNs don't count for
2706 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2707 || GET_CODE (insn) == INSN_LIST
2708 || DEBUG_INSN_P (insn))))
2709 ep->ref_outside_mem = 1;
2712 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2713 ep->previous_offset * INTVAL (XEXP (x, 1)));
2716 /* ... fall through ... */
2720 /* See comments before PLUS about handling MINUS. */
2722 case DIV: case UDIV:
2723 case MOD: case UMOD:
2724 case AND: case IOR: case XOR:
2725 case ROTATERT: case ROTATE:
2726 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2728 case GE: case GT: case GEU: case GTU:
2729 case LE: case LT: case LEU: case LTU:
2731 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2733 rtx new1 = XEXP (x, 1)
2734 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2737 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2738 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2743 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2746 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2748 if (new_rtx != XEXP (x, 0))
2750 /* If this is a REG_DEAD note, it is not valid anymore.
2751 Using the eliminated version could result in creating a
2752 REG_DEAD note for the stack or frame pointer. */
2753 if (REG_NOTE_KIND (x) == REG_DEAD)
2755 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2759 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2763 /* ... fall through ... */
2766 /* Now do eliminations in the rest of the chain. If this was
2767 an EXPR_LIST, this might result in allocating more memory than is
2768 strictly needed, but it simplifies the code. */
2771 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2773 if (new_rtx != XEXP (x, 1))
2775 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2783 /* We do not support elimination of a register that is modified.
2784 elimination_effects has already make sure that this does not
2790 /* We do not support elimination of a register that is modified.
2791 elimination_effects has already make sure that this does not
2792 happen. The only remaining case we need to consider here is
2793 that the increment value may be an eliminable register. */
2794 if (GET_CODE (XEXP (x, 1)) == PLUS
2795 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2797 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2798 insn, true, for_costs);
2800 if (new_rtx != XEXP (XEXP (x, 1), 1))
2801 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2802 gen_rtx_PLUS (GET_MODE (x),
2803 XEXP (x, 0), new_rtx));
2807 case STRICT_LOW_PART:
2809 case SIGN_EXTEND: case ZERO_EXTEND:
2810 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2811 case FLOAT: case FIX:
2812 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2821 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2823 if (new_rtx != XEXP (x, 0))
2824 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2828 /* Similar to above processing, but preserve SUBREG_BYTE.
2829 Convert (subreg (mem)) to (mem) if not paradoxical.
2830 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2831 pseudo didn't get a hard reg, we must replace this with the
2832 eliminated version of the memory location because push_reload
2833 may do the replacement in certain circumstances. */
2834 if (REG_P (SUBREG_REG (x))
2835 && (GET_MODE_SIZE (GET_MODE (x))
2836 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2837 && reg_equiv_memory_loc != 0
2838 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2840 new_rtx = SUBREG_REG (x);
2843 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false,
2846 if (new_rtx != SUBREG_REG (x))
2848 int x_size = GET_MODE_SIZE (GET_MODE (x));
2849 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2852 && ((x_size < new_size
2853 #ifdef WORD_REGISTER_OPERATIONS
2854 /* On these machines, combine can create rtl of the form
2855 (set (subreg:m1 (reg:m2 R) 0) ...)
2856 where m1 < m2, and expects something interesting to
2857 happen to the entire word. Moreover, it will use the
2858 (reg:m2 R) later, expecting all bits to be preserved.
2859 So if the number of words is the same, preserve the
2860 subreg so that push_reload can see it. */
2861 && ! ((x_size - 1) / UNITS_PER_WORD
2862 == (new_size -1 ) / UNITS_PER_WORD)
2865 || x_size == new_size)
2867 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2869 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2875 /* Our only special processing is to pass the mode of the MEM to our
2876 recursive call and copy the flags. While we are here, handle this
2877 case more efficiently. */
2879 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2882 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2883 && !memory_address_p (GET_MODE (x), new_rtx))
2884 for_each_rtx (&XEXP (x, 0), note_reg_elim_costly, insn);
2886 return replace_equiv_address_nv (x, new_rtx);
2889 /* Handle insn_list USE that a call to a pure function may generate. */
2890 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2892 if (new_rtx != XEXP (x, 0))
2893 return gen_rtx_USE (GET_MODE (x), new_rtx);
2897 gcc_assert (insn && DEBUG_INSN_P (insn));
2908 /* Process each of our operands recursively. If any have changed, make a
2910 fmt = GET_RTX_FORMAT (code);
2911 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2915 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2917 if (new_rtx != XEXP (x, i) && ! copied)
2919 x = shallow_copy_rtx (x);
2922 XEXP (x, i) = new_rtx;
2924 else if (*fmt == 'E')
2927 for (j = 0; j < XVECLEN (x, i); j++)
2929 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2931 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2933 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2937 x = shallow_copy_rtx (x);
2940 XVEC (x, i) = new_v;
2943 XVECEXP (x, i, j) = new_rtx;
2952 eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
2954 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2957 /* Scan rtx X for modifications of elimination target registers. Update
2958 the table of eliminables to reflect the changed state. MEM_MODE is
2959 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2962 elimination_effects (rtx x, enum machine_mode mem_mode)
2964 enum rtx_code code = GET_CODE (x);
2965 struct elim_table *ep;
2990 /* First handle the case where we encounter a bare register that
2991 is eliminable. Replace it with a PLUS. */
2992 if (regno < FIRST_PSEUDO_REGISTER)
2994 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2996 if (ep->from_rtx == x && ep->can_eliminate)
2999 ep->ref_outside_mem = 1;
3004 else if (reg_renumber[regno] < 0 && reg_equiv_constant
3005 && reg_equiv_constant[regno]
3006 && ! function_invariant_p (reg_equiv_constant[regno]))
3007 elimination_effects (reg_equiv_constant[regno], mem_mode);
3016 /* If we modify the source of an elimination rule, disable it. */
3017 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3018 if (ep->from_rtx == XEXP (x, 0))
3019 ep->can_eliminate = 0;
3021 /* If we modify the target of an elimination rule by adding a constant,
3022 update its offset. If we modify the target in any other way, we'll
3023 have to disable the rule as well. */
3024 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3025 if (ep->to_rtx == XEXP (x, 0))
3027 int size = GET_MODE_SIZE (mem_mode);
3029 /* If more bytes than MEM_MODE are pushed, account for them. */
3030 #ifdef PUSH_ROUNDING
3031 if (ep->to_rtx == stack_pointer_rtx)
3032 size = PUSH_ROUNDING (size);
3034 if (code == PRE_DEC || code == POST_DEC)
3036 else if (code == PRE_INC || code == POST_INC)
3038 else if (code == PRE_MODIFY || code == POST_MODIFY)
3040 if (GET_CODE (XEXP (x, 1)) == PLUS
3041 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3042 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3043 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3045 ep->can_eliminate = 0;
3049 /* These two aren't unary operators. */
3050 if (code == POST_MODIFY || code == PRE_MODIFY)
3053 /* Fall through to generic unary operation case. */
3054 case STRICT_LOW_PART:
3056 case SIGN_EXTEND: case ZERO_EXTEND:
3057 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3058 case FLOAT: case FIX:
3059 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3068 elimination_effects (XEXP (x, 0), mem_mode);
3072 if (REG_P (SUBREG_REG (x))
3073 && (GET_MODE_SIZE (GET_MODE (x))
3074 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3075 && reg_equiv_memory_loc != 0
3076 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3079 elimination_effects (SUBREG_REG (x), mem_mode);
3083 /* If using a register that is the source of an eliminate we still
3084 think can be performed, note it cannot be performed since we don't
3085 know how this register is used. */
3086 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3087 if (ep->from_rtx == XEXP (x, 0))
3088 ep->can_eliminate = 0;
3090 elimination_effects (XEXP (x, 0), mem_mode);
3094 /* If clobbering a register that is the replacement register for an
3095 elimination we still think can be performed, note that it cannot
3096 be performed. Otherwise, we need not be concerned about it. */
3097 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3098 if (ep->to_rtx == XEXP (x, 0))
3099 ep->can_eliminate = 0;
3101 elimination_effects (XEXP (x, 0), mem_mode);
3105 /* Check for setting a register that we know about. */
3106 if (REG_P (SET_DEST (x)))
3108 /* See if this is setting the replacement register for an
3111 If DEST is the hard frame pointer, we do nothing because we
3112 assume that all assignments to the frame pointer are for
3113 non-local gotos and are being done at a time when they are valid
3114 and do not disturb anything else. Some machines want to
3115 eliminate a fake argument pointer (or even a fake frame pointer)
3116 with either the real frame or the stack pointer. Assignments to
3117 the hard frame pointer must not prevent this elimination. */
3119 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3121 if (ep->to_rtx == SET_DEST (x)
3122 && SET_DEST (x) != hard_frame_pointer_rtx)
3124 /* If it is being incremented, adjust the offset. Otherwise,
3125 this elimination can't be done. */
3126 rtx src = SET_SRC (x);
3128 if (GET_CODE (src) == PLUS
3129 && XEXP (src, 0) == SET_DEST (x)
3130 && CONST_INT_P (XEXP (src, 1)))
3131 ep->offset -= INTVAL (XEXP (src, 1));
3133 ep->can_eliminate = 0;
3137 elimination_effects (SET_DEST (x), VOIDmode);
3138 elimination_effects (SET_SRC (x), VOIDmode);
3142 /* Our only special processing is to pass the mode of the MEM to our
3144 elimination_effects (XEXP (x, 0), GET_MODE (x));
3151 fmt = GET_RTX_FORMAT (code);
3152 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3155 elimination_effects (XEXP (x, i), mem_mode);
3156 else if (*fmt == 'E')
3157 for (j = 0; j < XVECLEN (x, i); j++)
3158 elimination_effects (XVECEXP (x, i, j), mem_mode);
3162 /* Descend through rtx X and verify that no references to eliminable registers
3163 remain. If any do remain, mark the involved register as not
3167 check_eliminable_occurrences (rtx x)
3176 code = GET_CODE (x);
3178 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3180 struct elim_table *ep;
3182 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3183 if (ep->from_rtx == x)
3184 ep->can_eliminate = 0;
3188 fmt = GET_RTX_FORMAT (code);
3189 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3192 check_eliminable_occurrences (XEXP (x, i));
3193 else if (*fmt == 'E')
3196 for (j = 0; j < XVECLEN (x, i); j++)
3197 check_eliminable_occurrences (XVECEXP (x, i, j));
3202 /* Scan INSN and eliminate all eliminable registers in it.
3204 If REPLACE is nonzero, do the replacement destructively. Also
3205 delete the insn as dead it if it is setting an eliminable register.
3207 If REPLACE is zero, do all our allocations in reload_obstack.
3209 If no eliminations were done and this insn doesn't require any elimination
3210 processing (these are not identical conditions: it might be updating sp,
3211 but not referencing fp; this needs to be seen during reload_as_needed so
3212 that the offset between fp and sp can be taken into consideration), zero
3213 is returned. Otherwise, 1 is returned. */
3216 eliminate_regs_in_insn (rtx insn, int replace)
3218 int icode = recog_memoized (insn);
3219 rtx old_body = PATTERN (insn);
3220 int insn_is_asm = asm_noperands (old_body) >= 0;
3221 rtx old_set = single_set (insn);
3225 rtx substed_operand[MAX_RECOG_OPERANDS];
3226 rtx orig_operand[MAX_RECOG_OPERANDS];
3227 struct elim_table *ep;
3228 rtx plus_src, plus_cst_src;
3230 if (! insn_is_asm && icode < 0)
3232 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3233 || GET_CODE (PATTERN (insn)) == CLOBBER
3234 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3235 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3236 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3237 || DEBUG_INSN_P (insn));
3238 if (DEBUG_INSN_P (insn))
3239 INSN_VAR_LOCATION_LOC (insn)
3240 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3244 if (old_set != 0 && REG_P (SET_DEST (old_set))
3245 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3247 /* Check for setting an eliminable register. */
3248 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3249 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3251 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3252 /* If this is setting the frame pointer register to the
3253 hardware frame pointer register and this is an elimination
3254 that will be done (tested above), this insn is really
3255 adjusting the frame pointer downward to compensate for
3256 the adjustment done before a nonlocal goto. */
3257 if (ep->from == FRAME_POINTER_REGNUM
3258 && ep->to == HARD_FRAME_POINTER_REGNUM)
3260 rtx base = SET_SRC (old_set);
3261 rtx base_insn = insn;
3262 HOST_WIDE_INT offset = 0;
3264 while (base != ep->to_rtx)
3266 rtx prev_insn, prev_set;
3268 if (GET_CODE (base) == PLUS
3269 && CONST_INT_P (XEXP (base, 1)))
3271 offset += INTVAL (XEXP (base, 1));
3272 base = XEXP (base, 0);
3274 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3275 && (prev_set = single_set (prev_insn)) != 0
3276 && rtx_equal_p (SET_DEST (prev_set), base))
3278 base = SET_SRC (prev_set);
3279 base_insn = prev_insn;
3285 if (base == ep->to_rtx)
3288 = plus_constant (ep->to_rtx, offset - ep->offset);
3290 new_body = old_body;
3293 new_body = copy_insn (old_body);
3294 if (REG_NOTES (insn))
3295 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3297 PATTERN (insn) = new_body;
3298 old_set = single_set (insn);
3300 /* First see if this insn remains valid when we
3301 make the change. If not, keep the INSN_CODE
3302 the same and let reload fit it up. */
3303 validate_change (insn, &SET_SRC (old_set), src, 1);
3304 validate_change (insn, &SET_DEST (old_set),
3306 if (! apply_change_group ())
3308 SET_SRC (old_set) = src;
3309 SET_DEST (old_set) = ep->to_rtx;
3318 /* In this case this insn isn't serving a useful purpose. We
3319 will delete it in reload_as_needed once we know that this
3320 elimination is, in fact, being done.
3322 If REPLACE isn't set, we can't delete this insn, but needn't
3323 process it since it won't be used unless something changes. */
3326 delete_dead_insn (insn);
3334 /* We allow one special case which happens to work on all machines we
3335 currently support: a single set with the source or a REG_EQUAL
3336 note being a PLUS of an eliminable register and a constant. */
3337 plus_src = plus_cst_src = 0;
3338 if (old_set && REG_P (SET_DEST (old_set)))
3340 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3341 plus_src = SET_SRC (old_set);
3342 /* First see if the source is of the form (plus (...) CST). */
3344 && CONST_INT_P (XEXP (plus_src, 1)))
3345 plus_cst_src = plus_src;
3346 else if (REG_P (SET_SRC (old_set))
3349 /* Otherwise, see if we have a REG_EQUAL note of the form
3350 (plus (...) CST). */
3352 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3354 if ((REG_NOTE_KIND (links) == REG_EQUAL
3355 || REG_NOTE_KIND (links) == REG_EQUIV)
3356 && GET_CODE (XEXP (links, 0)) == PLUS
3357 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3359 plus_cst_src = XEXP (links, 0);
3365 /* Check that the first operand of the PLUS is a hard reg or
3366 the lowpart subreg of one. */
3369 rtx reg = XEXP (plus_cst_src, 0);
3370 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3371 reg = SUBREG_REG (reg);
3373 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3379 rtx reg = XEXP (plus_cst_src, 0);
3380 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3382 if (GET_CODE (reg) == SUBREG)
3383 reg = SUBREG_REG (reg);
3385 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3386 if (ep->from_rtx == reg && ep->can_eliminate)
3388 rtx to_rtx = ep->to_rtx;
3389 offset += ep->offset;
3390 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3392 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3393 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3395 /* If we have a nonzero offset, and the source is already
3396 a simple REG, the following transformation would
3397 increase the cost of the insn by replacing a simple REG
3398 with (plus (reg sp) CST). So try only when we already
3399 had a PLUS before. */
3400 if (offset == 0 || plus_src)
3402 rtx new_src = plus_constant (to_rtx, offset);
3404 new_body = old_body;
3407 new_body = copy_insn (old_body);
3408 if (REG_NOTES (insn))
3409 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3411 PATTERN (insn) = new_body;
3412 old_set = single_set (insn);
3414 /* First see if this insn remains valid when we make the
3415 change. If not, try to replace the whole pattern with
3416 a simple set (this may help if the original insn was a
3417 PARALLEL that was only recognized as single_set due to
3418 REG_UNUSED notes). If this isn't valid either, keep
3419 the INSN_CODE the same and let reload fix it up. */
3420 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3422 rtx new_pat = gen_rtx_SET (VOIDmode,
3423 SET_DEST (old_set), new_src);
3425 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3426 SET_SRC (old_set) = new_src;
3433 /* This can't have an effect on elimination offsets, so skip right
3439 /* Determine the effects of this insn on elimination offsets. */
3440 elimination_effects (old_body, VOIDmode);
3442 /* Eliminate all eliminable registers occurring in operands that
3443 can be handled by reload. */
3444 extract_insn (insn);
3445 for (i = 0; i < recog_data.n_operands; i++)
3447 orig_operand[i] = recog_data.operand[i];
3448 substed_operand[i] = recog_data.operand[i];
3450 /* For an asm statement, every operand is eliminable. */
3451 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3453 bool is_set_src, in_plus;
3455 /* Check for setting a register that we know about. */
3456 if (recog_data.operand_type[i] != OP_IN
3457 && REG_P (orig_operand[i]))
3459 /* If we are assigning to a register that can be eliminated, it
3460 must be as part of a PARALLEL, since the code above handles
3461 single SETs. We must indicate that we can no longer
3462 eliminate this reg. */
3463 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3465 if (ep->from_rtx == orig_operand[i])
3466 ep->can_eliminate = 0;
3469 /* Companion to the above plus substitution, we can allow
3470 invariants as the source of a plain move. */
3473 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3477 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3478 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3482 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3483 replace ? insn : NULL_RTX,
3484 is_set_src || in_plus, false);
3485 if (substed_operand[i] != orig_operand[i])
3487 /* Terminate the search in check_eliminable_occurrences at
3489 *recog_data.operand_loc[i] = 0;
3491 /* If an output operand changed from a REG to a MEM and INSN is an
3492 insn, write a CLOBBER insn. */
3493 if (recog_data.operand_type[i] != OP_IN
3494 && REG_P (orig_operand[i])
3495 && MEM_P (substed_operand[i])
3497 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3501 for (i = 0; i < recog_data.n_dups; i++)
3502 *recog_data.dup_loc[i]
3503 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3505 /* If any eliminable remain, they aren't eliminable anymore. */
3506 check_eliminable_occurrences (old_body);
3508 /* Substitute the operands; the new values are in the substed_operand
3510 for (i = 0; i < recog_data.n_operands; i++)
3511 *recog_data.operand_loc[i] = substed_operand[i];
3512 for (i = 0; i < recog_data.n_dups; i++)
3513 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3515 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3516 re-recognize the insn. We do this in case we had a simple addition
3517 but now can do this as a load-address. This saves an insn in this
3519 If re-recognition fails, the old insn code number will still be used,
3520 and some register operands may have changed into PLUS expressions.
3521 These will be handled by find_reloads by loading them into a register
3526 /* If we aren't replacing things permanently and we changed something,
3527 make another copy to ensure that all the RTL is new. Otherwise
3528 things can go wrong if find_reload swaps commutative operands
3529 and one is inside RTL that has been copied while the other is not. */
3530 new_body = old_body;
3533 new_body = copy_insn (old_body);
3534 if (REG_NOTES (insn))
3535 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3537 PATTERN (insn) = new_body;
3539 /* If we had a move insn but now we don't, rerecognize it. This will
3540 cause spurious re-recognition if the old move had a PARALLEL since
3541 the new one still will, but we can't call single_set without
3542 having put NEW_BODY into the insn and the re-recognition won't
3543 hurt in this rare case. */
3544 /* ??? Why this huge if statement - why don't we just rerecognize the
3548 && ((REG_P (SET_SRC (old_set))
3549 && (GET_CODE (new_body) != SET
3550 || !REG_P (SET_SRC (new_body))))
3551 /* If this was a load from or store to memory, compare
3552 the MEM in recog_data.operand to the one in the insn.
3553 If they are not equal, then rerecognize the insn. */
3555 && ((MEM_P (SET_SRC (old_set))
3556 && SET_SRC (old_set) != recog_data.operand[1])
3557 || (MEM_P (SET_DEST (old_set))
3558 && SET_DEST (old_set) != recog_data.operand[0])))
3559 /* If this was an add insn before, rerecognize. */
3560 || GET_CODE (SET_SRC (old_set)) == PLUS))
3562 int new_icode = recog (PATTERN (insn), insn, 0);
3564 INSN_CODE (insn) = new_icode;
3568 /* Restore the old body. If there were any changes to it, we made a copy
3569 of it while the changes were still in place, so we'll correctly return
3570 a modified insn below. */
3573 /* Restore the old body. */
3574 for (i = 0; i < recog_data.n_operands; i++)
3575 /* Restoring a top-level match_parallel would clobber the new_body
3576 we installed in the insn. */
3577 if (recog_data.operand_loc[i] != &PATTERN (insn))
3578 *recog_data.operand_loc[i] = orig_operand[i];
3579 for (i = 0; i < recog_data.n_dups; i++)
3580 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3583 /* Update all elimination pairs to reflect the status after the current
3584 insn. The changes we make were determined by the earlier call to
3585 elimination_effects.
3587 We also detect cases where register elimination cannot be done,
3588 namely, if a register would be both changed and referenced outside a MEM
3589 in the resulting insn since such an insn is often undefined and, even if
3590 not, we cannot know what meaning will be given to it. Note that it is
3591 valid to have a register used in an address in an insn that changes it
3592 (presumably with a pre- or post-increment or decrement).
3594 If anything changes, return nonzero. */
3596 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3598 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3599 ep->can_eliminate = 0;
3601 ep->ref_outside_mem = 0;
3603 if (ep->previous_offset != ep->offset)
3608 /* If we changed something, perform elimination in REG_NOTES. This is
3609 needed even when REPLACE is zero because a REG_DEAD note might refer
3610 to a register that we eliminate and could cause a different number
3611 of spill registers to be needed in the final reload pass than in
3613 if (val && REG_NOTES (insn) != 0)
3615 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3621 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3622 register allocator. INSN is the instruction we need to examine, we perform
3623 eliminations in its operands and record cases where eliminating a reg with
3624 an invariant equivalence would add extra cost. */
3627 elimination_costs_in_insn (rtx insn)
3629 int icode = recog_memoized (insn);
3630 rtx old_body = PATTERN (insn);
3631 int insn_is_asm = asm_noperands (old_body) >= 0;
3632 rtx old_set = single_set (insn);
3634 rtx orig_operand[MAX_RECOG_OPERANDS];
3635 rtx orig_dup[MAX_RECOG_OPERANDS];
3636 struct elim_table *ep;
3637 rtx plus_src, plus_cst_src;
3640 if (! insn_is_asm && icode < 0)
3642 gcc_assert (GET_CODE (PATTERN (insn)) == USE
3643 || GET_CODE (PATTERN (insn)) == CLOBBER
3644 || GET_CODE (PATTERN (insn)) == ADDR_VEC
3645 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
3646 || GET_CODE (PATTERN (insn)) == ASM_INPUT
3647 || DEBUG_INSN_P (insn));
3651 if (old_set != 0 && REG_P (SET_DEST (old_set))
3652 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3654 /* Check for setting an eliminable register. */
3655 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3656 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3660 /* We allow one special case which happens to work on all machines we
3661 currently support: a single set with the source or a REG_EQUAL
3662 note being a PLUS of an eliminable register and a constant. */
3663 plus_src = plus_cst_src = 0;
3665 if (old_set && REG_P (SET_DEST (old_set)))
3668 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3669 plus_src = SET_SRC (old_set);
3670 /* First see if the source is of the form (plus (...) CST). */
3672 && CONST_INT_P (XEXP (plus_src, 1)))
3673 plus_cst_src = plus_src;
3674 else if (REG_P (SET_SRC (old_set))
3677 /* Otherwise, see if we have a REG_EQUAL note of the form
3678 (plus (...) CST). */
3680 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3682 if ((REG_NOTE_KIND (links) == REG_EQUAL
3683 || REG_NOTE_KIND (links) == REG_EQUIV)
3684 && GET_CODE (XEXP (links, 0)) == PLUS
3685 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3687 plus_cst_src = XEXP (links, 0);
3694 /* Determine the effects of this insn on elimination offsets. */
3695 elimination_effects (old_body, VOIDmode);
3697 /* Eliminate all eliminable registers occurring in operands that
3698 can be handled by reload. */
3699 extract_insn (insn);
3700 for (i = 0; i < recog_data.n_dups; i++)
3701 orig_dup[i] = *recog_data.dup_loc[i];
3703 for (i = 0; i < recog_data.n_operands; i++)
3705 orig_operand[i] = recog_data.operand[i];
3707 /* For an asm statement, every operand is eliminable. */
3708 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3710 bool is_set_src, in_plus;
3712 /* Check for setting a register that we know about. */
3713 if (recog_data.operand_type[i] != OP_IN
3714 && REG_P (orig_operand[i]))
3716 /* If we are assigning to a register that can be eliminated, it
3717 must be as part of a PARALLEL, since the code above handles
3718 single SETs. We must indicate that we can no longer
3719 eliminate this reg. */
3720 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3722 if (ep->from_rtx == orig_operand[i])
3723 ep->can_eliminate = 0;
3726 /* Companion to the above plus substitution, we can allow
3727 invariants as the source of a plain move. */
3729 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3731 if (is_set_src && !sets_reg_p)
3732 note_reg_elim_costly (&SET_SRC (old_set), insn);
3734 if (plus_src && sets_reg_p
3735 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3736 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3739 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3741 is_set_src || in_plus, true);
3742 /* Terminate the search in check_eliminable_occurrences at
3744 *recog_data.operand_loc[i] = 0;
3748 for (i = 0; i < recog_data.n_dups; i++)
3749 *recog_data.dup_loc[i]
3750 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3752 /* If any eliminable remain, they aren't eliminable anymore. */
3753 check_eliminable_occurrences (old_body);
3755 /* Restore the old body. */
3756 for (i = 0; i < recog_data.n_operands; i++)
3757 *recog_data.operand_loc[i] = orig_operand[i];
3758 for (i = 0; i < recog_data.n_dups; i++)
3759 *recog_data.dup_loc[i] = orig_dup[i];
3761 /* Update all elimination pairs to reflect the status after the current
3762 insn. The changes we make were determined by the earlier call to
3763 elimination_effects. */
3765 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3767 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3768 ep->can_eliminate = 0;
3770 ep->ref_outside_mem = 0;
3776 /* Loop through all elimination pairs.
3777 Recalculate the number not at initial offset.
3779 Compute the maximum offset (minimum offset if the stack does not
3780 grow downward) for each elimination pair. */
3783 update_eliminable_offsets (void)
3785 struct elim_table *ep;
3787 num_not_at_initial_offset = 0;
3788 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3790 ep->previous_offset = ep->offset;
3791 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3792 num_not_at_initial_offset++;
3796 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3797 replacement we currently believe is valid, mark it as not eliminable if X
3798 modifies DEST in any way other than by adding a constant integer to it.
3800 If DEST is the frame pointer, we do nothing because we assume that
3801 all assignments to the hard frame pointer are nonlocal gotos and are being
3802 done at a time when they are valid and do not disturb anything else.
3803 Some machines want to eliminate a fake argument pointer with either the
3804 frame or stack pointer. Assignments to the hard frame pointer must not
3805 prevent this elimination.
3807 Called via note_stores from reload before starting its passes to scan
3808 the insns of the function. */
3811 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3815 /* A SUBREG of a hard register here is just changing its mode. We should
3816 not see a SUBREG of an eliminable hard register, but check just in
3818 if (GET_CODE (dest) == SUBREG)
3819 dest = SUBREG_REG (dest);
3821 if (dest == hard_frame_pointer_rtx)
3824 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3825 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3826 && (GET_CODE (x) != SET
3827 || GET_CODE (SET_SRC (x)) != PLUS
3828 || XEXP (SET_SRC (x), 0) != dest
3829 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3831 reg_eliminate[i].can_eliminate_previous
3832 = reg_eliminate[i].can_eliminate = 0;
3837 /* Verify that the initial elimination offsets did not change since the
3838 last call to set_initial_elim_offsets. This is used to catch cases
3839 where something illegal happened during reload_as_needed that could
3840 cause incorrect code to be generated if we did not check for it. */
3843 verify_initial_elim_offsets (void)
3847 if (!num_eliminable)
3850 #ifdef ELIMINABLE_REGS
3852 struct elim_table *ep;
3854 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3856 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3857 if (t != ep->initial_offset)
3862 INITIAL_FRAME_POINTER_OFFSET (t);
3863 if (t != reg_eliminate[0].initial_offset)
3870 /* Reset all offsets on eliminable registers to their initial values. */
3873 set_initial_elim_offsets (void)
3875 struct elim_table *ep = reg_eliminate;
3877 #ifdef ELIMINABLE_REGS
3878 for (; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3880 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3881 ep->previous_offset = ep->offset = ep->initial_offset;
3884 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3885 ep->previous_offset = ep->offset = ep->initial_offset;
3888 num_not_at_initial_offset = 0;
3891 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3894 set_initial_eh_label_offset (rtx label)
3896 set_label_offsets (label, NULL_RTX, 1);
3899 /* Initialize the known label offsets.
3900 Set a known offset for each forced label to be at the initial offset
3901 of each elimination. We do this because we assume that all
3902 computed jumps occur from a location where each elimination is
3903 at its initial offset.
3904 For all other labels, show that we don't know the offsets. */
3907 set_initial_label_offsets (void)
3910 memset (offsets_known_at, 0, num_labels);
3912 for (x = forced_labels; x; x = XEXP (x, 1))
3914 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3916 for_each_eh_label (set_initial_eh_label_offset);
3919 /* Set all elimination offsets to the known values for the code label given
3923 set_offsets_for_label (rtx insn)
3926 int label_nr = CODE_LABEL_NUMBER (insn);
3927 struct elim_table *ep;
3929 num_not_at_initial_offset = 0;
3930 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3932 ep->offset = ep->previous_offset
3933 = offsets_at[label_nr - first_label_num][i];
3934 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3935 num_not_at_initial_offset++;
3939 /* See if anything that happened changes which eliminations are valid.
3940 For example, on the SPARC, whether or not the frame pointer can
3941 be eliminated can depend on what registers have been used. We need
3942 not check some conditions again (such as flag_omit_frame_pointer)
3943 since they can't have changed. */
3946 update_eliminables (HARD_REG_SET *pset)
3948 int previous_frame_pointer_needed = frame_pointer_needed;
3949 struct elim_table *ep;
3951 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3952 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3953 && targetm.frame_pointer_required ())
3954 #ifdef ELIMINABLE_REGS
3955 || ! targetm.can_eliminate (ep->from, ep->to)
3958 ep->can_eliminate = 0;
3960 /* Look for the case where we have discovered that we can't replace
3961 register A with register B and that means that we will now be
3962 trying to replace register A with register C. This means we can
3963 no longer replace register C with register B and we need to disable
3964 such an elimination, if it exists. This occurs often with A == ap,
3965 B == sp, and C == fp. */
3967 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3969 struct elim_table *op;
3972 if (! ep->can_eliminate && ep->can_eliminate_previous)
3974 /* Find the current elimination for ep->from, if there is a
3976 for (op = reg_eliminate;
3977 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
3978 if (op->from == ep->from && op->can_eliminate)
3984 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3986 for (op = reg_eliminate;
3987 op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
3988 if (op->from == new_to && op->to == ep->to)
3989 op->can_eliminate = 0;
3993 /* See if any registers that we thought we could eliminate the previous
3994 time are no longer eliminable. If so, something has changed and we
3995 must spill the register. Also, recompute the number of eliminable
3996 registers and see if the frame pointer is needed; it is if there is
3997 no elimination of the frame pointer that we can perform. */
3999 frame_pointer_needed = 1;
4000 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4002 if (ep->can_eliminate
4003 && ep->from == FRAME_POINTER_REGNUM
4004 && ep->to != HARD_FRAME_POINTER_REGNUM
4005 && (! SUPPORTS_STACK_ALIGNMENT
4006 || ! crtl->stack_realign_needed))
4007 frame_pointer_needed = 0;
4009 if (! ep->can_eliminate && ep->can_eliminate_previous)
4011 ep->can_eliminate_previous = 0;
4012 SET_HARD_REG_BIT (*pset, ep->from);
4017 /* If we didn't need a frame pointer last time, but we do now, spill
4018 the hard frame pointer. */
4019 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4020 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4023 /* Return true if X is used as the target register of an elimination. */
4026 elimination_target_reg_p (rtx x)
4028 struct elim_table *ep;
4030 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4031 if (ep->to_rtx == x && ep->can_eliminate)
4037 /* Initialize the table of registers to eliminate.
4038 Pre-condition: global flag frame_pointer_needed has been set before
4039 calling this function. */
4042 init_elim_table (void)
4044 struct elim_table *ep;
4045 #ifdef ELIMINABLE_REGS
4046 const struct elim_table_1 *ep1;
4050 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4054 #ifdef ELIMINABLE_REGS
4055 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4056 ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4058 ep->from = ep1->from;
4060 ep->can_eliminate = ep->can_eliminate_previous
4061 = (targetm.can_eliminate (ep->from, ep->to)
4062 && ! (ep->to == STACK_POINTER_REGNUM
4063 && frame_pointer_needed
4064 && (! SUPPORTS_STACK_ALIGNMENT
4065 || ! stack_realign_fp)));
4068 reg_eliminate[0].from = reg_eliminate_1[0].from;
4069 reg_eliminate[0].to = reg_eliminate_1[0].to;
4070 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4071 = ! frame_pointer_needed;
4074 /* Count the number of eliminable registers and build the FROM and TO
4075 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4076 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4077 We depend on this. */
4078 for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4080 num_eliminable += ep->can_eliminate;
4081 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4082 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4086 /* Find all the pseudo registers that didn't get hard regs
4087 but do have known equivalent constants or memory slots.
4088 These include parameters (known equivalent to parameter slots)
4089 and cse'd or loop-moved constant memory addresses.
4091 Record constant equivalents in reg_equiv_constant
4092 so they will be substituted by find_reloads.
4093 Record memory equivalents in reg_mem_equiv so they can
4094 be substituted eventually by altering the REG-rtx's. */
4097 init_eliminable_invariants (rtx first, bool do_subregs)
4102 reg_equiv_constant = XCNEWVEC (rtx, max_regno);
4103 reg_equiv_invariant = XCNEWVEC (rtx, max_regno);
4104 reg_equiv_mem = XCNEWVEC (rtx, max_regno);
4105 reg_equiv_alt_mem_list = XCNEWVEC (rtx, max_regno);
4106 reg_equiv_address = XCNEWVEC (rtx, max_regno);
4108 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4110 reg_max_ref_width = NULL;
4112 num_eliminable_invariants = 0;
4114 first_label_num = get_first_label_num ();
4115 num_labels = max_label_num () - first_label_num;
4117 /* Allocate the tables used to store offset information at labels. */
4118 offsets_known_at = XNEWVEC (char, num_labels);
4119 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4121 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4122 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4123 find largest such for each pseudo. FIRST is the head of the insn
4126 for (insn = first; insn; insn = NEXT_INSN (insn))
4128 rtx set = single_set (insn);
4130 /* We may introduce USEs that we want to remove at the end, so
4131 we'll mark them with QImode. Make sure there are no
4132 previously-marked insns left by say regmove. */
4133 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4134 && GET_MODE (insn) != VOIDmode)
4135 PUT_MODE (insn, VOIDmode);
4137 if (do_subregs && NONDEBUG_INSN_P (insn))
4138 scan_paradoxical_subregs (PATTERN (insn));
4140 if (set != 0 && REG_P (SET_DEST (set)))
4142 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4148 i = REGNO (SET_DEST (set));
4151 if (i <= LAST_VIRTUAL_REGISTER)
4154 if (! function_invariant_p (x)
4156 /* A function invariant is often CONSTANT_P but may
4157 include a register. We promise to only pass
4158 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
4160 && LEGITIMATE_PIC_OPERAND_P (x)))
4162 /* It can happen that a REG_EQUIV note contains a MEM
4163 that is not a legitimate memory operand. As later
4164 stages of reload assume that all addresses found
4165 in the reg_equiv_* arrays were originally legitimate,
4166 we ignore such REG_EQUIV notes. */
4167 if (memory_operand (x, VOIDmode))
4169 /* Always unshare the equivalence, so we can
4170 substitute into this insn without touching the
4172 reg_equiv_memory_loc[i] = copy_rtx (x);
4174 else if (function_invariant_p (x))
4176 if (GET_CODE (x) == PLUS)
4178 /* This is PLUS of frame pointer and a constant,
4179 and might be shared. Unshare it. */
4180 reg_equiv_invariant[i] = copy_rtx (x);
4181 num_eliminable_invariants++;
4183 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4185 reg_equiv_invariant[i] = x;
4186 num_eliminable_invariants++;
4188 else if (LEGITIMATE_CONSTANT_P (x))
4189 reg_equiv_constant[i] = x;
4192 reg_equiv_memory_loc[i]
4193 = force_const_mem (GET_MODE (SET_DEST (set)), x);
4194 if (! reg_equiv_memory_loc[i])
4195 reg_equiv_init[i] = NULL_RTX;
4200 reg_equiv_init[i] = NULL_RTX;
4205 reg_equiv_init[i] = NULL_RTX;
4210 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4211 if (reg_equiv_init[i])
4213 fprintf (dump_file, "init_insns for %u: ", i);
4214 print_inline_rtx (dump_file, reg_equiv_init[i], 20);
4215 fprintf (dump_file, "\n");
4219 /* Indicate that we no longer have known memory locations or constants.
4220 Free all data involved in tracking these. */
4223 free_reg_equiv (void)
4227 if (reg_equiv_constant)
4228 free (reg_equiv_constant);
4229 if (reg_equiv_invariant)
4230 free (reg_equiv_invariant);
4231 reg_equiv_constant = 0;
4232 reg_equiv_invariant = 0;
4233 VEC_free (rtx, gc, reg_equiv_memory_loc_vec);
4234 reg_equiv_memory_loc = 0;
4236 if (offsets_known_at)
4237 free (offsets_known_at);
4241 offsets_known_at = 0;
4243 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4244 if (reg_equiv_alt_mem_list[i])
4245 free_EXPR_LIST_list (®_equiv_alt_mem_list[i]);
4246 free (reg_equiv_alt_mem_list);
4248 free (reg_equiv_mem);
4249 free (reg_equiv_address);
4252 /* Kick all pseudos out of hard register REGNO.
4254 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4255 because we found we can't eliminate some register. In the case, no pseudos
4256 are allowed to be in the register, even if they are only in a block that
4257 doesn't require spill registers, unlike the case when we are spilling this
4258 hard reg to produce another spill register.
4260 Return nonzero if any pseudos needed to be kicked out. */
4263 spill_hard_reg (unsigned int regno, int cant_eliminate)
4269 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4270 df_set_regs_ever_live (regno, true);
4273 /* Spill every pseudo reg that was allocated to this reg
4274 or to something that overlaps this reg. */
4276 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4277 if (reg_renumber[i] >= 0
4278 && (unsigned int) reg_renumber[i] <= regno
4279 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4280 SET_REGNO_REG_SET (&spilled_pseudos, i);
4283 /* After find_reload_regs has been run for all insn that need reloads,
4284 and/or spill_hard_regs was called, this function is used to actually
4285 spill pseudo registers and try to reallocate them. It also sets up the
4286 spill_regs array for use by choose_reload_regs. */
4289 finish_spills (int global)
4291 struct insn_chain *chain;
4292 int something_changed = 0;
4294 reg_set_iterator rsi;
4296 /* Build the spill_regs array for the function. */
4297 /* If there are some registers still to eliminate and one of the spill regs
4298 wasn't ever used before, additional stack space may have to be
4299 allocated to store this register. Thus, we may have changed the offset
4300 between the stack and frame pointers, so mark that something has changed.
4302 One might think that we need only set VAL to 1 if this is a call-used
4303 register. However, the set of registers that must be saved by the
4304 prologue is not identical to the call-used set. For example, the
4305 register used by the call insn for the return PC is a call-used register,
4306 but must be saved by the prologue. */
4309 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4310 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4312 spill_reg_order[i] = n_spills;
4313 spill_regs[n_spills++] = i;
4314 if (num_eliminable && ! df_regs_ever_live_p (i))
4315 something_changed = 1;
4316 df_set_regs_ever_live (i, true);
4319 spill_reg_order[i] = -1;
4321 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4322 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4324 /* Record the current hard register the pseudo is allocated to
4325 in pseudo_previous_regs so we avoid reallocating it to the
4326 same hard reg in a later pass. */
4327 gcc_assert (reg_renumber[i] >= 0);
4329 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4330 /* Mark it as no longer having a hard register home. */
4331 reg_renumber[i] = -1;
4332 if (ira_conflicts_p)
4333 /* Inform IRA about the change. */
4334 ira_mark_allocation_change (i);
4335 /* We will need to scan everything again. */
4336 something_changed = 1;
4339 /* Retry global register allocation if possible. */
4340 if (global && ira_conflicts_p)
4344 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4345 /* For every insn that needs reloads, set the registers used as spill
4346 regs in pseudo_forbidden_regs for every pseudo live across the
4348 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4350 EXECUTE_IF_SET_IN_REG_SET
4351 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4353 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4354 chain->used_spill_regs);
4356 EXECUTE_IF_SET_IN_REG_SET
4357 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4359 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4360 chain->used_spill_regs);
4364 /* Retry allocating the pseudos spilled in IRA and the
4365 reload. For each reg, merge the various reg sets that
4366 indicate which hard regs can't be used, and call
4367 ira_reassign_pseudos. */
4368 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4369 if (reg_old_renumber[i] != reg_renumber[i])
4371 if (reg_renumber[i] < 0)
4372 temp_pseudo_reg_arr[n++] = i;
4374 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4376 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4377 bad_spill_regs_global,
4378 pseudo_forbidden_regs, pseudo_previous_regs,
4380 something_changed = 1;
4382 /* Fix up the register information in the insn chain.
4383 This involves deleting those of the spilled pseudos which did not get
4384 a new hard register home from the live_{before,after} sets. */
4385 for (chain = reload_insn_chain; chain; chain = chain->next)
4387 HARD_REG_SET used_by_pseudos;
4388 HARD_REG_SET used_by_pseudos2;
4390 if (! ira_conflicts_p)
4392 /* Don't do it for IRA because IRA and the reload still can
4393 assign hard registers to the spilled pseudos on next
4394 reload iterations. */
4395 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4396 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4398 /* Mark any unallocated hard regs as available for spills. That
4399 makes inheritance work somewhat better. */
4400 if (chain->need_reload)
4402 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4403 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4404 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4406 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4407 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4408 /* Value of chain->used_spill_regs from previous iteration
4409 may be not included in the value calculated here because
4410 of possible removing caller-saves insns (see function
4411 delete_caller_save_insns. */
4412 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4413 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4417 CLEAR_REG_SET (&changed_allocation_pseudos);
4418 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4419 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4421 int regno = reg_renumber[i];
4422 if (reg_old_renumber[i] == regno)
4425 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4427 alter_reg (i, reg_old_renumber[i], false);
4428 reg_old_renumber[i] = regno;
4432 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4434 fprintf (dump_file, " Register %d now in %d.\n\n",
4435 i, reg_renumber[i]);
4439 return something_changed;
4442 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4445 scan_paradoxical_subregs (rtx x)
4449 enum rtx_code code = GET_CODE (x);
4460 case CONST_VECTOR: /* shouldn't happen, but just in case. */
4468 if (REG_P (SUBREG_REG (x))
4469 && (GET_MODE_SIZE (GET_MODE (x))
4470 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4472 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4473 = GET_MODE_SIZE (GET_MODE (x));
4474 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4482 fmt = GET_RTX_FORMAT (code);
4483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4486 scan_paradoxical_subregs (XEXP (x, i));
4487 else if (fmt[i] == 'E')
4490 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4491 scan_paradoxical_subregs (XVECEXP (x, i, j));
4496 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4497 examine all of the reload insns between PREV and NEXT exclusive, and
4498 annotate all that may trap. */
4501 fixup_eh_region_note (rtx insn, rtx prev, rtx next)
4503 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4506 if (!insn_could_throw_p (insn))
4507 remove_note (insn, note);
4508 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4511 /* Reload pseudo-registers into hard regs around each insn as needed.
4512 Additional register load insns are output before the insn that needs it
4513 and perhaps store insns after insns that modify the reloaded pseudo reg.
4515 reg_last_reload_reg and reg_reloaded_contents keep track of
4516 which registers are already available in reload registers.
4517 We update these for the reloads that we perform,
4518 as the insns are scanned. */
4521 reload_as_needed (int live_known)
4523 struct insn_chain *chain;
4524 #if defined (AUTO_INC_DEC)
4529 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4530 memset (spill_reg_store, 0, sizeof spill_reg_store);
4531 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4532 INIT_REG_SET (®_has_output_reload);
4533 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4534 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4536 set_initial_elim_offsets ();
4538 for (chain = reload_insn_chain; chain; chain = chain->next)
4541 rtx insn = chain->insn;
4542 rtx old_next = NEXT_INSN (insn);
4544 rtx old_prev = PREV_INSN (insn);
4547 /* If we pass a label, copy the offsets from the label information
4548 into the current offsets of each elimination. */
4550 set_offsets_for_label (insn);
4552 else if (INSN_P (insn))
4554 regset_head regs_to_forget;
4555 INIT_REG_SET (®s_to_forget);
4556 note_stores (PATTERN (insn), forget_old_reloads_1, ®s_to_forget);
4558 /* If this is a USE and CLOBBER of a MEM, ensure that any
4559 references to eliminable registers have been removed. */
4561 if ((GET_CODE (PATTERN (insn)) == USE
4562 || GET_CODE (PATTERN (insn)) == CLOBBER)
4563 && MEM_P (XEXP (PATTERN (insn), 0)))
4564 XEXP (XEXP (PATTERN (insn), 0), 0)
4565 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4566 GET_MODE (XEXP (PATTERN (insn), 0)),
4569 /* If we need to do register elimination processing, do so.
4570 This might delete the insn, in which case we are done. */
4571 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4573 eliminate_regs_in_insn (insn, 1);
4576 update_eliminable_offsets ();
4577 CLEAR_REG_SET (®s_to_forget);
4582 /* If need_elim is nonzero but need_reload is zero, one might think
4583 that we could simply set n_reloads to 0. However, find_reloads
4584 could have done some manipulation of the insn (such as swapping
4585 commutative operands), and these manipulations are lost during
4586 the first pass for every insn that needs register elimination.
4587 So the actions of find_reloads must be redone here. */
4589 if (! chain->need_elim && ! chain->need_reload
4590 && ! chain->need_operand_change)
4592 /* First find the pseudo regs that must be reloaded for this insn.
4593 This info is returned in the tables reload_... (see reload.h).
4594 Also modify the body of INSN by substituting RELOAD
4595 rtx's for those pseudo regs. */
4598 CLEAR_REG_SET (®_has_output_reload);
4599 CLEAR_HARD_REG_SET (reg_is_output_reload);
4601 find_reloads (insn, 1, spill_indirect_levels, live_known,
4607 rtx next = NEXT_INSN (insn);
4610 prev = PREV_INSN (insn);
4612 /* Now compute which reload regs to reload them into. Perhaps
4613 reusing reload regs from previous insns, or else output
4614 load insns to reload them. Maybe output store insns too.
4615 Record the choices of reload reg in reload_reg_rtx. */
4616 choose_reload_regs (chain);
4618 /* Merge any reloads that we didn't combine for fear of
4619 increasing the number of spill registers needed but now
4620 discover can be safely merged. */
4621 if (targetm.small_register_classes_for_mode_p (VOIDmode))
4622 merge_assigned_reloads (insn);
4624 /* Generate the insns to reload operands into or out of
4625 their reload regs. */
4626 emit_reload_insns (chain);
4628 /* Substitute the chosen reload regs from reload_reg_rtx
4629 into the insn's body (or perhaps into the bodies of other
4630 load and store insn that we just made for reloading
4631 and that we moved the structure into). */
4632 subst_reloads (insn);
4634 /* Adjust the exception region notes for loads and stores. */
4635 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4636 fixup_eh_region_note (insn, prev, next);
4638 /* If this was an ASM, make sure that all the reload insns
4639 we have generated are valid. If not, give an error
4641 if (asm_noperands (PATTERN (insn)) >= 0)
4642 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4643 if (p != insn && INSN_P (p)
4644 && GET_CODE (PATTERN (p)) != USE
4645 && (recog_memoized (p) < 0
4646 || (extract_insn (p), ! constrain_operands (1))))
4648 error_for_asm (insn,
4649 "%<asm%> operand requires "
4650 "impossible reload");
4655 if (num_eliminable && chain->need_elim)
4656 update_eliminable_offsets ();
4658 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4659 is no longer validly lying around to save a future reload.
4660 Note that this does not detect pseudos that were reloaded
4661 for this insn in order to be stored in
4662 (obeying register constraints). That is correct; such reload
4663 registers ARE still valid. */
4664 forget_marked_reloads (®s_to_forget);
4665 CLEAR_REG_SET (®s_to_forget);
4667 /* There may have been CLOBBER insns placed after INSN. So scan
4668 between INSN and NEXT and use them to forget old reloads. */
4669 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4670 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4671 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4674 /* Likewise for regs altered by auto-increment in this insn.
4675 REG_INC notes have been changed by reloading:
4676 find_reloads_address_1 records substitutions for them,
4677 which have been performed by subst_reloads above. */
4678 for (i = n_reloads - 1; i >= 0; i--)
4680 rtx in_reg = rld[i].in_reg;
4683 enum rtx_code code = GET_CODE (in_reg);
4684 /* PRE_INC / PRE_DEC will have the reload register ending up
4685 with the same value as the stack slot, but that doesn't
4686 hold true for POST_INC / POST_DEC. Either we have to
4687 convert the memory access to a true POST_INC / POST_DEC,
4688 or we can't use the reload register for inheritance. */
4689 if ((code == POST_INC || code == POST_DEC)
4690 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4691 REGNO (rld[i].reg_rtx))
4692 /* Make sure it is the inc/dec pseudo, and not
4693 some other (e.g. output operand) pseudo. */
4694 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4695 == REGNO (XEXP (in_reg, 0))))
4698 rtx reload_reg = rld[i].reg_rtx;
4699 enum machine_mode mode = GET_MODE (reload_reg);
4703 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4705 /* We really want to ignore REG_INC notes here, so
4706 use PATTERN (p) as argument to reg_set_p . */
4707 if (reg_set_p (reload_reg, PATTERN (p)))
4709 n = count_occurrences (PATTERN (p), reload_reg, 0);
4715 = gen_rtx_fmt_e (code, mode, reload_reg);
4717 validate_replace_rtx_group (reload_reg,
4719 n = verify_changes (0);
4721 /* We must also verify that the constraints
4722 are met after the replacement. Make sure
4723 extract_insn is only called for an insn
4724 where the replacements were found to be
4729 n = constrain_operands (1);
4732 /* If the constraints were not met, then
4733 undo the replacement, else confirm it. */
4737 confirm_change_group ();
4743 add_reg_note (p, REG_INC, reload_reg);
4744 /* Mark this as having an output reload so that the
4745 REG_INC processing code below won't invalidate
4746 the reload for inheritance. */
4747 SET_HARD_REG_BIT (reg_is_output_reload,
4748 REGNO (reload_reg));
4749 SET_REGNO_REG_SET (®_has_output_reload,
4750 REGNO (XEXP (in_reg, 0)));
4753 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4756 else if ((code == PRE_INC || code == PRE_DEC)
4757 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4758 REGNO (rld[i].reg_rtx))
4759 /* Make sure it is the inc/dec pseudo, and not
4760 some other (e.g. output operand) pseudo. */
4761 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4762 == REGNO (XEXP (in_reg, 0))))
4764 SET_HARD_REG_BIT (reg_is_output_reload,
4765 REGNO (rld[i].reg_rtx));
4766 SET_REGNO_REG_SET (®_has_output_reload,
4767 REGNO (XEXP (in_reg, 0)));
4769 else if (code == PRE_INC || code == PRE_DEC
4770 || code == POST_INC || code == POST_DEC)
4772 int in_regno = REGNO (XEXP (in_reg, 0));
4774 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4777 bool forget_p = true;
4779 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4780 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4783 for (x = old_prev ? NEXT_INSN (old_prev) : insn;
4786 if (x == reg_reloaded_insn[in_hard_regno])
4792 /* If for some reasons, we didn't set up
4793 reg_last_reload_reg in this insn,
4794 invalidate inheritance from previous
4795 insns for the incremented/decremented
4796 register. Such registers will be not in
4797 reg_has_output_reload. Invalidate it
4798 also if the corresponding element in
4799 reg_reloaded_insn is also
4802 forget_old_reloads_1 (XEXP (in_reg, 0),
4808 /* If a pseudo that got a hard register is auto-incremented,
4809 we must purge records of copying it into pseudos without
4811 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4812 if (REG_NOTE_KIND (x) == REG_INC)
4814 /* See if this pseudo reg was reloaded in this insn.
4815 If so, its last-reload info is still valid
4816 because it is based on this insn's reload. */
4817 for (i = 0; i < n_reloads; i++)
4818 if (rld[i].out == XEXP (x, 0))
4822 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4826 /* A reload reg's contents are unknown after a label. */
4828 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4830 /* Don't assume a reload reg is still good after a call insn
4831 if it is a call-used reg, or if it contains a value that will
4832 be partially clobbered by the call. */
4833 else if (CALL_P (insn))
4835 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4836 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4841 free (reg_last_reload_reg);
4842 CLEAR_REG_SET (®_has_output_reload);
4845 /* Discard all record of any value reloaded from X,
4846 or reloaded in X from someplace else;
4847 unless X is an output reload reg of the current insn.
4849 X may be a hard reg (the reload reg)
4850 or it may be a pseudo reg that was reloaded from.
4852 When DATA is non-NULL just mark the registers in regset
4853 to be forgotten later. */
4856 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4861 regset regs = (regset) data;
4863 /* note_stores does give us subregs of hard regs,
4864 subreg_regno_offset requires a hard reg. */
4865 while (GET_CODE (x) == SUBREG)
4867 /* We ignore the subreg offset when calculating the regno,
4868 because we are using the entire underlying hard register
4878 if (regno >= FIRST_PSEUDO_REGISTER)
4884 nr = hard_regno_nregs[regno][GET_MODE (x)];
4885 /* Storing into a spilled-reg invalidates its contents.
4886 This can happen if a block-local pseudo is allocated to that reg
4887 and it wasn't spilled because this block's total need is 0.
4888 Then some insn might have an optional reload and use this reg. */
4890 for (i = 0; i < nr; i++)
4891 /* But don't do this if the reg actually serves as an output
4892 reload reg in the current instruction. */
4894 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4896 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4897 spill_reg_store[regno + i] = 0;
4903 SET_REGNO_REG_SET (regs, regno + nr);
4906 /* Since value of X has changed,
4907 forget any value previously copied from it. */
4910 /* But don't forget a copy if this is the output reload
4911 that establishes the copy's validity. */
4913 || !REGNO_REG_SET_P (®_has_output_reload, regno + nr))
4914 reg_last_reload_reg[regno + nr] = 0;
4918 /* Forget the reloads marked in regset by previous function. */
4920 forget_marked_reloads (regset regs)
4923 reg_set_iterator rsi;
4924 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4926 if (reg < FIRST_PSEUDO_REGISTER
4927 /* But don't do this if the reg actually serves as an output
4928 reload reg in the current instruction. */
4930 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4932 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4933 spill_reg_store[reg] = 0;
4936 || !REGNO_REG_SET_P (®_has_output_reload, reg))
4937 reg_last_reload_reg[reg] = 0;
4941 /* The following HARD_REG_SETs indicate when each hard register is
4942 used for a reload of various parts of the current insn. */
4944 /* If reg is unavailable for all reloads. */
4945 static HARD_REG_SET reload_reg_unavailable;
4946 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4947 static HARD_REG_SET reload_reg_used;
4948 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4949 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4950 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4951 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4952 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4953 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4954 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4955 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4956 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4957 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4958 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4959 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4960 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4961 static HARD_REG_SET reload_reg_used_in_op_addr;
4962 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4963 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4964 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4965 static HARD_REG_SET reload_reg_used_in_insn;
4966 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4967 static HARD_REG_SET reload_reg_used_in_other_addr;
4969 /* If reg is in use as a reload reg for any sort of reload. */
4970 static HARD_REG_SET reload_reg_used_at_all;
4972 /* If reg is use as an inherited reload. We just mark the first register
4974 static HARD_REG_SET reload_reg_used_for_inherit;
4976 /* Records which hard regs are used in any way, either as explicit use or
4977 by being allocated to a pseudo during any point of the current insn. */
4978 static HARD_REG_SET reg_used_in_insn;
4980 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4981 TYPE. MODE is used to indicate how many consecutive regs are
4985 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4986 enum machine_mode mode)
4988 unsigned int nregs = hard_regno_nregs[regno][mode];
4991 for (i = regno; i < nregs + regno; i++)
4996 SET_HARD_REG_BIT (reload_reg_used, i);
4999 case RELOAD_FOR_INPUT_ADDRESS:
5000 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
5003 case RELOAD_FOR_INPADDR_ADDRESS:
5004 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
5007 case RELOAD_FOR_OUTPUT_ADDRESS:
5008 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
5011 case RELOAD_FOR_OUTADDR_ADDRESS:
5012 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
5015 case RELOAD_FOR_OPERAND_ADDRESS:
5016 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
5019 case RELOAD_FOR_OPADDR_ADDR:
5020 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
5023 case RELOAD_FOR_OTHER_ADDRESS:
5024 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
5027 case RELOAD_FOR_INPUT:
5028 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
5031 case RELOAD_FOR_OUTPUT:
5032 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
5035 case RELOAD_FOR_INSN:
5036 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
5040 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
5044 /* Similarly, but show REGNO is no longer in use for a reload. */
5047 clear_reload_reg_in_use (unsigned int regno, int opnum,
5048 enum reload_type type, enum machine_mode mode)
5050 unsigned int nregs = hard_regno_nregs[regno][mode];
5051 unsigned int start_regno, end_regno, r;
5053 /* A complication is that for some reload types, inheritance might
5054 allow multiple reloads of the same types to share a reload register.
5055 We set check_opnum if we have to check only reloads with the same
5056 operand number, and check_any if we have to check all reloads. */
5057 int check_opnum = 0;
5059 HARD_REG_SET *used_in_set;
5064 used_in_set = &reload_reg_used;
5067 case RELOAD_FOR_INPUT_ADDRESS:
5068 used_in_set = &reload_reg_used_in_input_addr[opnum];
5071 case RELOAD_FOR_INPADDR_ADDRESS:
5073 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5076 case RELOAD_FOR_OUTPUT_ADDRESS:
5077 used_in_set = &reload_reg_used_in_output_addr[opnum];
5080 case RELOAD_FOR_OUTADDR_ADDRESS:
5082 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5085 case RELOAD_FOR_OPERAND_ADDRESS:
5086 used_in_set = &reload_reg_used_in_op_addr;
5089 case RELOAD_FOR_OPADDR_ADDR:
5091 used_in_set = &reload_reg_used_in_op_addr_reload;
5094 case RELOAD_FOR_OTHER_ADDRESS:
5095 used_in_set = &reload_reg_used_in_other_addr;
5099 case RELOAD_FOR_INPUT:
5100 used_in_set = &reload_reg_used_in_input[opnum];
5103 case RELOAD_FOR_OUTPUT:
5104 used_in_set = &reload_reg_used_in_output[opnum];
5107 case RELOAD_FOR_INSN:
5108 used_in_set = &reload_reg_used_in_insn;
5113 /* We resolve conflicts with remaining reloads of the same type by
5114 excluding the intervals of reload registers by them from the
5115 interval of freed reload registers. Since we only keep track of
5116 one set of interval bounds, we might have to exclude somewhat
5117 more than what would be necessary if we used a HARD_REG_SET here.
5118 But this should only happen very infrequently, so there should
5119 be no reason to worry about it. */
5121 start_regno = regno;
5122 end_regno = regno + nregs;
5123 if (check_opnum || check_any)
5125 for (i = n_reloads - 1; i >= 0; i--)
5127 if (rld[i].when_needed == type
5128 && (check_any || rld[i].opnum == opnum)
5131 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5132 unsigned int conflict_end
5133 = end_hard_regno (rld[i].mode, conflict_start);
5135 /* If there is an overlap with the first to-be-freed register,
5136 adjust the interval start. */
5137 if (conflict_start <= start_regno && conflict_end > start_regno)
5138 start_regno = conflict_end;
5139 /* Otherwise, if there is a conflict with one of the other
5140 to-be-freed registers, adjust the interval end. */
5141 if (conflict_start > start_regno && conflict_start < end_regno)
5142 end_regno = conflict_start;
5147 for (r = start_regno; r < end_regno; r++)
5148 CLEAR_HARD_REG_BIT (*used_in_set, r);
5151 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5152 specified by OPNUM and TYPE. */
5155 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5159 /* In use for a RELOAD_OTHER means it's not available for anything. */
5160 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5161 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5167 /* In use for anything means we can't use it for RELOAD_OTHER. */
5168 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5169 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5170 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5171 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5174 for (i = 0; i < reload_n_operands; i++)
5175 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5176 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5177 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5178 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5179 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5180 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5185 case RELOAD_FOR_INPUT:
5186 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5187 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5190 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5193 /* If it is used for some other input, can't use it. */
5194 for (i = 0; i < reload_n_operands; i++)
5195 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5198 /* If it is used in a later operand's address, can't use it. */
5199 for (i = opnum + 1; i < reload_n_operands; i++)
5200 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5201 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5206 case RELOAD_FOR_INPUT_ADDRESS:
5207 /* Can't use a register if it is used for an input address for this
5208 operand or used as an input in an earlier one. */
5209 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5210 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5213 for (i = 0; i < opnum; i++)
5214 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5219 case RELOAD_FOR_INPADDR_ADDRESS:
5220 /* Can't use a register if it is used for an input address
5221 for this operand or used as an input in an earlier
5223 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5226 for (i = 0; i < opnum; i++)
5227 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5232 case RELOAD_FOR_OUTPUT_ADDRESS:
5233 /* Can't use a register if it is used for an output address for this
5234 operand or used as an output in this or a later operand. Note
5235 that multiple output operands are emitted in reverse order, so
5236 the conflicting ones are those with lower indices. */
5237 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5240 for (i = 0; i <= opnum; i++)
5241 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5246 case RELOAD_FOR_OUTADDR_ADDRESS:
5247 /* Can't use a register if it is used for an output address
5248 for this operand or used as an output in this or a
5249 later operand. Note that multiple output operands are
5250 emitted in reverse order, so the conflicting ones are
5251 those with lower indices. */
5252 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5255 for (i = 0; i <= opnum; i++)
5256 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5261 case RELOAD_FOR_OPERAND_ADDRESS:
5262 for (i = 0; i < reload_n_operands; i++)
5263 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5266 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5267 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5269 case RELOAD_FOR_OPADDR_ADDR:
5270 for (i = 0; i < reload_n_operands; i++)
5271 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5274 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5276 case RELOAD_FOR_OUTPUT:
5277 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5278 outputs, or an operand address for this or an earlier output.
5279 Note that multiple output operands are emitted in reverse order,
5280 so the conflicting ones are those with higher indices. */
5281 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5284 for (i = 0; i < reload_n_operands; i++)
5285 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5288 for (i = opnum; i < reload_n_operands; i++)
5289 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5290 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5295 case RELOAD_FOR_INSN:
5296 for (i = 0; i < reload_n_operands; i++)
5297 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5298 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5301 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5302 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5304 case RELOAD_FOR_OTHER_ADDRESS:
5305 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5312 /* Return 1 if the value in reload reg REGNO, as used by a reload
5313 needed for the part of the insn specified by OPNUM and TYPE,
5314 is still available in REGNO at the end of the insn.
5316 We can assume that the reload reg was already tested for availability
5317 at the time it is needed, and we should not check this again,
5318 in case the reg has already been marked in use. */
5321 reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
5328 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5329 its value must reach the end. */
5332 /* If this use is for part of the insn,
5333 its value reaches if no subsequent part uses the same register.
5334 Just like the above function, don't try to do this with lots
5337 case RELOAD_FOR_OTHER_ADDRESS:
5338 /* Here we check for everything else, since these don't conflict
5339 with anything else and everything comes later. */
5341 for (i = 0; i < reload_n_operands; i++)
5342 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5343 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5344 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5345 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5346 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5347 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5350 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5351 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5352 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5353 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5355 case RELOAD_FOR_INPUT_ADDRESS:
5356 case RELOAD_FOR_INPADDR_ADDRESS:
5357 /* Similar, except that we check only for this and subsequent inputs
5358 and the address of only subsequent inputs and we do not need
5359 to check for RELOAD_OTHER objects since they are known not to
5362 for (i = opnum; i < reload_n_operands; i++)
5363 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5366 for (i = opnum + 1; i < reload_n_operands; i++)
5367 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5368 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5371 for (i = 0; i < reload_n_operands; i++)
5372 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5373 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5374 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5377 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5380 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5381 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5382 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5384 case RELOAD_FOR_INPUT:
5385 /* Similar to input address, except we start at the next operand for
5386 both input and input address and we do not check for
5387 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5390 for (i = opnum + 1; i < reload_n_operands; i++)
5391 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5392 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5393 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5396 /* ... fall through ... */
5398 case RELOAD_FOR_OPERAND_ADDRESS:
5399 /* Check outputs and their addresses. */
5401 for (i = 0; i < reload_n_operands; i++)
5402 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5403 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5404 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5407 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5409 case RELOAD_FOR_OPADDR_ADDR:
5410 for (i = 0; i < reload_n_operands; i++)
5411 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5412 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5413 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5416 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5417 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5418 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5420 case RELOAD_FOR_INSN:
5421 /* These conflict with other outputs with RELOAD_OTHER. So
5422 we need only check for output addresses. */
5424 opnum = reload_n_operands;
5426 /* ... fall through ... */
5428 case RELOAD_FOR_OUTPUT:
5429 case RELOAD_FOR_OUTPUT_ADDRESS:
5430 case RELOAD_FOR_OUTADDR_ADDRESS:
5431 /* We already know these can't conflict with a later output. So the
5432 only thing to check are later output addresses.
5433 Note that multiple output operands are emitted in reverse order,
5434 so the conflicting ones are those with lower indices. */
5435 for (i = 0; i < opnum; i++)
5436 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5437 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5447 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5448 every register in the range [REGNO, REGNO + NREGS). */
5451 reload_regs_reach_end_p (unsigned int regno, int nregs,
5452 int opnum, enum reload_type type)
5456 for (i = 0; i < nregs; i++)
5457 if (!reload_reg_reaches_end_p (regno + i, opnum, type))
5463 /* Returns whether R1 and R2 are uniquely chained: the value of one
5464 is used by the other, and that value is not used by any other
5465 reload for this insn. This is used to partially undo the decision
5466 made in find_reloads when in the case of multiple
5467 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5468 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5469 reloads. This code tries to avoid the conflict created by that
5470 change. It might be cleaner to explicitly keep track of which
5471 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5472 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5473 this after the fact. */
5475 reloads_unique_chain_p (int r1, int r2)
5479 /* We only check input reloads. */
5480 if (! rld[r1].in || ! rld[r2].in)
5483 /* Avoid anything with output reloads. */
5484 if (rld[r1].out || rld[r2].out)
5487 /* "chained" means one reload is a component of the other reload,
5488 not the same as the other reload. */
5489 if (rld[r1].opnum != rld[r2].opnum
5490 || rtx_equal_p (rld[r1].in, rld[r2].in)
5491 || rld[r1].optional || rld[r2].optional
5492 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5493 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5496 for (i = 0; i < n_reloads; i ++)
5497 /* Look for input reloads that aren't our two */
5498 if (i != r1 && i != r2 && rld[i].in)
5500 /* If our reload is mentioned at all, it isn't a simple chain. */
5501 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5507 /* The recursive function change all occurrences of WHAT in *WHERE
5510 substitute (rtx *where, const_rtx what, rtx repl)
5519 if (*where == what || rtx_equal_p (*where, what))
5521 /* Record the location of the changed rtx. */
5522 VEC_safe_push (rtx_p, heap, substitute_stack, where);
5527 code = GET_CODE (*where);
5528 fmt = GET_RTX_FORMAT (code);
5529 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5535 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5536 substitute (&XVECEXP (*where, i, j), what, repl);
5538 else if (fmt[i] == 'e')
5539 substitute (&XEXP (*where, i), what, repl);
5543 /* The function returns TRUE if chain of reload R1 and R2 (in any
5544 order) can be evaluated without usage of intermediate register for
5545 the reload containing another reload. It is important to see
5546 gen_reload to understand what the function is trying to do. As an
5547 example, let us have reload chain
5550 r1: <something> + const
5552 and reload R2 got reload reg HR. The function returns true if
5553 there is a correct insn HR = HR + <something>. Otherwise,
5554 gen_reload will use intermediate register (and this is the reload
5555 reg for R1) to reload <something>.
5557 We need this function to find a conflict for chain reloads. In our
5558 example, if HR = HR + <something> is incorrect insn, then we cannot
5559 use HR as a reload register for R2. If we do use it then we get a
5568 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5570 /* Assume other cases in gen_reload are not possible for
5571 chain reloads or do need an intermediate hard registers. */
5574 rtx out, in, tem, insn;
5575 rtx last = get_last_insn ();
5577 /* Make r2 a component of r1. */
5578 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5584 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5585 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5586 gcc_assert (regno >= 0);
5587 out = gen_rtx_REG (rld[r1].mode, regno);
5589 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5591 /* If IN is a paradoxical SUBREG, remove it and try to put the
5592 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5593 if (GET_CODE (in) == SUBREG
5594 && (GET_MODE_SIZE (GET_MODE (in))
5595 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
5596 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
5597 in = SUBREG_REG (in), out = tem;
5599 if (GET_CODE (in) == PLUS
5600 && (REG_P (XEXP (in, 0))
5601 || GET_CODE (XEXP (in, 0)) == SUBREG
5602 || MEM_P (XEXP (in, 0)))
5603 && (REG_P (XEXP (in, 1))
5604 || GET_CODE (XEXP (in, 1)) == SUBREG
5605 || CONSTANT_P (XEXP (in, 1))
5606 || MEM_P (XEXP (in, 1))))
5608 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5609 code = recog_memoized (insn);
5614 extract_insn (insn);
5615 /* We want constrain operands to treat this insn strictly in
5616 its validity determination, i.e., the way it would after
5617 reload has completed. */
5618 result = constrain_operands (1);
5621 delete_insns_since (last);
5624 /* Restore the original value at each changed address within R1. */
5625 while (!VEC_empty (rtx_p, substitute_stack))
5627 rtx *where = VEC_pop (rtx_p, substitute_stack);
5628 *where = rld[r2].in;
5634 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5637 This function uses the same algorithm as reload_reg_free_p above. */
5640 reloads_conflict (int r1, int r2)
5642 enum reload_type r1_type = rld[r1].when_needed;
5643 enum reload_type r2_type = rld[r2].when_needed;
5644 int r1_opnum = rld[r1].opnum;
5645 int r2_opnum = rld[r2].opnum;
5647 /* RELOAD_OTHER conflicts with everything. */
5648 if (r2_type == RELOAD_OTHER)
5651 /* Otherwise, check conflicts differently for each type. */
5655 case RELOAD_FOR_INPUT:
5656 return (r2_type == RELOAD_FOR_INSN
5657 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5658 || r2_type == RELOAD_FOR_OPADDR_ADDR
5659 || r2_type == RELOAD_FOR_INPUT
5660 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5661 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5662 && r2_opnum > r1_opnum));
5664 case RELOAD_FOR_INPUT_ADDRESS:
5665 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5666 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5668 case RELOAD_FOR_INPADDR_ADDRESS:
5669 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5670 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5672 case RELOAD_FOR_OUTPUT_ADDRESS:
5673 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5674 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5676 case RELOAD_FOR_OUTADDR_ADDRESS:
5677 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5678 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5680 case RELOAD_FOR_OPERAND_ADDRESS:
5681 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5682 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5683 && (!reloads_unique_chain_p (r1, r2)
5684 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5686 case RELOAD_FOR_OPADDR_ADDR:
5687 return (r2_type == RELOAD_FOR_INPUT
5688 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5690 case RELOAD_FOR_OUTPUT:
5691 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5692 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5693 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5694 && r2_opnum >= r1_opnum));
5696 case RELOAD_FOR_INSN:
5697 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5698 || r2_type == RELOAD_FOR_INSN
5699 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5701 case RELOAD_FOR_OTHER_ADDRESS:
5702 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5712 /* Indexed by reload number, 1 if incoming value
5713 inherited from previous insns. */
5714 static char reload_inherited[MAX_RELOADS];
5716 /* For an inherited reload, this is the insn the reload was inherited from,
5717 if we know it. Otherwise, this is 0. */
5718 static rtx reload_inheritance_insn[MAX_RELOADS];
5720 /* If nonzero, this is a place to get the value of the reload,
5721 rather than using reload_in. */
5722 static rtx reload_override_in[MAX_RELOADS];
5724 /* For each reload, the hard register number of the register used,
5725 or -1 if we did not need a register for this reload. */
5726 static int reload_spill_index[MAX_RELOADS];
5728 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5729 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5731 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5732 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5734 /* Subroutine of free_for_value_p, used to check a single register.
5735 START_REGNO is the starting regno of the full reload register
5736 (possibly comprising multiple hard registers) that we are considering. */
5739 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5740 enum reload_type type, rtx value, rtx out,
5741 int reloadnum, int ignore_address_reloads)
5744 /* Set if we see an input reload that must not share its reload register
5745 with any new earlyclobber, but might otherwise share the reload
5746 register with an output or input-output reload. */
5747 int check_earlyclobber = 0;
5751 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5754 if (out == const0_rtx)
5760 /* We use some pseudo 'time' value to check if the lifetimes of the
5761 new register use would overlap with the one of a previous reload
5762 that is not read-only or uses a different value.
5763 The 'time' used doesn't have to be linear in any shape or form, just
5765 Some reload types use different 'buckets' for each operand.
5766 So there are MAX_RECOG_OPERANDS different time values for each
5768 We compute TIME1 as the time when the register for the prospective
5769 new reload ceases to be live, and TIME2 for each existing
5770 reload as the time when that the reload register of that reload
5772 Where there is little to be gained by exact lifetime calculations,
5773 we just make conservative assumptions, i.e. a longer lifetime;
5774 this is done in the 'default:' cases. */
5777 case RELOAD_FOR_OTHER_ADDRESS:
5778 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5779 time1 = copy ? 0 : 1;
5782 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5784 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5785 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5786 respectively, to the time values for these, we get distinct time
5787 values. To get distinct time values for each operand, we have to
5788 multiply opnum by at least three. We round that up to four because
5789 multiply by four is often cheaper. */
5790 case RELOAD_FOR_INPADDR_ADDRESS:
5791 time1 = opnum * 4 + 2;
5793 case RELOAD_FOR_INPUT_ADDRESS:
5794 time1 = opnum * 4 + 3;
5796 case RELOAD_FOR_INPUT:
5797 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5798 executes (inclusive). */
5799 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5801 case RELOAD_FOR_OPADDR_ADDR:
5803 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5804 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5806 case RELOAD_FOR_OPERAND_ADDRESS:
5807 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5809 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5811 case RELOAD_FOR_OUTADDR_ADDRESS:
5812 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5814 case RELOAD_FOR_OUTPUT_ADDRESS:
5815 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5818 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5821 for (i = 0; i < n_reloads; i++)
5823 rtx reg = rld[i].reg_rtx;
5824 if (reg && REG_P (reg)
5825 && ((unsigned) regno - true_regnum (reg)
5826 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5829 rtx other_input = rld[i].in;
5831 /* If the other reload loads the same input value, that
5832 will not cause a conflict only if it's loading it into
5833 the same register. */
5834 if (true_regnum (reg) != start_regno)
5835 other_input = NULL_RTX;
5836 if (! other_input || ! rtx_equal_p (other_input, value)
5837 || rld[i].out || out)
5840 switch (rld[i].when_needed)
5842 case RELOAD_FOR_OTHER_ADDRESS:
5845 case RELOAD_FOR_INPADDR_ADDRESS:
5846 /* find_reloads makes sure that a
5847 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5848 by at most one - the first -
5849 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5850 address reload is inherited, the address address reload
5851 goes away, so we can ignore this conflict. */
5852 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5853 && ignore_address_reloads
5854 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5855 Then the address address is still needed to store
5856 back the new address. */
5857 && ! rld[reloadnum].out)
5859 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5860 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5862 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5863 && ignore_address_reloads
5864 /* Unless we are reloading an auto_inc expression. */
5865 && ! rld[reloadnum].out)
5867 time2 = rld[i].opnum * 4 + 2;
5869 case RELOAD_FOR_INPUT_ADDRESS:
5870 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5871 && ignore_address_reloads
5872 && ! rld[reloadnum].out)
5874 time2 = rld[i].opnum * 4 + 3;
5876 case RELOAD_FOR_INPUT:
5877 time2 = rld[i].opnum * 4 + 4;
5878 check_earlyclobber = 1;
5880 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5881 == MAX_RECOG_OPERAND * 4 */
5882 case RELOAD_FOR_OPADDR_ADDR:
5883 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5884 && ignore_address_reloads
5885 && ! rld[reloadnum].out)
5887 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5889 case RELOAD_FOR_OPERAND_ADDRESS:
5890 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5891 check_earlyclobber = 1;
5893 case RELOAD_FOR_INSN:
5894 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5896 case RELOAD_FOR_OUTPUT:
5897 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5898 instruction is executed. */
5899 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5901 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5902 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5904 case RELOAD_FOR_OUTADDR_ADDRESS:
5905 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5906 && ignore_address_reloads
5907 && ! rld[reloadnum].out)
5909 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5911 case RELOAD_FOR_OUTPUT_ADDRESS:
5912 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5915 /* If there is no conflict in the input part, handle this
5916 like an output reload. */
5917 if (! rld[i].in || rtx_equal_p (other_input, value))
5919 time2 = MAX_RECOG_OPERANDS * 4 + 4;
5920 /* Earlyclobbered outputs must conflict with inputs. */
5921 if (earlyclobber_operand_p (rld[i].out))
5922 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5927 /* RELOAD_OTHER might be live beyond instruction execution,
5928 but this is not obvious when we set time2 = 1. So check
5929 here if there might be a problem with the new reload
5930 clobbering the register used by the RELOAD_OTHER. */
5938 && (! rld[i].in || rld[i].out
5939 || ! rtx_equal_p (other_input, value)))
5940 || (out && rld[reloadnum].out_reg
5941 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5947 /* Earlyclobbered outputs must conflict with inputs. */
5948 if (check_earlyclobber && out && earlyclobber_operand_p (out))
5954 /* Return 1 if the value in reload reg REGNO, as used by a reload
5955 needed for the part of the insn specified by OPNUM and TYPE,
5956 may be used to load VALUE into it.
5958 MODE is the mode in which the register is used, this is needed to
5959 determine how many hard regs to test.
5961 Other read-only reloads with the same value do not conflict
5962 unless OUT is nonzero and these other reloads have to live while
5963 output reloads live.
5964 If OUT is CONST0_RTX, this is a special case: it means that the
5965 test should not be for using register REGNO as reload register, but
5966 for copying from register REGNO into the reload register.
5968 RELOADNUM is the number of the reload we want to load this value for;
5969 a reload does not conflict with itself.
5971 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5972 reloads that load an address for the very reload we are considering.
5974 The caller has to make sure that there is no conflict with the return
5978 free_for_value_p (int regno, enum machine_mode mode, int opnum,
5979 enum reload_type type, rtx value, rtx out, int reloadnum,
5980 int ignore_address_reloads)
5982 int nregs = hard_regno_nregs[regno][mode];
5984 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5985 value, out, reloadnum,
5986 ignore_address_reloads))
5991 /* Return nonzero if the rtx X is invariant over the current function. */
5992 /* ??? Actually, the places where we use this expect exactly what is
5993 tested here, and not everything that is function invariant. In
5994 particular, the frame pointer and arg pointer are special cased;
5995 pic_offset_table_rtx is not, and we must not spill these things to
5999 function_invariant_p (const_rtx x)
6003 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6005 if (GET_CODE (x) == PLUS
6006 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6007 && CONSTANT_P (XEXP (x, 1)))
6012 /* Determine whether the reload reg X overlaps any rtx'es used for
6013 overriding inheritance. Return nonzero if so. */
6016 conflicts_with_override (rtx x)
6019 for (i = 0; i < n_reloads; i++)
6020 if (reload_override_in[i]
6021 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6026 /* Give an error message saying we failed to find a reload for INSN,
6027 and clear out reload R. */
6029 failed_reload (rtx insn, int r)
6031 if (asm_noperands (PATTERN (insn)) < 0)
6032 /* It's the compiler's fault. */
6033 fatal_insn ("could not find a spill register", insn);
6035 /* It's the user's fault; the operand's mode and constraint
6036 don't match. Disable this reload so we don't crash in final. */
6037 error_for_asm (insn,
6038 "%<asm%> operand constraint incompatible with operand size");
6042 rld[r].optional = 1;
6043 rld[r].secondary_p = 1;
6046 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6047 for reload R. If it's valid, get an rtx for it. Return nonzero if
6050 set_reload_reg (int i, int r)
6053 rtx reg = spill_reg_rtx[i];
6055 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6056 spill_reg_rtx[i] = reg
6057 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6059 regno = true_regnum (reg);
6061 /* Detect when the reload reg can't hold the reload mode.
6062 This used to be one `if', but Sequent compiler can't handle that. */
6063 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6065 enum machine_mode test_mode = VOIDmode;
6067 test_mode = GET_MODE (rld[r].in);
6068 /* If rld[r].in has VOIDmode, it means we will load it
6069 in whatever mode the reload reg has: to wit, rld[r].mode.
6070 We have already tested that for validity. */
6071 /* Aside from that, we need to test that the expressions
6072 to reload from or into have modes which are valid for this
6073 reload register. Otherwise the reload insns would be invalid. */
6074 if (! (rld[r].in != 0 && test_mode != VOIDmode
6075 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6076 if (! (rld[r].out != 0
6077 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6079 /* The reg is OK. */
6082 /* Mark as in use for this insn the reload regs we use
6084 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6085 rld[r].when_needed, rld[r].mode);
6087 rld[r].reg_rtx = reg;
6088 reload_spill_index[r] = spill_regs[i];
6095 /* Find a spill register to use as a reload register for reload R.
6096 LAST_RELOAD is nonzero if this is the last reload for the insn being
6099 Set rld[R].reg_rtx to the register allocated.
6101 We return 1 if successful, or 0 if we couldn't find a spill reg and
6102 we didn't change anything. */
6105 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6110 /* If we put this reload ahead, thinking it is a group,
6111 then insist on finding a group. Otherwise we can grab a
6112 reg that some other reload needs.
6113 (That can happen when we have a 68000 DATA_OR_FP_REG
6114 which is a group of data regs or one fp reg.)
6115 We need not be so restrictive if there are no more reloads
6118 ??? Really it would be nicer to have smarter handling
6119 for that kind of reg class, where a problem like this is normal.
6120 Perhaps those classes should be avoided for reloading
6121 by use of more alternatives. */
6123 int force_group = rld[r].nregs > 1 && ! last_reload;
6125 /* If we want a single register and haven't yet found one,
6126 take any reg in the right class and not in use.
6127 If we want a consecutive group, here is where we look for it.
6129 We use three passes so we can first look for reload regs to
6130 reuse, which are already in use for other reloads in this insn,
6131 and only then use additional registers which are not "bad", then
6132 finally any register.
6134 I think that maximizing reuse is needed to make sure we don't
6135 run out of reload regs. Suppose we have three reloads, and
6136 reloads A and B can share regs. These need two regs.
6137 Suppose A and B are given different regs.
6138 That leaves none for C. */
6139 for (pass = 0; pass < 3; pass++)
6141 /* I is the index in spill_regs.
6142 We advance it round-robin between insns to use all spill regs
6143 equally, so that inherited reloads have a chance
6144 of leapfrogging each other. */
6148 for (count = 0; count < n_spills; count++)
6150 int rclass = (int) rld[r].rclass;
6156 regnum = spill_regs[i];
6158 if ((reload_reg_free_p (regnum, rld[r].opnum,
6161 /* We check reload_reg_used to make sure we
6162 don't clobber the return register. */
6163 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6164 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6165 rld[r].when_needed, rld[r].in,
6167 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6168 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6169 /* Look first for regs to share, then for unshared. But
6170 don't share regs used for inherited reloads; they are
6171 the ones we want to preserve. */
6173 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6175 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6178 int nr = hard_regno_nregs[regnum][rld[r].mode];
6180 /* During the second pass we want to avoid reload registers
6181 which are "bad" for this reload. */
6183 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6186 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6187 (on 68000) got us two FP regs. If NR is 1,
6188 we would reject both of them. */
6191 /* If we need only one reg, we have already won. */
6194 /* But reject a single reg if we demand a group. */
6199 /* Otherwise check that as many consecutive regs as we need
6200 are available here. */
6203 int regno = regnum + nr - 1;
6204 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6205 && spill_reg_order[regno] >= 0
6206 && reload_reg_free_p (regno, rld[r].opnum,
6207 rld[r].when_needed)))
6216 /* If we found something on the current pass, omit later passes. */
6217 if (count < n_spills)
6221 /* We should have found a spill register by now. */
6222 if (count >= n_spills)
6225 /* I is the index in SPILL_REG_RTX of the reload register we are to
6226 allocate. Get an rtx for it and find its register number. */
6228 return set_reload_reg (i, r);
6231 /* Initialize all the tables needed to allocate reload registers.
6232 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6233 is the array we use to restore the reg_rtx field for every reload. */
6236 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6240 for (i = 0; i < n_reloads; i++)
6241 rld[i].reg_rtx = save_reload_reg_rtx[i];
6243 memset (reload_inherited, 0, MAX_RELOADS);
6244 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6245 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6247 CLEAR_HARD_REG_SET (reload_reg_used);
6248 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6249 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6250 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6251 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6252 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6254 CLEAR_HARD_REG_SET (reg_used_in_insn);
6257 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6258 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6259 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6260 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6261 compute_use_by_pseudos (®_used_in_insn, &chain->live_throughout);
6262 compute_use_by_pseudos (®_used_in_insn, &chain->dead_or_set);
6265 for (i = 0; i < reload_n_operands; i++)
6267 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6268 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6269 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6270 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6271 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6272 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6275 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6277 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6279 for (i = 0; i < n_reloads; i++)
6280 /* If we have already decided to use a certain register,
6281 don't use it in another way. */
6283 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6284 rld[i].when_needed, rld[i].mode);
6287 /* Assign hard reg targets for the pseudo-registers we must reload
6288 into hard regs for this insn.
6289 Also output the instructions to copy them in and out of the hard regs.
6291 For machines with register classes, we are responsible for
6292 finding a reload reg in the proper class. */
6295 choose_reload_regs (struct insn_chain *chain)
6297 rtx insn = chain->insn;
6299 unsigned int max_group_size = 1;
6300 enum reg_class group_class = NO_REGS;
6301 int pass, win, inheritance;
6303 rtx save_reload_reg_rtx[MAX_RELOADS];
6305 /* In order to be certain of getting the registers we need,
6306 we must sort the reloads into order of increasing register class.
6307 Then our grabbing of reload registers will parallel the process
6308 that provided the reload registers.
6310 Also note whether any of the reloads wants a consecutive group of regs.
6311 If so, record the maximum size of the group desired and what
6312 register class contains all the groups needed by this insn. */
6314 for (j = 0; j < n_reloads; j++)
6316 reload_order[j] = j;
6317 if (rld[j].reg_rtx != NULL_RTX)
6319 gcc_assert (REG_P (rld[j].reg_rtx)
6320 && HARD_REGISTER_P (rld[j].reg_rtx));
6321 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6324 reload_spill_index[j] = -1;
6326 if (rld[j].nregs > 1)
6328 max_group_size = MAX (rld[j].nregs, max_group_size);
6330 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6333 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6337 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6339 /* If -O, try first with inheritance, then turning it off.
6340 If not -O, don't do inheritance.
6341 Using inheritance when not optimizing leads to paradoxes
6342 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6343 because one side of the comparison might be inherited. */
6345 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6347 choose_reload_regs_init (chain, save_reload_reg_rtx);
6349 /* Process the reloads in order of preference just found.
6350 Beyond this point, subregs can be found in reload_reg_rtx.
6352 This used to look for an existing reloaded home for all of the
6353 reloads, and only then perform any new reloads. But that could lose
6354 if the reloads were done out of reg-class order because a later
6355 reload with a looser constraint might have an old home in a register
6356 needed by an earlier reload with a tighter constraint.
6358 To solve this, we make two passes over the reloads, in the order
6359 described above. In the first pass we try to inherit a reload
6360 from a previous insn. If there is a later reload that needs a
6361 class that is a proper subset of the class being processed, we must
6362 also allocate a spill register during the first pass.
6364 Then make a second pass over the reloads to allocate any reloads
6365 that haven't been given registers yet. */
6367 for (j = 0; j < n_reloads; j++)
6369 int r = reload_order[j];
6370 rtx search_equiv = NULL_RTX;
6372 /* Ignore reloads that got marked inoperative. */
6373 if (rld[r].out == 0 && rld[r].in == 0
6374 && ! rld[r].secondary_p)
6377 /* If find_reloads chose to use reload_in or reload_out as a reload
6378 register, we don't need to chose one. Otherwise, try even if it
6379 found one since we might save an insn if we find the value lying
6381 Try also when reload_in is a pseudo without a hard reg. */
6382 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6383 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6384 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6385 && !MEM_P (rld[r].in)
6386 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6389 #if 0 /* No longer needed for correct operation.
6390 It might give better code, or might not; worth an experiment? */
6391 /* If this is an optional reload, we can't inherit from earlier insns
6392 until we are sure that any non-optional reloads have been allocated.
6393 The following code takes advantage of the fact that optional reloads
6394 are at the end of reload_order. */
6395 if (rld[r].optional != 0)
6396 for (i = 0; i < j; i++)
6397 if ((rld[reload_order[i]].out != 0
6398 || rld[reload_order[i]].in != 0
6399 || rld[reload_order[i]].secondary_p)
6400 && ! rld[reload_order[i]].optional
6401 && rld[reload_order[i]].reg_rtx == 0)
6402 allocate_reload_reg (chain, reload_order[i], 0);
6405 /* First see if this pseudo is already available as reloaded
6406 for a previous insn. We cannot try to inherit for reloads
6407 that are smaller than the maximum number of registers needed
6408 for groups unless the register we would allocate cannot be used
6411 We could check here to see if this is a secondary reload for
6412 an object that is already in a register of the desired class.
6413 This would avoid the need for the secondary reload register.
6414 But this is complex because we can't easily determine what
6415 objects might want to be loaded via this reload. So let a
6416 register be allocated here. In `emit_reload_insns' we suppress
6417 one of the loads in the case described above. */
6423 enum machine_mode mode = VOIDmode;
6427 else if (REG_P (rld[r].in))
6429 regno = REGNO (rld[r].in);
6430 mode = GET_MODE (rld[r].in);
6432 else if (REG_P (rld[r].in_reg))
6434 regno = REGNO (rld[r].in_reg);
6435 mode = GET_MODE (rld[r].in_reg);
6437 else if (GET_CODE (rld[r].in_reg) == SUBREG
6438 && REG_P (SUBREG_REG (rld[r].in_reg)))
6440 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6441 if (regno < FIRST_PSEUDO_REGISTER)
6442 regno = subreg_regno (rld[r].in_reg);
6444 byte = SUBREG_BYTE (rld[r].in_reg);
6445 mode = GET_MODE (rld[r].in_reg);
6448 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6449 && REG_P (XEXP (rld[r].in_reg, 0)))
6451 regno = REGNO (XEXP (rld[r].in_reg, 0));
6452 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6453 rld[r].out = rld[r].in;
6457 /* This won't work, since REGNO can be a pseudo reg number.
6458 Also, it takes much more hair to keep track of all the things
6459 that can invalidate an inherited reload of part of a pseudoreg. */
6460 else if (GET_CODE (rld[r].in) == SUBREG
6461 && REG_P (SUBREG_REG (rld[r].in)))
6462 regno = subreg_regno (rld[r].in);
6466 && reg_last_reload_reg[regno] != 0
6467 #ifdef CANNOT_CHANGE_MODE_CLASS
6468 /* Verify that the register it's in can be used in
6470 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6471 GET_MODE (reg_last_reload_reg[regno]),
6476 enum reg_class rclass = rld[r].rclass, last_class;
6477 rtx last_reg = reg_last_reload_reg[regno];
6478 enum machine_mode need_mode;
6480 i = REGNO (last_reg);
6481 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6482 last_class = REGNO_REG_CLASS (i);
6488 = smallest_mode_for_size
6489 (GET_MODE_BITSIZE (mode) + byte * BITS_PER_UNIT,
6490 GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
6491 ? MODE_INT : GET_MODE_CLASS (mode));
6493 if ((GET_MODE_SIZE (GET_MODE (last_reg))
6494 >= GET_MODE_SIZE (need_mode))
6495 && reg_reloaded_contents[i] == regno
6496 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6497 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6498 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6499 /* Even if we can't use this register as a reload
6500 register, we might use it for reload_override_in,
6501 if copying it to the desired class is cheap
6503 || ((REGISTER_MOVE_COST (mode, last_class, rclass)
6504 < MEMORY_MOVE_COST (mode, rclass, 1))
6505 && (secondary_reload_class (1, rclass, mode,
6508 #ifdef SECONDARY_MEMORY_NEEDED
6509 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6514 && (rld[r].nregs == max_group_size
6515 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6517 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6518 rld[r].when_needed, rld[r].in,
6521 /* If a group is needed, verify that all the subsequent
6522 registers still have their values intact. */
6523 int nr = hard_regno_nregs[i][rld[r].mode];
6526 for (k = 1; k < nr; k++)
6527 if (reg_reloaded_contents[i + k] != regno
6528 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6536 last_reg = (GET_MODE (last_reg) == mode
6537 ? last_reg : gen_rtx_REG (mode, i));
6540 for (k = 0; k < nr; k++)
6541 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6544 /* We found a register that contains the
6545 value we need. If this register is the
6546 same as an `earlyclobber' operand of the
6547 current insn, just mark it as a place to
6548 reload from since we can't use it as the
6549 reload register itself. */
6551 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6552 if (reg_overlap_mentioned_for_reload_p
6553 (reg_last_reload_reg[regno],
6554 reload_earlyclobbers[i1]))
6557 if (i1 != n_earlyclobbers
6558 || ! (free_for_value_p (i, rld[r].mode,
6560 rld[r].when_needed, rld[r].in,
6562 /* Don't use it if we'd clobber a pseudo reg. */
6563 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6565 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6566 /* Don't clobber the frame pointer. */
6567 || (i == HARD_FRAME_POINTER_REGNUM
6568 && frame_pointer_needed
6570 /* Don't really use the inherited spill reg
6571 if we need it wider than we've got it. */
6572 || (GET_MODE_SIZE (rld[r].mode)
6573 > GET_MODE_SIZE (mode))
6576 /* If find_reloads chose reload_out as reload
6577 register, stay with it - that leaves the
6578 inherited register for subsequent reloads. */
6579 || (rld[r].out && rld[r].reg_rtx
6580 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6582 if (! rld[r].optional)
6584 reload_override_in[r] = last_reg;
6585 reload_inheritance_insn[r]
6586 = reg_reloaded_insn[i];
6592 /* We can use this as a reload reg. */
6593 /* Mark the register as in use for this part of
6595 mark_reload_reg_in_use (i,
6599 rld[r].reg_rtx = last_reg;
6600 reload_inherited[r] = 1;
6601 reload_inheritance_insn[r]
6602 = reg_reloaded_insn[i];
6603 reload_spill_index[r] = i;
6604 for (k = 0; k < nr; k++)
6605 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6613 /* Here's another way to see if the value is already lying around. */
6616 && ! reload_inherited[r]
6618 && (CONSTANT_P (rld[r].in)
6619 || GET_CODE (rld[r].in) == PLUS
6620 || REG_P (rld[r].in)
6621 || MEM_P (rld[r].in))
6622 && (rld[r].nregs == max_group_size
6623 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6624 search_equiv = rld[r].in;
6625 /* If this is an output reload from a simple move insn, look
6626 if an equivalence for the input is available. */
6627 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
6629 rtx set = single_set (insn);
6632 && rtx_equal_p (rld[r].out, SET_DEST (set))
6633 && CONSTANT_P (SET_SRC (set)))
6634 search_equiv = SET_SRC (set);
6640 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6641 -1, NULL, 0, rld[r].mode);
6647 regno = REGNO (equiv);
6650 /* This must be a SUBREG of a hard register.
6651 Make a new REG since this might be used in an
6652 address and not all machines support SUBREGs
6654 gcc_assert (GET_CODE (equiv) == SUBREG);
6655 regno = subreg_regno (equiv);
6656 equiv = gen_rtx_REG (rld[r].mode, regno);
6657 /* If we choose EQUIV as the reload register, but the
6658 loop below decides to cancel the inheritance, we'll
6659 end up reloading EQUIV in rld[r].mode, not the mode
6660 it had originally. That isn't safe when EQUIV isn't
6661 available as a spill register since its value might
6662 still be live at this point. */
6663 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6664 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6669 /* If we found a spill reg, reject it unless it is free
6670 and of the desired class. */
6674 int bad_for_class = 0;
6675 int max_regno = regno + rld[r].nregs;
6677 for (i = regno; i < max_regno; i++)
6679 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6681 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6686 && ! free_for_value_p (regno, rld[r].mode,
6687 rld[r].opnum, rld[r].when_needed,
6688 rld[r].in, rld[r].out, r, 1))
6693 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6696 /* We found a register that contains the value we need.
6697 If this register is the same as an `earlyclobber' operand
6698 of the current insn, just mark it as a place to reload from
6699 since we can't use it as the reload register itself. */
6702 for (i = 0; i < n_earlyclobbers; i++)
6703 if (reg_overlap_mentioned_for_reload_p (equiv,
6704 reload_earlyclobbers[i]))
6706 if (! rld[r].optional)
6707 reload_override_in[r] = equiv;
6712 /* If the equiv register we have found is explicitly clobbered
6713 in the current insn, it depends on the reload type if we
6714 can use it, use it for reload_override_in, or not at all.
6715 In particular, we then can't use EQUIV for a
6716 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6720 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6721 switch (rld[r].when_needed)
6723 case RELOAD_FOR_OTHER_ADDRESS:
6724 case RELOAD_FOR_INPADDR_ADDRESS:
6725 case RELOAD_FOR_INPUT_ADDRESS:
6726 case RELOAD_FOR_OPADDR_ADDR:
6729 case RELOAD_FOR_INPUT:
6730 case RELOAD_FOR_OPERAND_ADDRESS:
6731 if (! rld[r].optional)
6732 reload_override_in[r] = equiv;
6738 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6739 switch (rld[r].when_needed)
6741 case RELOAD_FOR_OTHER_ADDRESS:
6742 case RELOAD_FOR_INPADDR_ADDRESS:
6743 case RELOAD_FOR_INPUT_ADDRESS:
6744 case RELOAD_FOR_OPADDR_ADDR:
6745 case RELOAD_FOR_OPERAND_ADDRESS:
6746 case RELOAD_FOR_INPUT:
6749 if (! rld[r].optional)
6750 reload_override_in[r] = equiv;
6758 /* If we found an equivalent reg, say no code need be generated
6759 to load it, and use it as our reload reg. */
6761 && (regno != HARD_FRAME_POINTER_REGNUM
6762 || !frame_pointer_needed))
6764 int nr = hard_regno_nregs[regno][rld[r].mode];
6766 rld[r].reg_rtx = equiv;
6767 reload_spill_index[r] = regno;
6768 reload_inherited[r] = 1;
6770 /* If reg_reloaded_valid is not set for this register,
6771 there might be a stale spill_reg_store lying around.
6772 We must clear it, since otherwise emit_reload_insns
6773 might delete the store. */
6774 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6775 spill_reg_store[regno] = NULL_RTX;
6776 /* If any of the hard registers in EQUIV are spill
6777 registers, mark them as in use for this insn. */
6778 for (k = 0; k < nr; k++)
6780 i = spill_reg_order[regno + k];
6783 mark_reload_reg_in_use (regno, rld[r].opnum,
6786 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6793 /* If we found a register to use already, or if this is an optional
6794 reload, we are done. */
6795 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6799 /* No longer needed for correct operation. Might or might
6800 not give better code on the average. Want to experiment? */
6802 /* See if there is a later reload that has a class different from our
6803 class that intersects our class or that requires less register
6804 than our reload. If so, we must allocate a register to this
6805 reload now, since that reload might inherit a previous reload
6806 and take the only available register in our class. Don't do this
6807 for optional reloads since they will force all previous reloads
6808 to be allocated. Also don't do this for reloads that have been
6811 for (i = j + 1; i < n_reloads; i++)
6813 int s = reload_order[i];
6815 if ((rld[s].in == 0 && rld[s].out == 0
6816 && ! rld[s].secondary_p)
6820 if ((rld[s].rclass != rld[r].rclass
6821 && reg_classes_intersect_p (rld[r].rclass,
6823 || rld[s].nregs < rld[r].nregs)
6830 allocate_reload_reg (chain, r, j == n_reloads - 1);
6834 /* Now allocate reload registers for anything non-optional that
6835 didn't get one yet. */
6836 for (j = 0; j < n_reloads; j++)
6838 int r = reload_order[j];
6840 /* Ignore reloads that got marked inoperative. */
6841 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6844 /* Skip reloads that already have a register allocated or are
6846 if (rld[r].reg_rtx != 0 || rld[r].optional)
6849 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6853 /* If that loop got all the way, we have won. */
6860 /* Loop around and try without any inheritance. */
6865 /* First undo everything done by the failed attempt
6866 to allocate with inheritance. */
6867 choose_reload_regs_init (chain, save_reload_reg_rtx);
6869 /* Some sanity tests to verify that the reloads found in the first
6870 pass are identical to the ones we have now. */
6871 gcc_assert (chain->n_reloads == n_reloads);
6873 for (i = 0; i < n_reloads; i++)
6875 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6877 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6878 for (j = 0; j < n_spills; j++)
6879 if (spill_regs[j] == chain->rld[i].regno)
6880 if (! set_reload_reg (j, i))
6881 failed_reload (chain->insn, i);
6885 /* If we thought we could inherit a reload, because it seemed that
6886 nothing else wanted the same reload register earlier in the insn,
6887 verify that assumption, now that all reloads have been assigned.
6888 Likewise for reloads where reload_override_in has been set. */
6890 /* If doing expensive optimizations, do one preliminary pass that doesn't
6891 cancel any inheritance, but removes reloads that have been needed only
6892 for reloads that we know can be inherited. */
6893 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6895 for (j = 0; j < n_reloads; j++)
6897 int r = reload_order[j];
6899 if (reload_inherited[r] && rld[r].reg_rtx)
6900 check_reg = rld[r].reg_rtx;
6901 else if (reload_override_in[r]
6902 && (REG_P (reload_override_in[r])
6903 || GET_CODE (reload_override_in[r]) == SUBREG))
6904 check_reg = reload_override_in[r];
6907 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6908 rld[r].opnum, rld[r].when_needed, rld[r].in,
6909 (reload_inherited[r]
6910 ? rld[r].out : const0_rtx),
6915 reload_inherited[r] = 0;
6916 reload_override_in[r] = 0;
6918 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6919 reload_override_in, then we do not need its related
6920 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6921 likewise for other reload types.
6922 We handle this by removing a reload when its only replacement
6923 is mentioned in reload_in of the reload we are going to inherit.
6924 A special case are auto_inc expressions; even if the input is
6925 inherited, we still need the address for the output. We can
6926 recognize them because they have RELOAD_OUT set to RELOAD_IN.
6927 If we succeeded removing some reload and we are doing a preliminary
6928 pass just to remove such reloads, make another pass, since the
6929 removal of one reload might allow us to inherit another one. */
6931 && rld[r].out != rld[r].in
6932 && remove_address_replacements (rld[r].in) && pass)
6937 /* Now that reload_override_in is known valid,
6938 actually override reload_in. */
6939 for (j = 0; j < n_reloads; j++)
6940 if (reload_override_in[j])
6941 rld[j].in = reload_override_in[j];
6943 /* If this reload won't be done because it has been canceled or is
6944 optional and not inherited, clear reload_reg_rtx so other
6945 routines (such as subst_reloads) don't get confused. */
6946 for (j = 0; j < n_reloads; j++)
6947 if (rld[j].reg_rtx != 0
6948 && ((rld[j].optional && ! reload_inherited[j])
6949 || (rld[j].in == 0 && rld[j].out == 0
6950 && ! rld[j].secondary_p)))
6952 int regno = true_regnum (rld[j].reg_rtx);
6954 if (spill_reg_order[regno] >= 0)
6955 clear_reload_reg_in_use (regno, rld[j].opnum,
6956 rld[j].when_needed, rld[j].mode);
6958 reload_spill_index[j] = -1;
6961 /* Record which pseudos and which spill regs have output reloads. */
6962 for (j = 0; j < n_reloads; j++)
6964 int r = reload_order[j];
6966 i = reload_spill_index[r];
6968 /* I is nonneg if this reload uses a register.
6969 If rld[r].reg_rtx is 0, this is an optional reload
6970 that we opted to ignore. */
6971 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6972 && rld[r].reg_rtx != 0)
6974 int nregno = REGNO (rld[r].out_reg);
6977 if (nregno < FIRST_PSEUDO_REGISTER)
6978 nr = hard_regno_nregs[nregno][rld[r].mode];
6981 SET_REGNO_REG_SET (®_has_output_reload,
6986 nr = hard_regno_nregs[i][rld[r].mode];
6988 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6991 gcc_assert (rld[r].when_needed == RELOAD_OTHER
6992 || rld[r].when_needed == RELOAD_FOR_OUTPUT
6993 || rld[r].when_needed == RELOAD_FOR_INSN);
6998 /* Deallocate the reload register for reload R. This is called from
6999 remove_address_replacements. */
7002 deallocate_reload_reg (int r)
7006 if (! rld[r].reg_rtx)
7008 regno = true_regnum (rld[r].reg_rtx);
7010 if (spill_reg_order[regno] >= 0)
7011 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7013 reload_spill_index[r] = -1;
7016 /* If the small_register_classes_for_mode_p target hook returns true for
7017 some machine modes, we may not have merged two reloads of the same item
7018 for fear that we might not have enough reload registers. However,
7019 normally they will get the same reload register and hence actually need
7020 not be loaded twice.
7022 Here we check for the most common case of this phenomenon: when we have
7023 a number of reloads for the same object, each of which were allocated
7024 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
7025 reload, and is not modified in the insn itself. If we find such,
7026 merge all the reloads and set the resulting reload to RELOAD_OTHER.
7027 This will not increase the number of spill registers needed and will
7028 prevent redundant code. */
7031 merge_assigned_reloads (rtx insn)
7035 /* Scan all the reloads looking for ones that only load values and
7036 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
7037 assigned and not modified by INSN. */
7039 for (i = 0; i < n_reloads; i++)
7041 int conflicting_input = 0;
7042 int max_input_address_opnum = -1;
7043 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
7045 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
7046 || rld[i].out != 0 || rld[i].reg_rtx == 0
7047 || reg_set_p (rld[i].reg_rtx, insn))
7050 /* Look at all other reloads. Ensure that the only use of this
7051 reload_reg_rtx is in a reload that just loads the same value
7052 as we do. Note that any secondary reloads must be of the identical
7053 class since the values, modes, and result registers are the
7054 same, so we need not do anything with any secondary reloads. */
7056 for (j = 0; j < n_reloads; j++)
7058 if (i == j || rld[j].reg_rtx == 0
7059 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
7063 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7064 && rld[j].opnum > max_input_address_opnum)
7065 max_input_address_opnum = rld[j].opnum;
7067 /* If the reload regs aren't exactly the same (e.g, different modes)
7068 or if the values are different, we can't merge this reload.
7069 But if it is an input reload, we might still merge
7070 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
7072 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
7073 || rld[j].out != 0 || rld[j].in == 0
7074 || ! rtx_equal_p (rld[i].in, rld[j].in))
7076 if (rld[j].when_needed != RELOAD_FOR_INPUT
7077 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
7078 || rld[i].opnum > rld[j].opnum)
7079 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
7081 conflicting_input = 1;
7082 if (min_conflicting_input_opnum > rld[j].opnum)
7083 min_conflicting_input_opnum = rld[j].opnum;
7087 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
7088 we, in fact, found any matching reloads. */
7091 && max_input_address_opnum <= min_conflicting_input_opnum)
7093 gcc_assert (rld[i].when_needed != RELOAD_FOR_OUTPUT);
7095 for (j = 0; j < n_reloads; j++)
7096 if (i != j && rld[j].reg_rtx != 0
7097 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
7098 && (! conflicting_input
7099 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7100 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
7102 rld[i].when_needed = RELOAD_OTHER;
7104 reload_spill_index[j] = -1;
7105 transfer_replacements (i, j);
7108 /* If this is now RELOAD_OTHER, look for any reloads that
7109 load parts of this operand and set them to
7110 RELOAD_FOR_OTHER_ADDRESS if they were for inputs,
7111 RELOAD_OTHER for outputs. Note that this test is
7112 equivalent to looking for reloads for this operand
7115 We must take special care with RELOAD_FOR_OUTPUT_ADDRESS;
7116 it may share registers with a RELOAD_FOR_INPUT, so we can
7117 not change it to RELOAD_FOR_OTHER_ADDRESS. We should
7118 never need to, since we do not modify RELOAD_FOR_OUTPUT.
7120 It is possible that the RELOAD_FOR_OPERAND_ADDRESS
7121 instruction is assigned the same register as the earlier
7122 RELOAD_FOR_OTHER_ADDRESS instruction. Merging these two
7123 instructions will cause the RELOAD_FOR_OTHER_ADDRESS
7124 instruction to be deleted later on. */
7126 if (rld[i].when_needed == RELOAD_OTHER)
7127 for (j = 0; j < n_reloads; j++)
7129 && rld[j].when_needed != RELOAD_OTHER
7130 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
7131 && rld[j].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
7132 && rld[j].when_needed != RELOAD_FOR_OPERAND_ADDRESS
7133 && (! conflicting_input
7134 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7135 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
7136 && reg_overlap_mentioned_for_reload_p (rld[j].in,
7142 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
7143 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
7144 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
7146 /* Check to see if we accidentally converted two
7147 reloads that use the same reload register with
7148 different inputs to the same type. If so, the
7149 resulting code won't work. */
7151 for (k = 0; k < j; k++)
7152 gcc_assert (rld[k].in == 0 || rld[k].reg_rtx == 0
7153 || rld[k].when_needed != rld[j].when_needed
7154 || !rtx_equal_p (rld[k].reg_rtx,
7156 || rtx_equal_p (rld[k].in,
7163 /* These arrays are filled by emit_reload_insns and its subroutines. */
7164 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
7165 static rtx other_input_address_reload_insns = 0;
7166 static rtx other_input_reload_insns = 0;
7167 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
7168 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7169 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
7170 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
7171 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7172 static rtx operand_reload_insns = 0;
7173 static rtx other_operand_reload_insns = 0;
7174 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
7176 /* Values to be put in spill_reg_store are put here first. */
7177 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7178 static HARD_REG_SET reg_reloaded_died;
7180 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7181 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7182 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7183 adjusted register, and return true. Otherwise, return false. */
7185 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7186 enum reg_class new_class,
7187 enum machine_mode new_mode)
7192 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7194 unsigned regno = REGNO (reg);
7196 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7198 if (GET_MODE (reg) != new_mode)
7200 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7202 if (hard_regno_nregs[regno][new_mode]
7203 > hard_regno_nregs[regno][GET_MODE (reg)])
7205 reg = reload_adjust_reg_for_mode (reg, new_mode);
7213 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7214 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7215 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7216 adjusted register, and return true. Otherwise, return false. */
7218 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7219 enum insn_code icode)
7222 enum reg_class new_class = scratch_reload_class (icode);
7223 enum machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7225 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7226 new_class, new_mode);
7229 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7230 has the number J. OLD contains the value to be used as input. */
7233 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7236 rtx insn = chain->insn;
7238 rtx oldequiv_reg = 0;
7241 enum machine_mode mode;
7244 /* delete_output_reload is only invoked properly if old contains
7245 the original pseudo register. Since this is replaced with a
7246 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7247 find the pseudo in RELOAD_IN_REG. */
7248 if (reload_override_in[j]
7249 && REG_P (rl->in_reg))
7256 else if (REG_P (oldequiv))
7257 oldequiv_reg = oldequiv;
7258 else if (GET_CODE (oldequiv) == SUBREG)
7259 oldequiv_reg = SUBREG_REG (oldequiv);
7261 reloadreg = reload_reg_rtx_for_input[j];
7262 mode = GET_MODE (reloadreg);
7264 /* If we are reloading from a register that was recently stored in
7265 with an output-reload, see if we can prove there was
7266 actually no need to store the old value in it. */
7268 if (optimize && REG_P (oldequiv)
7269 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7270 && spill_reg_store[REGNO (oldequiv)]
7272 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7273 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7275 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7277 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7280 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7281 oldequiv = SUBREG_REG (oldequiv);
7282 if (GET_MODE (oldequiv) != VOIDmode
7283 && mode != GET_MODE (oldequiv))
7284 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7286 /* Switch to the right place to emit the reload insns. */
7287 switch (rl->when_needed)
7290 where = &other_input_reload_insns;
7292 case RELOAD_FOR_INPUT:
7293 where = &input_reload_insns[rl->opnum];
7295 case RELOAD_FOR_INPUT_ADDRESS:
7296 where = &input_address_reload_insns[rl->opnum];
7298 case RELOAD_FOR_INPADDR_ADDRESS:
7299 where = &inpaddr_address_reload_insns[rl->opnum];
7301 case RELOAD_FOR_OUTPUT_ADDRESS:
7302 where = &output_address_reload_insns[rl->opnum];
7304 case RELOAD_FOR_OUTADDR_ADDRESS:
7305 where = &outaddr_address_reload_insns[rl->opnum];
7307 case RELOAD_FOR_OPERAND_ADDRESS:
7308 where = &operand_reload_insns;
7310 case RELOAD_FOR_OPADDR_ADDR:
7311 where = &other_operand_reload_insns;
7313 case RELOAD_FOR_OTHER_ADDRESS:
7314 where = &other_input_address_reload_insns;
7320 push_to_sequence (*where);
7322 /* Auto-increment addresses must be reloaded in a special way. */
7323 if (rl->out && ! rl->out_reg)
7325 /* We are not going to bother supporting the case where a
7326 incremented register can't be copied directly from
7327 OLDEQUIV since this seems highly unlikely. */
7328 gcc_assert (rl->secondary_in_reload < 0);
7330 if (reload_inherited[j])
7331 oldequiv = reloadreg;
7333 old = XEXP (rl->in_reg, 0);
7335 if (optimize && REG_P (oldequiv)
7336 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7337 && spill_reg_store[REGNO (oldequiv)]
7339 && (dead_or_set_p (insn,
7340 spill_reg_stored_to[REGNO (oldequiv)])
7341 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7343 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7345 /* Prevent normal processing of this reload. */
7347 /* Output a special code sequence for this case. */
7348 new_spill_reg_store[REGNO (reloadreg)]
7349 = inc_for_reload (reloadreg, oldequiv, rl->out,
7353 /* If we are reloading a pseudo-register that was set by the previous
7354 insn, see if we can get rid of that pseudo-register entirely
7355 by redirecting the previous insn into our reload register. */
7357 else if (optimize && REG_P (old)
7358 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7359 && dead_or_set_p (insn, old)
7360 /* This is unsafe if some other reload
7361 uses the same reg first. */
7362 && ! conflicts_with_override (reloadreg)
7363 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7364 rl->when_needed, old, rl->out, j, 0))
7366 rtx temp = PREV_INSN (insn);
7367 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7368 temp = PREV_INSN (temp);
7370 && NONJUMP_INSN_P (temp)
7371 && GET_CODE (PATTERN (temp)) == SET
7372 && SET_DEST (PATTERN (temp)) == old
7373 /* Make sure we can access insn_operand_constraint. */
7374 && asm_noperands (PATTERN (temp)) < 0
7375 /* This is unsafe if operand occurs more than once in current
7376 insn. Perhaps some occurrences aren't reloaded. */
7377 && count_occurrences (PATTERN (insn), old, 0) == 1)
7379 rtx old = SET_DEST (PATTERN (temp));
7380 /* Store into the reload register instead of the pseudo. */
7381 SET_DEST (PATTERN (temp)) = reloadreg;
7383 /* Verify that resulting insn is valid. */
7384 extract_insn (temp);
7385 if (constrain_operands (1))
7387 /* If the previous insn is an output reload, the source is
7388 a reload register, and its spill_reg_store entry will
7389 contain the previous destination. This is now
7391 if (REG_P (SET_SRC (PATTERN (temp)))
7392 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7394 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7395 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7398 /* If these are the only uses of the pseudo reg,
7399 pretend for GDB it lives in the reload reg we used. */
7400 if (REG_N_DEATHS (REGNO (old)) == 1
7401 && REG_N_SETS (REGNO (old)) == 1)
7403 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7404 if (ira_conflicts_p)
7405 /* Inform IRA about the change. */
7406 ira_mark_allocation_change (REGNO (old));
7407 alter_reg (REGNO (old), -1, false);
7411 /* Adjust any debug insns between temp and insn. */
7412 while ((temp = NEXT_INSN (temp)) != insn)
7413 if (DEBUG_INSN_P (temp))
7414 replace_rtx (PATTERN (temp), old, reloadreg);
7416 gcc_assert (NOTE_P (temp));
7420 SET_DEST (PATTERN (temp)) = old;
7425 /* We can't do that, so output an insn to load RELOADREG. */
7427 /* If we have a secondary reload, pick up the secondary register
7428 and icode, if any. If OLDEQUIV and OLD are different or
7429 if this is an in-out reload, recompute whether or not we
7430 still need a secondary register and what the icode should
7431 be. If we still need a secondary register and the class or
7432 icode is different, go back to reloading from OLD if using
7433 OLDEQUIV means that we got the wrong type of register. We
7434 cannot have different class or icode due to an in-out reload
7435 because we don't make such reloads when both the input and
7436 output need secondary reload registers. */
7438 if (! special && rl->secondary_in_reload >= 0)
7440 rtx second_reload_reg = 0;
7441 rtx third_reload_reg = 0;
7442 int secondary_reload = rl->secondary_in_reload;
7443 rtx real_oldequiv = oldequiv;
7446 enum insn_code icode;
7447 enum insn_code tertiary_icode = CODE_FOR_nothing;
7449 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7450 and similarly for OLD.
7451 See comments in get_secondary_reload in reload.c. */
7452 /* If it is a pseudo that cannot be replaced with its
7453 equivalent MEM, we must fall back to reload_in, which
7454 will have all the necessary substitutions registered.
7455 Likewise for a pseudo that can't be replaced with its
7456 equivalent constant.
7458 Take extra care for subregs of such pseudos. Note that
7459 we cannot use reg_equiv_mem in this case because it is
7460 not in the right mode. */
7463 if (GET_CODE (tmp) == SUBREG)
7464 tmp = SUBREG_REG (tmp);
7466 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7467 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7468 || reg_equiv_constant[REGNO (tmp)] != 0))
7470 if (! reg_equiv_mem[REGNO (tmp)]
7471 || num_not_at_initial_offset
7472 || GET_CODE (oldequiv) == SUBREG)
7473 real_oldequiv = rl->in;
7475 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
7479 if (GET_CODE (tmp) == SUBREG)
7480 tmp = SUBREG_REG (tmp);
7482 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7483 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
7484 || reg_equiv_constant[REGNO (tmp)] != 0))
7486 if (! reg_equiv_mem[REGNO (tmp)]
7487 || num_not_at_initial_offset
7488 || GET_CODE (old) == SUBREG)
7491 real_old = reg_equiv_mem[REGNO (tmp)];
7494 second_reload_reg = rld[secondary_reload].reg_rtx;
7495 if (rld[secondary_reload].secondary_in_reload >= 0)
7497 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7499 third_reload_reg = rld[tertiary_reload].reg_rtx;
7500 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7501 /* We'd have to add more code for quartary reloads. */
7502 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7504 icode = rl->secondary_in_icode;
7506 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7507 || (rl->in != 0 && rl->out != 0))
7509 secondary_reload_info sri, sri2;
7510 enum reg_class new_class, new_t_class;
7512 sri.icode = CODE_FOR_nothing;
7513 sri.prev_sri = NULL;
7514 new_class = targetm.secondary_reload (1, real_oldequiv, rl->rclass,
7517 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7518 second_reload_reg = 0;
7519 else if (new_class == NO_REGS)
7521 if (reload_adjust_reg_for_icode (&second_reload_reg,
7523 (enum insn_code) sri.icode))
7525 icode = (enum insn_code) sri.icode;
7526 third_reload_reg = 0;
7531 real_oldequiv = real_old;
7534 else if (sri.icode != CODE_FOR_nothing)
7535 /* We currently lack a way to express this in reloads. */
7539 sri2.icode = CODE_FOR_nothing;
7540 sri2.prev_sri = &sri;
7541 new_t_class = targetm.secondary_reload (1, real_oldequiv,
7542 new_class, mode, &sri);
7543 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7545 if (reload_adjust_reg_for_temp (&second_reload_reg,
7549 third_reload_reg = 0;
7550 tertiary_icode = (enum insn_code) sri2.icode;
7555 real_oldequiv = real_old;
7558 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7560 rtx intermediate = second_reload_reg;
7562 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7564 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7568 second_reload_reg = intermediate;
7569 tertiary_icode = (enum insn_code) sri2.icode;
7574 real_oldequiv = real_old;
7577 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7579 rtx intermediate = second_reload_reg;
7581 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7583 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7586 second_reload_reg = intermediate;
7587 tertiary_icode = (enum insn_code) sri2.icode;
7592 real_oldequiv = real_old;
7597 /* This could be handled more intelligently too. */
7599 real_oldequiv = real_old;
7604 /* If we still need a secondary reload register, check
7605 to see if it is being used as a scratch or intermediate
7606 register and generate code appropriately. If we need
7607 a scratch register, use REAL_OLDEQUIV since the form of
7608 the insn may depend on the actual address if it is
7611 if (second_reload_reg)
7613 if (icode != CODE_FOR_nothing)
7615 /* We'd have to add extra code to handle this case. */
7616 gcc_assert (!third_reload_reg);
7618 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7619 second_reload_reg));
7624 /* See if we need a scratch register to load the
7625 intermediate register (a tertiary reload). */
7626 if (tertiary_icode != CODE_FOR_nothing)
7628 emit_insn ((GEN_FCN (tertiary_icode)
7629 (second_reload_reg, real_oldequiv,
7630 third_reload_reg)));
7632 else if (third_reload_reg)
7634 gen_reload (third_reload_reg, real_oldequiv,
7637 gen_reload (second_reload_reg, third_reload_reg,
7642 gen_reload (second_reload_reg, real_oldequiv,
7646 oldequiv = second_reload_reg;
7651 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7653 rtx real_oldequiv = oldequiv;
7655 if ((REG_P (oldequiv)
7656 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7657 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7658 || reg_equiv_constant[REGNO (oldequiv)] != 0))
7659 || (GET_CODE (oldequiv) == SUBREG
7660 && REG_P (SUBREG_REG (oldequiv))
7661 && (REGNO (SUBREG_REG (oldequiv))
7662 >= FIRST_PSEUDO_REGISTER)
7663 && ((reg_equiv_memory_loc
7664 [REGNO (SUBREG_REG (oldequiv))] != 0)
7665 || (reg_equiv_constant
7666 [REGNO (SUBREG_REG (oldequiv))] != 0)))
7667 || (CONSTANT_P (oldequiv)
7668 && (PREFERRED_RELOAD_CLASS (oldequiv,
7669 REGNO_REG_CLASS (REGNO (reloadreg)))
7671 real_oldequiv = rl->in;
7672 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7676 if (cfun->can_throw_non_call_exceptions)
7677 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7679 /* End this sequence. */
7680 *where = get_insns ();
7683 /* Update reload_override_in so that delete_address_reloads_1
7684 can see the actual register usage. */
7686 reload_override_in[j] = oldequiv;
7689 /* Generate insns to for the output reload RL, which is for the insn described
7690 by CHAIN and has the number J. */
7692 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7696 rtx insn = chain->insn;
7699 enum machine_mode mode;
7703 if (rl->when_needed == RELOAD_OTHER)
7706 push_to_sequence (output_reload_insns[rl->opnum]);
7708 rl_reg_rtx = reload_reg_rtx_for_output[j];
7709 mode = GET_MODE (rl_reg_rtx);
7711 reloadreg = rl_reg_rtx;
7713 /* If we need two reload regs, set RELOADREG to the intermediate
7714 one, since it will be stored into OLD. We might need a secondary
7715 register only for an input reload, so check again here. */
7717 if (rl->secondary_out_reload >= 0)
7720 int secondary_reload = rl->secondary_out_reload;
7721 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7723 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7724 && reg_equiv_mem[REGNO (old)] != 0)
7725 real_old = reg_equiv_mem[REGNO (old)];
7727 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7729 rtx second_reloadreg = reloadreg;
7730 reloadreg = rld[secondary_reload].reg_rtx;
7732 /* See if RELOADREG is to be used as a scratch register
7733 or as an intermediate register. */
7734 if (rl->secondary_out_icode != CODE_FOR_nothing)
7736 /* We'd have to add extra code to handle this case. */
7737 gcc_assert (tertiary_reload < 0);
7739 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7740 (real_old, second_reloadreg, reloadreg)));
7745 /* See if we need both a scratch and intermediate reload
7748 enum insn_code tertiary_icode
7749 = rld[secondary_reload].secondary_out_icode;
7751 /* We'd have to add more code for quartary reloads. */
7752 gcc_assert (tertiary_reload < 0
7753 || rld[tertiary_reload].secondary_out_reload < 0);
7755 if (GET_MODE (reloadreg) != mode)
7756 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7758 if (tertiary_icode != CODE_FOR_nothing)
7760 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7763 /* Copy primary reload reg to secondary reload reg.
7764 (Note that these have been swapped above, then
7765 secondary reload reg to OLD using our insn.) */
7767 /* If REAL_OLD is a paradoxical SUBREG, remove it
7768 and try to put the opposite SUBREG on
7770 if (GET_CODE (real_old) == SUBREG
7771 && (GET_MODE_SIZE (GET_MODE (real_old))
7772 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7773 && 0 != (tem = gen_lowpart_common
7774 (GET_MODE (SUBREG_REG (real_old)),
7776 real_old = SUBREG_REG (real_old), reloadreg = tem;
7778 gen_reload (reloadreg, second_reloadreg,
7779 rl->opnum, rl->when_needed);
7780 emit_insn ((GEN_FCN (tertiary_icode)
7781 (real_old, reloadreg, third_reloadreg)));
7787 /* Copy between the reload regs here and then to
7790 gen_reload (reloadreg, second_reloadreg,
7791 rl->opnum, rl->when_needed);
7792 if (tertiary_reload >= 0)
7794 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7796 gen_reload (third_reloadreg, reloadreg,
7797 rl->opnum, rl->when_needed);
7798 reloadreg = third_reloadreg;
7805 /* Output the last reload insn. */
7810 /* Don't output the last reload if OLD is not the dest of
7811 INSN and is in the src and is clobbered by INSN. */
7812 if (! flag_expensive_optimizations
7814 || !(set = single_set (insn))
7815 || rtx_equal_p (old, SET_DEST (set))
7816 || !reg_mentioned_p (old, SET_SRC (set))
7817 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7818 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7819 gen_reload (old, reloadreg, rl->opnum,
7823 /* Look at all insns we emitted, just to be safe. */
7824 for (p = get_insns (); p; p = NEXT_INSN (p))
7827 rtx pat = PATTERN (p);
7829 /* If this output reload doesn't come from a spill reg,
7830 clear any memory of reloaded copies of the pseudo reg.
7831 If this output reload comes from a spill reg,
7832 reg_has_output_reload will make this do nothing. */
7833 note_stores (pat, forget_old_reloads_1, NULL);
7835 if (reg_mentioned_p (rl_reg_rtx, pat))
7837 rtx set = single_set (insn);
7838 if (reload_spill_index[j] < 0
7840 && SET_SRC (set) == rl_reg_rtx)
7842 int src = REGNO (SET_SRC (set));
7844 reload_spill_index[j] = src;
7845 SET_HARD_REG_BIT (reg_is_output_reload, src);
7846 if (find_regno_note (insn, REG_DEAD, src))
7847 SET_HARD_REG_BIT (reg_reloaded_died, src);
7849 if (HARD_REGISTER_P (rl_reg_rtx))
7851 int s = rl->secondary_out_reload;
7852 set = single_set (p);
7853 /* If this reload copies only to the secondary reload
7854 register, the secondary reload does the actual
7856 if (s >= 0 && set == NULL_RTX)
7857 /* We can't tell what function the secondary reload
7858 has and where the actual store to the pseudo is
7859 made; leave new_spill_reg_store alone. */
7862 && SET_SRC (set) == rl_reg_rtx
7863 && SET_DEST (set) == rld[s].reg_rtx)
7865 /* Usually the next instruction will be the
7866 secondary reload insn; if we can confirm
7867 that it is, setting new_spill_reg_store to
7868 that insn will allow an extra optimization. */
7869 rtx s_reg = rld[s].reg_rtx;
7870 rtx next = NEXT_INSN (p);
7871 rld[s].out = rl->out;
7872 rld[s].out_reg = rl->out_reg;
7873 set = single_set (next);
7874 if (set && SET_SRC (set) == s_reg
7875 && ! new_spill_reg_store[REGNO (s_reg)])
7877 SET_HARD_REG_BIT (reg_is_output_reload,
7879 new_spill_reg_store[REGNO (s_reg)] = next;
7883 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7888 if (rl->when_needed == RELOAD_OTHER)
7890 emit_insn (other_output_reload_insns[rl->opnum]);
7891 other_output_reload_insns[rl->opnum] = get_insns ();
7894 output_reload_insns[rl->opnum] = get_insns ();
7896 if (cfun->can_throw_non_call_exceptions)
7897 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7902 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7903 and has the number J. */
7905 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7907 rtx insn = chain->insn;
7908 rtx old = (rl->in && MEM_P (rl->in)
7909 ? rl->in_reg : rl->in);
7910 rtx reg_rtx = rl->reg_rtx;
7914 enum machine_mode mode;
7916 /* Determine the mode to reload in.
7917 This is very tricky because we have three to choose from.
7918 There is the mode the insn operand wants (rl->inmode).
7919 There is the mode of the reload register RELOADREG.
7920 There is the intrinsic mode of the operand, which we could find
7921 by stripping some SUBREGs.
7922 It turns out that RELOADREG's mode is irrelevant:
7923 we can change that arbitrarily.
7925 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7926 then the reload reg may not support QImode moves, so use SImode.
7927 If foo is in memory due to spilling a pseudo reg, this is safe,
7928 because the QImode value is in the least significant part of a
7929 slot big enough for a SImode. If foo is some other sort of
7930 memory reference, then it is impossible to reload this case,
7931 so previous passes had better make sure this never happens.
7933 Then consider a one-word union which has SImode and one of its
7934 members is a float, being fetched as (SUBREG:SF union:SI).
7935 We must fetch that as SFmode because we could be loading into
7936 a float-only register. In this case OLD's mode is correct.
7938 Consider an immediate integer: it has VOIDmode. Here we need
7939 to get a mode from something else.
7941 In some cases, there is a fourth mode, the operand's
7942 containing mode. If the insn specifies a containing mode for
7943 this operand, it overrides all others.
7945 I am not sure whether the algorithm here is always right,
7946 but it does the right things in those cases. */
7948 mode = GET_MODE (old);
7949 if (mode == VOIDmode)
7952 /* We cannot use gen_lowpart_common since it can do the wrong thing
7953 when REG_RTX has a multi-word mode. Note that REG_RTX must
7954 always be a REG here. */
7955 if (GET_MODE (reg_rtx) != mode)
7956 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7958 reload_reg_rtx_for_input[j] = reg_rtx;
7961 /* AUTO_INC reloads need to be handled even if inherited. We got an
7962 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7963 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7964 && ! rtx_equal_p (reg_rtx, old)
7966 emit_input_reload_insns (chain, rld + j, old, j);
7968 /* When inheriting a wider reload, we have a MEM in rl->in,
7969 e.g. inheriting a SImode output reload for
7970 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7971 if (optimize && reload_inherited[j] && rl->in
7973 && MEM_P (rl->in_reg)
7974 && reload_spill_index[j] >= 0
7975 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7976 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7978 /* If we are reloading a register that was recently stored in with an
7979 output-reload, see if we can prove there was
7980 actually no need to store the old value in it. */
7983 && (reload_inherited[j] || reload_override_in[j])
7986 && spill_reg_store[REGNO (reg_rtx)] != 0
7988 /* There doesn't seem to be any reason to restrict this to pseudos
7989 and doing so loses in the case where we are copying from a
7990 register of the wrong class. */
7991 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7993 /* The insn might have already some references to stackslots
7994 replaced by MEMs, while reload_out_reg still names the
7996 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7997 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7998 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
8001 /* Do output reloading for reload RL, which is for the insn described by
8002 CHAIN and has the number J.
8003 ??? At some point we need to support handling output reloads of
8004 JUMP_INSNs or insns that set cc0. */
8006 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8009 rtx insn = chain->insn;
8010 /* If this is an output reload that stores something that is
8011 not loaded in this same reload, see if we can eliminate a previous
8013 rtx pseudo = rl->out_reg;
8014 rtx reg_rtx = rl->reg_rtx;
8016 if (rl->out && reg_rtx)
8018 enum machine_mode mode;
8020 /* Determine the mode to reload in.
8021 See comments above (for input reloading). */
8022 mode = GET_MODE (rl->out);
8023 if (mode == VOIDmode)
8025 /* VOIDmode should never happen for an output. */
8026 if (asm_noperands (PATTERN (insn)) < 0)
8027 /* It's the compiler's fault. */
8028 fatal_insn ("VOIDmode on an output", insn);
8029 error_for_asm (insn, "output operand is constant in %<asm%>");
8030 /* Prevent crash--use something we know is valid. */
8032 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8034 if (GET_MODE (reg_rtx) != mode)
8035 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8037 reload_reg_rtx_for_output[j] = reg_rtx;
8042 && ! rtx_equal_p (rl->in_reg, pseudo)
8043 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8044 && reg_last_reload_reg[REGNO (pseudo)])
8046 int pseudo_no = REGNO (pseudo);
8047 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8049 /* We don't need to test full validity of last_regno for
8050 inherit here; we only want to know if the store actually
8051 matches the pseudo. */
8052 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8053 && reg_reloaded_contents[last_regno] == pseudo_no
8054 && spill_reg_store[last_regno]
8055 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8056 delete_output_reload (insn, j, last_regno, reg_rtx);
8062 || rtx_equal_p (old, reg_rtx))
8065 /* An output operand that dies right away does need a reload,
8066 but need not be copied from it. Show the new location in the
8068 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8069 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8071 XEXP (note, 0) = reg_rtx;
8074 /* Likewise for a SUBREG of an operand that dies. */
8075 else if (GET_CODE (old) == SUBREG
8076 && REG_P (SUBREG_REG (old))
8077 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8080 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8083 else if (GET_CODE (old) == SCRATCH)
8084 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8085 but we don't want to make an output reload. */
8088 /* If is a JUMP_INSN, we can't support output reloads yet. */
8089 gcc_assert (NONJUMP_INSN_P (insn));
8091 emit_output_reload_insns (chain, rld + j, j);
8094 /* A reload copies values of MODE from register SRC to register DEST.
8095 Return true if it can be treated for inheritance purposes like a
8096 group of reloads, each one reloading a single hard register. The
8097 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8098 occupy the same number of hard registers. */
8101 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8102 int src ATTRIBUTE_UNUSED,
8103 enum machine_mode mode ATTRIBUTE_UNUSED)
8105 #ifdef CANNOT_CHANGE_MODE_CLASS
8106 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8107 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8113 /* Output insns to reload values in and out of the chosen reload regs. */
8116 emit_reload_insns (struct insn_chain *chain)
8118 rtx insn = chain->insn;
8122 CLEAR_HARD_REG_SET (reg_reloaded_died);
8124 for (j = 0; j < reload_n_operands; j++)
8125 input_reload_insns[j] = input_address_reload_insns[j]
8126 = inpaddr_address_reload_insns[j]
8127 = output_reload_insns[j] = output_address_reload_insns[j]
8128 = outaddr_address_reload_insns[j]
8129 = other_output_reload_insns[j] = 0;
8130 other_input_address_reload_insns = 0;
8131 other_input_reload_insns = 0;
8132 operand_reload_insns = 0;
8133 other_operand_reload_insns = 0;
8135 /* Dump reloads into the dump file. */
8138 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8139 debug_reload_to_stream (dump_file);
8142 /* Now output the instructions to copy the data into and out of the
8143 reload registers. Do these in the order that the reloads were reported,
8144 since reloads of base and index registers precede reloads of operands
8145 and the operands may need the base and index registers reloaded. */
8147 for (j = 0; j < n_reloads; j++)
8149 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8153 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8154 new_spill_reg_store[i] = 0;
8157 do_input_reload (chain, rld + j, j);
8158 do_output_reload (chain, rld + j, j);
8161 /* Now write all the insns we made for reloads in the order expected by
8162 the allocation functions. Prior to the insn being reloaded, we write
8163 the following reloads:
8165 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8167 RELOAD_OTHER reloads.
8169 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8170 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8171 RELOAD_FOR_INPUT reload for the operand.
8173 RELOAD_FOR_OPADDR_ADDRS reloads.
8175 RELOAD_FOR_OPERAND_ADDRESS reloads.
8177 After the insn being reloaded, we write the following:
8179 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8180 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8181 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8182 reloads for the operand. The RELOAD_OTHER output reloads are
8183 output in descending order by reload number. */
8185 emit_insn_before (other_input_address_reload_insns, insn);
8186 emit_insn_before (other_input_reload_insns, insn);
8188 for (j = 0; j < reload_n_operands; j++)
8190 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8191 emit_insn_before (input_address_reload_insns[j], insn);
8192 emit_insn_before (input_reload_insns[j], insn);
8195 emit_insn_before (other_operand_reload_insns, insn);
8196 emit_insn_before (operand_reload_insns, insn);
8198 for (j = 0; j < reload_n_operands; j++)
8200 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8201 x = emit_insn_after (output_address_reload_insns[j], x);
8202 x = emit_insn_after (output_reload_insns[j], x);
8203 emit_insn_after (other_output_reload_insns[j], x);
8206 /* For all the spill regs newly reloaded in this instruction,
8207 record what they were reloaded from, so subsequent instructions
8208 can inherit the reloads.
8210 Update spill_reg_store for the reloads of this insn.
8211 Copy the elements that were updated in the loop above. */
8213 for (j = 0; j < n_reloads; j++)
8215 int r = reload_order[j];
8216 int i = reload_spill_index[r];
8218 /* If this is a non-inherited input reload from a pseudo, we must
8219 clear any memory of a previous store to the same pseudo. Only do
8220 something if there will not be an output reload for the pseudo
8222 if (rld[r].in_reg != 0
8223 && ! (reload_inherited[r] || reload_override_in[r]))
8225 rtx reg = rld[r].in_reg;
8227 if (GET_CODE (reg) == SUBREG)
8228 reg = SUBREG_REG (reg);
8231 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8232 && !REGNO_REG_SET_P (®_has_output_reload, REGNO (reg)))
8234 int nregno = REGNO (reg);
8236 if (reg_last_reload_reg[nregno])
8238 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8240 if (reg_reloaded_contents[last_regno] == nregno)
8241 spill_reg_store[last_regno] = 0;
8246 /* I is nonneg if this reload used a register.
8247 If rld[r].reg_rtx is 0, this is an optional reload
8248 that we opted to ignore. */
8250 if (i >= 0 && rld[r].reg_rtx != 0)
8252 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8255 /* For a multi register reload, we need to check if all or part
8256 of the value lives to the end. */
8257 for (k = 0; k < nr; k++)
8258 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
8259 rld[r].when_needed))
8260 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8262 /* Maybe the spill reg contains a copy of reload_out. */
8264 && (REG_P (rld[r].out)
8268 || REG_P (rld[r].out_reg)))
8271 enum machine_mode mode;
8274 reg = reload_reg_rtx_for_output[r];
8275 mode = GET_MODE (reg);
8276 regno = REGNO (reg);
8277 nregs = hard_regno_nregs[regno][mode];
8278 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8279 rld[r].when_needed))
8281 rtx out = (REG_P (rld[r].out)
8285 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8286 int out_regno = REGNO (out);
8287 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8288 : hard_regno_nregs[out_regno][mode]);
8291 spill_reg_store[regno] = new_spill_reg_store[regno];
8292 spill_reg_stored_to[regno] = out;
8293 reg_last_reload_reg[out_regno] = reg;
8295 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8296 && nregs == out_nregs
8297 && inherit_piecemeal_p (out_regno, regno, mode));
8299 /* If OUT_REGNO is a hard register, it may occupy more than
8300 one register. If it does, say what is in the
8301 rest of the registers assuming that both registers
8302 agree on how many words the object takes. If not,
8303 invalidate the subsequent registers. */
8305 if (HARD_REGISTER_NUM_P (out_regno))
8306 for (k = 1; k < out_nregs; k++)
8307 reg_last_reload_reg[out_regno + k]
8308 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8310 /* Now do the inverse operation. */
8311 for (k = 0; k < nregs; k++)
8313 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8314 reg_reloaded_contents[regno + k]
8315 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8318 reg_reloaded_insn[regno + k] = insn;
8319 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8320 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8321 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8324 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8329 /* Maybe the spill reg contains a copy of reload_in. Only do
8330 something if there will not be an output reload for
8331 the register being reloaded. */
8332 else if (rld[r].out_reg == 0
8334 && ((REG_P (rld[r].in)
8335 && !HARD_REGISTER_P (rld[r].in)
8336 && !REGNO_REG_SET_P (®_has_output_reload,
8338 || (REG_P (rld[r].in_reg)
8339 && !REGNO_REG_SET_P (®_has_output_reload,
8340 REGNO (rld[r].in_reg))))
8341 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8344 enum machine_mode mode;
8347 reg = reload_reg_rtx_for_input[r];
8348 mode = GET_MODE (reg);
8349 regno = REGNO (reg);
8350 nregs = hard_regno_nregs[regno][mode];
8351 if (reload_regs_reach_end_p (regno, nregs, rld[r].opnum,
8352 rld[r].when_needed))
8359 if (REG_P (rld[r].in)
8360 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8362 else if (REG_P (rld[r].in_reg))
8365 in = XEXP (rld[r].in_reg, 0);
8366 in_regno = REGNO (in);
8368 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8369 : hard_regno_nregs[in_regno][mode]);
8371 reg_last_reload_reg[in_regno] = reg;
8373 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8374 && nregs == in_nregs
8375 && inherit_piecemeal_p (regno, in_regno, mode));
8377 if (HARD_REGISTER_NUM_P (in_regno))
8378 for (k = 1; k < in_nregs; k++)
8379 reg_last_reload_reg[in_regno + k]
8380 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8382 /* Unless we inherited this reload, show we haven't
8383 recently done a store.
8384 Previous stores of inherited auto_inc expressions
8385 also have to be discarded. */
8386 if (! reload_inherited[r]
8387 || (rld[r].out && ! rld[r].out_reg))
8388 spill_reg_store[regno] = 0;
8390 for (k = 0; k < nregs; k++)
8392 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8393 reg_reloaded_contents[regno + k]
8394 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8397 reg_reloaded_insn[regno + k] = insn;
8398 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8399 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8400 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8403 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8410 /* The following if-statement was #if 0'd in 1.34 (or before...).
8411 It's reenabled in 1.35 because supposedly nothing else
8412 deals with this problem. */
8414 /* If a register gets output-reloaded from a non-spill register,
8415 that invalidates any previous reloaded copy of it.
8416 But forget_old_reloads_1 won't get to see it, because
8417 it thinks only about the original insn. So invalidate it here.
8418 Also do the same thing for RELOAD_OTHER constraints where the
8419 output is discarded. */
8421 && ((rld[r].out != 0
8422 && (REG_P (rld[r].out)
8423 || (MEM_P (rld[r].out)
8424 && REG_P (rld[r].out_reg))))
8425 || (rld[r].out == 0 && rld[r].out_reg
8426 && REG_P (rld[r].out_reg))))
8428 rtx out = ((rld[r].out && REG_P (rld[r].out))
8429 ? rld[r].out : rld[r].out_reg);
8430 int out_regno = REGNO (out);
8431 enum machine_mode mode = GET_MODE (out);
8433 /* REG_RTX is now set or clobbered by the main instruction.
8434 As the comment above explains, forget_old_reloads_1 only
8435 sees the original instruction, and there is no guarantee
8436 that the original instruction also clobbered REG_RTX.
8437 For example, if find_reloads sees that the input side of
8438 a matched operand pair dies in this instruction, it may
8439 use the input register as the reload register.
8441 Calling forget_old_reloads_1 is a waste of effort if
8442 REG_RTX is also the output register.
8444 If we know that REG_RTX holds the value of a pseudo
8445 register, the code after the call will record that fact. */
8446 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8447 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8449 if (!HARD_REGISTER_NUM_P (out_regno))
8451 rtx src_reg, store_insn = NULL_RTX;
8453 reg_last_reload_reg[out_regno] = 0;
8455 /* If we can find a hard register that is stored, record
8456 the storing insn so that we may delete this insn with
8457 delete_output_reload. */
8458 src_reg = reload_reg_rtx_for_output[r];
8460 /* If this is an optional reload, try to find the source reg
8461 from an input reload. */
8464 rtx set = single_set (insn);
8465 if (set && SET_DEST (set) == rld[r].out)
8469 src_reg = SET_SRC (set);
8471 for (k = 0; k < n_reloads; k++)
8473 if (rld[k].in == src_reg)
8475 src_reg = reload_reg_rtx_for_input[k];
8482 store_insn = new_spill_reg_store[REGNO (src_reg)];
8483 if (src_reg && REG_P (src_reg)
8484 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8486 int src_regno, src_nregs, k;
8489 gcc_assert (GET_MODE (src_reg) == mode);
8490 src_regno = REGNO (src_reg);
8491 src_nregs = hard_regno_nregs[src_regno][mode];
8492 /* The place where to find a death note varies with
8493 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8494 necessarily checked exactly in the code that moves
8495 notes, so just check both locations. */
8496 note = find_regno_note (insn, REG_DEAD, src_regno);
8497 if (! note && store_insn)
8498 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8499 for (k = 0; k < src_nregs; k++)
8501 spill_reg_store[src_regno + k] = store_insn;
8502 spill_reg_stored_to[src_regno + k] = out;
8503 reg_reloaded_contents[src_regno + k] = out_regno;
8504 reg_reloaded_insn[src_regno + k] = store_insn;
8505 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8506 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8507 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8509 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8512 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8514 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8516 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8518 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8520 reg_last_reload_reg[out_regno] = src_reg;
8521 /* We have to set reg_has_output_reload here, or else
8522 forget_old_reloads_1 will clear reg_last_reload_reg
8524 SET_REGNO_REG_SET (®_has_output_reload,
8530 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8532 for (k = 0; k < out_nregs; k++)
8533 reg_last_reload_reg[out_regno + k] = 0;
8537 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8540 /* Go through the motions to emit INSN and test if it is strictly valid.
8541 Return the emitted insn if valid, else return NULL. */
8544 emit_insn_if_valid_for_reload (rtx insn)
8546 rtx last = get_last_insn ();
8549 insn = emit_insn (insn);
8550 code = recog_memoized (insn);
8554 extract_insn (insn);
8555 /* We want constrain operands to treat this insn strictly in its
8556 validity determination, i.e., the way it would after reload has
8558 if (constrain_operands (1))
8562 delete_insns_since (last);
8566 /* Emit code to perform a reload from IN (which may be a reload register) to
8567 OUT (which may also be a reload register). IN or OUT is from operand
8568 OPNUM with reload type TYPE.
8570 Returns first insn emitted. */
8573 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8575 rtx last = get_last_insn ();
8578 /* If IN is a paradoxical SUBREG, remove it and try to put the
8579 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8580 if (GET_CODE (in) == SUBREG
8581 && (GET_MODE_SIZE (GET_MODE (in))
8582 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
8583 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
8584 in = SUBREG_REG (in), out = tem;
8585 else if (GET_CODE (out) == SUBREG
8586 && (GET_MODE_SIZE (GET_MODE (out))
8587 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
8588 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
8589 out = SUBREG_REG (out), in = tem;
8591 /* How to do this reload can get quite tricky. Normally, we are being
8592 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8593 register that didn't get a hard register. In that case we can just
8594 call emit_move_insn.
8596 We can also be asked to reload a PLUS that adds a register or a MEM to
8597 another register, constant or MEM. This can occur during frame pointer
8598 elimination and while reloading addresses. This case is handled by
8599 trying to emit a single insn to perform the add. If it is not valid,
8600 we use a two insn sequence.
8602 Or we can be asked to reload an unary operand that was a fragment of
8603 an addressing mode, into a register. If it isn't recognized as-is,
8604 we try making the unop operand and the reload-register the same:
8605 (set reg:X (unop:X expr:Y))
8606 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8608 Finally, we could be called to handle an 'o' constraint by putting
8609 an address into a register. In that case, we first try to do this
8610 with a named pattern of "reload_load_address". If no such pattern
8611 exists, we just emit a SET insn and hope for the best (it will normally
8612 be valid on machines that use 'o').
8614 This entire process is made complex because reload will never
8615 process the insns we generate here and so we must ensure that
8616 they will fit their constraints and also by the fact that parts of
8617 IN might be being reloaded separately and replaced with spill registers.
8618 Because of this, we are, in some sense, just guessing the right approach
8619 here. The one listed above seems to work.
8621 ??? At some point, this whole thing needs to be rethought. */
8623 if (GET_CODE (in) == PLUS
8624 && (REG_P (XEXP (in, 0))
8625 || GET_CODE (XEXP (in, 0)) == SUBREG
8626 || MEM_P (XEXP (in, 0)))
8627 && (REG_P (XEXP (in, 1))
8628 || GET_CODE (XEXP (in, 1)) == SUBREG
8629 || CONSTANT_P (XEXP (in, 1))
8630 || MEM_P (XEXP (in, 1))))
8632 /* We need to compute the sum of a register or a MEM and another
8633 register, constant, or MEM, and put it into the reload
8634 register. The best possible way of doing this is if the machine
8635 has a three-operand ADD insn that accepts the required operands.
8637 The simplest approach is to try to generate such an insn and see if it
8638 is recognized and matches its constraints. If so, it can be used.
8640 It might be better not to actually emit the insn unless it is valid,
8641 but we need to pass the insn as an operand to `recog' and
8642 `extract_insn' and it is simpler to emit and then delete the insn if
8643 not valid than to dummy things up. */
8645 rtx op0, op1, tem, insn;
8648 op0 = find_replacement (&XEXP (in, 0));
8649 op1 = find_replacement (&XEXP (in, 1));
8651 /* Since constraint checking is strict, commutativity won't be
8652 checked, so we need to do that here to avoid spurious failure
8653 if the add instruction is two-address and the second operand
8654 of the add is the same as the reload reg, which is frequently
8655 the case. If the insn would be A = B + A, rearrange it so
8656 it will be A = A + B as constrain_operands expects. */
8658 if (REG_P (XEXP (in, 1))
8659 && REGNO (out) == REGNO (XEXP (in, 1)))
8660 tem = op0, op0 = op1, op1 = tem;
8662 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8663 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8665 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8669 /* If that failed, we must use a conservative two-insn sequence.
8671 Use a move to copy one operand into the reload register. Prefer
8672 to reload a constant, MEM or pseudo since the move patterns can
8673 handle an arbitrary operand. If OP1 is not a constant, MEM or
8674 pseudo and OP1 is not a valid operand for an add instruction, then
8677 After reloading one of the operands into the reload register, add
8678 the reload register to the output register.
8680 If there is another way to do this for a specific machine, a
8681 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8684 code = (int) optab_handler (add_optab, GET_MODE (out))->insn_code;
8686 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8688 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8689 || (code != CODE_FOR_nothing
8690 && ! ((*insn_data[code].operand[2].predicate)
8691 (op1, insn_data[code].operand[2].mode))))
8692 tem = op0, op0 = op1, op1 = tem;
8694 gen_reload (out, op0, opnum, type);
8696 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8697 This fixes a problem on the 32K where the stack pointer cannot
8698 be used as an operand of an add insn. */
8700 if (rtx_equal_p (op0, op1))
8703 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8706 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8707 set_unique_reg_note (insn, REG_EQUIV, in);
8711 /* If that failed, copy the address register to the reload register.
8712 Then add the constant to the reload register. */
8714 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8715 gen_reload (out, op1, opnum, type);
8716 insn = emit_insn (gen_add2_insn (out, op0));
8717 set_unique_reg_note (insn, REG_EQUIV, in);
8720 #ifdef SECONDARY_MEMORY_NEEDED
8721 /* If we need a memory location to do the move, do it that way. */
8722 else if ((REG_P (in)
8723 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
8724 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
8726 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
8727 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
8728 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
8729 REGNO_REG_CLASS (reg_or_subregno (out)),
8732 /* Get the memory to use and rewrite both registers to its mode. */
8733 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8735 if (GET_MODE (loc) != GET_MODE (out))
8736 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
8738 if (GET_MODE (loc) != GET_MODE (in))
8739 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
8741 gen_reload (loc, in, opnum, type);
8742 gen_reload (out, loc, opnum, type);
8745 else if (REG_P (out) && UNARY_P (in))
8752 op1 = find_replacement (&XEXP (in, 0));
8753 if (op1 != XEXP (in, 0))
8754 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8756 /* First, try a plain SET. */
8757 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8761 /* If that failed, move the inner operand to the reload
8762 register, and try the same unop with the inner expression
8763 replaced with the reload register. */
8765 if (GET_MODE (op1) != GET_MODE (out))
8766 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8770 gen_reload (out_moded, op1, opnum, type);
8773 = gen_rtx_SET (VOIDmode, out,
8774 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8776 insn = emit_insn_if_valid_for_reload (insn);
8779 set_unique_reg_note (insn, REG_EQUIV, in);
8783 fatal_insn ("Failure trying to reload:", set);
8785 /* If IN is a simple operand, use gen_move_insn. */
8786 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8788 tem = emit_insn (gen_move_insn (out, in));
8789 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8790 mark_jump_label (in, tem, 0);
8793 #ifdef HAVE_reload_load_address
8794 else if (HAVE_reload_load_address)
8795 emit_insn (gen_reload_load_address (out, in));
8798 /* Otherwise, just write (set OUT IN) and hope for the best. */
8800 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8802 /* Return the first insn emitted.
8803 We can not just return get_last_insn, because there may have
8804 been multiple instructions emitted. Also note that gen_move_insn may
8805 emit more than one insn itself, so we can not assume that there is one
8806 insn emitted per emit_insn_before call. */
8808 return last ? NEXT_INSN (last) : get_insns ();
8811 /* Delete a previously made output-reload whose result we now believe
8812 is not needed. First we double-check.
8814 INSN is the insn now being processed.
8815 LAST_RELOAD_REG is the hard register number for which we want to delete
8816 the last output reload.
8817 J is the reload-number that originally used REG. The caller has made
8818 certain that reload J doesn't use REG any longer for input.
8819 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8822 delete_output_reload (rtx insn, int j, int last_reload_reg, rtx new_reload_reg)
8824 rtx output_reload_insn = spill_reg_store[last_reload_reg];
8825 rtx reg = spill_reg_stored_to[last_reload_reg];
8828 int n_inherited = 0;
8832 /* It is possible that this reload has been only used to set another reload
8833 we eliminated earlier and thus deleted this instruction too. */
8834 if (INSN_DELETED_P (output_reload_insn))
8837 /* Get the raw pseudo-register referred to. */
8839 while (GET_CODE (reg) == SUBREG)
8840 reg = SUBREG_REG (reg);
8841 substed = reg_equiv_memory_loc[REGNO (reg)];
8843 /* This is unsafe if the operand occurs more often in the current
8844 insn than it is inherited. */
8845 for (k = n_reloads - 1; k >= 0; k--)
8847 rtx reg2 = rld[k].in;
8850 if (MEM_P (reg2) || reload_override_in[k])
8851 reg2 = rld[k].in_reg;
8853 if (rld[k].out && ! rld[k].out_reg)
8854 reg2 = XEXP (rld[k].in_reg, 0);
8856 while (GET_CODE (reg2) == SUBREG)
8857 reg2 = SUBREG_REG (reg2);
8858 if (rtx_equal_p (reg2, reg))
8860 if (reload_inherited[k] || reload_override_in[k] || k == j)
8866 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8867 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8868 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8871 n_occurrences += count_occurrences (PATTERN (insn),
8872 eliminate_regs (substed, VOIDmode,
8874 for (i1 = reg_equiv_alt_mem_list[REGNO (reg)]; i1; i1 = XEXP (i1, 1))
8876 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8877 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8879 if (n_occurrences > n_inherited)
8882 /* If the pseudo-reg we are reloading is no longer referenced
8883 anywhere between the store into it and here,
8884 and we're within the same basic block, then the value can only
8885 pass through the reload reg and end up here.
8886 Otherwise, give up--return. */
8887 for (i1 = NEXT_INSN (output_reload_insn);
8888 i1 != insn; i1 = NEXT_INSN (i1))
8890 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8892 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8893 && reg_mentioned_p (reg, PATTERN (i1)))
8895 /* If this is USE in front of INSN, we only have to check that
8896 there are no more references than accounted for by inheritance. */
8897 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8899 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8900 i1 = NEXT_INSN (i1);
8902 if (n_occurrences <= n_inherited && i1 == insn)
8908 /* We will be deleting the insn. Remove the spill reg information. */
8909 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8911 spill_reg_store[last_reload_reg + k] = 0;
8912 spill_reg_stored_to[last_reload_reg + k] = 0;
8915 /* The caller has already checked that REG dies or is set in INSN.
8916 It has also checked that we are optimizing, and thus some
8917 inaccuracies in the debugging information are acceptable.
8918 So we could just delete output_reload_insn. But in some cases
8919 we can improve the debugging information without sacrificing
8920 optimization - maybe even improving the code: See if the pseudo
8921 reg has been completely replaced with reload regs. If so, delete
8922 the store insn and forget we had a stack slot for the pseudo. */
8923 if (rld[j].out != rld[j].in
8924 && REG_N_DEATHS (REGNO (reg)) == 1
8925 && REG_N_SETS (REGNO (reg)) == 1
8926 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8927 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8931 /* We know that it was used only between here and the beginning of
8932 the current basic block. (We also know that the last use before
8933 INSN was the output reload we are thinking of deleting, but never
8934 mind that.) Search that range; see if any ref remains. */
8935 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8937 rtx set = single_set (i2);
8939 /* Uses which just store in the pseudo don't count,
8940 since if they are the only uses, they are dead. */
8941 if (set != 0 && SET_DEST (set) == reg)
8946 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8947 && reg_mentioned_p (reg, PATTERN (i2)))
8949 /* Some other ref remains; just delete the output reload we
8951 delete_address_reloads (output_reload_insn, insn);
8952 delete_insn (output_reload_insn);
8957 /* Delete the now-dead stores into this pseudo. Note that this
8958 loop also takes care of deleting output_reload_insn. */
8959 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8961 rtx set = single_set (i2);
8963 if (set != 0 && SET_DEST (set) == reg)
8965 delete_address_reloads (i2, insn);
8973 /* For the debugging info, say the pseudo lives in this reload reg. */
8974 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8975 if (ira_conflicts_p)
8976 /* Inform IRA about the change. */
8977 ira_mark_allocation_change (REGNO (reg));
8978 alter_reg (REGNO (reg), -1, false);
8982 delete_address_reloads (output_reload_insn, insn);
8983 delete_insn (output_reload_insn);
8987 /* We are going to delete DEAD_INSN. Recursively delete loads of
8988 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8989 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8991 delete_address_reloads (rtx dead_insn, rtx current_insn)
8993 rtx set = single_set (dead_insn);
8994 rtx set2, dst, prev, next;
8997 rtx dst = SET_DEST (set);
8999 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
9001 /* If we deleted the store from a reloaded post_{in,de}c expression,
9002 we can delete the matching adds. */
9003 prev = PREV_INSN (dead_insn);
9004 next = NEXT_INSN (dead_insn);
9005 if (! prev || ! next)
9007 set = single_set (next);
9008 set2 = single_set (prev);
9010 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9011 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9012 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9014 dst = SET_DEST (set);
9015 if (! rtx_equal_p (dst, SET_DEST (set2))
9016 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9017 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9018 || (INTVAL (XEXP (SET_SRC (set), 1))
9019 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9021 delete_related_insns (prev);
9022 delete_related_insns (next);
9025 /* Subfunction of delete_address_reloads: process registers found in X. */
9027 delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
9029 rtx prev, set, dst, i2;
9031 enum rtx_code code = GET_CODE (x);
9035 const char *fmt = GET_RTX_FORMAT (code);
9036 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9039 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9040 else if (fmt[i] == 'E')
9042 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9043 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9050 if (spill_reg_order[REGNO (x)] < 0)
9053 /* Scan backwards for the insn that sets x. This might be a way back due
9055 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9057 code = GET_CODE (prev);
9058 if (code == CODE_LABEL || code == JUMP_INSN)
9062 if (reg_set_p (x, PATTERN (prev)))
9064 if (reg_referenced_p (x, PATTERN (prev)))
9067 if (! prev || INSN_UID (prev) < reload_first_uid)
9069 /* Check that PREV only sets the reload register. */
9070 set = single_set (prev);
9073 dst = SET_DEST (set);
9075 || ! rtx_equal_p (dst, x))
9077 if (! reg_set_p (dst, PATTERN (dead_insn)))
9079 /* Check if DST was used in a later insn -
9080 it might have been inherited. */
9081 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9087 if (reg_referenced_p (dst, PATTERN (i2)))
9089 /* If there is a reference to the register in the current insn,
9090 it might be loaded in a non-inherited reload. If no other
9091 reload uses it, that means the register is set before
9093 if (i2 == current_insn)
9095 for (j = n_reloads - 1; j >= 0; j--)
9096 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9097 || reload_override_in[j] == dst)
9099 for (j = n_reloads - 1; j >= 0; j--)
9100 if (rld[j].in && rld[j].reg_rtx == dst)
9109 /* If DST is still live at CURRENT_INSN, check if it is used for
9110 any reload. Note that even if CURRENT_INSN sets DST, we still
9111 have to check the reloads. */
9112 if (i2 == current_insn)
9114 for (j = n_reloads - 1; j >= 0; j--)
9115 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9116 || reload_override_in[j] == dst)
9118 /* ??? We can't finish the loop here, because dst might be
9119 allocated to a pseudo in this block if no reload in this
9120 block needs any of the classes containing DST - see
9121 spill_hard_reg. There is no easy way to tell this, so we
9122 have to scan till the end of the basic block. */
9124 if (reg_set_p (dst, PATTERN (i2)))
9128 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9129 reg_reloaded_contents[REGNO (dst)] = -1;
9133 /* Output reload-insns to reload VALUE into RELOADREG.
9134 VALUE is an autoincrement or autodecrement RTX whose operand
9135 is a register or memory location;
9136 so reloading involves incrementing that location.
9137 IN is either identical to VALUE, or some cheaper place to reload from.
9139 INC_AMOUNT is the number to increment or decrement by (always positive).
9140 This cannot be deduced from VALUE.
9142 Return the instruction that stores into RELOADREG. */
9145 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9147 /* REG or MEM to be copied and incremented. */
9148 rtx incloc = find_replacement (&XEXP (value, 0));
9149 /* Nonzero if increment after copying. */
9150 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9151 || GET_CODE (value) == POST_MODIFY);
9157 rtx real_in = in == value ? incloc : in;
9159 /* No hard register is equivalent to this register after
9160 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9161 we could inc/dec that register as well (maybe even using it for
9162 the source), but I'm not sure it's worth worrying about. */
9164 reg_last_reload_reg[REGNO (incloc)] = 0;
9166 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9168 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9169 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9173 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9174 inc_amount = -inc_amount;
9176 inc = GEN_INT (inc_amount);
9179 /* If this is post-increment, first copy the location to the reload reg. */
9180 if (post && real_in != reloadreg)
9181 emit_insn (gen_move_insn (reloadreg, real_in));
9185 /* See if we can directly increment INCLOC. Use a method similar to
9186 that in gen_reload. */
9188 last = get_last_insn ();
9189 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9190 gen_rtx_PLUS (GET_MODE (incloc),
9193 code = recog_memoized (add_insn);
9196 extract_insn (add_insn);
9197 if (constrain_operands (1))
9199 /* If this is a pre-increment and we have incremented the value
9200 where it lives, copy the incremented value to RELOADREG to
9201 be used as an address. */
9204 emit_insn (gen_move_insn (reloadreg, incloc));
9209 delete_insns_since (last);
9212 /* If couldn't do the increment directly, must increment in RELOADREG.
9213 The way we do this depends on whether this is pre- or post-increment.
9214 For pre-increment, copy INCLOC to the reload register, increment it
9215 there, then save back. */
9219 if (in != reloadreg)
9220 emit_insn (gen_move_insn (reloadreg, real_in));
9221 emit_insn (gen_add2_insn (reloadreg, inc));
9222 store = emit_insn (gen_move_insn (incloc, reloadreg));
9227 Because this might be a jump insn or a compare, and because RELOADREG
9228 may not be available after the insn in an input reload, we must do
9229 the incrementation before the insn being reloaded for.
9231 We have already copied IN to RELOADREG. Increment the copy in
9232 RELOADREG, save that back, then decrement RELOADREG so it has
9233 the original value. */
9235 emit_insn (gen_add2_insn (reloadreg, inc));
9236 store = emit_insn (gen_move_insn (incloc, reloadreg));
9237 if (CONST_INT_P (inc))
9238 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
9240 emit_insn (gen_sub2_insn (reloadreg, inc));
9248 add_auto_inc_notes (rtx insn, rtx x)
9250 enum rtx_code code = GET_CODE (x);
9254 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9256 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9260 /* Scan all the operand sub-expressions. */
9261 fmt = GET_RTX_FORMAT (code);
9262 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9265 add_auto_inc_notes (insn, XEXP (x, i));
9266 else if (fmt[i] == 'E')
9267 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9268 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9273 /* This is used by reload pass, that does emit some instructions after
9274 abnormal calls moving basic block end, but in fact it wants to emit
9275 them on the edge. Looks for abnormal call edges, find backward the
9276 proper call and fix the damage.
9278 Similar handle instructions throwing exceptions internally. */
9280 fixup_abnormal_edges (void)
9282 bool inserted = false;
9290 /* Look for cases we are interested in - calls or instructions causing
9292 FOR_EACH_EDGE (e, ei, bb->succs)
9294 if (e->flags & EDGE_ABNORMAL_CALL)
9296 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9297 == (EDGE_ABNORMAL | EDGE_EH))
9300 if (e && !CALL_P (BB_END (bb))
9301 && !can_throw_internal (BB_END (bb)))
9305 /* Get past the new insns generated. Allow notes, as the insns
9306 may be already deleted. */
9308 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
9309 && !can_throw_internal (insn)
9310 && insn != BB_HEAD (bb))
9311 insn = PREV_INSN (insn);
9313 if (CALL_P (insn) || can_throw_internal (insn))
9317 stop = NEXT_INSN (BB_END (bb));
9319 insn = NEXT_INSN (insn);
9321 FOR_EACH_EDGE (e, ei, bb->succs)
9322 if (e->flags & EDGE_FALLTHRU)
9325 while (insn && insn != stop)
9327 next = NEXT_INSN (insn);
9332 /* Sometimes there's still the return value USE.
9333 If it's placed after a trapping call (i.e. that
9334 call is the last insn anyway), we have no fallthru
9335 edge. Simply delete this use and don't try to insert
9336 on the non-existent edge. */
9337 if (GET_CODE (PATTERN (insn)) != USE)
9339 /* We're not deleting it, we're moving it. */
9340 INSN_DELETED_P (insn) = 0;
9341 PREV_INSN (insn) = NULL_RTX;
9342 NEXT_INSN (insn) = NULL_RTX;
9344 insert_insn_on_edge (insn, e);
9348 else if (!BARRIER_P (insn))
9349 set_block_for_insn (insn, NULL);
9354 /* It may be that we don't find any such trapping insn. In this
9355 case we discovered quite late that the insn that had been
9356 marked as can_throw_internal in fact couldn't trap at all.
9357 So we should in fact delete the EH edges out of the block. */
9359 purge_dead_edges (bb);
9363 /* We've possibly turned single trapping insn into multiple ones. */
9364 if (cfun->can_throw_non_call_exceptions)
9367 blocks = sbitmap_alloc (last_basic_block);
9368 sbitmap_ones (blocks);
9369 find_many_sub_basic_blocks (blocks);
9370 sbitmap_free (blocks);
9374 commit_edge_insertions ();
9376 #ifdef ENABLE_CHECKING
9377 /* Verify that we didn't turn one trapping insn into many, and that
9378 we found and corrected all of the problems wrt fixups on the
9380 verify_flow_info ();