1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177 /* Nonzero if the second flow pass has completed. */
180 /* Maximum register number used in this function, plus one. */
184 /* Indexed by n, giving various register information */
186 varray_type reg_n_info;
188 /* Size of a regset for the current function,
189 in (1) bytes and (2) elements. */
194 /* Regset of regs live when calls to `setjmp'-like functions happen. */
195 /* ??? Does this exist only for the setjmp-clobbered warning message? */
197 regset regs_live_at_setjmp;
199 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
200 that have to go in the same hard reg.
201 The first two regs in the list are a pair, and the next two
202 are another pair, etc. */
205 /* Callback that determines if it's ok for a function to have no
206 noreturn attribute. */
207 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
209 /* Set of registers that may be eliminable. These are handled specially
210 in updating regs_ever_live. */
212 static HARD_REG_SET elim_reg_set;
214 /* Holds information for tracking conditional register life information. */
215 struct reg_cond_life_info
217 /* A boolean expression of conditions under which a register is dead. */
219 /* Conditions under which a register is dead at the basic block end. */
222 /* A boolean expression of conditions under which a register has been
226 /* ??? Could store mask of bytes that are dead, so that we could finally
227 track lifetimes of multi-word registers accessed via subregs. */
230 /* For use in communicating between propagate_block and its subroutines.
231 Holds all information needed to compute life and def-use information. */
233 struct propagate_block_info
235 /* The basic block we're considering. */
238 /* Bit N is set if register N is conditionally or unconditionally live. */
241 /* Bit N is set if register N is set this insn. */
244 /* Element N is the next insn that uses (hard or pseudo) register N
245 within the current basic block; or zero, if there is no such insn. */
248 /* Contains a list of all the MEMs we are tracking for dead store
252 /* If non-null, record the set of registers set unconditionally in the
256 /* If non-null, record the set of registers set conditionally in the
258 regset cond_local_set;
260 #ifdef HAVE_conditional_execution
261 /* Indexed by register number, holds a reg_cond_life_info for each
262 register that is not unconditionally live or dead. */
263 splay_tree reg_cond_dead;
265 /* Bit N is set if register N is in an expression in reg_cond_dead. */
269 /* The length of mem_set_list. */
270 int mem_set_list_len;
272 /* Non-zero if the value of CC0 is live. */
275 /* Flags controling the set of information propagate_block collects. */
279 /* Number of dead insns removed. */
282 /* Maximum length of pbi->mem_set_list before we start dropping
283 new elements on the floor. */
284 #define MAX_MEM_SET_LIST_LEN 100
286 /* Forward declarations */
287 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
288 static void verify_wide_reg PARAMS ((int, basic_block));
289 static void verify_local_live_at_start PARAMS ((regset, basic_block));
290 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
291 static void notice_stack_pointer_modification PARAMS ((rtx));
292 static void mark_reg PARAMS ((rtx, void *));
293 static void mark_regs_live_at_end PARAMS ((regset));
294 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
295 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
296 static void propagate_block_delete_insn PARAMS ((rtx));
297 static rtx propagate_block_delete_libcall PARAMS ((rtx, rtx));
298 static int insn_dead_p PARAMS ((struct propagate_block_info *,
300 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
302 static void mark_set_regs PARAMS ((struct propagate_block_info *,
304 static void mark_set_1 PARAMS ((struct propagate_block_info *,
305 enum rtx_code, rtx, rtx,
307 static int find_regno_partial PARAMS ((rtx *, void *));
309 #ifdef HAVE_conditional_execution
310 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
312 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
313 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
314 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
316 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
317 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
318 static rtx not_reg_cond PARAMS ((rtx));
319 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
322 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
323 rtx, rtx, rtx, rtx, rtx));
324 static void find_auto_inc PARAMS ((struct propagate_block_info *,
326 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
328 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
330 static void mark_used_reg PARAMS ((struct propagate_block_info *,
332 static void mark_used_regs PARAMS ((struct propagate_block_info *,
334 void dump_flow_info PARAMS ((FILE *));
335 void debug_flow_info PARAMS ((void));
336 static void add_to_mem_set_list PARAMS ((struct propagate_block_info *,
338 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
340 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
342 static void clear_log_links PARAMS ((sbitmap));
346 check_function_return_warnings ()
348 if (warn_missing_noreturn
349 && !TREE_THIS_VOLATILE (cfun->decl)
350 && EXIT_BLOCK_PTR->pred == NULL
351 && (lang_missing_noreturn_ok_p
352 && !lang_missing_noreturn_ok_p (cfun->decl)))
353 warning ("function might be possible candidate for attribute `noreturn'");
355 /* If we have a path to EXIT, then we do return. */
356 if (TREE_THIS_VOLATILE (cfun->decl)
357 && EXIT_BLOCK_PTR->pred != NULL)
358 warning ("`noreturn' function does return");
360 /* If the clobber_return_insn appears in some basic block, then we
361 do reach the end without returning a value. */
362 else if (warn_return_type
363 && cfun->x_clobber_return_insn != NULL
364 && EXIT_BLOCK_PTR->pred != NULL)
366 int max_uid = get_max_uid ();
368 /* If clobber_return_insn was excised by jump1, then renumber_insns
369 can make max_uid smaller than the number still recorded in our rtx.
370 That's fine, since this is a quick way of verifying that the insn
371 is no longer in the chain. */
372 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
374 /* Recompute insn->block mapping, since the initial mapping is
375 set before we delete unreachable blocks. */
376 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
377 warning ("control reaches end of non-void function");
382 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
383 note associated with the BLOCK. */
386 first_insn_after_basic_block_note (block)
391 /* Get the first instruction in the block. */
394 if (insn == NULL_RTX)
396 if (GET_CODE (insn) == CODE_LABEL)
397 insn = NEXT_INSN (insn);
398 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
401 return NEXT_INSN (insn);
404 /* Perform data flow analysis.
405 F is the first insn of the function; FLAGS is a set of PROP_* flags
406 to be used in accumulating flow info. */
409 life_analysis (f, file, flags)
414 #ifdef ELIMINABLE_REGS
416 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
419 /* Record which registers will be eliminated. We use this in
422 CLEAR_HARD_REG_SET (elim_reg_set);
424 #ifdef ELIMINABLE_REGS
425 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
426 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
428 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
432 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC | PROP_ALLOW_CFG_CHANGES);
434 /* The post-reload life analysis have (on a global basis) the same
435 registers live as was computed by reload itself. elimination
436 Otherwise offsets and such may be incorrect.
438 Reload will make some registers as live even though they do not
441 We don't want to create new auto-incs after reload, since they
442 are unlikely to be useful and can cause problems with shared
444 if (reload_completed)
445 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
447 /* We want alias analysis information for local dead store elimination. */
448 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
449 init_alias_analysis ();
451 /* Always remove no-op moves. Do this before other processing so
452 that we don't have to keep re-scanning them. */
453 delete_noop_moves (f);
455 /* Some targets can emit simpler epilogues if they know that sp was
456 not ever modified during the function. After reload, of course,
457 we've already emitted the epilogue so there's no sense searching. */
458 if (! reload_completed)
459 notice_stack_pointer_modification (f);
461 /* Allocate and zero out data structures that will record the
462 data from lifetime analysis. */
463 allocate_reg_life_data ();
464 allocate_bb_life_data ();
466 /* Find the set of registers live on function exit. */
467 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
469 /* "Update" life info from zero. It'd be nice to begin the
470 relaxation with just the exit and noreturn blocks, but that set
471 is not immediately handy. */
473 if (flags & PROP_REG_INFO)
474 memset (regs_ever_live, 0, sizeof (regs_ever_live));
475 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
478 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
479 end_alias_analysis ();
482 dump_flow_info (file);
484 free_basic_block_vars (1);
486 #ifdef ENABLE_CHECKING
490 /* Search for any REG_LABEL notes which reference deleted labels. */
491 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
493 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
495 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
500 /* Removing dead insns should've made jumptables really dead. */
501 delete_dead_jumptables ();
504 /* A subroutine of verify_wide_reg, called through for_each_rtx.
505 Search for REGNO. If found, return 2 if it is not wider than
509 verify_wide_reg_1 (px, pregno)
514 unsigned int regno = *(int *) pregno;
516 if (GET_CODE (x) == REG && REGNO (x) == regno)
518 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
525 /* A subroutine of verify_local_live_at_start. Search through insns
526 of BB looking for register REGNO. */
529 verify_wide_reg (regno, bb)
533 rtx head = bb->head, end = bb->end;
539 int r = for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no);
547 head = NEXT_INSN (head);
552 fprintf (rtl_dump_file, "Register %d died unexpectedly.\n", regno);
553 dump_bb (bb, rtl_dump_file);
558 /* A subroutine of update_life_info. Verify that there are no untoward
559 changes in live_at_start during a local update. */
562 verify_local_live_at_start (new_live_at_start, bb)
563 regset new_live_at_start;
566 if (reload_completed)
568 /* After reload, there are no pseudos, nor subregs of multi-word
569 registers. The regsets should exactly match. */
570 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
574 fprintf (rtl_dump_file,
575 "live_at_start mismatch in bb %d, aborting\nNew:\n",
577 debug_bitmap_file (rtl_dump_file, new_live_at_start);
578 fputs ("Old:\n", rtl_dump_file);
579 dump_bb (bb, rtl_dump_file);
588 /* Find the set of changed registers. */
589 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
591 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
593 /* No registers should die. */
594 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
598 fprintf (rtl_dump_file,
599 "Register %d died unexpectedly.\n", i);
600 dump_bb (bb, rtl_dump_file);
605 /* Verify that the now-live register is wider than word_mode. */
606 verify_wide_reg (i, bb);
611 /* Updates life information starting with the basic blocks set in BLOCKS.
612 If BLOCKS is null, consider it to be the universal set.
614 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
615 we are only expecting local modifications to basic blocks. If we find
616 extra registers live at the beginning of a block, then we either killed
617 useful data, or we have a broken split that wants data not provided.
618 If we find registers removed from live_at_start, that means we have
619 a broken peephole that is killing a register it shouldn't.
621 ??? This is not true in one situation -- when a pre-reload splitter
622 generates subregs of a multi-word pseudo, current life analysis will
623 lose the kill. So we _can_ have a pseudo go live. How irritating.
625 Including PROP_REG_INFO does not properly refresh regs_ever_live
626 unless the caller resets it to zero. */
629 update_life_info (blocks, extent, prop_flags)
631 enum update_life_extent extent;
635 regset_head tmp_head;
638 tmp = INITIALIZE_REG_SET (tmp_head);
641 timevar_push ((extent == UPDATE_LIFE_LOCAL || blocks)
642 ? TV_LIFE_UPDATE : TV_LIFE);
644 /* Changes to the CFG are only allowed when
645 doing a global update for the entire CFG. */
646 if ((prop_flags & PROP_ALLOW_CFG_CHANGES)
647 && (extent == UPDATE_LIFE_LOCAL || blocks))
650 /* For a global update, we go through the relaxation process again. */
651 if (extent != UPDATE_LIFE_LOCAL)
657 calculate_global_regs_live (blocks, blocks,
658 prop_flags & (PROP_SCAN_DEAD_CODE
659 | PROP_ALLOW_CFG_CHANGES));
661 if ((prop_flags & (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
662 != (PROP_KILL_DEAD_CODE | PROP_ALLOW_CFG_CHANGES))
665 /* Removing dead code may allow the CFG to be simplified which
666 in turn may allow for further dead code detection / removal. */
667 for (i = n_basic_blocks - 1; i >= 0; --i)
669 basic_block bb = BASIC_BLOCK (i);
671 COPY_REG_SET (tmp, bb->global_live_at_end);
672 changed |= propagate_block (bb, tmp, NULL, NULL,
673 prop_flags & (PROP_SCAN_DEAD_CODE
674 | PROP_KILL_DEAD_CODE));
677 if (! changed || ! cleanup_cfg (CLEANUP_EXPENSIVE))
681 /* If asked, remove notes from the blocks we'll update. */
682 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
683 count_or_remove_death_notes (blocks, 1);
686 /* Clear log links in case we are asked to (re)compute them. */
687 if (prop_flags & PROP_LOG_LINKS)
688 clear_log_links (blocks);
692 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
694 basic_block bb = BASIC_BLOCK (i);
696 COPY_REG_SET (tmp, bb->global_live_at_end);
697 propagate_block (bb, tmp, NULL, NULL, prop_flags);
699 if (extent == UPDATE_LIFE_LOCAL)
700 verify_local_live_at_start (tmp, bb);
705 for (i = n_basic_blocks - 1; i >= 0; --i)
707 basic_block bb = BASIC_BLOCK (i);
709 COPY_REG_SET (tmp, bb->global_live_at_end);
710 propagate_block (bb, tmp, NULL, NULL, prop_flags);
712 if (extent == UPDATE_LIFE_LOCAL)
713 verify_local_live_at_start (tmp, bb);
719 if (prop_flags & PROP_REG_INFO)
721 /* The only pseudos that are live at the beginning of the function
722 are those that were not set anywhere in the function. local-alloc
723 doesn't know how to handle these correctly, so mark them as not
724 local to any one basic block. */
725 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
726 FIRST_PSEUDO_REGISTER, i,
727 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
729 /* We have a problem with any pseudoreg that lives across the setjmp.
730 ANSI says that if a user variable does not change in value between
731 the setjmp and the longjmp, then the longjmp preserves it. This
732 includes longjmp from a place where the pseudo appears dead.
733 (In principle, the value still exists if it is in scope.)
734 If the pseudo goes in a hard reg, some other value may occupy
735 that hard reg where this pseudo is dead, thus clobbering the pseudo.
736 Conclusion: such a pseudo must not go in a hard reg. */
737 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
738 FIRST_PSEUDO_REGISTER, i,
740 if (regno_reg_rtx[i] != 0)
742 REG_LIVE_LENGTH (i) = -1;
743 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
747 timevar_pop ((extent == UPDATE_LIFE_LOCAL || blocks)
748 ? TV_LIFE_UPDATE : TV_LIFE);
749 if (ndead && rtl_dump_file)
750 fprintf (rtl_dump_file, "deleted %i dead insns\n", ndead);
754 /* Update life information in all blocks where BB_DIRTY is set. */
757 update_life_info_in_dirty_blocks (extent, prop_flags)
758 enum update_life_extent extent;
761 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
766 sbitmap_zero (update_life_blocks);
767 for (block_num = 0; block_num < n_basic_blocks; block_num++)
768 if (BASIC_BLOCK (block_num)->flags & BB_DIRTY)
770 SET_BIT (update_life_blocks, block_num);
775 ndead = update_life_info (update_life_blocks, extent, prop_flags);
777 sbitmap_free (update_life_blocks);
781 /* Free the variables allocated by find_basic_blocks.
783 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
786 free_basic_block_vars (keep_head_end_p)
789 if (! keep_head_end_p)
791 if (basic_block_info)
794 VARRAY_FREE (basic_block_info);
798 ENTRY_BLOCK_PTR->aux = NULL;
799 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
800 EXIT_BLOCK_PTR->aux = NULL;
801 EXIT_BLOCK_PTR->global_live_at_start = NULL;
805 /* Delete any insns that copy a register to itself. */
808 delete_noop_moves (f)
809 rtx f ATTRIBUTE_UNUSED;
816 for (i = 0; i < n_basic_blocks; i++)
818 bb = BASIC_BLOCK (i);
819 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
821 next = NEXT_INSN (insn);
822 if (INSN_P (insn) && noop_move_p (insn))
826 /* If we're about to remove the first insn of a libcall
827 then move the libcall note to the next real insn and
828 update the retval note. */
829 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX))
830 && XEXP (note, 0) != insn)
832 rtx new_libcall_insn = next_real_insn (insn);
833 rtx retval_note = find_reg_note (XEXP (note, 0),
834 REG_RETVAL, NULL_RTX);
835 REG_NOTES (new_libcall_insn)
836 = gen_rtx_INSN_LIST (REG_LIBCALL, XEXP (note, 0),
837 REG_NOTES (new_libcall_insn));
838 XEXP (retval_note, 0) = new_libcall_insn;
841 delete_insn_and_edges (insn);
846 if (nnoops && rtl_dump_file)
847 fprintf (rtl_dump_file, "deleted %i noop moves", nnoops);
851 /* Delete any jump tables never referenced. We can't delete them at the
852 time of removing tablejump insn as they are referenced by the preceding
853 insns computing the destination, so we delay deleting and garbagecollect
854 them once life information is computed. */
856 delete_dead_jumptables ()
859 for (insn = get_insns (); insn; insn = next)
861 next = NEXT_INSN (insn);
862 if (GET_CODE (insn) == CODE_LABEL
863 && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
864 && GET_CODE (next) == JUMP_INSN
865 && (GET_CODE (PATTERN (next)) == ADDR_VEC
866 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
869 fprintf (rtl_dump_file, "Dead jumptable %i removed\n", INSN_UID (insn));
870 delete_insn (NEXT_INSN (insn));
872 next = NEXT_INSN (next);
877 /* Determine if the stack pointer is constant over the life of the function.
878 Only useful before prologues have been emitted. */
881 notice_stack_pointer_modification_1 (x, pat, data)
883 rtx pat ATTRIBUTE_UNUSED;
884 void *data ATTRIBUTE_UNUSED;
886 if (x == stack_pointer_rtx
887 /* The stack pointer is only modified indirectly as the result
888 of a push until later in flow. See the comments in rtl.texi
889 regarding Embedded Side-Effects on Addresses. */
890 || (GET_CODE (x) == MEM
891 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
892 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
893 current_function_sp_is_unchanging = 0;
897 notice_stack_pointer_modification (f)
902 /* Assume that the stack pointer is unchanging if alloca hasn't
904 current_function_sp_is_unchanging = !current_function_calls_alloca;
905 if (! current_function_sp_is_unchanging)
908 for (insn = f; insn; insn = NEXT_INSN (insn))
912 /* Check if insn modifies the stack pointer. */
913 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
915 if (! current_function_sp_is_unchanging)
921 /* Mark a register in SET. Hard registers in large modes get all
922 of their component registers set as well. */
929 regset set = (regset) xset;
930 int regno = REGNO (reg);
932 if (GET_MODE (reg) == BLKmode)
935 SET_REGNO_REG_SET (set, regno);
936 if (regno < FIRST_PSEUDO_REGISTER)
938 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
940 SET_REGNO_REG_SET (set, regno + n);
944 /* Mark those regs which are needed at the end of the function as live
945 at the end of the last basic block. */
948 mark_regs_live_at_end (set)
953 /* If exiting needs the right stack value, consider the stack pointer
954 live at the end of the function. */
955 if ((HAVE_epilogue && reload_completed)
956 || ! EXIT_IGNORE_STACK
957 || (! FRAME_POINTER_REQUIRED
958 && ! current_function_calls_alloca
959 && flag_omit_frame_pointer)
960 || current_function_sp_is_unchanging)
962 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
965 /* Mark the frame pointer if needed at the end of the function. If
966 we end up eliminating it, it will be removed from the live list
967 of each basic block by reload. */
969 if (! reload_completed || frame_pointer_needed)
971 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
972 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
973 /* If they are different, also mark the hard frame pointer as live. */
974 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
975 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
979 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
980 /* Many architectures have a GP register even without flag_pic.
981 Assume the pic register is not in use, or will be handled by
982 other means, if it is not fixed. */
983 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
984 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
985 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
988 /* Mark all global registers, and all registers used by the epilogue
989 as being live at the end of the function since they may be
990 referenced by our caller. */
991 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
992 if (global_regs[i] || EPILOGUE_USES (i))
993 SET_REGNO_REG_SET (set, i);
995 if (HAVE_epilogue && reload_completed)
997 /* Mark all call-saved registers that we actually used. */
998 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
999 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1000 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1001 SET_REGNO_REG_SET (set, i);
1004 #ifdef EH_RETURN_DATA_REGNO
1005 /* Mark the registers that will contain data for the handler. */
1006 if (reload_completed && current_function_calls_eh_return)
1009 unsigned regno = EH_RETURN_DATA_REGNO(i);
1010 if (regno == INVALID_REGNUM)
1012 SET_REGNO_REG_SET (set, regno);
1015 #ifdef EH_RETURN_STACKADJ_RTX
1016 if ((! HAVE_epilogue || ! reload_completed)
1017 && current_function_calls_eh_return)
1019 rtx tmp = EH_RETURN_STACKADJ_RTX;
1020 if (tmp && REG_P (tmp))
1021 mark_reg (tmp, set);
1024 #ifdef EH_RETURN_HANDLER_RTX
1025 if ((! HAVE_epilogue || ! reload_completed)
1026 && current_function_calls_eh_return)
1028 rtx tmp = EH_RETURN_HANDLER_RTX;
1029 if (tmp && REG_P (tmp))
1030 mark_reg (tmp, set);
1034 /* Mark function return value. */
1035 diddle_return_value (mark_reg, set);
1038 /* Callback function for for_each_successor_phi. DATA is a regset.
1039 Sets the SRC_REGNO, the regno of the phi alternative for phi node
1040 INSN, in the regset. */
1043 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
1044 rtx insn ATTRIBUTE_UNUSED;
1045 int dest_regno ATTRIBUTE_UNUSED;
1049 regset live = (regset) data;
1050 SET_REGNO_REG_SET (live, src_regno);
1054 /* Propagate global life info around the graph of basic blocks. Begin
1055 considering blocks with their corresponding bit set in BLOCKS_IN.
1056 If BLOCKS_IN is null, consider it the universal set.
1058 BLOCKS_OUT is set for every block that was changed. */
1061 calculate_global_regs_live (blocks_in, blocks_out, flags)
1062 sbitmap blocks_in, blocks_out;
1065 basic_block *queue, *qhead, *qtail, *qend;
1066 regset tmp, new_live_at_end, call_used;
1067 regset_head tmp_head, call_used_head;
1068 regset_head new_live_at_end_head;
1071 tmp = INITIALIZE_REG_SET (tmp_head);
1072 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
1073 call_used = INITIALIZE_REG_SET (call_used_head);
1075 /* Inconveniently, this is only readily available in hard reg set form. */
1076 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1077 if (call_used_regs[i])
1078 SET_REGNO_REG_SET (call_used, i);
1080 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
1081 because the `head == tail' style test for an empty queue doesn't
1082 work with a full queue. */
1083 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
1085 qhead = qend = queue + n_basic_blocks + 2;
1087 /* Queue the blocks set in the initial mask. Do this in reverse block
1088 number order so that we are more likely for the first round to do
1089 useful work. We use AUX non-null to flag that the block is queued. */
1092 /* Clear out the garbage that might be hanging out in bb->aux. */
1093 for (i = n_basic_blocks - 1; i >= 0; --i)
1094 BASIC_BLOCK (i)->aux = NULL;
1096 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
1098 basic_block bb = BASIC_BLOCK (i);
1105 for (i = 0; i < n_basic_blocks; ++i)
1107 basic_block bb = BASIC_BLOCK (i);
1114 sbitmap_zero (blocks_out);
1116 /* We work through the queue until there are no more blocks. What
1117 is live at the end of this block is precisely the union of what
1118 is live at the beginning of all its successors. So, we set its
1119 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
1120 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
1121 this block by walking through the instructions in this block in
1122 reverse order and updating as we go. If that changed
1123 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
1124 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
1126 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
1127 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
1128 must either be live at the end of the block, or used within the
1129 block. In the latter case, it will certainly never disappear
1130 from GLOBAL_LIVE_AT_START. In the former case, the register
1131 could go away only if it disappeared from GLOBAL_LIVE_AT_START
1132 for one of the successor blocks. By induction, that cannot
1134 while (qhead != qtail)
1136 int rescan, changed;
1145 /* Begin by propagating live_at_start from the successor blocks. */
1146 CLEAR_REG_SET (new_live_at_end);
1147 for (e = bb->succ; e; e = e->succ_next)
1149 basic_block sb = e->dest;
1151 /* Call-clobbered registers die across exception and call edges. */
1152 /* ??? Abnormal call edges ignored for the moment, as this gets
1153 confused by sibling call edges, which crashes reg-stack. */
1154 if (e->flags & EDGE_EH)
1156 bitmap_operation (tmp, sb->global_live_at_start,
1157 call_used, BITMAP_AND_COMPL);
1158 IOR_REG_SET (new_live_at_end, tmp);
1161 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
1164 /* The all-important stack pointer must always be live. */
1165 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
1167 /* Before reload, there are a few registers that must be forced
1168 live everywhere -- which might not already be the case for
1169 blocks within infinite loops. */
1170 if (! reload_completed)
1172 /* Any reference to any pseudo before reload is a potential
1173 reference of the frame pointer. */
1174 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
1176 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1177 /* Pseudos with argument area equivalences may require
1178 reloading via the argument pointer. */
1179 if (fixed_regs[ARG_POINTER_REGNUM])
1180 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
1183 /* Any constant, or pseudo with constant equivalences, may
1184 require reloading from memory using the pic register. */
1185 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1186 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1187 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
1190 /* Regs used in phi nodes are not included in
1191 global_live_at_start, since they are live only along a
1192 particular edge. Set those regs that are live because of a
1193 phi node alternative corresponding to this particular block. */
1195 for_each_successor_phi (bb, &set_phi_alternative_reg,
1198 if (bb == ENTRY_BLOCK_PTR)
1200 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1204 /* On our first pass through this block, we'll go ahead and continue.
1205 Recognize first pass by local_set NULL. On subsequent passes, we
1206 get to skip out early if live_at_end wouldn't have changed. */
1208 if (bb->local_set == NULL)
1210 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1211 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1216 /* If any bits were removed from live_at_end, we'll have to
1217 rescan the block. This wouldn't be necessary if we had
1218 precalculated local_live, however with PROP_SCAN_DEAD_CODE
1219 local_live is really dependent on live_at_end. */
1220 CLEAR_REG_SET (tmp);
1221 rescan = bitmap_operation (tmp, bb->global_live_at_end,
1222 new_live_at_end, BITMAP_AND_COMPL);
1226 /* If any of the registers in the new live_at_end set are
1227 conditionally set in this basic block, we must rescan.
1228 This is because conditional lifetimes at the end of the
1229 block do not just take the live_at_end set into account,
1230 but also the liveness at the start of each successor
1231 block. We can miss changes in those sets if we only
1232 compare the new live_at_end against the previous one. */
1233 CLEAR_REG_SET (tmp);
1234 rescan = bitmap_operation (tmp, new_live_at_end,
1235 bb->cond_local_set, BITMAP_AND);
1240 /* Find the set of changed bits. Take this opportunity
1241 to notice that this set is empty and early out. */
1242 CLEAR_REG_SET (tmp);
1243 changed = bitmap_operation (tmp, bb->global_live_at_end,
1244 new_live_at_end, BITMAP_XOR);
1248 /* If any of the changed bits overlap with local_set,
1249 we'll have to rescan the block. Detect overlap by
1250 the AND with ~local_set turning off bits. */
1251 rescan = bitmap_operation (tmp, tmp, bb->local_set,
1256 /* Let our caller know that BB changed enough to require its
1257 death notes updated. */
1259 SET_BIT (blocks_out, bb->index);
1263 /* Add to live_at_start the set of all registers in
1264 new_live_at_end that aren't in the old live_at_end. */
1266 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
1268 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1270 changed = bitmap_operation (bb->global_live_at_start,
1271 bb->global_live_at_start,
1278 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
1280 /* Rescan the block insn by insn to turn (a copy of) live_at_end
1281 into live_at_start. */
1282 propagate_block (bb, new_live_at_end, bb->local_set,
1283 bb->cond_local_set, flags);
1285 /* If live_at start didn't change, no need to go farther. */
1286 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
1289 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
1292 /* Queue all predecessors of BB so that we may re-examine
1293 their live_at_end. */
1294 for (e = bb->pred; e; e = e->pred_next)
1296 basic_block pb = e->src;
1297 if (pb->aux == NULL)
1308 FREE_REG_SET (new_live_at_end);
1309 FREE_REG_SET (call_used);
1313 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
1315 basic_block bb = BASIC_BLOCK (i);
1316 FREE_REG_SET (bb->local_set);
1317 FREE_REG_SET (bb->cond_local_set);
1322 for (i = n_basic_blocks - 1; i >= 0; --i)
1324 basic_block bb = BASIC_BLOCK (i);
1325 FREE_REG_SET (bb->local_set);
1326 FREE_REG_SET (bb->cond_local_set);
1334 /* This structure is used to pass parameters to an from the
1335 the function find_regno_partial(). It is used to pass in the
1336 register number we are looking, as well as to return any rtx
1340 unsigned regno_to_find;
1342 } find_regno_partial_param;
1345 /* Find the rtx for the reg numbers specified in 'data' if it is
1346 part of an expression which only uses part of the register. Return
1347 it in the structure passed in. */
1349 find_regno_partial (ptr, data)
1353 find_regno_partial_param *param = (find_regno_partial_param *)data;
1354 unsigned reg = param->regno_to_find;
1355 param->retval = NULL_RTX;
1357 if (*ptr == NULL_RTX)
1360 switch (GET_CODE (*ptr))
1364 case STRICT_LOW_PART:
1365 if (GET_CODE (XEXP (*ptr, 0)) == REG && REGNO (XEXP (*ptr, 0)) == reg)
1367 param->retval = XEXP (*ptr, 0);
1373 if (GET_CODE (SUBREG_REG (*ptr)) == REG
1374 && REGNO (SUBREG_REG (*ptr)) == reg)
1376 param->retval = SUBREG_REG (*ptr);
1388 /* Process all immediate successors of the entry block looking for pseudo
1389 registers which are live on entry. Find all of those whose first
1390 instance is a partial register reference of some kind, and initialize
1391 them to 0 after the entry block. This will prevent bit sets within
1392 registers whose value is unknown, and may contain some kind of sticky
1393 bits we don't want. */
1396 initialize_uninitialized_subregs ()
1400 int reg, did_something = 0;
1401 find_regno_partial_param param;
1403 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1405 basic_block bb = e->dest;
1406 regset map = bb->global_live_at_start;
1407 EXECUTE_IF_SET_IN_REG_SET (map,
1408 FIRST_PSEUDO_REGISTER, reg,
1410 int uid = REGNO_FIRST_UID (reg);
1413 /* Find an insn which mentions the register we are looking for.
1414 Its preferable to have an instance of the register's rtl since
1415 there may be various flags set which we need to duplicate.
1416 If we can't find it, its probably an automatic whose initial
1417 value doesn't matter, or hopefully something we don't care about. */
1418 for (i = get_insns (); i && INSN_UID (i) != uid; i = NEXT_INSN (i))
1422 /* Found the insn, now get the REG rtx, if we can. */
1423 param.regno_to_find = reg;
1424 for_each_rtx (&i, find_regno_partial, ¶m);
1425 if (param.retval != NULL_RTX)
1427 insn = gen_move_insn (param.retval,
1428 CONST0_RTX (GET_MODE (param.retval)));
1429 insert_insn_on_edge (insn, e);
1437 commit_edge_insertions ();
1438 return did_something;
1442 /* Subroutines of life analysis. */
1444 /* Allocate the permanent data structures that represent the results
1445 of life analysis. Not static since used also for stupid life analysis. */
1448 allocate_bb_life_data ()
1452 for (i = 0; i < n_basic_blocks; i++)
1454 basic_block bb = BASIC_BLOCK (i);
1456 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1457 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1460 ENTRY_BLOCK_PTR->global_live_at_end
1461 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1462 EXIT_BLOCK_PTR->global_live_at_start
1463 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1465 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1469 allocate_reg_life_data ()
1473 max_regno = max_reg_num ();
1475 /* Recalculate the register space, in case it has grown. Old style
1476 vector oriented regsets would set regset_{size,bytes} here also. */
1477 allocate_reg_info (max_regno, FALSE, FALSE);
1479 /* Reset all the data we'll collect in propagate_block and its
1481 for (i = 0; i < max_regno; i++)
1485 REG_N_DEATHS (i) = 0;
1486 REG_N_CALLS_CROSSED (i) = 0;
1487 REG_LIVE_LENGTH (i) = 0;
1488 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
1492 /* Delete dead instructions for propagate_block. */
1495 propagate_block_delete_insn (insn)
1498 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
1500 /* If the insn referred to a label, and that label was attached to
1501 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
1502 pretty much mandatory to delete it, because the ADDR_VEC may be
1503 referencing labels that no longer exist.
1505 INSN may reference a deleted label, particularly when a jump
1506 table has been optimized into a direct jump. There's no
1507 real good way to fix up the reference to the deleted label
1508 when the label is deleted, so we just allow it here.
1510 After dead code elimination is complete, we do search for
1511 any REG_LABEL notes which reference deleted labels as a
1514 if (inote && GET_CODE (inote) == CODE_LABEL)
1516 rtx label = XEXP (inote, 0);
1519 /* The label may be forced if it has been put in the constant
1520 pool. If that is the only use we must discard the table
1521 jump following it, but not the label itself. */
1522 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
1523 && (next = next_nonnote_insn (label)) != NULL
1524 && GET_CODE (next) == JUMP_INSN
1525 && (GET_CODE (PATTERN (next)) == ADDR_VEC
1526 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
1528 rtx pat = PATTERN (next);
1529 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1530 int len = XVECLEN (pat, diff_vec_p);
1533 for (i = 0; i < len; i++)
1534 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
1536 delete_insn_and_edges (next);
1541 delete_insn_and_edges (insn);
1545 /* Delete dead libcalls for propagate_block. Return the insn
1546 before the libcall. */
1549 propagate_block_delete_libcall ( insn, note)
1552 rtx first = XEXP (note, 0);
1553 rtx before = PREV_INSN (first);
1555 delete_insn_chain_and_edges (first, insn);
1560 /* Update the life-status of regs for one insn. Return the previous insn. */
1563 propagate_one_insn (pbi, insn)
1564 struct propagate_block_info *pbi;
1567 rtx prev = PREV_INSN (insn);
1568 int flags = pbi->flags;
1569 int insn_is_dead = 0;
1570 int libcall_is_dead = 0;
1574 if (! INSN_P (insn))
1577 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1578 if (flags & PROP_SCAN_DEAD_CODE)
1580 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
1581 libcall_is_dead = (insn_is_dead && note != 0
1582 && libcall_dead_p (pbi, note, insn));
1585 /* If an instruction consists of just dead store(s) on final pass,
1587 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
1589 /* If we're trying to delete a prologue or epilogue instruction
1590 that isn't flagged as possibly being dead, something is wrong.
1591 But if we are keeping the stack pointer depressed, we might well
1592 be deleting insns that are used to compute the amount to update
1593 it by, so they are fine. */
1594 if (reload_completed
1595 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1596 && (TYPE_RETURNS_STACK_DEPRESSED
1597 (TREE_TYPE (current_function_decl))))
1598 && (((HAVE_epilogue || HAVE_prologue)
1599 && prologue_epilogue_contains (insn))
1600 || (HAVE_sibcall_epilogue
1601 && sibcall_epilogue_contains (insn)))
1602 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
1603 fatal_insn ("Attempt to delete prologue/epilogue insn:", insn);
1605 /* Record sets. Do this even for dead instructions, since they
1606 would have killed the values if they hadn't been deleted. */
1607 mark_set_regs (pbi, PATTERN (insn), insn);
1609 /* CC0 is now known to be dead. Either this insn used it,
1610 in which case it doesn't anymore, or clobbered it,
1611 so the next insn can't use it. */
1614 if (libcall_is_dead)
1615 prev = propagate_block_delete_libcall ( insn, note);
1617 propagate_block_delete_insn (insn);
1622 /* See if this is an increment or decrement that can be merged into
1623 a following memory address. */
1626 rtx x = single_set (insn);
1628 /* Does this instruction increment or decrement a register? */
1629 if ((flags & PROP_AUTOINC)
1631 && GET_CODE (SET_DEST (x)) == REG
1632 && (GET_CODE (SET_SRC (x)) == PLUS
1633 || GET_CODE (SET_SRC (x)) == MINUS)
1634 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1635 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1636 /* Ok, look for a following memory ref we can combine with.
1637 If one is found, change the memory ref to a PRE_INC
1638 or PRE_DEC, cancel this insn, and return 1.
1639 Return 0 if nothing has been done. */
1640 && try_pre_increment_1 (pbi, insn))
1643 #endif /* AUTO_INC_DEC */
1645 CLEAR_REG_SET (pbi->new_set);
1647 /* If this is not the final pass, and this insn is copying the value of
1648 a library call and it's dead, don't scan the insns that perform the
1649 library call, so that the call's arguments are not marked live. */
1650 if (libcall_is_dead)
1652 /* Record the death of the dest reg. */
1653 mark_set_regs (pbi, PATTERN (insn), insn);
1655 insn = XEXP (note, 0);
1656 return PREV_INSN (insn);
1658 else if (GET_CODE (PATTERN (insn)) == SET
1659 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1660 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1661 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1662 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1663 /* We have an insn to pop a constant amount off the stack.
1664 (Such insns use PLUS regardless of the direction of the stack,
1665 and any insn to adjust the stack by a constant is always a pop.)
1666 These insns, if not dead stores, have no effect on life. */
1671 /* Any regs live at the time of a call instruction must not go
1672 in a register clobbered by calls. Find all regs now live and
1673 record this for them. */
1675 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
1676 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1677 { REG_N_CALLS_CROSSED (i)++; });
1679 /* Record sets. Do this even for dead instructions, since they
1680 would have killed the values if they hadn't been deleted. */
1681 mark_set_regs (pbi, PATTERN (insn), insn);
1683 if (GET_CODE (insn) == CALL_INSN)
1689 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1690 cond = COND_EXEC_TEST (PATTERN (insn));
1692 /* Non-constant calls clobber memory. */
1693 if (! CONST_OR_PURE_CALL_P (insn))
1695 free_EXPR_LIST_list (&pbi->mem_set_list);
1696 pbi->mem_set_list_len = 0;
1699 /* There may be extra registers to be clobbered. */
1700 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1702 note = XEXP (note, 1))
1703 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1704 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
1705 cond, insn, pbi->flags);
1707 /* Calls change all call-used and global registers. */
1708 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1709 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1711 /* We do not want REG_UNUSED notes for these registers. */
1712 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
1714 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
1718 /* If an insn doesn't use CC0, it becomes dead since we assume
1719 that every insn clobbers it. So show it dead here;
1720 mark_used_regs will set it live if it is referenced. */
1725 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
1726 if ((flags & PROP_EQUAL_NOTES)
1727 && ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX))
1728 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX))))
1729 mark_used_regs (pbi, XEXP (note, 0), NULL_RTX, insn);
1731 /* Sometimes we may have inserted something before INSN (such as a move)
1732 when we make an auto-inc. So ensure we will scan those insns. */
1734 prev = PREV_INSN (insn);
1737 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1743 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
1744 cond = COND_EXEC_TEST (PATTERN (insn));
1746 /* Calls use their arguments. */
1747 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1749 note = XEXP (note, 1))
1750 if (GET_CODE (XEXP (note, 0)) == USE)
1751 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
1754 /* The stack ptr is used (honorarily) by a CALL insn. */
1755 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
1757 /* Calls may also reference any of the global registers,
1758 so they are made live. */
1759 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1761 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
1766 /* On final pass, update counts of how many insns in which each reg
1768 if (flags & PROP_REG_INFO)
1769 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
1770 { REG_LIVE_LENGTH (i)++; });
1775 /* Initialize a propagate_block_info struct for public consumption.
1776 Note that the structure itself is opaque to this file, but that
1777 the user can use the regsets provided here. */
1779 struct propagate_block_info *
1780 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
1782 regset live, local_set, cond_local_set;
1785 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
1788 pbi->reg_live = live;
1789 pbi->mem_set_list = NULL_RTX;
1790 pbi->mem_set_list_len = 0;
1791 pbi->local_set = local_set;
1792 pbi->cond_local_set = cond_local_set;
1796 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
1797 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
1799 pbi->reg_next_use = NULL;
1801 pbi->new_set = BITMAP_XMALLOC ();
1803 #ifdef HAVE_conditional_execution
1804 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
1805 free_reg_cond_life_info);
1806 pbi->reg_cond_reg = BITMAP_XMALLOC ();
1808 /* If this block ends in a conditional branch, for each register live
1809 from one side of the branch and not the other, record the register
1810 as conditionally dead. */
1811 if (GET_CODE (bb->end) == JUMP_INSN
1812 && any_condjump_p (bb->end))
1814 regset_head diff_head;
1815 regset diff = INITIALIZE_REG_SET (diff_head);
1816 basic_block bb_true, bb_false;
1817 rtx cond_true, cond_false, set_src;
1820 /* Identify the successor blocks. */
1821 bb_true = bb->succ->dest;
1822 if (bb->succ->succ_next != NULL)
1824 bb_false = bb->succ->succ_next->dest;
1826 if (bb->succ->flags & EDGE_FALLTHRU)
1828 basic_block t = bb_false;
1832 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
1837 /* This can happen with a conditional jump to the next insn. */
1838 if (JUMP_LABEL (bb->end) != bb_true->head)
1841 /* Simplest way to do nothing. */
1845 /* Extract the condition from the branch. */
1846 set_src = SET_SRC (pc_set (bb->end));
1847 cond_true = XEXP (set_src, 0);
1848 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
1849 GET_MODE (cond_true), XEXP (cond_true, 0),
1850 XEXP (cond_true, 1));
1851 if (GET_CODE (XEXP (set_src, 1)) == PC)
1854 cond_false = cond_true;
1858 /* Compute which register lead different lives in the successors. */
1859 if (bitmap_operation (diff, bb_true->global_live_at_start,
1860 bb_false->global_live_at_start, BITMAP_XOR))
1862 rtx reg = XEXP (cond_true, 0);
1864 if (GET_CODE (reg) == SUBREG)
1865 reg = SUBREG_REG (reg);
1867 if (GET_CODE (reg) != REG)
1870 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
1872 /* For each such register, mark it conditionally dead. */
1873 EXECUTE_IF_SET_IN_REG_SET
1876 struct reg_cond_life_info *rcli;
1879 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
1881 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
1885 rcli->condition = cond;
1886 rcli->stores = const0_rtx;
1887 rcli->orig_condition = cond;
1889 splay_tree_insert (pbi->reg_cond_dead, i,
1890 (splay_tree_value) rcli);
1894 FREE_REG_SET (diff);
1898 /* If this block has no successors, any stores to the frame that aren't
1899 used later in the block are dead. So make a pass over the block
1900 recording any such that are made and show them dead at the end. We do
1901 a very conservative and simple job here. */
1903 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
1904 && (TYPE_RETURNS_STACK_DEPRESSED
1905 (TREE_TYPE (current_function_decl))))
1906 && (flags & PROP_SCAN_DEAD_CODE)
1907 && (bb->succ == NULL
1908 || (bb->succ->succ_next == NULL
1909 && bb->succ->dest == EXIT_BLOCK_PTR
1910 && ! current_function_calls_eh_return)))
1913 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
1914 if (GET_CODE (insn) == INSN
1915 && (set = single_set (insn))
1916 && GET_CODE (SET_DEST (set)) == MEM)
1918 rtx mem = SET_DEST (set);
1919 rtx canon_mem = canon_rtx (mem);
1921 /* This optimization is performed by faking a store to the
1922 memory at the end of the block. This doesn't work for
1923 unchanging memories because multiple stores to unchanging
1924 memory is illegal and alias analysis doesn't consider it. */
1925 if (RTX_UNCHANGING_P (canon_mem))
1928 if (XEXP (canon_mem, 0) == frame_pointer_rtx
1929 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
1930 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
1931 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
1932 add_to_mem_set_list (pbi, canon_mem);
1939 /* Release a propagate_block_info struct. */
1942 free_propagate_block_info (pbi)
1943 struct propagate_block_info *pbi;
1945 free_EXPR_LIST_list (&pbi->mem_set_list);
1947 BITMAP_XFREE (pbi->new_set);
1949 #ifdef HAVE_conditional_execution
1950 splay_tree_delete (pbi->reg_cond_dead);
1951 BITMAP_XFREE (pbi->reg_cond_reg);
1954 if (pbi->reg_next_use)
1955 free (pbi->reg_next_use);
1960 /* Compute the registers live at the beginning of a basic block BB from
1961 those live at the end.
1963 When called, REG_LIVE contains those live at the end. On return, it
1964 contains those live at the beginning.
1966 LOCAL_SET, if non-null, will be set with all registers killed
1967 unconditionally by this basic block.
1968 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
1969 killed conditionally by this basic block. If there is any unconditional
1970 set of a register, then the corresponding bit will be set in LOCAL_SET
1971 and cleared in COND_LOCAL_SET.
1972 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
1973 case, the resulting set will be equal to the union of the two sets that
1974 would otherwise be computed.
1976 Return non-zero if an INSN is deleted (i.e. by dead code removal). */
1979 propagate_block (bb, live, local_set, cond_local_set, flags)
1983 regset cond_local_set;
1986 struct propagate_block_info *pbi;
1990 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
1992 if (flags & PROP_REG_INFO)
1996 /* Process the regs live at the end of the block.
1997 Mark them as not local to any one basic block. */
1998 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
1999 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2002 /* Scan the block an insn at a time from end to beginning. */
2005 for (insn = bb->end;; insn = prev)
2007 /* If this is a call to `setjmp' et al, warn if any
2008 non-volatile datum is live. */
2009 if ((flags & PROP_REG_INFO)
2010 && GET_CODE (insn) == CALL_INSN
2011 && find_reg_note (insn, REG_SETJMP, NULL))
2012 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
2014 prev = propagate_one_insn (pbi, insn);
2015 changed |= NEXT_INSN (prev) != insn;
2017 if (insn == bb->head)
2021 free_propagate_block_info (pbi);
2026 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
2027 (SET expressions whose destinations are registers dead after the insn).
2028 NEEDED is the regset that says which regs are alive after the insn.
2030 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
2032 If X is the entire body of an insn, NOTES contains the reg notes
2033 pertaining to the insn. */
2036 insn_dead_p (pbi, x, call_ok, notes)
2037 struct propagate_block_info *pbi;
2040 rtx notes ATTRIBUTE_UNUSED;
2042 enum rtx_code code = GET_CODE (x);
2045 /* As flow is invoked after combine, we must take existing AUTO_INC
2046 expressions into account. */
2047 for (; notes; notes = XEXP (notes, 1))
2049 if (REG_NOTE_KIND (notes) == REG_INC)
2051 int regno = REGNO (XEXP (notes, 0));
2053 /* Don't delete insns to set global regs. */
2054 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2055 || REGNO_REG_SET_P (pbi->reg_live, regno))
2061 /* If setting something that's a reg or part of one,
2062 see if that register's altered value will be live. */
2066 rtx r = SET_DEST (x);
2069 if (GET_CODE (r) == CC0)
2070 return ! pbi->cc0_live;
2073 /* A SET that is a subroutine call cannot be dead. */
2074 if (GET_CODE (SET_SRC (x)) == CALL)
2080 /* Don't eliminate loads from volatile memory or volatile asms. */
2081 else if (volatile_refs_p (SET_SRC (x)))
2084 if (GET_CODE (r) == MEM)
2088 if (MEM_VOLATILE_P (r) || GET_MODE (r) == BLKmode)
2091 canon_r = canon_rtx (r);
2093 /* Walk the set of memory locations we are currently tracking
2094 and see if one is an identical match to this memory location.
2095 If so, this memory write is dead (remember, we're walking
2096 backwards from the end of the block to the start). Since
2097 rtx_equal_p does not check the alias set or flags, we also
2098 must have the potential for them to conflict (anti_dependence). */
2099 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
2100 if (anti_dependence (r, XEXP (temp, 0)))
2102 rtx mem = XEXP (temp, 0);
2104 if (rtx_equal_p (XEXP (canon_r, 0), XEXP (mem, 0))
2105 && (GET_MODE_SIZE (GET_MODE (canon_r))
2106 <= GET_MODE_SIZE (GET_MODE (mem))))
2110 /* Check if memory reference matches an auto increment. Only
2111 post increment/decrement or modify are valid. */
2112 if (GET_MODE (mem) == GET_MODE (r)
2113 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
2114 || GET_CODE (XEXP (mem, 0)) == POST_INC
2115 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
2116 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
2117 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
2124 while (GET_CODE (r) == SUBREG
2125 || GET_CODE (r) == STRICT_LOW_PART
2126 || GET_CODE (r) == ZERO_EXTRACT)
2129 if (GET_CODE (r) == REG)
2131 int regno = REGNO (r);
2134 if (REGNO_REG_SET_P (pbi->reg_live, regno))
2137 /* If this is a hard register, verify that subsequent
2138 words are not needed. */
2139 if (regno < FIRST_PSEUDO_REGISTER)
2141 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2144 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
2148 /* Don't delete insns to set global regs. */
2149 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2152 /* Make sure insns to set the stack pointer aren't deleted. */
2153 if (regno == STACK_POINTER_REGNUM)
2156 /* ??? These bits might be redundant with the force live bits
2157 in calculate_global_regs_live. We would delete from
2158 sequential sets; whether this actually affects real code
2159 for anything but the stack pointer I don't know. */
2160 /* Make sure insns to set the frame pointer aren't deleted. */
2161 if (regno == FRAME_POINTER_REGNUM
2162 && (! reload_completed || frame_pointer_needed))
2164 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2165 if (regno == HARD_FRAME_POINTER_REGNUM
2166 && (! reload_completed || frame_pointer_needed))
2170 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2171 /* Make sure insns to set arg pointer are never deleted
2172 (if the arg pointer isn't fixed, there will be a USE
2173 for it, so we can treat it normally). */
2174 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2178 /* Otherwise, the set is dead. */
2184 /* If performing several activities, insn is dead if each activity
2185 is individually dead. Also, CLOBBERs and USEs can be ignored; a
2186 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
2188 else if (code == PARALLEL)
2190 int i = XVECLEN (x, 0);
2192 for (i--; i >= 0; i--)
2193 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2194 && GET_CODE (XVECEXP (x, 0, i)) != USE
2195 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
2201 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2202 is not necessarily true for hard registers. */
2203 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2204 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2205 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
2208 /* We do not check other CLOBBER or USE here. An insn consisting of just
2209 a CLOBBER or just a USE should not be deleted. */
2213 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
2214 return 1 if the entire library call is dead.
2215 This is true if INSN copies a register (hard or pseudo)
2216 and if the hard return reg of the call insn is dead.
2217 (The caller should have tested the destination of the SET inside
2218 INSN already for death.)
2220 If this insn doesn't just copy a register, then we don't
2221 have an ordinary libcall. In that case, cse could not have
2222 managed to substitute the source for the dest later on,
2223 so we can assume the libcall is dead.
2225 PBI is the block info giving pseudoregs live before this insn.
2226 NOTE is the REG_RETVAL note of the insn. */
2229 libcall_dead_p (pbi, note, insn)
2230 struct propagate_block_info *pbi;
2234 rtx x = single_set (insn);
2238 rtx r = SET_SRC (x);
2240 if (GET_CODE (r) == REG)
2242 rtx call = XEXP (note, 0);
2246 /* Find the call insn. */
2247 while (call != insn && GET_CODE (call) != CALL_INSN)
2248 call = NEXT_INSN (call);
2250 /* If there is none, do nothing special,
2251 since ordinary death handling can understand these insns. */
2255 /* See if the hard reg holding the value is dead.
2256 If this is a PARALLEL, find the call within it. */
2257 call_pat = PATTERN (call);
2258 if (GET_CODE (call_pat) == PARALLEL)
2260 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
2261 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
2262 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
2265 /* This may be a library call that is returning a value
2266 via invisible pointer. Do nothing special, since
2267 ordinary death handling can understand these insns. */
2271 call_pat = XVECEXP (call_pat, 0, i);
2274 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
2280 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2281 live at function entry. Don't count global register variables, variables
2282 in registers that can be used for function arg passing, or variables in
2283 fixed hard registers. */
2286 regno_uninitialized (regno)
2289 if (n_basic_blocks == 0
2290 || (regno < FIRST_PSEUDO_REGISTER
2291 && (global_regs[regno]
2292 || fixed_regs[regno]
2293 || FUNCTION_ARG_REGNO_P (regno))))
2296 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
2299 /* 1 if register REGNO was alive at a place where `setjmp' was called
2300 and was set more than once or is an argument.
2301 Such regs may be clobbered by `longjmp'. */
2304 regno_clobbered_at_setjmp (regno)
2307 if (n_basic_blocks == 0)
2310 return ((REG_N_SETS (regno) > 1
2311 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
2312 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2315 /* Add MEM to PBI->MEM_SET_LIST. MEM should be canonical. Respect the
2316 maximal list size; look for overlaps in mode and select the largest. */
2318 add_to_mem_set_list (pbi, mem)
2319 struct propagate_block_info *pbi;
2324 /* We don't know how large a BLKmode store is, so we must not
2325 take them into consideration. */
2326 if (GET_MODE (mem) == BLKmode)
2329 for (i = pbi->mem_set_list; i ; i = XEXP (i, 1))
2331 rtx e = XEXP (i, 0);
2332 if (rtx_equal_p (XEXP (mem, 0), XEXP (e, 0)))
2334 if (GET_MODE_SIZE (GET_MODE (mem)) > GET_MODE_SIZE (GET_MODE (e)))
2337 /* If we must store a copy of the mem, we can just modify
2338 the mode of the stored copy. */
2339 if (pbi->flags & PROP_AUTOINC)
2340 PUT_MODE (e, GET_MODE (mem));
2349 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN)
2352 /* Store a copy of mem, otherwise the address may be
2353 scrogged by find_auto_inc. */
2354 if (pbi->flags & PROP_AUTOINC)
2355 mem = shallow_copy_rtx (mem);
2357 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
2358 pbi->mem_set_list_len++;
2362 /* INSN references memory, possibly using autoincrement addressing modes.
2363 Find any entries on the mem_set_list that need to be invalidated due
2364 to an address change. */
2367 invalidate_mems_from_autoinc (pbi, insn)
2368 struct propagate_block_info *pbi;
2371 rtx note = REG_NOTES (insn);
2372 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2373 if (REG_NOTE_KIND (note) == REG_INC)
2374 invalidate_mems_from_set (pbi, XEXP (note, 0));
2377 /* EXP is a REG. Remove any dependent entries from pbi->mem_set_list. */
2380 invalidate_mems_from_set (pbi, exp)
2381 struct propagate_block_info *pbi;
2384 rtx temp = pbi->mem_set_list;
2385 rtx prev = NULL_RTX;
2390 next = XEXP (temp, 1);
2391 if (reg_overlap_mentioned_p (exp, XEXP (temp, 0)))
2393 /* Splice this entry out of the list. */
2395 XEXP (prev, 1) = next;
2397 pbi->mem_set_list = next;
2398 free_EXPR_LIST_node (temp);
2399 pbi->mem_set_list_len--;
2407 /* Process the registers that are set within X. Their bits are set to
2408 1 in the regset DEAD, because they are dead prior to this insn.
2410 If INSN is nonzero, it is the insn being processed.
2412 FLAGS is the set of operations to perform. */
2415 mark_set_regs (pbi, x, insn)
2416 struct propagate_block_info *pbi;
2419 rtx cond = NULL_RTX;
2424 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2426 if (REG_NOTE_KIND (link) == REG_INC)
2427 mark_set_1 (pbi, SET, XEXP (link, 0),
2428 (GET_CODE (x) == COND_EXEC
2429 ? COND_EXEC_TEST (x) : NULL_RTX),
2433 switch (code = GET_CODE (x))
2437 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
2441 cond = COND_EXEC_TEST (x);
2442 x = COND_EXEC_CODE (x);
2449 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2451 rtx sub = XVECEXP (x, 0, i);
2452 switch (code = GET_CODE (sub))
2455 if (cond != NULL_RTX)
2458 cond = COND_EXEC_TEST (sub);
2459 sub = COND_EXEC_CODE (sub);
2460 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
2466 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
2481 /* Process a single set, which appears in INSN. REG (which may not
2482 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
2483 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
2484 If the set is conditional (because it appear in a COND_EXEC), COND
2485 will be the condition. */
2488 mark_set_1 (pbi, code, reg, cond, insn, flags)
2489 struct propagate_block_info *pbi;
2491 rtx reg, cond, insn;
2494 int regno_first = -1, regno_last = -1;
2495 unsigned long not_dead = 0;
2498 /* Modifying just one hardware register of a multi-reg value or just a
2499 byte field of a register does not mean the value from before this insn
2500 is now dead. Of course, if it was dead after it's unused now. */
2502 switch (GET_CODE (reg))
2505 /* Some targets place small structures in registers for return values of
2506 functions. We have to detect this case specially here to get correct
2507 flow information. */
2508 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2509 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
2510 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
2516 case STRICT_LOW_PART:
2517 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
2519 reg = XEXP (reg, 0);
2520 while (GET_CODE (reg) == SUBREG
2521 || GET_CODE (reg) == ZERO_EXTRACT
2522 || GET_CODE (reg) == SIGN_EXTRACT
2523 || GET_CODE (reg) == STRICT_LOW_PART);
2524 if (GET_CODE (reg) == MEM)
2526 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
2530 regno_last = regno_first = REGNO (reg);
2531 if (regno_first < FIRST_PSEUDO_REGISTER)
2532 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
2536 if (GET_CODE (SUBREG_REG (reg)) == REG)
2538 enum machine_mode outer_mode = GET_MODE (reg);
2539 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
2541 /* Identify the range of registers affected. This is moderately
2542 tricky for hard registers. See alter_subreg. */
2544 regno_last = regno_first = REGNO (SUBREG_REG (reg));
2545 if (regno_first < FIRST_PSEUDO_REGISTER)
2547 regno_first += subreg_regno_offset (regno_first, inner_mode,
2550 regno_last = (regno_first
2551 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
2553 /* Since we've just adjusted the register number ranges, make
2554 sure REG matches. Otherwise some_was_live will be clear
2555 when it shouldn't have been, and we'll create incorrect
2556 REG_UNUSED notes. */
2557 reg = gen_rtx_REG (outer_mode, regno_first);
2561 /* If the number of words in the subreg is less than the number
2562 of words in the full register, we have a well-defined partial
2563 set. Otherwise the high bits are undefined.
2565 This is only really applicable to pseudos, since we just took
2566 care of multi-word hard registers. */
2567 if (((GET_MODE_SIZE (outer_mode)
2568 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
2569 < ((GET_MODE_SIZE (inner_mode)
2570 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
2571 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
2574 reg = SUBREG_REG (reg);
2578 reg = SUBREG_REG (reg);
2585 /* If this set is a MEM, then it kills any aliased writes.
2586 If this set is a REG, then it kills any MEMs which use the reg. */
2587 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2589 if (GET_CODE (reg) == REG)
2590 invalidate_mems_from_set (pbi, reg);
2592 /* If the memory reference had embedded side effects (autoincrement
2593 address modes. Then we may need to kill some entries on the
2595 if (insn && GET_CODE (reg) == MEM)
2596 invalidate_mems_from_autoinc (pbi, insn);
2598 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2599 /* ??? With more effort we could track conditional memory life. */
2601 /* There are no REG_INC notes for SP, so we can't assume we'll see
2602 everything that invalidates it. To be safe, don't eliminate any
2603 stores though SP; none of them should be redundant anyway. */
2604 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2605 add_to_mem_set_list (pbi, canon_rtx (reg));
2608 if (GET_CODE (reg) == REG
2609 && ! (regno_first == FRAME_POINTER_REGNUM
2610 && (! reload_completed || frame_pointer_needed))
2611 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2612 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
2613 && (! reload_completed || frame_pointer_needed))
2615 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2616 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
2620 int some_was_live = 0, some_was_dead = 0;
2622 for (i = regno_first; i <= regno_last; ++i)
2624 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
2627 /* Order of the set operation matters here since both
2628 sets may be the same. */
2629 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
2630 if (cond != NULL_RTX
2631 && ! REGNO_REG_SET_P (pbi->local_set, i))
2632 SET_REGNO_REG_SET (pbi->cond_local_set, i);
2634 SET_REGNO_REG_SET (pbi->local_set, i);
2636 if (code != CLOBBER)
2637 SET_REGNO_REG_SET (pbi->new_set, i);
2639 some_was_live |= needed_regno;
2640 some_was_dead |= ! needed_regno;
2643 #ifdef HAVE_conditional_execution
2644 /* Consider conditional death in deciding that the register needs
2646 if (some_was_live && ! not_dead
2647 /* The stack pointer is never dead. Well, not strictly true,
2648 but it's very difficult to tell from here. Hopefully
2649 combine_stack_adjustments will fix up the most egregious
2651 && regno_first != STACK_POINTER_REGNUM)
2653 for (i = regno_first; i <= regno_last; ++i)
2654 if (! mark_regno_cond_dead (pbi, i, cond))
2655 not_dead |= ((unsigned long) 1) << (i - regno_first);
2659 /* Additional data to record if this is the final pass. */
2660 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
2661 | PROP_DEATH_NOTES | PROP_AUTOINC))
2664 int blocknum = pbi->bb->index;
2667 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2669 y = pbi->reg_next_use[regno_first];
2671 /* The next use is no longer next, since a store intervenes. */
2672 for (i = regno_first; i <= regno_last; ++i)
2673 pbi->reg_next_use[i] = 0;
2676 if (flags & PROP_REG_INFO)
2678 for (i = regno_first; i <= regno_last; ++i)
2680 /* Count (weighted) references, stores, etc. This counts a
2681 register twice if it is modified, but that is correct. */
2682 REG_N_SETS (i) += 1;
2683 REG_N_REFS (i) += 1;
2684 REG_FREQ (i) += REG_FREQ_FROM_BB (pbi->bb);
2686 /* The insns where a reg is live are normally counted
2687 elsewhere, but we want the count to include the insn
2688 where the reg is set, and the normal counting mechanism
2689 would not count it. */
2690 REG_LIVE_LENGTH (i) += 1;
2693 /* If this is a hard reg, record this function uses the reg. */
2694 if (regno_first < FIRST_PSEUDO_REGISTER)
2696 for (i = regno_first; i <= regno_last; i++)
2697 regs_ever_live[i] = 1;
2701 /* Keep track of which basic blocks each reg appears in. */
2702 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
2703 REG_BASIC_BLOCK (regno_first) = blocknum;
2704 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
2705 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
2709 if (! some_was_dead)
2711 if (flags & PROP_LOG_LINKS)
2713 /* Make a logical link from the next following insn
2714 that uses this register, back to this insn.
2715 The following insns have already been processed.
2717 We don't build a LOG_LINK for hard registers containing
2718 in ASM_OPERANDs. If these registers get replaced,
2719 we might wind up changing the semantics of the insn,
2720 even if reload can make what appear to be valid
2721 assignments later. */
2722 if (y && (BLOCK_NUM (y) == blocknum)
2723 && (regno_first >= FIRST_PSEUDO_REGISTER
2724 || asm_noperands (PATTERN (y)) < 0))
2725 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
2730 else if (! some_was_live)
2732 if (flags & PROP_REG_INFO)
2733 REG_N_DEATHS (regno_first) += 1;
2735 if (flags & PROP_DEATH_NOTES)
2737 /* Note that dead stores have already been deleted
2738 when possible. If we get here, we have found a
2739 dead store that cannot be eliminated (because the
2740 same insn does something useful). Indicate this
2741 by marking the reg being set as dying here. */
2743 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2748 if (flags & PROP_DEATH_NOTES)
2750 /* This is a case where we have a multi-word hard register
2751 and some, but not all, of the words of the register are
2752 needed in subsequent insns. Write REG_UNUSED notes
2753 for those parts that were not needed. This case should
2756 for (i = regno_first; i <= regno_last; ++i)
2757 if (! REGNO_REG_SET_P (pbi->reg_live, i))
2759 = alloc_EXPR_LIST (REG_UNUSED,
2760 gen_rtx_REG (reg_raw_mode[i], i),
2766 /* Mark the register as being dead. */
2768 /* The stack pointer is never dead. Well, not strictly true,
2769 but it's very difficult to tell from here. Hopefully
2770 combine_stack_adjustments will fix up the most egregious
2772 && regno_first != STACK_POINTER_REGNUM)
2774 for (i = regno_first; i <= regno_last; ++i)
2775 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
2776 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
2779 else if (GET_CODE (reg) == REG)
2781 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
2782 pbi->reg_next_use[regno_first] = 0;
2785 /* If this is the last pass and this is a SCRATCH, show it will be dying
2786 here and count it. */
2787 else if (GET_CODE (reg) == SCRATCH)
2789 if (flags & PROP_DEATH_NOTES)
2791 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2795 #ifdef HAVE_conditional_execution
2796 /* Mark REGNO conditionally dead.
2797 Return true if the register is now unconditionally dead. */
2800 mark_regno_cond_dead (pbi, regno, cond)
2801 struct propagate_block_info *pbi;
2805 /* If this is a store to a predicate register, the value of the
2806 predicate is changing, we don't know that the predicate as seen
2807 before is the same as that seen after. Flush all dependent
2808 conditions from reg_cond_dead. This will make all such
2809 conditionally live registers unconditionally live. */
2810 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
2811 flush_reg_cond_reg (pbi, regno);
2813 /* If this is an unconditional store, remove any conditional
2814 life that may have existed. */
2815 if (cond == NULL_RTX)
2816 splay_tree_remove (pbi->reg_cond_dead, regno);
2819 splay_tree_node node;
2820 struct reg_cond_life_info *rcli;
2823 /* Otherwise this is a conditional set. Record that fact.
2824 It may have been conditionally used, or there may be a
2825 subsequent set with a complimentary condition. */
2827 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
2830 /* The register was unconditionally live previously.
2831 Record the current condition as the condition under
2832 which it is dead. */
2833 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
2834 rcli->condition = cond;
2835 rcli->stores = cond;
2836 rcli->orig_condition = const0_rtx;
2837 splay_tree_insert (pbi->reg_cond_dead, regno,
2838 (splay_tree_value) rcli);
2840 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2842 /* Not unconditionally dead. */
2847 /* The register was conditionally live previously.
2848 Add the new condition to the old. */
2849 rcli = (struct reg_cond_life_info *) node->value;
2850 ncond = rcli->condition;
2851 ncond = ior_reg_cond (ncond, cond, 1);
2852 if (rcli->stores == const0_rtx)
2853 rcli->stores = cond;
2854 else if (rcli->stores != const1_rtx)
2855 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
2857 /* If the register is now unconditionally dead, remove the entry
2858 in the splay_tree. A register is unconditionally dead if the
2859 dead condition ncond is true. A register is also unconditionally
2860 dead if the sum of all conditional stores is an unconditional
2861 store (stores is true), and the dead condition is identically the
2862 same as the original dead condition initialized at the end of
2863 the block. This is a pointer compare, not an rtx_equal_p
2865 if (ncond == const1_rtx
2866 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
2867 splay_tree_remove (pbi->reg_cond_dead, regno);
2870 rcli->condition = ncond;
2872 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
2874 /* Not unconditionally dead. */
2883 /* Called from splay_tree_delete for pbi->reg_cond_life. */
2886 free_reg_cond_life_info (value)
2887 splay_tree_value value;
2889 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
2893 /* Helper function for flush_reg_cond_reg. */
2896 flush_reg_cond_reg_1 (node, data)
2897 splay_tree_node node;
2900 struct reg_cond_life_info *rcli;
2901 int *xdata = (int *) data;
2902 unsigned int regno = xdata[0];
2904 /* Don't need to search if last flushed value was farther on in
2905 the in-order traversal. */
2906 if (xdata[1] >= (int) node->key)
2909 /* Splice out portions of the expression that refer to regno. */
2910 rcli = (struct reg_cond_life_info *) node->value;
2911 rcli->condition = elim_reg_cond (rcli->condition, regno);
2912 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
2913 rcli->stores = elim_reg_cond (rcli->stores, regno);
2915 /* If the entire condition is now false, signal the node to be removed. */
2916 if (rcli->condition == const0_rtx)
2918 xdata[1] = node->key;
2921 else if (rcli->condition == const1_rtx)
2927 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
2930 flush_reg_cond_reg (pbi, regno)
2931 struct propagate_block_info *pbi;
2938 while (splay_tree_foreach (pbi->reg_cond_dead,
2939 flush_reg_cond_reg_1, pair) == -1)
2940 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
2942 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
2945 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
2946 For ior/and, the ADD flag determines whether we want to add the new
2947 condition X to the old one unconditionally. If it is zero, we will
2948 only return a new expression if X allows us to simplify part of
2949 OLD, otherwise we return NULL to the caller.
2950 If ADD is nonzero, we will return a new condition in all cases. The
2951 toplevel caller of one of these functions should always pass 1 for
2955 ior_reg_cond (old, x, add)
2961 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
2963 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
2964 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
2965 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
2967 if (GET_CODE (x) == GET_CODE (old)
2968 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
2972 return gen_rtx_IOR (0, old, x);
2975 switch (GET_CODE (old))
2978 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
2979 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
2980 if (op0 != NULL || op1 != NULL)
2982 if (op0 == const0_rtx)
2983 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
2984 if (op1 == const0_rtx)
2985 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
2986 if (op0 == const1_rtx || op1 == const1_rtx)
2989 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
2990 else if (rtx_equal_p (x, op0))
2991 /* (x | A) | x ~ (x | A). */
2994 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
2995 else if (rtx_equal_p (x, op1))
2996 /* (A | x) | x ~ (A | x). */
2998 return gen_rtx_IOR (0, op0, op1);
3002 return gen_rtx_IOR (0, old, x);
3005 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
3006 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
3007 if (op0 != NULL || op1 != NULL)
3009 if (op0 == const1_rtx)
3010 return op1 ? op1 : gen_rtx_IOR (0, XEXP (old, 1), x);
3011 if (op1 == const1_rtx)
3012 return op0 ? op0 : gen_rtx_IOR (0, XEXP (old, 0), x);
3013 if (op0 == const0_rtx || op1 == const0_rtx)
3016 op0 = gen_rtx_IOR (0, XEXP (old, 0), x);
3017 else if (rtx_equal_p (x, op0))
3018 /* (x & A) | x ~ x. */
3021 op1 = gen_rtx_IOR (0, XEXP (old, 1), x);
3022 else if (rtx_equal_p (x, op1))
3023 /* (A & x) | x ~ x. */
3025 return gen_rtx_AND (0, op0, op1);
3029 return gen_rtx_IOR (0, old, x);
3032 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3034 return not_reg_cond (op0);
3037 return gen_rtx_IOR (0, old, x);
3048 enum rtx_code x_code;
3050 if (x == const0_rtx)
3052 else if (x == const1_rtx)
3054 x_code = GET_CODE (x);
3057 if (GET_RTX_CLASS (x_code) == '<'
3058 && GET_CODE (XEXP (x, 0)) == REG)
3060 if (XEXP (x, 1) != const0_rtx)
3063 return gen_rtx_fmt_ee (reverse_condition (x_code),
3064 VOIDmode, XEXP (x, 0), const0_rtx);
3066 return gen_rtx_NOT (0, x);
3070 and_reg_cond (old, x, add)
3076 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
3078 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
3079 && GET_CODE (x) == reverse_condition (GET_CODE (old))
3080 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3082 if (GET_CODE (x) == GET_CODE (old)
3083 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
3087 return gen_rtx_AND (0, old, x);
3090 switch (GET_CODE (old))
3093 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3094 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3095 if (op0 != NULL || op1 != NULL)
3097 if (op0 == const0_rtx)
3098 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3099 if (op1 == const0_rtx)
3100 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3101 if (op0 == const1_rtx || op1 == const1_rtx)
3104 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3105 else if (rtx_equal_p (x, op0))
3106 /* (x | A) & x ~ x. */
3109 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3110 else if (rtx_equal_p (x, op1))
3111 /* (A | x) & x ~ x. */
3113 return gen_rtx_IOR (0, op0, op1);
3117 return gen_rtx_AND (0, old, x);
3120 op0 = and_reg_cond (XEXP (old, 0), x, 0);
3121 op1 = and_reg_cond (XEXP (old, 1), x, 0);
3122 if (op0 != NULL || op1 != NULL)
3124 if (op0 == const1_rtx)
3125 return op1 ? op1 : gen_rtx_AND (0, XEXP (old, 1), x);
3126 if (op1 == const1_rtx)
3127 return op0 ? op0 : gen_rtx_AND (0, XEXP (old, 0), x);
3128 if (op0 == const0_rtx || op1 == const0_rtx)
3131 op0 = gen_rtx_AND (0, XEXP (old, 0), x);
3132 else if (rtx_equal_p (x, op0))
3133 /* (x & A) & x ~ (x & A). */
3136 op1 = gen_rtx_AND (0, XEXP (old, 1), x);
3137 else if (rtx_equal_p (x, op1))
3138 /* (A & x) & x ~ (A & x). */
3140 return gen_rtx_AND (0, op0, op1);
3144 return gen_rtx_AND (0, old, x);
3147 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
3149 return not_reg_cond (op0);
3152 return gen_rtx_AND (0, old, x);
3159 /* Given a condition X, remove references to reg REGNO and return the
3160 new condition. The removal will be done so that all conditions
3161 involving REGNO are considered to evaluate to false. This function
3162 is used when the value of REGNO changes. */
3165 elim_reg_cond (x, regno)
3171 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3173 if (REGNO (XEXP (x, 0)) == regno)
3178 switch (GET_CODE (x))
3181 op0 = elim_reg_cond (XEXP (x, 0), regno);
3182 op1 = elim_reg_cond (XEXP (x, 1), regno);
3183 if (op0 == const0_rtx || op1 == const0_rtx)
3185 if (op0 == const1_rtx)
3187 if (op1 == const1_rtx)
3189 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3191 return gen_rtx_AND (0, op0, op1);
3194 op0 = elim_reg_cond (XEXP (x, 0), regno);
3195 op1 = elim_reg_cond (XEXP (x, 1), regno);
3196 if (op0 == const1_rtx || op1 == const1_rtx)
3198 if (op0 == const0_rtx)
3200 if (op1 == const0_rtx)
3202 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
3204 return gen_rtx_IOR (0, op0, op1);
3207 op0 = elim_reg_cond (XEXP (x, 0), regno);
3208 if (op0 == const0_rtx)
3210 if (op0 == const1_rtx)
3212 if (op0 != XEXP (x, 0))
3213 return not_reg_cond (op0);
3220 #endif /* HAVE_conditional_execution */
3224 /* Try to substitute the auto-inc expression INC as the address inside
3225 MEM which occurs in INSN. Currently, the address of MEM is an expression
3226 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
3227 that has a single set whose source is a PLUS of INCR_REG and something
3231 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
3232 struct propagate_block_info *pbi;
3233 rtx inc, insn, mem, incr, incr_reg;
3235 int regno = REGNO (incr_reg);
3236 rtx set = single_set (incr);
3237 rtx q = SET_DEST (set);
3238 rtx y = SET_SRC (set);
3239 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
3241 /* Make sure this reg appears only once in this insn. */
3242 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
3245 if (dead_or_set_p (incr, incr_reg)
3246 /* Mustn't autoinc an eliminable register. */
3247 && (regno >= FIRST_PSEUDO_REGISTER
3248 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
3250 /* This is the simple case. Try to make the auto-inc. If
3251 we can't, we are done. Otherwise, we will do any
3252 needed updates below. */
3253 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
3256 else if (GET_CODE (q) == REG
3257 /* PREV_INSN used here to check the semi-open interval
3259 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
3260 /* We must also check for sets of q as q may be
3261 a call clobbered hard register and there may
3262 be a call between PREV_INSN (insn) and incr. */
3263 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
3265 /* We have *p followed sometime later by q = p+size.
3266 Both p and q must be live afterward,
3267 and q is not used between INSN and its assignment.
3268 Change it to q = p, ...*q..., q = q+size.
3269 Then fall into the usual case. */
3273 emit_move_insn (q, incr_reg);
3274 insns = get_insns ();
3277 /* If we can't make the auto-inc, or can't make the
3278 replacement into Y, exit. There's no point in making
3279 the change below if we can't do the auto-inc and doing
3280 so is not correct in the pre-inc case. */
3283 validate_change (insn, &XEXP (mem, 0), inc, 1);
3284 validate_change (incr, &XEXP (y, opnum), q, 1);
3285 if (! apply_change_group ())
3288 /* We now know we'll be doing this change, so emit the
3289 new insn(s) and do the updates. */
3290 emit_insns_before (insns, insn);
3292 if (pbi->bb->head == insn)
3293 pbi->bb->head = insns;
3295 /* INCR will become a NOTE and INSN won't contain a
3296 use of INCR_REG. If a use of INCR_REG was just placed in
3297 the insn before INSN, make that the next use.
3298 Otherwise, invalidate it. */
3299 if (GET_CODE (PREV_INSN (insn)) == INSN
3300 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
3301 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
3302 pbi->reg_next_use[regno] = PREV_INSN (insn);
3304 pbi->reg_next_use[regno] = 0;
3309 /* REGNO is now used in INCR which is below INSN, but
3310 it previously wasn't live here. If we don't mark
3311 it as live, we'll put a REG_DEAD note for it
3312 on this insn, which is incorrect. */
3313 SET_REGNO_REG_SET (pbi->reg_live, regno);
3315 /* If there are any calls between INSN and INCR, show
3316 that REGNO now crosses them. */
3317 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
3318 if (GET_CODE (temp) == CALL_INSN)
3319 REG_N_CALLS_CROSSED (regno)++;
3321 /* Invalidate alias info for Q since we just changed its value. */
3322 clear_reg_alias_info (q);
3327 /* If we haven't returned, it means we were able to make the
3328 auto-inc, so update the status. First, record that this insn
3329 has an implicit side effect. */
3331 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
3333 /* Modify the old increment-insn to simply copy
3334 the already-incremented value of our register. */
3335 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
3338 /* If that makes it a no-op (copying the register into itself) delete
3339 it so it won't appear to be a "use" and a "set" of this
3341 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
3343 /* If the original source was dead, it's dead now. */
3346 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
3348 remove_note (incr, note);
3349 if (XEXP (note, 0) != incr_reg)
3350 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
3353 PUT_CODE (incr, NOTE);
3354 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
3355 NOTE_SOURCE_FILE (incr) = 0;
3358 if (regno >= FIRST_PSEUDO_REGISTER)
3360 /* Count an extra reference to the reg. When a reg is
3361 incremented, spilling it is worse, so we want to make
3362 that less likely. */
3363 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3365 /* Count the increment as a setting of the register,
3366 even though it isn't a SET in rtl. */
3367 REG_N_SETS (regno)++;
3371 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
3375 find_auto_inc (pbi, x, insn)
3376 struct propagate_block_info *pbi;
3380 rtx addr = XEXP (x, 0);
3381 HOST_WIDE_INT offset = 0;
3382 rtx set, y, incr, inc_val;
3384 int size = GET_MODE_SIZE (GET_MODE (x));
3386 if (GET_CODE (insn) == JUMP_INSN)
3389 /* Here we detect use of an index register which might be good for
3390 postincrement, postdecrement, preincrement, or predecrement. */
3392 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
3393 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
3395 if (GET_CODE (addr) != REG)
3398 regno = REGNO (addr);
3400 /* Is the next use an increment that might make auto-increment? */
3401 incr = pbi->reg_next_use[regno];
3402 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
3404 set = single_set (incr);
3405 if (set == 0 || GET_CODE (set) != SET)
3409 if (GET_CODE (y) != PLUS)
3412 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
3413 inc_val = XEXP (y, 1);
3414 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
3415 inc_val = XEXP (y, 0);
3419 if (GET_CODE (inc_val) == CONST_INT)
3421 if (HAVE_POST_INCREMENT
3422 && (INTVAL (inc_val) == size && offset == 0))
3423 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
3425 else if (HAVE_POST_DECREMENT
3426 && (INTVAL (inc_val) == -size && offset == 0))
3427 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
3429 else if (HAVE_PRE_INCREMENT
3430 && (INTVAL (inc_val) == size && offset == size))
3431 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
3433 else if (HAVE_PRE_DECREMENT
3434 && (INTVAL (inc_val) == -size && offset == -size))
3435 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
3437 else if (HAVE_POST_MODIFY_DISP && offset == 0)
3438 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3439 gen_rtx_PLUS (Pmode,
3442 insn, x, incr, addr);
3444 else if (GET_CODE (inc_val) == REG
3445 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
3449 if (HAVE_POST_MODIFY_REG && offset == 0)
3450 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
3451 gen_rtx_PLUS (Pmode,
3454 insn, x, incr, addr);
3458 #endif /* AUTO_INC_DEC */
3461 mark_used_reg (pbi, reg, cond, insn)
3462 struct propagate_block_info *pbi;
3464 rtx cond ATTRIBUTE_UNUSED;
3467 unsigned int regno_first, regno_last, i;
3468 int some_was_live, some_was_dead, some_not_set;
3470 regno_last = regno_first = REGNO (reg);
3471 if (regno_first < FIRST_PSEUDO_REGISTER)
3472 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
3474 /* Find out if any of this register is live after this instruction. */
3475 some_was_live = some_was_dead = 0;
3476 for (i = regno_first; i <= regno_last; ++i)
3478 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
3479 some_was_live |= needed_regno;
3480 some_was_dead |= ! needed_regno;
3483 /* Find out if any of the register was set this insn. */
3485 for (i = regno_first; i <= regno_last; ++i)
3486 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
3488 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3490 /* Record where each reg is used, so when the reg is set we know
3491 the next insn that uses it. */
3492 pbi->reg_next_use[regno_first] = insn;
3495 if (pbi->flags & PROP_REG_INFO)
3497 if (regno_first < FIRST_PSEUDO_REGISTER)
3499 /* If this is a register we are going to try to eliminate,
3500 don't mark it live here. If we are successful in
3501 eliminating it, it need not be live unless it is used for
3502 pseudos, in which case it will have been set live when it
3503 was allocated to the pseudos. If the register will not
3504 be eliminated, reload will set it live at that point.
3506 Otherwise, record that this function uses this register. */
3507 /* ??? The PPC backend tries to "eliminate" on the pic
3508 register to itself. This should be fixed. In the mean
3509 time, hack around it. */
3511 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
3512 && (regno_first == FRAME_POINTER_REGNUM
3513 || regno_first == ARG_POINTER_REGNUM)))
3514 for (i = regno_first; i <= regno_last; ++i)
3515 regs_ever_live[i] = 1;
3519 /* Keep track of which basic block each reg appears in. */
3521 int blocknum = pbi->bb->index;
3522 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
3523 REG_BASIC_BLOCK (regno_first) = blocknum;
3524 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
3525 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
3527 /* Count (weighted) number of uses of each reg. */
3528 REG_FREQ (regno_first) += REG_FREQ_FROM_BB (pbi->bb);
3529 REG_N_REFS (regno_first)++;
3533 /* Record and count the insns in which a reg dies. If it is used in
3534 this insn and was dead below the insn then it dies in this insn.
3535 If it was set in this insn, we do not make a REG_DEAD note;
3536 likewise if we already made such a note. */
3537 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
3541 /* Check for the case where the register dying partially
3542 overlaps the register set by this insn. */
3543 if (regno_first != regno_last)
3544 for (i = regno_first; i <= regno_last; ++i)
3545 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
3547 /* If none of the words in X is needed, make a REG_DEAD note.
3548 Otherwise, we must make partial REG_DEAD notes. */
3549 if (! some_was_live)
3551 if ((pbi->flags & PROP_DEATH_NOTES)
3552 && ! find_regno_note (insn, REG_DEAD, regno_first))
3554 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3556 if (pbi->flags & PROP_REG_INFO)
3557 REG_N_DEATHS (regno_first)++;
3561 /* Don't make a REG_DEAD note for a part of a register
3562 that is set in the insn. */
3563 for (i = regno_first; i <= regno_last; ++i)
3564 if (! REGNO_REG_SET_P (pbi->reg_live, i)
3565 && ! dead_or_set_regno_p (insn, i))
3567 = alloc_EXPR_LIST (REG_DEAD,
3568 gen_rtx_REG (reg_raw_mode[i], i),
3573 /* Mark the register as being live. */
3574 for (i = regno_first; i <= regno_last; ++i)
3576 SET_REGNO_REG_SET (pbi->reg_live, i);
3578 #ifdef HAVE_conditional_execution
3579 /* If this is a conditional use, record that fact. If it is later
3580 conditionally set, we'll know to kill the register. */
3581 if (cond != NULL_RTX)
3583 splay_tree_node node;
3584 struct reg_cond_life_info *rcli;
3589 node = splay_tree_lookup (pbi->reg_cond_dead, i);
3592 /* The register was unconditionally live previously.
3593 No need to do anything. */
3597 /* The register was conditionally live previously.
3598 Subtract the new life cond from the old death cond. */
3599 rcli = (struct reg_cond_life_info *) node->value;
3600 ncond = rcli->condition;
3601 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
3603 /* If the register is now unconditionally live,
3604 remove the entry in the splay_tree. */
3605 if (ncond == const0_rtx)
3606 splay_tree_remove (pbi->reg_cond_dead, i);
3609 rcli->condition = ncond;
3610 SET_REGNO_REG_SET (pbi->reg_cond_reg,
3611 REGNO (XEXP (cond, 0)));
3617 /* The register was not previously live at all. Record
3618 the condition under which it is still dead. */
3619 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3620 rcli->condition = not_reg_cond (cond);
3621 rcli->stores = const0_rtx;
3622 rcli->orig_condition = const0_rtx;
3623 splay_tree_insert (pbi->reg_cond_dead, i,
3624 (splay_tree_value) rcli);
3626 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
3629 else if (some_was_live)
3631 /* The register may have been conditionally live previously, but
3632 is now unconditionally live. Remove it from the conditionally
3633 dead list, so that a conditional set won't cause us to think
3635 splay_tree_remove (pbi->reg_cond_dead, i);
3641 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
3642 This is done assuming the registers needed from X are those that
3643 have 1-bits in PBI->REG_LIVE.
3645 INSN is the containing instruction. If INSN is dead, this function
3649 mark_used_regs (pbi, x, cond, insn)
3650 struct propagate_block_info *pbi;
3655 int flags = pbi->flags;
3660 code = GET_CODE (x);
3681 /* If we are clobbering a MEM, mark any registers inside the address
3683 if (GET_CODE (XEXP (x, 0)) == MEM)
3684 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
3688 /* Don't bother watching stores to mems if this is not the
3689 final pass. We'll not be deleting dead stores this round. */
3690 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3692 /* Invalidate the data for the last MEM stored, but only if MEM is
3693 something that can be stored into. */
3694 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3695 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3696 /* Needn't clear the memory set list. */
3700 rtx temp = pbi->mem_set_list;
3701 rtx prev = NULL_RTX;
3706 next = XEXP (temp, 1);
3707 if (anti_dependence (XEXP (temp, 0), x))
3709 /* Splice temp out of the list. */
3711 XEXP (prev, 1) = next;
3713 pbi->mem_set_list = next;
3714 free_EXPR_LIST_node (temp);
3715 pbi->mem_set_list_len--;
3723 /* If the memory reference had embedded side effects (autoincrement
3724 address modes. Then we may need to kill some entries on the
3727 invalidate_mems_from_autoinc (pbi, insn);
3731 if (flags & PROP_AUTOINC)
3732 find_auto_inc (pbi, x, insn);
3737 #ifdef CLASS_CANNOT_CHANGE_MODE
3738 if (GET_CODE (SUBREG_REG (x)) == REG
3739 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
3740 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
3741 GET_MODE (SUBREG_REG (x))))
3742 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
3745 /* While we're here, optimize this case. */
3747 if (GET_CODE (x) != REG)
3752 /* See a register other than being set => mark it as needed. */
3753 mark_used_reg (pbi, x, cond, insn);
3758 rtx testreg = SET_DEST (x);
3761 /* If storing into MEM, don't show it as being used. But do
3762 show the address as being used. */
3763 if (GET_CODE (testreg) == MEM)
3766 if (flags & PROP_AUTOINC)
3767 find_auto_inc (pbi, testreg, insn);
3769 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
3770 mark_used_regs (pbi, SET_SRC (x), cond, insn);
3774 /* Storing in STRICT_LOW_PART is like storing in a reg
3775 in that this SET might be dead, so ignore it in TESTREG.
3776 but in some other ways it is like using the reg.
3778 Storing in a SUBREG or a bit field is like storing the entire
3779 register in that if the register's value is not used
3780 then this SET is not needed. */
3781 while (GET_CODE (testreg) == STRICT_LOW_PART
3782 || GET_CODE (testreg) == ZERO_EXTRACT
3783 || GET_CODE (testreg) == SIGN_EXTRACT
3784 || GET_CODE (testreg) == SUBREG)
3786 #ifdef CLASS_CANNOT_CHANGE_MODE
3787 if (GET_CODE (testreg) == SUBREG
3788 && GET_CODE (SUBREG_REG (testreg)) == REG
3789 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
3790 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
3791 GET_MODE (testreg)))
3792 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
3795 /* Modifying a single register in an alternate mode
3796 does not use any of the old value. But these other
3797 ways of storing in a register do use the old value. */
3798 if (GET_CODE (testreg) == SUBREG
3799 && !((REG_BYTES (SUBREG_REG (testreg))
3800 + UNITS_PER_WORD - 1) / UNITS_PER_WORD
3801 > (REG_BYTES (testreg)
3802 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
3807 testreg = XEXP (testreg, 0);
3810 /* If this is a store into a register or group of registers,
3811 recursively scan the value being stored. */
3813 if ((GET_CODE (testreg) == PARALLEL
3814 && GET_MODE (testreg) == BLKmode)
3815 || (GET_CODE (testreg) == REG
3816 && (regno = REGNO (testreg),
3817 ! (regno == FRAME_POINTER_REGNUM
3818 && (! reload_completed || frame_pointer_needed)))
3819 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3820 && ! (regno == HARD_FRAME_POINTER_REGNUM
3821 && (! reload_completed || frame_pointer_needed))
3823 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3824 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3829 mark_used_regs (pbi, SET_DEST (x), cond, insn);
3830 mark_used_regs (pbi, SET_SRC (x), cond, insn);
3837 case UNSPEC_VOLATILE:
3841 /* Traditional and volatile asm instructions must be considered to use
3842 and clobber all hard registers, all pseudo-registers and all of
3843 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
3845 Consider for instance a volatile asm that changes the fpu rounding
3846 mode. An insn should not be moved across this even if it only uses
3847 pseudo-regs because it might give an incorrectly rounded result.
3849 ?!? Unfortunately, marking all hard registers as live causes massive
3850 problems for the register allocator and marking all pseudos as live
3851 creates mountains of uninitialized variable warnings.
3853 So for now, just clear the memory set list and mark any regs
3854 we can find in ASM_OPERANDS as used. */
3855 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
3857 free_EXPR_LIST_list (&pbi->mem_set_list);
3858 pbi->mem_set_list_len = 0;
3861 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
3862 We can not just fall through here since then we would be confused
3863 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
3864 traditional asms unlike their normal usage. */
3865 if (code == ASM_OPERANDS)
3869 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3870 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
3876 if (cond != NULL_RTX)
3879 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
3881 cond = COND_EXEC_TEST (x);
3882 x = COND_EXEC_CODE (x);
3886 /* We _do_not_ want to scan operands of phi nodes. Operands of
3887 a phi function are evaluated only when control reaches this
3888 block along a particular edge. Therefore, regs that appear
3889 as arguments to phi should not be added to the global live at
3897 /* Recursively scan the operands of this expression. */
3900 const char * const fmt = GET_RTX_FORMAT (code);
3903 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3907 /* Tail recursive case: save a function call level. */
3913 mark_used_regs (pbi, XEXP (x, i), cond, insn);
3915 else if (fmt[i] == 'E')
3918 for (j = 0; j < XVECLEN (x, i); j++)
3919 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
3928 try_pre_increment_1 (pbi, insn)
3929 struct propagate_block_info *pbi;
3932 /* Find the next use of this reg. If in same basic block,
3933 make it do pre-increment or pre-decrement if appropriate. */
3934 rtx x = single_set (insn);
3935 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
3936 * INTVAL (XEXP (SET_SRC (x), 1)));
3937 int regno = REGNO (SET_DEST (x));
3938 rtx y = pbi->reg_next_use[regno];
3940 && SET_DEST (x) != stack_pointer_rtx
3941 && BLOCK_NUM (y) == BLOCK_NUM (insn)
3942 /* Don't do this if the reg dies, or gets set in y; a standard addressing
3943 mode would be better. */
3944 && ! dead_or_set_p (y, SET_DEST (x))
3945 && try_pre_increment (y, SET_DEST (x), amount))
3947 /* We have found a suitable auto-increment and already changed
3948 insn Y to do it. So flush this increment instruction. */
3949 propagate_block_delete_insn (insn);
3951 /* Count a reference to this reg for the increment insn we are
3952 deleting. When a reg is incremented, spilling it is worse,
3953 so we want to make that less likely. */
3954 if (regno >= FIRST_PSEUDO_REGISTER)
3956 REG_FREQ (regno) += REG_FREQ_FROM_BB (pbi->bb);
3957 REG_N_SETS (regno)++;
3960 /* Flush any remembered memories depending on the value of
3961 the incremented register. */
3962 invalidate_mems_from_set (pbi, SET_DEST (x));
3969 /* Try to change INSN so that it does pre-increment or pre-decrement
3970 addressing on register REG in order to add AMOUNT to REG.
3971 AMOUNT is negative for pre-decrement.
3972 Returns 1 if the change could be made.
3973 This checks all about the validity of the result of modifying INSN. */
3976 try_pre_increment (insn, reg, amount)
3978 HOST_WIDE_INT amount;
3982 /* Nonzero if we can try to make a pre-increment or pre-decrement.
3983 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
3985 /* Nonzero if we can try to make a post-increment or post-decrement.
3986 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
3987 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
3988 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
3991 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
3994 /* From the sign of increment, see which possibilities are conceivable
3995 on this target machine. */
3996 if (HAVE_PRE_INCREMENT && amount > 0)
3998 if (HAVE_POST_INCREMENT && amount > 0)
4001 if (HAVE_PRE_DECREMENT && amount < 0)
4003 if (HAVE_POST_DECREMENT && amount < 0)
4006 if (! (pre_ok || post_ok))
4009 /* It is not safe to add a side effect to a jump insn
4010 because if the incremented register is spilled and must be reloaded
4011 there would be no way to store the incremented value back in memory. */
4013 if (GET_CODE (insn) == JUMP_INSN)
4018 use = find_use_as_address (PATTERN (insn), reg, 0);
4019 if (post_ok && (use == 0 || use == (rtx) (size_t) 1))
4021 use = find_use_as_address (PATTERN (insn), reg, -amount);
4025 if (use == 0 || use == (rtx) (size_t) 1)
4028 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4031 /* See if this combination of instruction and addressing mode exists. */
4032 if (! validate_change (insn, &XEXP (use, 0),
4033 gen_rtx_fmt_e (amount > 0
4034 ? (do_post ? POST_INC : PRE_INC)
4035 : (do_post ? POST_DEC : PRE_DEC),
4039 /* Record that this insn now has an implicit side effect on X. */
4040 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4044 #endif /* AUTO_INC_DEC */
4046 /* Find the place in the rtx X where REG is used as a memory address.
4047 Return the MEM rtx that so uses it.
4048 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4049 (plus REG (const_int PLUSCONST)).
4051 If such an address does not appear, return 0.
4052 If REG appears more than once, or is used other than in such an address,
4056 find_use_as_address (x, reg, plusconst)
4059 HOST_WIDE_INT plusconst;
4061 enum rtx_code code = GET_CODE (x);
4062 const char * const fmt = GET_RTX_FORMAT (code);
4067 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4070 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4071 && XEXP (XEXP (x, 0), 0) == reg
4072 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4073 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4076 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4078 /* If REG occurs inside a MEM used in a bit-field reference,
4079 that is unacceptable. */
4080 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4081 return (rtx) (size_t) 1;
4085 return (rtx) (size_t) 1;
4087 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4091 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4095 return (rtx) (size_t) 1;
4097 else if (fmt[i] == 'E')
4100 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4102 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4106 return (rtx) (size_t) 1;
4114 /* Write information about registers and basic blocks into FILE.
4115 This is part of making a debugging dump. */
4118 dump_regset (r, outf)
4125 fputs (" (nil)", outf);
4129 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4131 fprintf (outf, " %d", i);
4132 if (i < FIRST_PSEUDO_REGISTER)
4133 fprintf (outf, " [%s]",
4138 /* Print a human-reaable representation of R on the standard error
4139 stream. This function is designed to be used from within the
4146 dump_regset (r, stderr);
4147 putc ('\n', stderr);
4150 /* Recompute register set/reference counts immediately prior to register
4153 This avoids problems with set/reference counts changing to/from values
4154 which have special meanings to the register allocators.
4156 Additionally, the reference counts are the primary component used by the
4157 register allocators to prioritize pseudos for allocation to hard regs.
4158 More accurate reference counts generally lead to better register allocation.
4160 F is the first insn to be scanned.
4162 LOOP_STEP denotes how much loop_depth should be incremented per
4163 loop nesting level in order to increase the ref count more for
4164 references in a loop.
4166 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4167 possibly other information which is used by the register allocators. */
4170 recompute_reg_usage (f, loop_step)
4171 rtx f ATTRIBUTE_UNUSED;
4172 int loop_step ATTRIBUTE_UNUSED;
4174 allocate_reg_life_data ();
4175 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
4178 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
4179 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
4180 of the number of registers that died. */
4183 count_or_remove_death_notes (blocks, kill)
4189 for (i = n_basic_blocks - 1; i >= 0; --i)
4194 if (blocks && ! TEST_BIT (blocks, i))
4197 bb = BASIC_BLOCK (i);
4199 for (insn = bb->head;; insn = NEXT_INSN (insn))
4203 rtx *pprev = ®_NOTES (insn);
4208 switch (REG_NOTE_KIND (link))
4211 if (GET_CODE (XEXP (link, 0)) == REG)
4213 rtx reg = XEXP (link, 0);
4216 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
4219 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
4227 rtx next = XEXP (link, 1);
4228 free_EXPR_LIST_node (link);
4229 *pprev = link = next;
4235 pprev = &XEXP (link, 1);
4242 if (insn == bb->end)
4249 /* Clear LOG_LINKS fields of insns in a selected blocks or whole chain
4250 if blocks is NULL. */
4253 clear_log_links (blocks)
4261 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4263 free_INSN_LIST_list (&LOG_LINKS (insn));
4266 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4268 basic_block bb = BASIC_BLOCK (i);
4270 for (insn = bb->head; insn != NEXT_INSN (bb->end);
4271 insn = NEXT_INSN (insn))
4273 free_INSN_LIST_list (&LOG_LINKS (insn));
4277 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
4278 correspond to the hard registers, if any, set in that map. This
4279 could be done far more efficiently by having all sorts of special-cases
4280 with moving single words, but probably isn't worth the trouble. */
4283 reg_set_to_hard_reg_set (to, from)
4289 EXECUTE_IF_SET_IN_BITMAP
4292 if (i >= FIRST_PSEUDO_REGISTER)
4294 SET_HARD_REG_BIT (*to, i);