1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block. */
114 #include "basic-block.h"
115 #include "insn-config.h"
117 #include "hard-reg-set.h"
124 #define obstack_chunk_alloc xmalloc
125 #define obstack_chunk_free free
127 /* The contents of the current function definition are allocated
128 in this obstack, and all are freed at the end of the function.
129 For top-level functions, this is temporary_obstack.
130 Separate obstacks are made for nested functions. */
132 extern struct obstack *function_obstack;
134 /* List of labels that must never be deleted. */
135 extern rtx forced_labels;
137 /* Get the basic block number of an insn.
138 This info should not be expected to remain available
139 after the end of life_analysis. */
141 /* This is the limit of the allocated space in the following two arrays. */
143 static int max_uid_for_flow;
145 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
147 /* This is where the BLOCK_NUM values are really stored.
148 This is set up by find_basic_blocks and used there and in life_analysis,
151 int *uid_block_number;
153 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
155 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
156 static char *uid_volatile;
158 /* Number of basic blocks in the current function. */
162 /* Maximum register number used in this function, plus one. */
166 /* Maximum number of SCRATCH rtx's used in any basic block of this
171 /* Number of SCRATCH rtx's in the current block. */
173 static int num_scratch;
175 /* Indexed by n, giving various register information */
177 varray_type reg_n_info;
179 /* Size of the reg_n_info table. */
181 unsigned int reg_n_max;
183 /* Element N is the next insn that uses (hard or pseudo) register number N
184 within the current basic block; or zero, if there is no such insn.
185 This is valid only during the final backward scan in propagate_block. */
187 static rtx *reg_next_use;
189 /* Size of a regset for the current function,
190 in (1) bytes and (2) elements. */
195 /* Element N is first insn in basic block N.
196 This info lasts until we finish compiling the function. */
198 rtx *basic_block_head;
200 /* Element N is last insn in basic block N.
201 This info lasts until we finish compiling the function. */
203 rtx *basic_block_end;
205 /* Element N indicates whether basic block N can be reached through a
208 char *basic_block_computed_jump_target;
210 /* Element N is a regset describing the registers live
211 at the start of basic block N.
212 This info lasts until we finish compiling the function. */
214 regset *basic_block_live_at_start;
216 /* Regset of regs live when calls to `setjmp'-like functions happen. */
218 regset regs_live_at_setjmp;
220 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
221 that have to go in the same hard reg.
222 The first two regs in the list are a pair, and the next two
223 are another pair, etc. */
226 /* Element N is nonzero if control can drop into basic block N
227 from the preceding basic block. Freed after life_analysis. */
229 static char *basic_block_drops_in;
231 /* Element N is depth within loops of the last insn in basic block number N.
232 Freed after life_analysis. */
234 static short *basic_block_loop_depth;
236 /* Element N nonzero if basic block N can actually be reached.
237 Vector exists only during find_basic_blocks. */
239 static char *block_live_static;
241 /* Depth within loops of basic block being scanned for lifetime analysis,
242 plus one. This is the weight attached to references to registers. */
244 static int loop_depth;
246 /* During propagate_block, this is non-zero if the value of CC0 is live. */
250 /* During propagate_block, this contains the last MEM stored into. It
251 is used to eliminate consecutive stores to the same location. */
253 static rtx last_mem_set;
255 /* Set of registers that may be eliminable. These are handled specially
256 in updating regs_ever_live. */
258 static HARD_REG_SET elim_reg_set;
260 /* Forward declarations */
261 static void find_basic_blocks_1 PROTO((rtx, rtx, int));
262 static void mark_label_ref PROTO((rtx, rtx, int));
263 static void life_analysis_1 PROTO((rtx, int));
264 static void propagate_block PROTO((regset, rtx, rtx, int,
266 static rtx flow_delete_insn PROTO((rtx));
267 static int insn_dead_p PROTO((rtx, regset, int));
268 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
269 static void mark_set_regs PROTO((regset, regset, rtx,
271 static void mark_set_1 PROTO((regset, regset, rtx,
274 static void find_auto_inc PROTO((regset, rtx, rtx));
275 static int try_pre_increment_1 PROTO((rtx));
276 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
278 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
279 void dump_flow_info PROTO((FILE *));
280 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
281 int_list_ptr *, int *, int *));
282 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
283 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
285 static void init_regset_vector PROTO ((regset *, int,
287 static void count_reg_sets_1 PROTO ((rtx));
288 static void count_reg_sets PROTO ((rtx));
289 static void count_reg_references PROTO ((rtx));
291 /* Find basic blocks of the current function.
292 F is the first insn of the function and NREGS the number of register numbers
294 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
295 be reachable. This turns on a kludge that causes the control flow
296 information to be inaccurate and not suitable for passes like GCSE. */
299 find_basic_blocks (f, nregs, file, live_reachable_p)
303 int live_reachable_p;
307 rtx nonlocal_label_list = nonlocal_label_rtx_list ();
308 int in_libcall_block = 0;
310 /* Count the basic blocks. Also find maximum insn uid value used. */
313 register RTX_CODE prev_code = JUMP_INSN;
314 register RTX_CODE code;
317 max_uid_for_flow = 0;
319 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
322 /* Track when we are inside in LIBCALL block. */
323 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
324 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
325 in_libcall_block = 1;
327 code = GET_CODE (insn);
328 if (INSN_UID (insn) > max_uid_for_flow)
329 max_uid_for_flow = INSN_UID (insn);
330 if (code == CODE_LABEL
331 || (GET_RTX_CLASS (code) == 'i'
332 && (prev_code == JUMP_INSN
333 || (prev_code == CALL_INSN
334 && (nonlocal_label_list != 0 || eh_region)
335 && ! in_libcall_block)
336 || prev_code == BARRIER)))
339 if (code == CALL_INSN && find_reg_note (insn, REG_RETVAL, NULL_RTX))
344 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
346 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
349 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
350 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
351 in_libcall_block = 0;
358 /* Leave space for insns life_analysis makes in some cases for auto-inc.
359 These cases are rare, so we don't need too much space. */
360 max_uid_for_flow += max_uid_for_flow / 10;
363 /* Allocate some tables that last till end of compiling this function
364 and some needed only in find_basic_blocks and life_analysis. */
366 basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
367 basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
368 basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
369 basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
370 basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
372 = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
373 uid_volatile = (char *) xmalloc (max_uid_for_flow + 1);
374 bzero (uid_volatile, max_uid_for_flow + 1);
376 find_basic_blocks_1 (f, nonlocal_label_list, live_reachable_p);
379 /* Find all basic blocks of the function whose first insn is F.
380 Store the correct data in the tables that describe the basic blocks,
381 set up the chains of references for each CODE_LABEL, and
382 delete any entire basic blocks that cannot be reached.
384 NONLOCAL_LABEL_LIST is a list of non-local labels in the function.
385 Blocks that are otherwise unreachable may be reachable with a non-local
387 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
388 be reachable. This turns on a kludge that causes the control flow
389 information to be inaccurate and not suitable for passes like GCSE. */
392 find_basic_blocks_1 (f, nonlocal_label_list, live_reachable_p)
393 rtx f, nonlocal_label_list;
394 int live_reachable_p;
398 register char *block_live = (char *) alloca (n_basic_blocks);
399 register char *block_marked = (char *) alloca (n_basic_blocks);
400 /* An array of CODE_LABELs, indexed by UID for the start of the active
401 EH handler for each insn in F. */
402 int *active_eh_region;
403 int *nested_eh_region;
404 /* List of label_refs to all labels whose addresses are taken
406 rtx label_value_list;
407 rtx x, note, eh_note;
408 enum rtx_code prev_code, code;
410 int in_libcall_block = 0;
411 int deleted_handler = 0;
414 active_eh_region = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int));
415 nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int));
418 label_value_list = 0;
419 block_live_static = block_live;
420 bzero (block_live, n_basic_blocks);
421 bzero (block_marked, n_basic_blocks);
422 bzero (basic_block_computed_jump_target, n_basic_blocks);
423 bzero ((char *) active_eh_region, (max_uid_for_flow + 1) * sizeof (int));
424 bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int));
425 current_function_has_computed_jump = 0;
427 /* Initialize with just block 0 reachable and no blocks marked. */
428 if (n_basic_blocks > 0)
431 /* Initialize the ref chain of each label to 0. Record where all the
432 blocks start and end and their depth in loops. For each insn, record
433 the block it is in. Also mark as reachable any blocks headed by labels
434 that must not be deleted. */
436 for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
437 insn; insn = NEXT_INSN (insn))
440 /* Track when we are inside in LIBCALL block. */
441 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
442 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
443 in_libcall_block = 1;
445 code = GET_CODE (insn);
448 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
450 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
454 /* A basic block starts at label, or after something that can jump. */
455 else if (code == CODE_LABEL
456 || (GET_RTX_CLASS (code) == 'i'
457 && (prev_code == JUMP_INSN
458 || (prev_code == CALL_INSN
459 && (nonlocal_label_list != 0 || eh_note)
460 && ! in_libcall_block)
461 || prev_code == BARRIER)))
463 basic_block_head[++i] = insn;
464 basic_block_end[i] = insn;
465 basic_block_loop_depth[i] = depth;
467 if (code == CODE_LABEL)
469 LABEL_REFS (insn) = insn;
470 /* Any label that cannot be deleted
471 is considered to start a reachable block. */
472 if (LABEL_PRESERVE_P (insn))
477 else if (GET_RTX_CLASS (code) == 'i')
479 basic_block_end[i] = insn;
480 basic_block_loop_depth[i] = depth;
483 if (GET_RTX_CLASS (code) == 'i')
485 /* Make a list of all labels referred to other than by jumps. */
486 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
487 if (REG_NOTE_KIND (note) == REG_LABEL
488 && XEXP (note, 0) != current_function_eh_stub_label
489 && XEXP (note, 0) != current_function_eh_old_stub_label)
490 label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
494 /* Keep a lifo list of the currently active exception notes. */
495 if (GET_CODE (insn) == NOTE)
497 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
500 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] =
501 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
503 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = 0;
504 eh_note = gen_rtx_EXPR_LIST (VOIDmode,
507 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
508 eh_note = XEXP (eh_note, 1);
510 /* If we encounter a CALL_INSN, note which exception handler it
511 might pass control to.
513 If doing asynchronous exceptions, record the active EH handler
514 for every insn, since most insns can throw. */
516 && (asynchronous_exceptions
517 || (GET_CODE (insn) == CALL_INSN
518 && ! in_libcall_block)))
519 active_eh_region[INSN_UID (insn)] =
520 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
521 BLOCK_NUM (insn) = i;
526 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
527 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
528 in_libcall_block = 0;
531 /* During the second pass, `n_basic_blocks' is only an upper bound.
532 Only perform the sanity check for the first pass, and on the second
533 pass ensure `n_basic_blocks' is set to the correct value. */
534 if (pass == 1 && i + 1 != n_basic_blocks)
536 n_basic_blocks = i + 1;
538 /* Record which basic blocks control can drop in to. */
540 for (i = 0; i < n_basic_blocks; i++)
542 for (insn = PREV_INSN (basic_block_head[i]);
543 insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
546 basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
549 /* Now find which basic blocks can actually be reached
550 and put all jump insns' LABEL_REFS onto the ref-chains
551 of their target labels. */
553 if (n_basic_blocks > 0)
555 int something_marked = 1;
558 /* Pass over all blocks, marking each block that is reachable
559 and has not yet been marked.
560 Keep doing this until, in one pass, no blocks have been marked.
561 Then blocks_live and blocks_marked are identical and correct.
562 In addition, all jumps actually reachable have been marked. */
564 while (something_marked)
566 something_marked = 0;
567 for (i = 0; i < n_basic_blocks; i++)
568 if (block_live[i] && !block_marked[i])
571 something_marked = 1;
572 if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1])
573 block_live[i + 1] = 1;
574 insn = basic_block_end[i];
575 if (GET_CODE (insn) == JUMP_INSN)
576 mark_label_ref (PATTERN (insn), insn, 0);
578 /* If we have any forced labels, mark them as potentially
579 reachable from this block. */
580 for (x = forced_labels; x; x = XEXP (x, 1))
581 if (! LABEL_REF_NONLOCAL_P (x))
582 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
585 /* Now scan the insns for this block, we may need to make
586 edges for some of them to various non-obvious locations
587 (exception handlers, nonlocal labels, etc). */
588 for (insn = basic_block_head[i];
589 insn != NEXT_INSN (basic_block_end[i]);
590 insn = NEXT_INSN (insn))
592 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
594 /* References to labels in non-jumping insns have
595 REG_LABEL notes attached to them.
597 This can happen for computed gotos; we don't care
598 about them here since the values are also on the
599 label_value_list and will be marked live if we find
600 a live computed goto.
602 This can also happen when we take the address of
603 a label to pass as an argument to __throw. Note
604 throw only uses the value to determine what handler
605 should be called -- ie the label is not used as
606 a jump target, it just marks regions in the code.
608 In theory we should be able to ignore the REG_LABEL
609 notes, but we have to make sure that the label and
610 associated insns aren't marked dead, so we make
611 the block in question live and create an edge from
612 this insn to the label. This is not strictly
613 correct, but it is close enough for now.
615 See below for code that handles the eh_stub labels
617 for (note = REG_NOTES (insn);
619 note = XEXP (note, 1))
621 if (REG_NOTE_KIND (note) == REG_LABEL
622 && XEXP (note, 0) != current_function_eh_stub_label
623 && XEXP (note, 0) != current_function_eh_old_stub_label)
626 block_live[BLOCK_NUM (x)] = 1;
627 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
632 /* If this is a computed jump, then mark it as
633 reaching everything on the label_value_list
634 and forced_labels list. */
635 if (computed_jump_p (insn))
637 current_function_has_computed_jump = 1;
638 for (x = label_value_list; x; x = XEXP (x, 1))
640 int b = BLOCK_NUM (XEXP (x, 0));
641 basic_block_computed_jump_target[b] = 1;
642 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
647 for (x = forced_labels; x; x = XEXP (x, 1))
649 int b = BLOCK_NUM (XEXP (x, 0));
650 basic_block_computed_jump_target[b] = 1;
651 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
657 /* If this is a CALL_INSN, then mark it as reaching
658 the active EH handler for this CALL_INSN. If
659 we're handling asynchronous exceptions mark every
660 insn as reaching the active EH handler.
662 Also mark the CALL_INSN as reaching any nonlocal
664 else if (asynchronous_exceptions
665 || (GET_CODE (insn) == CALL_INSN
666 && ! find_reg_note (insn, REG_RETVAL,
669 if (active_eh_region[INSN_UID (insn)])
673 region = active_eh_region[INSN_UID (insn)];
675 region = nested_eh_region[region])
677 ptr = get_first_handler (region);
678 for ( ; ptr ; ptr = ptr->next)
679 mark_label_ref (gen_rtx_LABEL_REF
680 (VOIDmode, ptr->handler_label), insn, 0);
683 if (!asynchronous_exceptions)
685 for (x = nonlocal_label_list;
688 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
692 /* ??? This could be made smarter:
693 in some cases it's possible to tell that
694 certain calls will not do a nonlocal goto.
696 For example, if the nested functions that
697 do the nonlocal gotos do not have their
698 addresses taken, then only calls to those
699 functions or to other nested functions that
700 use them could possibly do nonlocal gotos. */
704 /* We know something about the structure of the function
705 __throw in libgcc2.c. It is the only function that ever
706 contains eh_stub labels. It modifies its return address
707 so that the last block returns to one of the eh_stub labels
708 within it. So we have to make additional edges in the
710 if (i + 1 == n_basic_blocks
711 && current_function_eh_stub_label != 0)
713 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
714 current_function_eh_stub_label),
715 basic_block_end[i], 0);
716 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
717 current_function_eh_old_stub_label),
718 basic_block_end[i], 0);
723 /* This should never happen. If it does that means we've computed an
724 incorrect flow graph, which can lead to aborts/crashes later in the
725 compiler or incorrect code generation.
727 We used to try and continue here, but that's just asking for trouble
728 later during the compile or at runtime. It's easier to debug the
729 problem here than later! */
730 for (i = 1; i < n_basic_blocks; i++)
731 if (block_live[i] && ! basic_block_drops_in[i]
732 && GET_CODE (basic_block_head[i]) == CODE_LABEL
733 && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
736 /* Now delete the code for any basic blocks that can't be reached.
737 They can occur because jump_optimize does not recognize
738 unreachable loops as unreachable. */
741 for (i = 0; i < n_basic_blocks; i++)
746 /* Delete the insns in a (non-live) block. We physically delete
747 every non-note insn except the start and end (so
748 basic_block_head/end needn't be updated), we turn the latter
749 into NOTE_INSN_DELETED notes.
750 We use to "delete" the insns by turning them into notes, but
751 we may be deleting lots of insns that subsequent passes would
752 otherwise have to process. Secondly, lots of deleted blocks in
753 a row can really slow down propagate_block since it will
754 otherwise process insn-turned-notes multiple times when it
755 looks for loop begin/end notes. */
756 if (basic_block_head[i] != basic_block_end[i])
758 /* It would be quicker to delete all of these with a single
759 unchaining, rather than one at a time, but we need to keep
761 insn = NEXT_INSN (basic_block_head[i]);
762 while (insn != basic_block_end[i])
764 if (GET_CODE (insn) == BARRIER)
766 else if (GET_CODE (insn) != NOTE)
767 insn = flow_delete_insn (insn);
769 insn = NEXT_INSN (insn);
772 insn = basic_block_head[i];
773 if (GET_CODE (insn) != NOTE)
775 /* Turn the head into a deleted insn note. */
776 if (GET_CODE (insn) == BARRIER)
779 /* If the head of this block is a CODE_LABEL, then it might
780 be the label for an exception handler which can't be
783 We need to remove the label from the exception_handler_label
784 list and remove the associated NOTE_EH_REGION_BEG and
785 NOTE_EH_REGION_END notes. */
786 if (GET_CODE (insn) == CODE_LABEL)
788 rtx x, *prev = &exception_handler_labels;
790 for (x = exception_handler_labels; x; x = XEXP (x, 1))
792 if (XEXP (x, 0) == insn)
794 /* Found a match, splice this label out of the
797 XEXP (x, 1) = NULL_RTX;
798 XEXP (x, 0) = NULL_RTX;
800 /* Remove the handler from all regions */
801 remove_handler (insn);
809 PUT_CODE (insn, NOTE);
810 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
811 NOTE_SOURCE_FILE (insn) = 0;
813 insn = basic_block_end[i];
814 if (GET_CODE (insn) != NOTE)
816 /* Turn the tail into a deleted insn note. */
817 if (GET_CODE (insn) == BARRIER)
819 PUT_CODE (insn, NOTE);
820 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
821 NOTE_SOURCE_FILE (insn) = 0;
823 /* BARRIERs are between basic blocks, not part of one.
824 Delete a BARRIER if the preceding jump is deleted.
825 We cannot alter a BARRIER into a NOTE
826 because it is too short; but we can really delete
827 it because it is not part of a basic block. */
828 if (NEXT_INSN (insn) != 0
829 && GET_CODE (NEXT_INSN (insn)) == BARRIER)
830 delete_insn (NEXT_INSN (insn));
832 /* Each time we delete some basic blocks,
833 see if there is a jump around them that is
834 being turned into a no-op. If so, delete it. */
836 if (block_live[i - 1])
839 for (j = i + 1; j < n_basic_blocks; j++)
843 insn = basic_block_end[i - 1];
844 if (GET_CODE (insn) == JUMP_INSN
845 /* An unconditional jump is the only possibility
846 we must check for, since a conditional one
847 would make these blocks live. */
848 && simplejump_p (insn)
849 && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1)
850 && INSN_UID (label) != 0
851 && BLOCK_NUM (label) == j)
855 /* The deleted blocks still show up in the cfg,
856 so we must set basic_block_drops_in for blocks
857 I to J inclusive to keep the cfg accurate. */
858 for (k = i; k <= j; k++)
859 basic_block_drops_in[k] = 1;
861 PUT_CODE (insn, NOTE);
862 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
863 NOTE_SOURCE_FILE (insn) = 0;
864 if (GET_CODE (NEXT_INSN (insn)) != BARRIER)
866 delete_insn (NEXT_INSN (insn));
872 /* If we deleted an exception handler, we may have EH region
873 begin/end blocks to remove as well. */
875 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
876 if (GET_CODE (insn) == NOTE)
878 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
879 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
881 int num = CODE_LABEL_NUMBER (insn);
882 /* A NULL handler indicates a region is no longer needed */
883 if (get_first_handler (num) == NULL)
885 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
886 NOTE_SOURCE_FILE (insn) = 0;
891 /* There are pathological cases where one function calling hundreds of
892 nested inline functions can generate lots and lots of unreachable
893 blocks that jump can't delete. Since we don't use sparse matrices
894 a lot of memory will be needed to compile such functions.
895 Implementing sparse matrices is a fair bit of work and it is not
896 clear that they win more than they lose (we don't want to
897 unnecessarily slow down compilation of normal code). By making
898 another pass for the pathological case, we can greatly speed up
899 their compilation without hurting normal code. This works because
900 all the insns in the unreachable blocks have either been deleted or
902 Note that we're talking about reducing memory usage by 10's of
903 megabytes and reducing compilation time by several minutes. */
904 /* ??? The choice of when to make another pass is a bit arbitrary,
905 and was derived from empirical data. */
910 n_basic_blocks -= deleted;
911 /* `n_basic_blocks' may not be correct at this point: two previously
912 separate blocks may now be merged. That's ok though as we
913 recalculate it during the second pass. It certainly can't be
914 any larger than the current value. */
920 /* Record INSN's block number as BB. */
923 set_block_num (insn, bb)
927 if (INSN_UID (insn) >= max_uid_for_flow)
929 /* Add one-eighth the size so we don't keep calling xrealloc. */
930 max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8;
931 uid_block_number = (int *)
932 xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int));
934 BLOCK_NUM (insn) = bb;
938 /* Subroutines of find_basic_blocks. */
940 /* Check expression X for label references;
941 if one is found, add INSN to the label's chain of references.
943 CHECKDUP means check for and avoid creating duplicate references
944 from the same insn. Such duplicates do no serious harm but
945 can slow life analysis. CHECKDUP is set only when duplicates
949 mark_label_ref (x, insn, checkdup)
953 register RTX_CODE code;
957 /* We can be called with NULL when scanning label_value_list. */
962 if (code == LABEL_REF)
964 register rtx label = XEXP (x, 0);
966 if (GET_CODE (label) != CODE_LABEL)
968 /* If the label was never emitted, this insn is junk,
969 but avoid a crash trying to refer to BLOCK_NUM (label).
970 This can happen as a result of a syntax error
971 and a diagnostic has already been printed. */
972 if (INSN_UID (label) == 0)
974 CONTAINING_INSN (x) = insn;
975 /* if CHECKDUP is set, check for duplicate ref from same insn
978 for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y))
979 if (CONTAINING_INSN (y) == insn)
981 LABEL_NEXTREF (x) = LABEL_REFS (label);
982 LABEL_REFS (label) = x;
983 block_live_static[BLOCK_NUM (label)] = 1;
987 fmt = GET_RTX_FORMAT (code);
988 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
991 mark_label_ref (XEXP (x, i), insn, 0);
995 for (j = 0; j < XVECLEN (x, i); j++)
996 mark_label_ref (XVECEXP (x, i, j), insn, 1);
1001 /* Delete INSN by patching it out.
1002 Return the next insn. */
1005 flow_delete_insn (insn)
1008 /* ??? For the moment we assume we don't have to watch for NULLs here
1009 since the start/end of basic blocks aren't deleted like this. */
1010 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1011 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1012 return NEXT_INSN (insn);
1015 /* Perform data flow analysis.
1016 F is the first insn of the function and NREGS the number of register numbers
1020 life_analysis (f, nregs, file)
1025 #ifdef ELIMINABLE_REGS
1027 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
1030 /* Record which registers will be eliminated. We use this in
1033 CLEAR_HARD_REG_SET (elim_reg_set);
1035 #ifdef ELIMINABLE_REGS
1036 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
1037 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1039 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1042 life_analysis_1 (f, nregs);
1044 dump_flow_info (file);
1046 free_basic_block_vars (1);
1049 /* Free the variables allocated by find_basic_blocks.
1051 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1052 are not to be freed. */
1055 free_basic_block_vars (keep_head_end_p)
1056 int keep_head_end_p;
1058 if (basic_block_drops_in)
1060 free (basic_block_drops_in);
1061 /* Tell dump_flow_info this isn't available anymore. */
1062 basic_block_drops_in = 0;
1064 if (basic_block_loop_depth)
1066 free (basic_block_loop_depth);
1067 basic_block_loop_depth = 0;
1069 if (uid_block_number)
1071 free (uid_block_number);
1072 uid_block_number = 0;
1076 free (uid_volatile);
1080 if (! keep_head_end_p && basic_block_head)
1082 free (basic_block_head);
1083 basic_block_head = 0;
1084 free (basic_block_end);
1085 basic_block_end = 0;
1089 /* Determine which registers are live at the start of each
1090 basic block of the function whose first insn is F.
1091 NREGS is the number of registers used in F.
1092 We allocate the vector basic_block_live_at_start
1093 and the regsets that it points to, and fill them with the data.
1094 regset_size and regset_bytes are also set here. */
1097 life_analysis_1 (f, nregs)
1103 /* For each basic block, a bitmask of regs
1104 live on exit from the block. */
1105 regset *basic_block_live_at_end;
1106 /* For each basic block, a bitmask of regs
1107 live on entry to a successor-block of this block.
1108 If this does not match basic_block_live_at_end,
1109 that must be updated, and the block must be rescanned. */
1110 regset *basic_block_new_live_at_end;
1111 /* For each basic block, a bitmask of regs
1112 whose liveness at the end of the basic block
1113 can make a difference in which regs are live on entry to the block.
1114 These are the regs that are set within the basic block,
1115 possibly excluding those that are used after they are set. */
1116 regset *basic_block_significant;
1120 struct obstack flow_obstack;
1122 gcc_obstack_init (&flow_obstack);
1126 bzero (regs_ever_live, sizeof regs_ever_live);
1128 /* Allocate and zero out many data structures
1129 that will record the data from lifetime analysis. */
1131 allocate_for_life_analysis ();
1133 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
1134 bzero ((char *) reg_next_use, nregs * sizeof (rtx));
1136 /* Set up several regset-vectors used internally within this function.
1137 Their meanings are documented above, with their declarations. */
1139 basic_block_live_at_end
1140 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1142 /* Don't use alloca since that leads to a crash rather than an error message
1143 if there isn't enough space.
1144 Don't use oballoc since we may need to allocate other things during
1145 this function on the temporary obstack. */
1146 init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack);
1148 basic_block_new_live_at_end
1149 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1150 init_regset_vector (basic_block_new_live_at_end, n_basic_blocks,
1153 basic_block_significant
1154 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1155 init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack);
1157 /* Record which insns refer to any volatile memory
1158 or for any reason can't be deleted just because they are dead stores.
1159 Also, delete any insns that copy a register to itself. */
1161 for (insn = f; insn; insn = NEXT_INSN (insn))
1163 enum rtx_code code1 = GET_CODE (insn);
1164 if (code1 == CALL_INSN)
1165 INSN_VOLATILE (insn) = 1;
1166 else if (code1 == INSN || code1 == JUMP_INSN)
1168 /* Delete (in effect) any obvious no-op moves. */
1169 if (GET_CODE (PATTERN (insn)) == SET
1170 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
1171 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
1172 && (REGNO (SET_DEST (PATTERN (insn)))
1173 == REGNO (SET_SRC (PATTERN (insn))))
1174 /* Insns carrying these notes are useful later on. */
1175 && ! find_reg_note (insn, REG_EQUAL, NULL_RTX))
1177 PUT_CODE (insn, NOTE);
1178 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1179 NOTE_SOURCE_FILE (insn) = 0;
1181 /* Delete (in effect) any obvious no-op moves. */
1182 else if (GET_CODE (PATTERN (insn)) == SET
1183 && GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG
1184 && GET_CODE (SUBREG_REG (SET_DEST (PATTERN (insn)))) == REG
1185 && GET_CODE (SET_SRC (PATTERN (insn))) == SUBREG
1186 && GET_CODE (SUBREG_REG (SET_SRC (PATTERN (insn)))) == REG
1187 && (REGNO (SUBREG_REG (SET_DEST (PATTERN (insn))))
1188 == REGNO (SUBREG_REG (SET_SRC (PATTERN (insn)))))
1189 && SUBREG_WORD (SET_DEST (PATTERN (insn))) ==
1190 SUBREG_WORD (SET_SRC (PATTERN (insn)))
1191 /* Insns carrying these notes are useful later on. */
1192 && ! find_reg_note (insn, REG_EQUAL, NULL_RTX))
1194 PUT_CODE (insn, NOTE);
1195 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1196 NOTE_SOURCE_FILE (insn) = 0;
1198 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
1200 /* If nothing but SETs of registers to themselves,
1201 this insn can also be deleted. */
1202 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1204 rtx tem = XVECEXP (PATTERN (insn), 0, i);
1206 if (GET_CODE (tem) == USE
1207 || GET_CODE (tem) == CLOBBER)
1210 if (GET_CODE (tem) != SET
1211 || GET_CODE (SET_DEST (tem)) != REG
1212 || GET_CODE (SET_SRC (tem)) != REG
1213 || REGNO (SET_DEST (tem)) != REGNO (SET_SRC (tem)))
1217 if (i == XVECLEN (PATTERN (insn), 0)
1218 /* Insns carrying these notes are useful later on. */
1219 && ! find_reg_note (insn, REG_EQUAL, NULL_RTX))
1221 PUT_CODE (insn, NOTE);
1222 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1223 NOTE_SOURCE_FILE (insn) = 0;
1226 INSN_VOLATILE (insn) = volatile_refs_p (PATTERN (insn));
1228 else if (GET_CODE (PATTERN (insn)) != USE)
1229 INSN_VOLATILE (insn) = volatile_refs_p (PATTERN (insn));
1230 /* A SET that makes space on the stack cannot be dead.
1231 (Such SETs occur only for allocating variable-size data,
1232 so they will always have a PLUS or MINUS according to the
1233 direction of stack growth.)
1234 Even if this function never uses this stack pointer value,
1235 signal handlers do! */
1236 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
1237 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1238 #ifdef STACK_GROWS_DOWNWARD
1239 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
1241 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1243 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
1244 INSN_VOLATILE (insn) = 1;
1248 if (n_basic_blocks > 0)
1249 #ifdef EXIT_IGNORE_STACK
1250 if (! EXIT_IGNORE_STACK
1251 || (! FRAME_POINTER_REQUIRED
1252 && ! current_function_calls_alloca
1253 && flag_omit_frame_pointer))
1256 /* If exiting needs the right stack value,
1257 consider the stack pointer live at the end of the function. */
1258 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1],
1259 STACK_POINTER_REGNUM);
1260 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1261 STACK_POINTER_REGNUM);
1264 /* Mark the frame pointer is needed at the end of the function. If
1265 we end up eliminating it, it will be removed from the live list
1266 of each basic block by reload. */
1268 if (n_basic_blocks > 0)
1270 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1],
1271 FRAME_POINTER_REGNUM);
1272 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1273 FRAME_POINTER_REGNUM);
1274 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1275 /* If they are different, also mark the hard frame pointer as live */
1276 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1],
1277 HARD_FRAME_POINTER_REGNUM);
1278 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1279 HARD_FRAME_POINTER_REGNUM);
1283 /* Mark all global registers and all registers used by the epilogue
1284 as being live at the end of the function since they may be
1285 referenced by our caller. */
1287 if (n_basic_blocks > 0)
1288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1290 #ifdef EPILOGUE_USES
1291 || EPILOGUE_USES (i)
1295 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1], i);
1296 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1], i);
1299 /* Propagate life info through the basic blocks
1300 around the graph of basic blocks.
1302 This is a relaxation process: each time a new register
1303 is live at the end of the basic block, we must scan the block
1304 to determine which registers are, as a consequence, live at the beginning
1305 of that block. These registers must then be marked live at the ends
1306 of all the blocks that can transfer control to that block.
1307 The process continues until it reaches a fixed point. */
1314 for (i = n_basic_blocks - 1; i >= 0; i--)
1316 int consider = first_pass;
1317 int must_rescan = first_pass;
1322 /* Set CONSIDER if this block needs thinking about at all
1323 (that is, if the regs live now at the end of it
1324 are not the same as were live at the end of it when
1325 we last thought about it).
1326 Set must_rescan if it needs to be thought about
1327 instruction by instruction (that is, if any additional
1328 reg that is live at the end now but was not live there before
1329 is one of the significant regs of this basic block). */
1331 EXECUTE_IF_AND_COMPL_IN_REG_SET
1332 (basic_block_new_live_at_end[i],
1333 basic_block_live_at_end[i], 0, j,
1336 if (REGNO_REG_SET_P (basic_block_significant[i], j))
1347 /* The live_at_start of this block may be changing,
1348 so another pass will be required after this one. */
1353 /* No complete rescan needed;
1354 just record those variables newly known live at end
1355 as live at start as well. */
1356 IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i],
1357 basic_block_new_live_at_end[i],
1358 basic_block_live_at_end[i]);
1360 IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i],
1361 basic_block_new_live_at_end[i],
1362 basic_block_live_at_end[i]);
1366 /* Update the basic_block_live_at_start
1367 by propagation backwards through the block. */
1368 COPY_REG_SET (basic_block_live_at_end[i],
1369 basic_block_new_live_at_end[i]);
1370 COPY_REG_SET (basic_block_live_at_start[i],
1371 basic_block_live_at_end[i]);
1372 propagate_block (basic_block_live_at_start[i],
1373 basic_block_head[i], basic_block_end[i], 0,
1374 first_pass ? basic_block_significant[i]
1380 register rtx jump, head;
1382 /* Update the basic_block_new_live_at_end's of the block
1383 that falls through into this one (if any). */
1384 head = basic_block_head[i];
1385 if (basic_block_drops_in[i])
1386 IOR_REG_SET (basic_block_new_live_at_end[i-1],
1387 basic_block_live_at_start[i]);
1389 /* Update the basic_block_new_live_at_end's of
1390 all the blocks that jump to this one. */
1391 if (GET_CODE (head) == CODE_LABEL)
1392 for (jump = LABEL_REFS (head);
1394 jump = LABEL_NEXTREF (jump))
1396 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
1397 IOR_REG_SET (basic_block_new_live_at_end[from_block],
1398 basic_block_live_at_start[i]);
1408 /* The only pseudos that are live at the beginning of the function are
1409 those that were not set anywhere in the function. local-alloc doesn't
1410 know how to handle these correctly, so mark them as not local to any
1413 if (n_basic_blocks > 0)
1414 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0],
1415 FIRST_PSEUDO_REGISTER, i,
1417 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1420 /* Now the life information is accurate.
1421 Make one more pass over each basic block
1422 to delete dead stores, create autoincrement addressing
1423 and record how many times each register is used, is set, or dies.
1425 To save time, we operate directly in basic_block_live_at_end[i],
1426 thus destroying it (in fact, converting it into a copy of
1427 basic_block_live_at_start[i]). This is ok now because
1428 basic_block_live_at_end[i] is no longer used past this point. */
1432 for (i = 0; i < n_basic_blocks; i++)
1434 propagate_block (basic_block_live_at_end[i],
1435 basic_block_head[i], basic_block_end[i], 1,
1443 /* Something live during a setjmp should not be put in a register
1444 on certain machines which restore regs from stack frames
1445 rather than from the jmpbuf.
1446 But we don't need to do this for the user's variables, since
1447 ANSI says only volatile variables need this. */
1448 #ifdef LONGJMP_RESTORE_FROM_STACK
1449 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1450 FIRST_PSEUDO_REGISTER, i,
1452 if (regno_reg_rtx[i] != 0
1453 && ! REG_USERVAR_P (regno_reg_rtx[i]))
1455 REG_LIVE_LENGTH (i) = -1;
1456 REG_BASIC_BLOCK (i) = -1;
1462 /* We have a problem with any pseudoreg that
1463 lives across the setjmp. ANSI says that if a
1464 user variable does not change in value
1465 between the setjmp and the longjmp, then the longjmp preserves it.
1466 This includes longjmp from a place where the pseudo appears dead.
1467 (In principle, the value still exists if it is in scope.)
1468 If the pseudo goes in a hard reg, some other value may occupy
1469 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1470 Conclusion: such a pseudo must not go in a hard reg. */
1471 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1472 FIRST_PSEUDO_REGISTER, i,
1474 if (regno_reg_rtx[i] != 0)
1476 REG_LIVE_LENGTH (i) = -1;
1477 REG_BASIC_BLOCK (i) = -1;
1482 free_regset_vector (basic_block_live_at_end, n_basic_blocks);
1483 free_regset_vector (basic_block_new_live_at_end, n_basic_blocks);
1484 free_regset_vector (basic_block_significant, n_basic_blocks);
1485 basic_block_live_at_end = (regset *)0;
1486 basic_block_new_live_at_end = (regset *)0;
1487 basic_block_significant = (regset *)0;
1489 obstack_free (&flow_obstack, NULL_PTR);
1492 /* Subroutines of life analysis. */
1494 /* Allocate the permanent data structures that represent the results
1495 of life analysis. Not static since used also for stupid life analysis. */
1498 allocate_for_life_analysis ()
1502 /* Recalculate the register space, in case it has grown. Old style
1503 vector oriented regsets would set regset_{size,bytes} here also. */
1504 allocate_reg_info (max_regno, FALSE, FALSE);
1506 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1507 information, explicitly reset it here. The allocation should have
1508 already happened on the previous reg_scan pass. Make sure in case
1509 some more registers were allocated. */
1510 for (i = 0; i < max_regno; i++)
1513 basic_block_live_at_start
1514 = (regset *) oballoc (n_basic_blocks * sizeof (regset));
1515 init_regset_vector (basic_block_live_at_start, n_basic_blocks,
1518 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
1519 CLEAR_REG_SET (regs_live_at_setjmp);
1522 /* Make each element of VECTOR point at a regset. The vector has
1523 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1527 init_regset_vector (vector, nelts, alloc_obstack)
1530 struct obstack *alloc_obstack;
1534 for (i = 0; i < nelts; i++)
1536 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
1537 CLEAR_REG_SET (vector[i]);
1541 /* Release any additional space allocated for each element of VECTOR point
1542 other than the regset header itself. The vector has NELTS elements. */
1545 free_regset_vector (vector, nelts)
1551 for (i = 0; i < nelts; i++)
1552 FREE_REG_SET (vector[i]);
1555 /* Compute the registers live at the beginning of a basic block
1556 from those live at the end.
1558 When called, OLD contains those live at the end.
1559 On return, it contains those live at the beginning.
1560 FIRST and LAST are the first and last insns of the basic block.
1562 FINAL is nonzero if we are doing the final pass which is not
1563 for computing the life info (since that has already been done)
1564 but for acting on it. On this pass, we delete dead stores,
1565 set up the logical links and dead-variables lists of instructions,
1566 and merge instructions for autoincrement and autodecrement addresses.
1568 SIGNIFICANT is nonzero only the first time for each basic block.
1569 If it is nonzero, it points to a regset in which we store
1570 a 1 for each register that is set within the block.
1572 BNUM is the number of the basic block. */
1575 propagate_block (old, first, last, final, significant, bnum)
1576 register regset old;
1588 /* The following variables are used only if FINAL is nonzero. */
1589 /* This vector gets one element for each reg that has been live
1590 at any point in the basic block that has been scanned so far.
1591 SOMETIMES_MAX says how many elements are in use so far. */
1592 register int *regs_sometimes_live;
1593 int sometimes_max = 0;
1594 /* This regset has 1 for each reg that we have seen live so far.
1595 It and REGS_SOMETIMES_LIVE are updated together. */
1598 /* The loop depth may change in the middle of a basic block. Since we
1599 scan from end to beginning, we start with the depth at the end of the
1600 current basic block, and adjust as we pass ends and starts of loops. */
1601 loop_depth = basic_block_loop_depth[bnum];
1603 dead = ALLOCA_REG_SET ();
1604 live = ALLOCA_REG_SET ();
1609 /* Include any notes at the end of the block in the scan.
1610 This is in case the block ends with a call to setjmp. */
1612 while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE)
1614 /* Look for loop boundaries, we are going forward here. */
1615 last = NEXT_INSN (last);
1616 if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG)
1618 else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END)
1627 maxlive = ALLOCA_REG_SET ();
1628 COPY_REG_SET (maxlive, old);
1629 regs_sometimes_live = (int *) alloca (max_regno * sizeof (int));
1631 /* Process the regs live at the end of the block.
1632 Enter them in MAXLIVE and REGS_SOMETIMES_LIVE.
1633 Also mark them as not local to any one basic block. */
1634 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1636 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1637 regs_sometimes_live[sometimes_max] = i;
1642 /* Scan the block an insn at a time from end to beginning. */
1644 for (insn = last; ; insn = prev)
1646 prev = PREV_INSN (insn);
1648 if (GET_CODE (insn) == NOTE)
1650 /* Look for loop boundaries, remembering that we are going
1652 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1654 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1657 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1658 Abort now rather than setting register status incorrectly. */
1659 if (loop_depth == 0)
1662 /* If this is a call to `setjmp' et al,
1663 warn if any non-volatile datum is live. */
1665 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1666 IOR_REG_SET (regs_live_at_setjmp, old);
1669 /* Update the life-status of regs for this insn.
1670 First DEAD gets which regs are set in this insn
1671 then LIVE gets which regs are used in this insn.
1672 Then the regs live before the insn
1673 are those live after, with DEAD regs turned off,
1674 and then LIVE regs turned on. */
1676 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1679 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1681 = (insn_dead_p (PATTERN (insn), old, 0)
1682 /* Don't delete something that refers to volatile storage! */
1683 && ! INSN_VOLATILE (insn));
1685 = (insn_is_dead && note != 0
1686 && libcall_dead_p (PATTERN (insn), old, note, insn));
1688 /* If an instruction consists of just dead store(s) on final pass,
1689 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1690 We could really delete it with delete_insn, but that
1691 can cause trouble for first or last insn in a basic block. */
1692 if (final && insn_is_dead)
1694 PUT_CODE (insn, NOTE);
1695 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1696 NOTE_SOURCE_FILE (insn) = 0;
1698 /* CC0 is now known to be dead. Either this insn used it,
1699 in which case it doesn't anymore, or clobbered it,
1700 so the next insn can't use it. */
1703 /* If this insn is copying the return value from a library call,
1704 delete the entire library call. */
1705 if (libcall_is_dead)
1707 rtx first = XEXP (note, 0);
1709 while (INSN_DELETED_P (first))
1710 first = NEXT_INSN (first);
1715 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
1716 NOTE_SOURCE_FILE (p) = 0;
1722 CLEAR_REG_SET (dead);
1723 CLEAR_REG_SET (live);
1725 /* See if this is an increment or decrement that can be
1726 merged into a following memory address. */
1729 register rtx x = single_set (insn);
1731 /* Does this instruction increment or decrement a register? */
1733 && GET_CODE (SET_DEST (x)) == REG
1734 && (GET_CODE (SET_SRC (x)) == PLUS
1735 || GET_CODE (SET_SRC (x)) == MINUS)
1736 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1737 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1738 /* Ok, look for a following memory ref we can combine with.
1739 If one is found, change the memory ref to a PRE_INC
1740 or PRE_DEC, cancel this insn, and return 1.
1741 Return 0 if nothing has been done. */
1742 && try_pre_increment_1 (insn))
1745 #endif /* AUTO_INC_DEC */
1747 /* If this is not the final pass, and this insn is copying the
1748 value of a library call and it's dead, don't scan the
1749 insns that perform the library call, so that the call's
1750 arguments are not marked live. */
1751 if (libcall_is_dead)
1753 /* Mark the dest reg as `significant'. */
1754 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
1756 insn = XEXP (note, 0);
1757 prev = PREV_INSN (insn);
1759 else if (GET_CODE (PATTERN (insn)) == SET
1760 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1761 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1762 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1763 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1764 /* We have an insn to pop a constant amount off the stack.
1765 (Such insns use PLUS regardless of the direction of the stack,
1766 and any insn to adjust the stack by a constant is always a pop.)
1767 These insns, if not dead stores, have no effect on life. */
1771 /* LIVE gets the regs used in INSN;
1772 DEAD gets those set by it. Dead insns don't make anything
1775 mark_set_regs (old, dead, PATTERN (insn),
1776 final ? insn : NULL_RTX, significant);
1778 /* If an insn doesn't use CC0, it becomes dead since we
1779 assume that every insn clobbers it. So show it dead here;
1780 mark_used_regs will set it live if it is referenced. */
1784 mark_used_regs (old, live, PATTERN (insn), final, insn);
1786 /* Sometimes we may have inserted something before INSN (such as
1787 a move) when we make an auto-inc. So ensure we will scan
1790 prev = PREV_INSN (insn);
1793 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1799 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1801 note = XEXP (note, 1))
1802 if (GET_CODE (XEXP (note, 0)) == USE)
1803 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
1806 /* Each call clobbers all call-clobbered regs that are not
1807 global or fixed. Note that the function-value reg is a
1808 call-clobbered reg, and mark_set_regs has already had
1809 a chance to handle it. */
1811 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1812 if (call_used_regs[i] && ! global_regs[i]
1814 SET_REGNO_REG_SET (dead, i);
1816 /* The stack ptr is used (honorarily) by a CALL insn. */
1817 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
1819 /* Calls may also reference any of the global registers,
1820 so they are made live. */
1821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823 mark_used_regs (old, live,
1824 gen_rtx_REG (reg_raw_mode[i], i),
1827 /* Calls also clobber memory. */
1831 /* Update OLD for the registers used or set. */
1832 AND_COMPL_REG_SET (old, dead);
1833 IOR_REG_SET (old, live);
1835 if (GET_CODE (insn) == CALL_INSN && final)
1837 /* Any regs live at the time of a call instruction
1838 must not go in a register clobbered by calls.
1839 Find all regs now live and record this for them. */
1841 register int *p = regs_sometimes_live;
1843 for (i = 0; i < sometimes_max; i++, p++)
1844 if (REGNO_REG_SET_P (old, *p))
1845 REG_N_CALLS_CROSSED (*p)++;
1849 /* On final pass, add any additional sometimes-live regs
1850 into MAXLIVE and REGS_SOMETIMES_LIVE.
1851 Also update counts of how many insns each reg is live at. */
1858 EXECUTE_IF_AND_COMPL_IN_REG_SET
1859 (live, maxlive, 0, regno,
1861 regs_sometimes_live[sometimes_max++] = regno;
1862 SET_REGNO_REG_SET (maxlive, regno);
1865 p = regs_sometimes_live;
1866 for (i = 0; i < sometimes_max; i++)
1869 if (REGNO_REG_SET_P (old, regno))
1870 REG_LIVE_LENGTH (regno)++;
1879 FREE_REG_SET (dead);
1880 FREE_REG_SET (live);
1882 FREE_REG_SET (maxlive);
1884 if (num_scratch > max_scratch)
1885 max_scratch = num_scratch;
1888 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
1889 (SET expressions whose destinations are registers dead after the insn).
1890 NEEDED is the regset that says which regs are alive after the insn.
1892 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
1895 insn_dead_p (x, needed, call_ok)
1900 enum rtx_code code = GET_CODE (x);
1902 /* If setting something that's a reg or part of one,
1903 see if that register's altered value will be live. */
1907 rtx r = SET_DEST (x);
1909 /* A SET that is a subroutine call cannot be dead. */
1910 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
1914 if (GET_CODE (r) == CC0)
1918 if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r)
1919 && rtx_equal_p (r, last_mem_set))
1922 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
1923 || GET_CODE (r) == ZERO_EXTRACT)
1926 if (GET_CODE (r) == REG)
1928 int regno = REGNO (r);
1930 /* Don't delete insns to set global regs. */
1931 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1932 /* Make sure insns to set frame pointer aren't deleted. */
1933 || regno == FRAME_POINTER_REGNUM
1934 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1935 || regno == HARD_FRAME_POINTER_REGNUM
1937 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1938 /* Make sure insns to set arg pointer are never deleted
1939 (if the arg pointer isn't fixed, there will be a USE for
1940 it, so we can treat it normally). */
1941 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
1943 || REGNO_REG_SET_P (needed, regno))
1946 /* If this is a hard register, verify that subsequent words are
1948 if (regno < FIRST_PSEUDO_REGISTER)
1950 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
1953 if (REGNO_REG_SET_P (needed, regno+n))
1961 /* If performing several activities,
1962 insn is dead if each activity is individually dead.
1963 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
1964 that's inside a PARALLEL doesn't make the insn worth keeping. */
1965 else if (code == PARALLEL)
1967 int i = XVECLEN (x, 0);
1969 for (i--; i >= 0; i--)
1970 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
1971 && GET_CODE (XVECEXP (x, 0, i)) != USE
1972 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok))
1978 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
1979 is not necessarily true for hard registers. */
1980 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
1981 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
1982 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
1985 /* We do not check other CLOBBER or USE here. An insn consisting of just
1986 a CLOBBER or just a USE should not be deleted. */
1990 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
1991 return 1 if the entire library call is dead.
1992 This is true if X copies a register (hard or pseudo)
1993 and if the hard return reg of the call insn is dead.
1994 (The caller should have tested the destination of X already for death.)
1996 If this insn doesn't just copy a register, then we don't
1997 have an ordinary libcall. In that case, cse could not have
1998 managed to substitute the source for the dest later on,
1999 so we can assume the libcall is dead.
2001 NEEDED is the bit vector of pseudoregs live before this insn.
2002 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
2005 libcall_dead_p (x, needed, note, insn)
2011 register RTX_CODE code = GET_CODE (x);
2015 register rtx r = SET_SRC (x);
2016 if (GET_CODE (r) == REG)
2018 rtx call = XEXP (note, 0);
2021 /* Find the call insn. */
2022 while (call != insn && GET_CODE (call) != CALL_INSN)
2023 call = NEXT_INSN (call);
2025 /* If there is none, do nothing special,
2026 since ordinary death handling can understand these insns. */
2030 /* See if the hard reg holding the value is dead.
2031 If this is a PARALLEL, find the call within it. */
2032 call = PATTERN (call);
2033 if (GET_CODE (call) == PARALLEL)
2035 for (i = XVECLEN (call, 0) - 1; i >= 0; i--)
2036 if (GET_CODE (XVECEXP (call, 0, i)) == SET
2037 && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
2040 /* This may be a library call that is returning a value
2041 via invisible pointer. Do nothing special, since
2042 ordinary death handling can understand these insns. */
2046 call = XVECEXP (call, 0, i);
2049 return insn_dead_p (call, needed, 1);
2055 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2056 live at function entry. Don't count global register variables, variables
2057 in registers that can be used for function arg passing, or variables in
2058 fixed hard registers. */
2061 regno_uninitialized (regno)
2064 if (n_basic_blocks == 0
2065 || (regno < FIRST_PSEUDO_REGISTER
2066 && (global_regs[regno]
2067 || fixed_regs[regno]
2068 || FUNCTION_ARG_REGNO_P (regno))))
2071 return REGNO_REG_SET_P (basic_block_live_at_start[0], regno);
2074 /* 1 if register REGNO was alive at a place where `setjmp' was called
2075 and was set more than once or is an argument.
2076 Such regs may be clobbered by `longjmp'. */
2079 regno_clobbered_at_setjmp (regno)
2082 if (n_basic_blocks == 0)
2085 return ((REG_N_SETS (regno) > 1
2086 || REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2087 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2090 /* Process the registers that are set within X.
2091 Their bits are set to 1 in the regset DEAD,
2092 because they are dead prior to this insn.
2094 If INSN is nonzero, it is the insn being processed
2095 and the fact that it is nonzero implies this is the FINAL pass
2096 in propagate_block. In this case, various info about register
2097 usage is stored, LOG_LINKS fields of insns are set up. */
2100 mark_set_regs (needed, dead, x, insn, significant)
2107 register RTX_CODE code = GET_CODE (x);
2109 if (code == SET || code == CLOBBER)
2110 mark_set_1 (needed, dead, x, insn, significant);
2111 else if (code == PARALLEL)
2114 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2116 code = GET_CODE (XVECEXP (x, 0, i));
2117 if (code == SET || code == CLOBBER)
2118 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
2123 /* Process a single SET rtx, X. */
2126 mark_set_1 (needed, dead, x, insn, significant)
2134 register rtx reg = SET_DEST (x);
2136 /* Modifying just one hardware register of a multi-reg value
2137 or just a byte field of a register
2138 does not mean the value from before this insn is now dead.
2139 But it does mean liveness of that register at the end of the block
2142 Within mark_set_1, however, we treat it as if the register is
2143 indeed modified. mark_used_regs will, however, also treat this
2144 register as being used. Thus, we treat these insns as setting a
2145 new value for the register as a function of its old value. This
2146 cases LOG_LINKS to be made appropriately and this will help combine. */
2148 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
2149 || GET_CODE (reg) == SIGN_EXTRACT
2150 || GET_CODE (reg) == STRICT_LOW_PART)
2151 reg = XEXP (reg, 0);
2153 /* If we are writing into memory or into a register mentioned in the
2154 address of the last thing stored into memory, show we don't know
2155 what the last store was. If we are writing memory, save the address
2156 unless it is volatile. */
2157 if (GET_CODE (reg) == MEM
2158 || (GET_CODE (reg) == REG
2159 && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set)))
2162 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2163 /* There are no REG_INC notes for SP, so we can't assume we'll see
2164 everything that invalidates it. To be safe, don't eliminate any
2165 stores though SP; none of them should be redundant anyway. */
2166 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2169 if (GET_CODE (reg) == REG
2170 && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
2171 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2172 && regno != HARD_FRAME_POINTER_REGNUM
2174 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2175 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2177 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
2178 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2180 int some_needed = REGNO_REG_SET_P (needed, regno);
2181 int some_not_needed = ! some_needed;
2183 /* Mark it as a significant register for this basic block. */
2185 SET_REGNO_REG_SET (significant, regno);
2187 /* Mark it as dead before this insn. */
2188 SET_REGNO_REG_SET (dead, regno);
2190 /* A hard reg in a wide mode may really be multiple registers.
2191 If so, mark all of them just like the first. */
2192 if (regno < FIRST_PSEUDO_REGISTER)
2196 /* Nothing below is needed for the stack pointer; get out asap.
2197 Eg, log links aren't needed, since combine won't use them. */
2198 if (regno == STACK_POINTER_REGNUM)
2201 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2204 int regno_n = regno + n;
2205 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2207 SET_REGNO_REG_SET (significant, regno_n);
2209 SET_REGNO_REG_SET (dead, regno_n);
2210 some_needed |= needed_regno;
2211 some_not_needed |= ! needed_regno;
2214 /* Additional data to record if this is the final pass. */
2217 register rtx y = reg_next_use[regno];
2218 register int blocknum = BLOCK_NUM (insn);
2220 /* If this is a hard reg, record this function uses the reg. */
2222 if (regno < FIRST_PSEUDO_REGISTER)
2225 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
2227 for (i = regno; i < endregno; i++)
2229 /* The next use is no longer "next", since a store
2231 reg_next_use[i] = 0;
2233 regs_ever_live[i] = 1;
2239 /* The next use is no longer "next", since a store
2241 reg_next_use[regno] = 0;
2243 /* Keep track of which basic blocks each reg appears in. */
2245 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2246 REG_BASIC_BLOCK (regno) = blocknum;
2247 else if (REG_BASIC_BLOCK (regno) != blocknum)
2248 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2250 /* Count (weighted) references, stores, etc. This counts a
2251 register twice if it is modified, but that is correct. */
2252 REG_N_SETS (regno)++;
2254 REG_N_REFS (regno) += loop_depth;
2256 /* The insns where a reg is live are normally counted
2257 elsewhere, but we want the count to include the insn
2258 where the reg is set, and the normal counting mechanism
2259 would not count it. */
2260 REG_LIVE_LENGTH (regno)++;
2263 if (! some_not_needed)
2265 /* Make a logical link from the next following insn
2266 that uses this register, back to this insn.
2267 The following insns have already been processed.
2269 We don't build a LOG_LINK for hard registers containing
2270 in ASM_OPERANDs. If these registers get replaced,
2271 we might wind up changing the semantics of the insn,
2272 even if reload can make what appear to be valid assignments
2274 if (y && (BLOCK_NUM (y) == blocknum)
2275 && (regno >= FIRST_PSEUDO_REGISTER
2276 || asm_noperands (PATTERN (y)) < 0))
2278 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
2280 else if (! some_needed)
2282 /* Note that dead stores have already been deleted when possible
2283 If we get here, we have found a dead store that cannot
2284 be eliminated (because the same insn does something useful).
2285 Indicate this by marking the reg being set as dying here. */
2287 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2288 REG_N_DEATHS (REGNO (reg))++;
2292 /* This is a case where we have a multi-word hard register
2293 and some, but not all, of the words of the register are
2294 needed in subsequent insns. Write REG_UNUSED notes
2295 for those parts that were not needed. This case should
2300 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
2302 if (!REGNO_REG_SET_P (needed, regno + i))
2304 = gen_rtx_EXPR_LIST (REG_UNUSED,
2305 gen_rtx_REG (reg_raw_mode[regno + i],
2311 else if (GET_CODE (reg) == REG)
2312 reg_next_use[regno] = 0;
2314 /* If this is the last pass and this is a SCRATCH, show it will be dying
2315 here and count it. */
2316 else if (GET_CODE (reg) == SCRATCH && insn != 0)
2319 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2326 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2330 find_auto_inc (needed, x, insn)
2335 rtx addr = XEXP (x, 0);
2336 HOST_WIDE_INT offset = 0;
2339 /* Here we detect use of an index register which might be good for
2340 postincrement, postdecrement, preincrement, or predecrement. */
2342 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2343 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
2345 if (GET_CODE (addr) == REG)
2348 register int size = GET_MODE_SIZE (GET_MODE (x));
2351 int regno = REGNO (addr);
2353 /* Is the next use an increment that might make auto-increment? */
2354 if ((incr = reg_next_use[regno]) != 0
2355 && (set = single_set (incr)) != 0
2356 && GET_CODE (set) == SET
2357 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
2358 /* Can't add side effects to jumps; if reg is spilled and
2359 reloaded, there's no way to store back the altered value. */
2360 && GET_CODE (insn) != JUMP_INSN
2361 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
2362 && XEXP (y, 0) == addr
2363 && GET_CODE (XEXP (y, 1)) == CONST_INT
2365 #ifdef HAVE_POST_INCREMENT
2366 || (INTVAL (XEXP (y, 1)) == size && offset == 0)
2368 #ifdef HAVE_POST_DECREMENT
2369 || (INTVAL (XEXP (y, 1)) == - size && offset == 0)
2371 #ifdef HAVE_PRE_INCREMENT
2372 || (INTVAL (XEXP (y, 1)) == size && offset == size)
2374 #ifdef HAVE_PRE_DECREMENT
2375 || (INTVAL (XEXP (y, 1)) == - size && offset == - size)
2378 /* Make sure this reg appears only once in this insn. */
2379 && (use = find_use_as_address (PATTERN (insn), addr, offset),
2380 use != 0 && use != (rtx) 1))
2382 rtx q = SET_DEST (set);
2383 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
2384 ? (offset ? PRE_INC : POST_INC)
2385 : (offset ? PRE_DEC : POST_DEC));
2387 if (dead_or_set_p (incr, addr))
2389 /* This is the simple case. Try to make the auto-inc. If
2390 we can't, we are done. Otherwise, we will do any
2391 needed updates below. */
2392 if (! validate_change (insn, &XEXP (x, 0),
2393 gen_rtx_fmt_e (inc_code, Pmode, addr),
2397 else if (GET_CODE (q) == REG
2398 /* PREV_INSN used here to check the semi-open interval
2400 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
2401 /* We must also check for sets of q as q may be
2402 a call clobbered hard register and there may
2403 be a call between PREV_INSN (insn) and incr. */
2404 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
2406 /* We have *p followed sometime later by q = p+size.
2407 Both p and q must be live afterward,
2408 and q is not used between INSN and its assignment.
2409 Change it to q = p, ...*q..., q = q+size.
2410 Then fall into the usual case. */
2414 emit_move_insn (q, addr);
2415 insns = get_insns ();
2418 /* If anything in INSNS have UID's that don't fit within the
2419 extra space we allocate earlier, we can't make this auto-inc.
2420 This should never happen. */
2421 for (temp = insns; temp; temp = NEXT_INSN (temp))
2423 if (INSN_UID (temp) > max_uid_for_flow)
2425 BLOCK_NUM (temp) = BLOCK_NUM (insn);
2428 /* If we can't make the auto-inc, or can't make the
2429 replacement into Y, exit. There's no point in making
2430 the change below if we can't do the auto-inc and doing
2431 so is not correct in the pre-inc case. */
2433 validate_change (insn, &XEXP (x, 0),
2434 gen_rtx_fmt_e (inc_code, Pmode, q),
2436 validate_change (incr, &XEXP (y, 0), q, 1);
2437 if (! apply_change_group ())
2440 /* We now know we'll be doing this change, so emit the
2441 new insn(s) and do the updates. */
2442 emit_insns_before (insns, insn);
2444 if (basic_block_head[BLOCK_NUM (insn)] == insn)
2445 basic_block_head[BLOCK_NUM (insn)] = insns;
2447 /* INCR will become a NOTE and INSN won't contain a
2448 use of ADDR. If a use of ADDR was just placed in
2449 the insn before INSN, make that the next use.
2450 Otherwise, invalidate it. */
2451 if (GET_CODE (PREV_INSN (insn)) == INSN
2452 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
2453 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
2454 reg_next_use[regno] = PREV_INSN (insn);
2456 reg_next_use[regno] = 0;
2461 /* REGNO is now used in INCR which is below INSN, but
2462 it previously wasn't live here. If we don't mark
2463 it as needed, we'll put a REG_DEAD note for it
2464 on this insn, which is incorrect. */
2465 SET_REGNO_REG_SET (needed, regno);
2467 /* If there are any calls between INSN and INCR, show
2468 that REGNO now crosses them. */
2469 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
2470 if (GET_CODE (temp) == CALL_INSN)
2471 REG_N_CALLS_CROSSED (regno)++;
2476 /* If we haven't returned, it means we were able to make the
2477 auto-inc, so update the status. First, record that this insn
2478 has an implicit side effect. */
2481 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
2483 /* Modify the old increment-insn to simply copy
2484 the already-incremented value of our register. */
2485 if (! validate_change (incr, &SET_SRC (set), addr, 0))
2488 /* If that makes it a no-op (copying the register into itself) delete
2489 it so it won't appear to be a "use" and a "set" of this
2491 if (SET_DEST (set) == addr)
2493 PUT_CODE (incr, NOTE);
2494 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
2495 NOTE_SOURCE_FILE (incr) = 0;
2498 if (regno >= FIRST_PSEUDO_REGISTER)
2500 /* Count an extra reference to the reg. When a reg is
2501 incremented, spilling it is worse, so we want to make
2502 that less likely. */
2503 REG_N_REFS (regno) += loop_depth;
2505 /* Count the increment as a setting of the register,
2506 even though it isn't a SET in rtl. */
2507 REG_N_SETS (regno)++;
2512 #endif /* AUTO_INC_DEC */
2514 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2515 This is done assuming the registers needed from X
2516 are those that have 1-bits in NEEDED.
2518 On the final pass, FINAL is 1. This means try for autoincrement
2519 and count the uses and deaths of each pseudo-reg.
2521 INSN is the containing instruction. If INSN is dead, this function is not
2525 mark_used_regs (needed, live, x, final, insn)
2532 register RTX_CODE code;
2537 code = GET_CODE (x);
2558 /* If we are clobbering a MEM, mark any registers inside the address
2560 if (GET_CODE (XEXP (x, 0)) == MEM)
2561 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
2565 /* Invalidate the data for the last MEM stored, but only if MEM is
2566 something that can be stored into. */
2567 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2568 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2569 ; /* needn't clear last_mem_set */
2575 find_auto_inc (needed, x, insn);
2580 if (GET_CODE (SUBREG_REG (x)) == REG
2581 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
2582 && (GET_MODE_SIZE (GET_MODE (x))
2583 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2584 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
2586 /* While we're here, optimize this case. */
2589 /* In case the SUBREG is not of a register, don't optimize */
2590 if (GET_CODE (x) != REG)
2592 mark_used_regs (needed, live, x, final, insn);
2596 /* ... fall through ... */
2599 /* See a register other than being set
2600 => mark it as needed. */
2604 int some_needed = REGNO_REG_SET_P (needed, regno);
2605 int some_not_needed = ! some_needed;
2607 SET_REGNO_REG_SET (live, regno);
2609 /* A hard reg in a wide mode may really be multiple registers.
2610 If so, mark all of them just like the first. */
2611 if (regno < FIRST_PSEUDO_REGISTER)
2615 /* For stack ptr or fixed arg pointer,
2616 nothing below can be necessary, so waste no more time. */
2617 if (regno == STACK_POINTER_REGNUM
2618 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2619 || regno == HARD_FRAME_POINTER_REGNUM
2621 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2622 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2624 || regno == FRAME_POINTER_REGNUM)
2626 /* If this is a register we are going to try to eliminate,
2627 don't mark it live here. If we are successful in
2628 eliminating it, it need not be live unless it is used for
2629 pseudos, in which case it will have been set live when
2630 it was allocated to the pseudos. If the register will not
2631 be eliminated, reload will set it live at that point. */
2633 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
2634 regs_ever_live[regno] = 1;
2637 /* No death notes for global register variables;
2638 their values are live after this function exits. */
2639 if (global_regs[regno])
2642 reg_next_use[regno] = insn;
2646 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2649 int regno_n = regno + n;
2650 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2652 SET_REGNO_REG_SET (live, regno_n);
2653 some_needed |= needed_regno;
2654 some_not_needed |= ! needed_regno;
2659 /* Record where each reg is used, so when the reg
2660 is set we know the next insn that uses it. */
2662 reg_next_use[regno] = insn;
2664 if (regno < FIRST_PSEUDO_REGISTER)
2666 /* If a hard reg is being used,
2667 record that this function does use it. */
2669 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
2673 regs_ever_live[regno + --i] = 1;
2678 /* Keep track of which basic block each reg appears in. */
2680 register int blocknum = BLOCK_NUM (insn);
2682 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2683 REG_BASIC_BLOCK (regno) = blocknum;
2684 else if (REG_BASIC_BLOCK (regno) != blocknum)
2685 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2687 /* Count (weighted) number of uses of each reg. */
2689 REG_N_REFS (regno) += loop_depth;
2692 /* Record and count the insns in which a reg dies.
2693 If it is used in this insn and was dead below the insn
2694 then it dies in this insn. If it was set in this insn,
2695 we do not make a REG_DEAD note; likewise if we already
2696 made such a note. */
2699 && ! dead_or_set_p (insn, x)
2701 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
2705 /* Check for the case where the register dying partially
2706 overlaps the register set by this insn. */
2707 if (regno < FIRST_PSEUDO_REGISTER
2708 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2710 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2712 some_needed |= dead_or_set_regno_p (insn, regno + n);
2715 /* If none of the words in X is needed, make a REG_DEAD
2716 note. Otherwise, we must make partial REG_DEAD notes. */
2720 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
2721 REG_N_DEATHS (regno)++;
2727 /* Don't make a REG_DEAD note for a part of a register
2728 that is set in the insn. */
2730 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
2732 if (!REGNO_REG_SET_P (needed, regno + i)
2733 && ! dead_or_set_regno_p (insn, regno + i))
2735 = gen_rtx_EXPR_LIST (REG_DEAD,
2736 gen_rtx_REG (reg_raw_mode[regno + i],
2747 register rtx testreg = SET_DEST (x);
2750 /* If storing into MEM, don't show it as being used. But do
2751 show the address as being used. */
2752 if (GET_CODE (testreg) == MEM)
2756 find_auto_inc (needed, testreg, insn);
2758 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
2759 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2763 /* Storing in STRICT_LOW_PART is like storing in a reg
2764 in that this SET might be dead, so ignore it in TESTREG.
2765 but in some other ways it is like using the reg.
2767 Storing in a SUBREG or a bit field is like storing the entire
2768 register in that if the register's value is not used
2769 then this SET is not needed. */
2770 while (GET_CODE (testreg) == STRICT_LOW_PART
2771 || GET_CODE (testreg) == ZERO_EXTRACT
2772 || GET_CODE (testreg) == SIGN_EXTRACT
2773 || GET_CODE (testreg) == SUBREG)
2775 if (GET_CODE (testreg) == SUBREG
2776 && GET_CODE (SUBREG_REG (testreg)) == REG
2777 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
2778 && (GET_MODE_SIZE (GET_MODE (testreg))
2779 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
2780 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
2782 /* Modifying a single register in an alternate mode
2783 does not use any of the old value. But these other
2784 ways of storing in a register do use the old value. */
2785 if (GET_CODE (testreg) == SUBREG
2786 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
2791 testreg = XEXP (testreg, 0);
2794 /* If this is a store into a register,
2795 recursively scan the value being stored. */
2797 if (GET_CODE (testreg) == REG
2798 && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
2799 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2800 && regno != HARD_FRAME_POINTER_REGNUM
2802 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2803 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2806 /* We used to exclude global_regs here, but that seems wrong.
2807 Storing in them is like storing in mem. */
2809 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2811 mark_used_regs (needed, live, SET_DEST (x), final, insn);
2818 /* If exiting needs the right stack value, consider this insn as
2819 using the stack pointer. In any event, consider it as using
2820 all global registers and all registers used by return. */
2822 #ifdef EXIT_IGNORE_STACK
2823 if (! EXIT_IGNORE_STACK
2824 || (! FRAME_POINTER_REQUIRED
2825 && ! current_function_calls_alloca
2826 && flag_omit_frame_pointer))
2828 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2830 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2832 #ifdef EPILOGUE_USES
2833 || EPILOGUE_USES (i)
2836 SET_REGNO_REG_SET (live, i);
2843 /* Recursively scan the operands of this expression. */
2846 register char *fmt = GET_RTX_FORMAT (code);
2849 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2853 /* Tail recursive case: save a function call level. */
2859 mark_used_regs (needed, live, XEXP (x, i), final, insn);
2861 else if (fmt[i] == 'E')
2864 for (j = 0; j < XVECLEN (x, i); j++)
2865 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
2874 try_pre_increment_1 (insn)
2877 /* Find the next use of this reg. If in same basic block,
2878 make it do pre-increment or pre-decrement if appropriate. */
2879 rtx x = single_set (insn);
2880 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
2881 * INTVAL (XEXP (SET_SRC (x), 1)));
2882 int regno = REGNO (SET_DEST (x));
2883 rtx y = reg_next_use[regno];
2885 && BLOCK_NUM (y) == BLOCK_NUM (insn)
2886 /* Don't do this if the reg dies, or gets set in y; a standard addressing
2887 mode would be better. */
2888 && ! dead_or_set_p (y, SET_DEST (x))
2889 && try_pre_increment (y, SET_DEST (x), amount))
2891 /* We have found a suitable auto-increment
2892 and already changed insn Y to do it.
2893 So flush this increment-instruction. */
2894 PUT_CODE (insn, NOTE);
2895 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2896 NOTE_SOURCE_FILE (insn) = 0;
2897 /* Count a reference to this reg for the increment
2898 insn we are deleting. When a reg is incremented.
2899 spilling it is worse, so we want to make that
2901 if (regno >= FIRST_PSEUDO_REGISTER)
2903 REG_N_REFS (regno) += loop_depth;
2904 REG_N_SETS (regno)++;
2911 /* Try to change INSN so that it does pre-increment or pre-decrement
2912 addressing on register REG in order to add AMOUNT to REG.
2913 AMOUNT is negative for pre-decrement.
2914 Returns 1 if the change could be made.
2915 This checks all about the validity of the result of modifying INSN. */
2918 try_pre_increment (insn, reg, amount)
2920 HOST_WIDE_INT amount;
2924 /* Nonzero if we can try to make a pre-increment or pre-decrement.
2925 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
2927 /* Nonzero if we can try to make a post-increment or post-decrement.
2928 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
2929 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
2930 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
2933 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
2936 /* From the sign of increment, see which possibilities are conceivable
2937 on this target machine. */
2938 #ifdef HAVE_PRE_INCREMENT
2942 #ifdef HAVE_POST_INCREMENT
2947 #ifdef HAVE_PRE_DECREMENT
2951 #ifdef HAVE_POST_DECREMENT
2956 if (! (pre_ok || post_ok))
2959 /* It is not safe to add a side effect to a jump insn
2960 because if the incremented register is spilled and must be reloaded
2961 there would be no way to store the incremented value back in memory. */
2963 if (GET_CODE (insn) == JUMP_INSN)
2968 use = find_use_as_address (PATTERN (insn), reg, 0);
2969 if (post_ok && (use == 0 || use == (rtx) 1))
2971 use = find_use_as_address (PATTERN (insn), reg, -amount);
2975 if (use == 0 || use == (rtx) 1)
2978 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
2981 /* See if this combination of instruction and addressing mode exists. */
2982 if (! validate_change (insn, &XEXP (use, 0),
2983 gen_rtx_fmt_e (amount > 0
2984 ? (do_post ? POST_INC : PRE_INC)
2985 : (do_post ? POST_DEC : PRE_DEC),
2989 /* Record that this insn now has an implicit side effect on X. */
2990 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
2994 #endif /* AUTO_INC_DEC */
2996 /* Find the place in the rtx X where REG is used as a memory address.
2997 Return the MEM rtx that so uses it.
2998 If PLUSCONST is nonzero, search instead for a memory address equivalent to
2999 (plus REG (const_int PLUSCONST)).
3001 If such an address does not appear, return 0.
3002 If REG appears more than once, or is used other than in such an address,
3006 find_use_as_address (x, reg, plusconst)
3009 HOST_WIDE_INT plusconst;
3011 enum rtx_code code = GET_CODE (x);
3012 char *fmt = GET_RTX_FORMAT (code);
3014 register rtx value = 0;
3017 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
3020 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
3021 && XEXP (XEXP (x, 0), 0) == reg
3022 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3023 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
3026 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
3028 /* If REG occurs inside a MEM used in a bit-field reference,
3029 that is unacceptable. */
3030 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
3031 return (rtx) (HOST_WIDE_INT) 1;
3035 return (rtx) (HOST_WIDE_INT) 1;
3037 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3041 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
3045 return (rtx) (HOST_WIDE_INT) 1;
3050 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3052 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
3056 return (rtx) (HOST_WIDE_INT) 1;
3064 /* Write information about registers and basic blocks into FILE.
3065 This is part of making a debugging dump. */
3068 dump_flow_info (file)
3072 static char *reg_class_names[] = REG_CLASS_NAMES;
3074 fprintf (file, "%d registers.\n", max_regno);
3076 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3079 enum reg_class class, altclass;
3080 fprintf (file, "\nRegister %d used %d times across %d insns",
3081 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
3082 if (REG_BASIC_BLOCK (i) >= 0)
3083 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
3085 fprintf (file, "; set %d time%s", REG_N_SETS (i),
3086 (REG_N_SETS (i) == 1) ? "" : "s");
3087 if (REG_USERVAR_P (regno_reg_rtx[i]))
3088 fprintf (file, "; user var");
3089 if (REG_N_DEATHS (i) != 1)
3090 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
3091 if (REG_N_CALLS_CROSSED (i) == 1)
3092 fprintf (file, "; crosses 1 call");
3093 else if (REG_N_CALLS_CROSSED (i))
3094 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
3095 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
3096 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
3097 class = reg_preferred_class (i);
3098 altclass = reg_alternate_class (i);
3099 if (class != GENERAL_REGS || altclass != ALL_REGS)
3101 if (altclass == ALL_REGS || class == ALL_REGS)
3102 fprintf (file, "; pref %s", reg_class_names[(int) class]);
3103 else if (altclass == NO_REGS)
3104 fprintf (file, "; %s or none", reg_class_names[(int) class]);
3106 fprintf (file, "; pref %s, else %s",
3107 reg_class_names[(int) class],
3108 reg_class_names[(int) altclass]);
3110 if (REGNO_POINTER_FLAG (i))
3111 fprintf (file, "; pointer");
3112 fprintf (file, ".\n");
3114 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
3115 for (i = 0; i < n_basic_blocks; i++)
3117 register rtx head, jump;
3119 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
3121 INSN_UID (basic_block_head[i]),
3122 INSN_UID (basic_block_end[i]));
3123 /* The control flow graph's storage is freed
3124 now when flow_analysis returns.
3125 Don't try to print it if it is gone. */
3126 if (basic_block_drops_in)
3128 fprintf (file, "Reached from blocks: ");
3129 head = basic_block_head[i];
3130 if (GET_CODE (head) == CODE_LABEL)
3131 for (jump = LABEL_REFS (head);
3133 jump = LABEL_NEXTREF (jump))
3135 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
3136 fprintf (file, " %d", from_block);
3138 if (basic_block_drops_in[i])
3139 fprintf (file, " previous");
3141 fprintf (file, "\nRegisters live at start:");
3142 for (regno = 0; regno < max_regno; regno++)
3143 if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno))
3144 fprintf (file, " %d", regno);
3145 fprintf (file, "\n");
3147 fprintf (file, "\n");
3151 /* Like print_rtl, but also print out live information for the start of each
3155 print_rtl_with_bb (outf, rtx_first)
3159 extern int flag_dump_unnumbered;
3160 register rtx tmp_rtx;
3163 fprintf (outf, "(nil)\n");
3168 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
3169 int max_uid = get_max_uid ();
3170 int *start = (int *) alloca (max_uid * sizeof (int));
3171 int *end = (int *) alloca (max_uid * sizeof (int));
3172 char *in_bb_p = (char *) alloca (max_uid * sizeof (enum bb_state));
3174 for (i = 0; i < max_uid; i++)
3176 start[i] = end[i] = -1;
3177 in_bb_p[i] = NOT_IN_BB;
3180 for (i = n_basic_blocks-1; i >= 0; i--)
3183 start[INSN_UID (basic_block_head[i])] = i;
3184 end[INSN_UID (basic_block_end[i])] = i;
3185 for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x))
3187 in_bb_p[ INSN_UID(x)]
3188 = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB)
3189 ? IN_ONE_BB : IN_MULTIPLE_BB;
3190 if (x == basic_block_end[i])
3195 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
3197 if ((bb = start[INSN_UID (tmp_rtx)]) >= 0)
3199 fprintf (outf, ";; Start of basic block %d, registers live:",
3202 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i,
3204 fprintf (outf, " %d", i);
3205 if (i < FIRST_PSEUDO_REGISTER)
3206 fprintf (outf, " [%s]",
3212 if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB
3213 && GET_CODE (tmp_rtx) != NOTE
3214 && GET_CODE (tmp_rtx) != BARRIER)
3215 fprintf (outf, ";; Insn is not within a basic block\n");
3216 else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
3217 fprintf (outf, ";; Insn is in multiple basic blocks\n");
3219 print_rtl_single (outf, tmp_rtx);
3221 if ((bb = end[INSN_UID (tmp_rtx)]) >= 0)
3222 fprintf (outf, ";; End of basic block %d\n", bb);
3224 if (! flag_dump_unnumbered
3225 || GET_CODE (tmp_rtx) != NOTE || NOTE_LINE_NUMBER (tmp_rtx) < 0)
3232 /* Integer list support. */
3234 /* Allocate a node from list *HEAD_PTR. */
3237 alloc_int_list_node (head_ptr)
3238 int_list_block **head_ptr;
3240 struct int_list_block *first_blk = *head_ptr;
3242 if (first_blk == NULL || first_blk->nodes_left <= 0)
3244 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
3245 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
3246 first_blk->next = *head_ptr;
3247 *head_ptr = first_blk;
3250 first_blk->nodes_left--;
3251 return &first_blk->nodes[first_blk->nodes_left];
3254 /* Pointer to head of predecessor/successor block list. */
3255 static int_list_block *pred_int_list_blocks;
3257 /* Add a new node to integer list LIST with value VAL.
3258 LIST is a pointer to a list object to allow for different implementations.
3259 If *LIST is initially NULL, the list is empty.
3260 The caller must not care whether the element is added to the front or
3261 to the end of the list (to allow for different implementations). */
3264 add_int_list_node (blk_list, list, val)
3265 int_list_block **blk_list;
3269 int_list_ptr p = alloc_int_list_node (blk_list);
3277 /* Free the blocks of lists at BLK_LIST. */
3280 free_int_list (blk_list)
3281 int_list_block **blk_list;
3283 int_list_block *p, *next;
3285 for (p = *blk_list; p != NULL; p = next)
3291 /* Mark list as empty for the next function we compile. */
3295 /* Predecessor/successor computation. */
3297 /* Mark PRED_BB a precessor of SUCC_BB,
3298 and conversely SUCC_BB a successor of PRED_BB. */
3301 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
3304 int_list_ptr *s_preds;
3305 int_list_ptr *s_succs;
3309 if (succ_bb != EXIT_BLOCK)
3311 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
3312 num_preds[succ_bb]++;
3314 if (pred_bb != ENTRY_BLOCK)
3316 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
3317 num_succs[pred_bb]++;
3321 /* Compute the predecessors and successors for each block. */
3323 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
3324 int_list_ptr *s_preds;
3325 int_list_ptr *s_succs;
3329 int bb, clear_local_bb_vars = 0;
3331 bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr));
3332 bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr));
3333 bzero ((char *) num_preds, n_basic_blocks * sizeof (int));
3334 bzero ((char *) num_succs, n_basic_blocks * sizeof (int));
3336 /* This routine can be called after life analysis; in that case
3337 basic_block_drops_in and uid_block_number will not be available
3338 and we must recompute their values. */
3339 if (basic_block_drops_in == NULL || uid_block_number == NULL)
3341 clear_local_bb_vars = 1;
3342 basic_block_drops_in = (char *) alloca (n_basic_blocks);
3343 uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3345 bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char));
3346 bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int));
3348 /* Scan each basic block setting basic_block_drops_in and
3349 uid_block_number as needed. */
3350 for (bb = 0; bb < n_basic_blocks; bb++)
3352 rtx insn, stop_insn;
3355 stop_insn = NULL_RTX;
3357 stop_insn = basic_block_end[bb-1];
3359 /* Look backwards from the start of this block. Stop if we
3360 hit the start of the function or the end of a previous
3361 block. Don't walk backwards through blocks that are just
3363 for (insn = PREV_INSN (basic_block_head[bb]);
3364 insn && insn != stop_insn && GET_CODE (insn) == NOTE;
3365 insn = PREV_INSN (insn))
3368 /* Never set basic_block_drops_in for the first block. It is
3371 If we stopped on anything other than a BARRIER, then this
3374 basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1);
3376 insn = basic_block_head[bb];
3379 BLOCK_NUM (insn) = bb;
3380 if (insn == basic_block_end[bb])
3382 insn = NEXT_INSN (insn);
3387 for (bb = 0; bb < n_basic_blocks; bb++)
3392 head = BLOCK_HEAD (bb);
3394 if (GET_CODE (head) == CODE_LABEL)
3395 for (jump = LABEL_REFS (head);
3397 jump = LABEL_NEXTREF (jump))
3399 if (! INSN_DELETED_P (CONTAINING_INSN (jump))
3400 && (GET_CODE (CONTAINING_INSN (jump)) != NOTE
3401 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump))
3402 != NOTE_INSN_DELETED)))
3403 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb,
3404 s_preds, s_succs, num_preds, num_succs);
3407 jump = BLOCK_END (bb);
3408 /* If this is a RETURN insn or a conditional jump in the last
3409 basic block, or a non-jump insn in the last basic block, then
3410 this block reaches the exit block. */
3411 if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN)
3412 || (((GET_CODE (jump) == JUMP_INSN
3413 && condjump_p (jump) && !simplejump_p (jump))
3414 || GET_CODE (jump) != JUMP_INSN)
3415 && (bb == n_basic_blocks - 1)))
3416 add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs);
3418 if (basic_block_drops_in[bb])
3419 add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs);
3422 add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs);
3425 /* If we allocated any variables in temporary storage, clear out the
3426 pointer to the local storage to avoid dangling pointers. */
3427 if (clear_local_bb_vars)
3429 basic_block_drops_in = NULL;
3430 uid_block_number = NULL;
3436 dump_bb_data (file, preds, succs)
3438 int_list_ptr *preds;
3439 int_list_ptr *succs;
3444 fprintf (file, "BB data\n\n");
3445 for (bb = 0; bb < n_basic_blocks; bb++)
3447 fprintf (file, "BB %d, start %d, end %d\n", bb,
3448 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
3449 fprintf (file, " preds:");
3450 for (p = preds[bb]; p != NULL; p = p->next)
3452 int pred_bb = INT_LIST_VAL (p);
3453 if (pred_bb == ENTRY_BLOCK)
3454 fprintf (file, " entry");
3456 fprintf (file, " %d", pred_bb);
3458 fprintf (file, "\n");
3459 fprintf (file, " succs:");
3460 for (p = succs[bb]; p != NULL; p = p->next)
3462 int succ_bb = INT_LIST_VAL (p);
3463 if (succ_bb == EXIT_BLOCK)
3464 fprintf (file, " exit");
3466 fprintf (file, " %d", succ_bb);
3468 fprintf (file, "\n");
3470 fprintf (file, "\n");
3474 dump_sbitmap (file, bmap)
3479 int set_size = bmap->size;
3480 int total_bits = bmap->n_bits;
3482 fprintf (file, " ");
3483 for (i = n = 0; i < set_size && n < total_bits; i++)
3485 for (j = 0; j < SBITMAP_ELT_BITS && n < total_bits; j++, n++)
3487 if (n != 0 && n % 10 == 0)
3488 fprintf (file, " ");
3489 fprintf (file, "%d", (bmap->elms[i] & (1L << j)) != 0);
3492 fprintf (file, "\n");
3496 dump_sbitmap_vector (file, title, subtitle, bmaps, n_maps)
3498 char *title, *subtitle;
3504 fprintf (file, "%s\n", title);
3505 for (bb = 0; bb < n_maps; bb++)
3507 fprintf (file, "%s %d\n", subtitle, bb);
3508 dump_sbitmap (file, bmaps[bb]);
3510 fprintf (file, "\n");
3513 /* Free basic block data storage. */
3518 free_int_list (&pred_int_list_blocks);
3521 /* Bitmap manipulation routines. */
3523 /* Allocate a simple bitmap of N_ELMS bits. */
3526 sbitmap_alloc (n_elms)
3529 int bytes, size, amt;
3532 size = SBITMAP_SET_SIZE (n_elms);
3533 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3534 amt = (sizeof (struct simple_bitmap_def)
3535 + bytes - sizeof (SBITMAP_ELT_TYPE));
3536 bmap = (sbitmap) xmalloc (amt);
3537 bmap->n_bits = n_elms;
3539 bmap->bytes = bytes;
3543 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3546 sbitmap_vector_alloc (n_vecs, n_elms)
3549 int i, bytes, offset, elm_bytes, size, amt, vector_bytes;
3550 sbitmap *bitmap_vector;
3552 size = SBITMAP_SET_SIZE (n_elms);
3553 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3554 elm_bytes = (sizeof (struct simple_bitmap_def)
3555 + bytes - sizeof (SBITMAP_ELT_TYPE));
3556 vector_bytes = n_vecs * sizeof (sbitmap *);
3558 /* Round up `vector_bytes' to account for the alignment requirements
3559 of an sbitmap. One could allocate the vector-table and set of sbitmaps
3560 separately, but that requires maintaining two pointers or creating
3561 a cover struct to hold both pointers (so our result is still just
3562 one pointer). Neither is a bad idea, but this is simpler for now. */
3564 /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */
3565 struct { char x; SBITMAP_ELT_TYPE y; } align;
3566 int alignment = (char *) & align.y - & align.x;
3567 vector_bytes = (vector_bytes + alignment - 1) & ~ (alignment - 1);
3570 amt = vector_bytes + (n_vecs * elm_bytes);
3571 bitmap_vector = (sbitmap *) xmalloc (amt);
3573 for (i = 0, offset = vector_bytes;
3575 i++, offset += elm_bytes)
3577 sbitmap b = (sbitmap) ((char *) bitmap_vector + offset);
3578 bitmap_vector[i] = b;
3584 return bitmap_vector;
3587 /* Copy sbitmap SRC to DST. */
3590 sbitmap_copy (dst, src)
3598 for (i = 0; i < dst->size; i++)
3602 /* Zero all elements in a bitmap. */
3608 bzero ((char *) bmap->elms, bmap->bytes);
3611 /* Set to ones all elements in a bitmap. */
3617 memset (bmap->elms, -1, bmap->bytes);
3620 /* Zero a vector of N_VECS bitmaps. */
3623 sbitmap_vector_zero (bmap, n_vecs)
3629 for (i = 0; i < n_vecs; i++)
3630 sbitmap_zero (bmap[i]);
3633 /* Set to ones a vector of N_VECS bitmaps. */
3636 sbitmap_vector_ones (bmap, n_vecs)
3642 for (i = 0; i < n_vecs; i++)
3643 sbitmap_ones (bmap[i]);
3646 /* Set DST to be A union (B - C).
3648 Return non-zero if any change is made. */
3651 sbitmap_union_of_diff (dst, a, b, c)
3652 sbitmap dst, a, b, c;
3655 sbitmap_ptr dstp, ap, bp, cp;
3662 for (i = 0; i < dst->size; i++)
3664 SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp);
3668 dstp++; ap++; bp++; cp++;
3673 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3676 sbitmap_not (dst, src)
3680 sbitmap_ptr dstp, ap;
3684 for (i = 0; i < dst->size; i++)
3686 SBITMAP_ELT_TYPE tmp = ~(*ap);
3692 /* Set the bits in DST to be the difference between the bits
3693 in A and the bits in B. i.e. dst = a - b.
3694 The - operator is implemented as a & (~b). */
3697 sbitmap_difference (dst, a, b)
3701 sbitmap_ptr dstp, ap, bp;
3706 for (i = 0; i < dst->size; i++)
3707 *dstp++ = *ap++ & (~*bp++);
3710 /* Set DST to be (A and B)).
3711 Return non-zero if any change is made. */
3714 sbitmap_a_and_b (dst, a, b)
3718 sbitmap_ptr dstp, ap, bp;
3724 for (i = 0; i < dst->size; i++)
3726 SBITMAP_ELT_TYPE tmp = *ap & *bp;
3734 /* Set DST to be (A or B)).
3735 Return non-zero if any change is made. */
3738 sbitmap_a_or_b (dst, a, b)
3742 sbitmap_ptr dstp, ap, bp;
3748 for (i = 0; i < dst->size; i++)
3750 SBITMAP_ELT_TYPE tmp = *ap | *bp;
3759 /* Set DST to be (A or (B and C)).
3760 Return non-zero if any change is made. */
3763 sbitmap_a_or_b_and_c (dst, a, b, c)
3764 sbitmap dst, a, b, c;
3767 sbitmap_ptr dstp, ap, bp, cp;
3774 for (i = 0; i < dst->size; i++)
3776 SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp);
3780 dstp++; ap++; bp++; cp++;
3785 /* Set DST to be (A ann (B or C)).
3786 Return non-zero if any change is made. */
3789 sbitmap_a_and_b_or_c (dst, a, b, c)
3790 sbitmap dst, a, b, c;
3793 sbitmap_ptr dstp, ap, bp, cp;
3800 for (i = 0; i < dst->size; i++)
3802 SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp);
3806 dstp++; ap++; bp++; cp++;
3811 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3812 successors of block number BB (PRED_SUCC says which). */
3815 sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ)
3819 int_list_ptr *pred_succ;
3823 int set_size = dst->size;
3827 /* It is possible that there are no predecessors(/successors).
3828 This can happen for example in unreachable code. */
3832 /* In APL-speak this is the `and' reduction of the empty set and thus
3833 the result is the identity for `and'. */
3838 /* Set result to first predecessor/successor. */
3840 for ( ; ps != NULL; ps = ps->next)
3842 ps_bb = INT_LIST_VAL (ps);
3843 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3845 sbitmap_copy (dst, src[ps_bb]);
3846 /* Break out since we're only doing first predecessor. */
3852 /* Now do the remaining predecessors/successors. */
3854 for (ps = ps->next; ps != NULL; ps = ps->next)
3859 ps_bb = INT_LIST_VAL (ps);
3860 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3863 p = src[ps_bb]->elms;
3866 for (i = 0; i < set_size; i++)
3871 /* Set the bitmap DST to the intersection of SRC of all predecessors
3872 of block number BB. */
3875 sbitmap_intersect_of_predecessors (dst, src, bb, s_preds)
3879 int_list_ptr *s_preds;
3881 sbitmap_intersect_of_predsucc (dst, src, bb, s_preds);
3884 /* Set the bitmap DST to the intersection of SRC of all successors
3885 of block number BB. */
3888 sbitmap_intersect_of_successors (dst, src, bb, s_succs)
3892 int_list_ptr *s_succs;
3894 sbitmap_intersect_of_predsucc (dst, src, bb, s_succs);
3897 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
3901 sbitmap_union_of_predsucc (dst, src, bb, pred_succ)
3905 int_list_ptr *pred_succ;
3909 int set_size = dst->size;
3913 /* It is possible that there are no predecessors(/successors).
3914 This can happen for example in unreachable code. */
3918 /* In APL-speak this is the `or' reduction of the empty set and thus
3919 the result is the identity for `or'. */
3924 /* Set result to first predecessor/successor. */
3926 for ( ; ps != NULL; ps = ps->next)
3928 ps_bb = INT_LIST_VAL (ps);
3929 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3931 sbitmap_copy (dst, src[ps_bb]);
3932 /* Break out since we're only doing first predecessor. */
3938 /* Now do the remaining predecessors/successors. */
3940 for (ps = ps->next; ps != NULL; ps = ps->next)
3945 ps_bb = INT_LIST_VAL (ps);
3946 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3949 p = src[ps_bb]->elms;
3952 for (i = 0; i < set_size; i++)
3957 /* Set the bitmap DST to the union of SRC of all predecessors of
3961 sbitmap_union_of_predecessors (dst, src, bb, s_preds)
3965 int_list_ptr *s_preds;
3967 sbitmap_union_of_predsucc (dst, src, bb, s_preds);
3970 /* Set the bitmap DST to the union of SRC of all predecessors of
3974 sbitmap_union_of_successors (dst, src, bb, s_succ)
3978 int_list_ptr *s_succ;
3980 sbitmap_union_of_predsucc (dst, src, bb, s_succ);
3983 /* Compute dominator relationships. */
3985 compute_dominators (dominators, post_dominators, s_preds, s_succs)
3986 sbitmap *dominators;
3987 sbitmap *post_dominators;
3988 int_list_ptr *s_preds;
3989 int_list_ptr *s_succs;
3991 int bb, changed, passes;
3992 sbitmap *temp_bitmap;
3994 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
3995 sbitmap_vector_ones (dominators, n_basic_blocks);
3996 sbitmap_vector_ones (post_dominators, n_basic_blocks);
3997 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
3999 sbitmap_zero (dominators[0]);
4000 SET_BIT (dominators[0], 0);
4002 sbitmap_zero (post_dominators[n_basic_blocks-1]);
4003 SET_BIT (post_dominators[n_basic_blocks-1], 0);
4010 for (bb = 1; bb < n_basic_blocks; bb++)
4012 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4014 SET_BIT (temp_bitmap[bb], bb);
4015 changed |= sbitmap_a_and_b (dominators[bb],
4018 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4020 SET_BIT (temp_bitmap[bb], bb);
4021 changed |= sbitmap_a_and_b (post_dominators[bb],
4022 post_dominators[bb],
4031 /* Count for a single SET rtx, X. */
4034 count_reg_sets_1 (x)
4038 register rtx reg = SET_DEST (x);
4040 /* Find the register that's set/clobbered. */
4041 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4042 || GET_CODE (reg) == SIGN_EXTRACT
4043 || GET_CODE (reg) == STRICT_LOW_PART)
4044 reg = XEXP (reg, 0);
4046 if (GET_CODE (reg) == REG)
4048 regno = REGNO (reg);
4049 if (regno >= FIRST_PSEUDO_REGISTER)
4051 /* Count (weighted) references, stores, etc. This counts a
4052 register twice if it is modified, but that is correct. */
4053 REG_N_SETS (regno)++;
4055 REG_N_REFS (regno) += loop_depth;
4060 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4061 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4067 register RTX_CODE code = GET_CODE (x);
4069 if (code == SET || code == CLOBBER)
4070 count_reg_sets_1 (x);
4071 else if (code == PARALLEL)
4074 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4076 code = GET_CODE (XVECEXP (x, 0, i));
4077 if (code == SET || code == CLOBBER)
4078 count_reg_sets_1 (XVECEXP (x, 0, i));
4083 /* Increment REG_N_REFS by the current loop depth each register reference
4087 count_reg_references (x)
4090 register RTX_CODE code;
4093 code = GET_CODE (x);
4113 /* If we are clobbering a MEM, mark any registers inside the address
4115 if (GET_CODE (XEXP (x, 0)) == MEM)
4116 count_reg_references (XEXP (XEXP (x, 0), 0));
4120 /* While we're here, optimize this case. */
4123 /* In case the SUBREG is not of a register, don't optimize */
4124 if (GET_CODE (x) != REG)
4126 count_reg_references (x);
4130 /* ... fall through ... */
4133 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4134 REG_N_REFS (REGNO (x)) += loop_depth;
4139 register rtx testreg = SET_DEST (x);
4142 /* If storing into MEM, don't show it as being used. But do
4143 show the address as being used. */
4144 if (GET_CODE (testreg) == MEM)
4146 count_reg_references (XEXP (testreg, 0));
4147 count_reg_references (SET_SRC (x));
4151 /* Storing in STRICT_LOW_PART is like storing in a reg
4152 in that this SET might be dead, so ignore it in TESTREG.
4153 but in some other ways it is like using the reg.
4155 Storing in a SUBREG or a bit field is like storing the entire
4156 register in that if the register's value is not used
4157 then this SET is not needed. */
4158 while (GET_CODE (testreg) == STRICT_LOW_PART
4159 || GET_CODE (testreg) == ZERO_EXTRACT
4160 || GET_CODE (testreg) == SIGN_EXTRACT
4161 || GET_CODE (testreg) == SUBREG)
4163 /* Modifying a single register in an alternate mode
4164 does not use any of the old value. But these other
4165 ways of storing in a register do use the old value. */
4166 if (GET_CODE (testreg) == SUBREG
4167 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4172 testreg = XEXP (testreg, 0);
4175 /* If this is a store into a register,
4176 recursively scan the value being stored. */
4178 if (GET_CODE (testreg) == REG)
4180 count_reg_references (SET_SRC (x));
4182 count_reg_references (SET_DEST (x));
4192 /* Recursively scan the operands of this expression. */
4195 register char *fmt = GET_RTX_FORMAT (code);
4198 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4202 /* Tail recursive case: save a function call level. */
4208 count_reg_references (XEXP (x, i));
4210 else if (fmt[i] == 'E')
4213 for (j = 0; j < XVECLEN (x, i); j++)
4214 count_reg_references (XVECEXP (x, i, j));
4220 /* Recompute register set/reference counts immediately prior to register
4223 This avoids problems with set/reference counts changing to/from values
4224 which have special meanings to the register allocators.
4226 Additionally, the reference counts are the primary component used by the
4227 register allocators to prioritize pseudos for allocation to hard regs.
4228 More accurate reference counts generally lead to better register allocation.
4230 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4231 possibly other information which is used by the register allocators. */
4234 recompute_reg_usage (f)
4240 /* Clear out the old data. */
4241 max_reg = max_reg_num ();
4242 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4248 /* Scan each insn in the chain and count how many times each register is
4251 for (insn = f; insn; insn = NEXT_INSN (insn))
4253 /* Keep track of loop depth. */
4254 if (GET_CODE (insn) == NOTE)
4256 /* Look for loop boundaries. */
4257 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4259 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4262 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4263 Abort now rather than setting register status incorrectly. */
4264 if (loop_depth == 0)
4267 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4271 /* This call will increment REG_N_SETS for each SET or CLOBBER
4272 of a register in INSN. It will also increment REG_N_REFS
4273 by the loop depth for each set of a register in INSN. */
4274 count_reg_sets (PATTERN (insn));
4276 /* count_reg_sets does not detect autoincrement address modes, so
4277 detect them here by looking at the notes attached to INSN. */
4278 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4280 if (REG_NOTE_KIND (links) == REG_INC)
4281 /* Count (weighted) references, stores, etc. This counts a
4282 register twice if it is modified, but that is correct. */
4283 REG_N_SETS (REGNO (XEXP (links, 0)))++;
4286 /* This call will increment REG_N_REFS by the current loop depth for
4287 each reference to a register in INSN. */
4288 count_reg_references (PATTERN (insn));
4290 /* count_reg_references will not include counts for arguments to
4291 function calls, so detect them here by examining the
4292 CALL_INSN_FUNCTION_USAGE data. */
4293 if (GET_CODE (insn) == CALL_INSN)
4297 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4299 note = XEXP (note, 1))
4300 if (GET_CODE (XEXP (note, 0)) == USE)
4301 count_reg_references (SET_DEST (XEXP (note, 0)));