1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block.
111 life_analysis sets current_function_sp_is_unchanging if the function
112 doesn't modify the stack pointer. */
117 #include "basic-block.h"
118 #include "insn-config.h"
120 #include "hard-reg-set.h"
128 #define obstack_chunk_alloc xmalloc
129 #define obstack_chunk_free free
131 /* The contents of the current function definition are allocated
132 in this obstack, and all are freed at the end of the function.
133 For top-level functions, this is temporary_obstack.
134 Separate obstacks are made for nested functions. */
136 extern struct obstack *function_obstack;
138 /* List of labels that must never be deleted. */
139 extern rtx forced_labels;
141 /* Get the basic block number of an insn.
142 This info should not be expected to remain available
143 after the end of life_analysis. */
145 /* This is the limit of the allocated space in the following two arrays. */
147 static int max_uid_for_flow;
149 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
151 /* This is where the BLOCK_NUM values are really stored.
152 This is set up by find_basic_blocks and used there and in life_analysis,
155 int *uid_block_number;
157 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
159 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
160 static char *uid_volatile;
162 /* Number of basic blocks in the current function. */
166 /* Maximum register number used in this function, plus one. */
170 /* Indexed by n, giving various register information */
172 varray_type reg_n_info;
174 /* Size of the reg_n_info table. */
176 unsigned int reg_n_max;
178 /* Element N is the next insn that uses (hard or pseudo) register number N
179 within the current basic block; or zero, if there is no such insn.
180 This is valid only during the final backward scan in propagate_block. */
182 static rtx *reg_next_use;
184 /* Size of a regset for the current function,
185 in (1) bytes and (2) elements. */
190 /* Element N is first insn in basic block N.
191 This info lasts until we finish compiling the function. */
193 rtx *basic_block_head;
195 /* Element N is last insn in basic block N.
196 This info lasts until we finish compiling the function. */
198 rtx *basic_block_end;
200 /* Element N indicates whether basic block N can be reached through a
203 char *basic_block_computed_jump_target;
205 /* Element N is a regset describing the registers live
206 at the start of basic block N.
207 This info lasts until we finish compiling the function. */
209 regset *basic_block_live_at_start;
211 /* Regset of regs live when calls to `setjmp'-like functions happen. */
213 regset regs_live_at_setjmp;
215 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
216 that have to go in the same hard reg.
217 The first two regs in the list are a pair, and the next two
218 are another pair, etc. */
221 /* Element N is nonzero if control can drop into basic block N
222 from the preceding basic block. Freed after life_analysis. */
224 static char *basic_block_drops_in;
226 /* Element N is depth within loops of the last insn in basic block number N.
227 Freed after life_analysis. */
229 static short *basic_block_loop_depth;
231 /* Depth within loops of basic block being scanned for lifetime analysis,
232 plus one. This is the weight attached to references to registers. */
234 static int loop_depth;
236 /* During propagate_block, this is non-zero if the value of CC0 is live. */
240 /* During propagate_block, this contains the last MEM stored into. It
241 is used to eliminate consecutive stores to the same location. */
243 static rtx last_mem_set;
245 /* Set of registers that may be eliminable. These are handled specially
246 in updating regs_ever_live. */
248 static HARD_REG_SET elim_reg_set;
250 /* Forward declarations */
251 static void find_basic_blocks_1 PROTO((rtx, rtx));
252 static void make_edges PROTO((int));
253 static void mark_label_ref PROTO((rtx, rtx, int));
254 static int delete_unreachable_blocks PROTO((void));
255 static int delete_block PROTO((int));
256 static void life_analysis_1 PROTO((rtx, int));
257 static void propagate_block PROTO((regset, rtx, rtx, int,
259 static rtx flow_delete_insn PROTO((rtx));
260 static int set_noop_p PROTO((rtx));
261 static int noop_move_p PROTO((rtx));
262 static void record_volatile_insns PROTO((rtx));
263 static void mark_regs_live_at_end PROTO((regset));
264 static int insn_dead_p PROTO((rtx, regset, int));
265 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
266 static void mark_set_regs PROTO((regset, regset, rtx,
268 static void mark_set_1 PROTO((regset, regset, rtx,
271 static void find_auto_inc PROTO((regset, rtx, rtx));
272 static int try_pre_increment_1 PROTO((rtx));
273 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
275 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
276 void dump_flow_info PROTO((FILE *));
277 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
278 int_list_ptr *, int *, int *));
279 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
280 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
282 static void init_regset_vector PROTO ((regset *, int,
284 static void count_reg_sets_1 PROTO ((rtx));
285 static void count_reg_sets PROTO ((rtx));
286 static void count_reg_references PROTO ((rtx));
287 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
289 /* Find basic blocks of the current function.
290 F is the first insn of the function and NREGS the number of register numbers
292 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
293 be reachable. This turns on a kludge that causes the control flow
294 information to be inaccurate and not suitable for passes like GCSE. */
297 find_basic_blocks (f, nregs, file)
304 rtx nonlocal_label_list = nonlocal_label_rtx_list ();
305 int in_libcall_block = 0;
307 /* Count the basic blocks. Also find maximum insn uid value used. */
311 register RTX_CODE prev_code = JUMP_INSN;
312 register RTX_CODE code;
314 int call_had_abnormal_edge = 0;
316 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
318 /* Track when we are inside in LIBCALL block. */
319 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
320 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
321 in_libcall_block = 1;
323 code = GET_CODE (insn);
325 /* A basic block starts at label, or after something that can jump. */
326 if (code == CODE_LABEL
327 || (GET_RTX_CLASS (code) == 'i'
328 && (prev_code == JUMP_INSN
329 || (prev_code == CALL_INSN && call_had_abnormal_edge)
330 || prev_code == BARRIER)))
334 /* If the previous insn was a call that did not create an
335 abnormal edge, we want to add a nop so that the CALL_INSN
336 itself is not at basic_block_end. This allows us to easily
337 distinguish between normal calls and those which create
338 abnormal edges in the flow graph. */
340 if (i > 0 && !call_had_abnormal_edge && prev_call != 0)
342 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
343 emit_insn_after (nop, prev_call);
346 /* We change the code of the CALL_INSN, so that it won't start a
348 if (code == CALL_INSN && in_libcall_block)
351 /* Record whether this call created an edge. */
352 if (code == CALL_INSN)
355 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_region);
357 else if (code != NOTE && code != BARRIER)
362 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
364 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
367 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
368 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
369 in_libcall_block = 0;
375 max_uid_for_flow = get_max_uid ();
377 /* Leave space for insns life_analysis makes in some cases for auto-inc.
378 These cases are rare, so we don't need too much space. */
379 max_uid_for_flow += max_uid_for_flow / 10;
382 /* Allocate some tables that last till end of compiling this function
383 and some needed only in find_basic_blocks and life_analysis. */
385 basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
386 basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
387 basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
388 basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
389 basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
391 = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
392 uid_volatile = (char *) xmalloc (max_uid_for_flow + 1);
393 bzero (uid_volatile, max_uid_for_flow + 1);
395 find_basic_blocks_1 (f, nonlocal_label_list);
398 /* For communication between find_basic_blocks_1 and its subroutines. */
400 /* An array of CODE_LABELs, indexed by UID for the start of the active
401 EH handler for each insn in F. */
402 static int *active_eh_region;
403 static int *nested_eh_region;
405 /* Element N nonzero if basic block N can actually be reached. */
407 static char *block_live_static;
409 /* List of label_refs to all labels whose addresses are taken
411 static rtx label_value_list;
413 /* a list of non-local labels in the function. */
414 static rtx nonlocal_label_list;
416 /* Find all basic blocks of the function whose first insn is F.
417 Store the correct data in the tables that describe the basic blocks,
418 set up the chains of references for each CODE_LABEL, and
419 delete any entire basic blocks that cannot be reached.
421 NONLOCAL_LABELS is a list of non-local labels in the function.
422 Blocks that are otherwise unreachable may be reachable with a non-local
424 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
425 be reachable. This turns on a kludge that causes the control flow
426 information to be inaccurate and not suitable for passes like GCSE. */
429 find_basic_blocks_1 (f, nonlocal_labels)
430 rtx f, nonlocal_labels;
434 register char *block_live = (char *) alloca (n_basic_blocks);
435 register char *block_marked = (char *) alloca (n_basic_blocks);
437 enum rtx_code prev_code, code;
439 int in_libcall_block = 0;
440 int call_had_abnormal_edge = 0;
443 active_eh_region = (int *) alloca ((max_uid_for_flow + 1) * sizeof (int));
444 nested_eh_region = (int *) alloca ((max_label_num () + 1) * sizeof (int));
445 nonlocal_label_list = nonlocal_labels;
448 label_value_list = 0;
449 block_live_static = block_live;
450 bzero (block_live, n_basic_blocks);
451 bzero (block_marked, n_basic_blocks);
452 bzero (basic_block_computed_jump_target, n_basic_blocks);
453 bzero ((char *) active_eh_region, (max_uid_for_flow + 1) * sizeof (int));
454 bzero ((char *) nested_eh_region, (max_label_num () + 1) * sizeof (int));
455 current_function_has_computed_jump = 0;
457 /* Initialize with just block 0 reachable and no blocks marked. */
458 if (n_basic_blocks > 0)
461 /* Initialize the ref chain of each label to 0. Record where all the
462 blocks start and end and their depth in loops. For each insn, record
463 the block it is in. Also mark as reachable any blocks headed by labels
464 that must not be deleted. */
466 for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
467 insn; insn = NEXT_INSN (insn))
470 /* Track when we are inside in LIBCALL block. */
471 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
473 in_libcall_block = 1;
475 code = GET_CODE (insn);
478 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
480 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
484 /* A basic block starts at label, or after something that can jump. */
485 else if (code == CODE_LABEL
486 || (GET_RTX_CLASS (code) == 'i'
487 && (prev_code == JUMP_INSN
488 || (prev_code == CALL_INSN && call_had_abnormal_edge)
489 || prev_code == BARRIER)))
491 basic_block_head[++i] = insn;
492 basic_block_end[i] = insn;
493 basic_block_loop_depth[i] = depth;
495 if (code == CODE_LABEL)
497 LABEL_REFS (insn) = insn;
498 /* Any label that cannot be deleted
499 is considered to start a reachable block. */
500 if (LABEL_PRESERVE_P (insn))
505 else if (GET_RTX_CLASS (code) == 'i')
507 basic_block_end[i] = insn;
508 basic_block_loop_depth[i] = depth;
511 if (GET_RTX_CLASS (code) == 'i')
513 /* Make a list of all labels referred to other than by jumps. */
514 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
515 if (REG_NOTE_KIND (note) == REG_LABEL
516 && XEXP (note, 0) != eh_return_stub_label)
517 label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
521 /* Keep a lifo list of the currently active exception notes. */
522 if (GET_CODE (insn) == NOTE)
524 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
527 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] =
528 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
530 nested_eh_region [NOTE_BLOCK_NUMBER (insn)] = 0;
531 eh_note = gen_rtx_EXPR_LIST (VOIDmode,
534 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
535 eh_note = XEXP (eh_note, 1);
537 /* If we encounter a CALL_INSN, note which exception handler it
538 might pass control to.
540 If doing asynchronous exceptions, record the active EH handler
541 for every insn, since most insns can throw. */
543 && (asynchronous_exceptions
544 || (GET_CODE (insn) == CALL_INSN
545 && ! in_libcall_block)))
546 active_eh_region[INSN_UID (insn)] =
547 NOTE_BLOCK_NUMBER (XEXP (eh_note, 0));
548 BLOCK_NUM (insn) = i;
550 /* We change the code of the CALL_INSN, so that it won't start a
552 if (code == CALL_INSN && in_libcall_block)
555 /* Record whether this call created an edge. */
556 if (code == CALL_INSN)
557 call_had_abnormal_edge = (nonlocal_label_list != 0 || eh_note);
562 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
563 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
564 in_libcall_block = 0;
567 /* During the second pass, `n_basic_blocks' is only an upper bound.
568 Only perform the sanity check for the first pass, and on the second
569 pass ensure `n_basic_blocks' is set to the correct value. */
570 if (pass == 1 && i + 1 != n_basic_blocks)
572 n_basic_blocks = i + 1;
574 /* Record which basic blocks control can drop in to. */
576 for (i = 0; i < n_basic_blocks; i++)
578 for (insn = PREV_INSN (basic_block_head[i]);
579 insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
582 basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
585 /* Now find which basic blocks can actually be reached
586 and put all jump insns' LABEL_REFS onto the ref-chains
587 of their target labels. */
589 if (n_basic_blocks > 0)
591 int something_marked = 1;
594 /* Pass over all blocks, marking each block that is reachable
595 and has not yet been marked.
596 Keep doing this until, in one pass, no blocks have been marked.
597 Then blocks_live and blocks_marked are identical and correct.
598 In addition, all jumps actually reachable have been marked. */
600 while (something_marked)
602 something_marked = 0;
603 for (i = 0; i < n_basic_blocks; i++)
604 if (block_live[i] && !block_marked[i])
607 something_marked = 1;
613 /* This should never happen. If it does that means we've computed an
614 incorrect flow graph, which can lead to aborts/crashes later in the
615 compiler or incorrect code generation.
617 We used to try and continue here, but that's just asking for trouble
618 later during the compile or at runtime. It's easier to debug the
619 problem here than later! */
620 for (i = 1; i < n_basic_blocks; i++)
621 if (block_live[i] && ! basic_block_drops_in[i]
622 && GET_CODE (basic_block_head[i]) == CODE_LABEL
623 && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
626 if (! reload_completed)
627 deleted = delete_unreachable_blocks ();
629 /* There are pathological cases where one function calling hundreds of
630 nested inline functions can generate lots and lots of unreachable
631 blocks that jump can't delete. Since we don't use sparse matrices
632 a lot of memory will be needed to compile such functions.
633 Implementing sparse matrices is a fair bit of work and it is not
634 clear that they win more than they lose (we don't want to
635 unnecessarily slow down compilation of normal code). By making
636 another pass for the pathological case, we can greatly speed up
637 their compilation without hurting normal code. This works because
638 all the insns in the unreachable blocks have either been deleted or
640 Note that we're talking about reducing memory usage by 10's of
641 megabytes and reducing compilation time by several minutes. */
642 /* ??? The choice of when to make another pass is a bit arbitrary,
643 and was derived from empirical data. */
648 n_basic_blocks -= deleted;
649 /* `n_basic_blocks' may not be correct at this point: two previously
650 separate blocks may now be merged. That's ok though as we
651 recalculate it during the second pass. It certainly can't be
652 any larger than the current value. */
658 /* Record INSN's block number as BB. */
661 set_block_num (insn, bb)
665 if (INSN_UID (insn) >= max_uid_for_flow)
667 /* Add one-eighth the size so we don't keep calling xrealloc. */
668 max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8;
669 uid_block_number = (int *)
670 xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int));
672 BLOCK_NUM (insn) = bb;
676 /* Subroutines of find_basic_blocks. */
678 /* For basic block I, make edges and mark live all blocks which are reachable
686 if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1])
687 block_live_static[i + 1] = 1;
688 insn = basic_block_end[i];
689 if (GET_CODE (insn) == JUMP_INSN)
690 mark_label_ref (PATTERN (insn), insn, 0);
692 /* If we have any forced labels, mark them as potentially reachable from
694 for (x = forced_labels; x; x = XEXP (x, 1))
695 if (! LABEL_REF_NONLOCAL_P (x))
696 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
699 /* Now scan the insns for this block, we may need to make edges for some of
700 them to various non-obvious locations (exception handlers, nonlocal
702 for (insn = basic_block_head[i];
703 insn != NEXT_INSN (basic_block_end[i]);
704 insn = NEXT_INSN (insn))
706 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
709 /* References to labels in non-jumping insns have REG_LABEL notes
712 This can happen for computed gotos; we don't care about them
713 here since the values are also on the label_value_list and will
714 be marked live if we find a live computed goto.
716 This can also happen when we take the address of a label to pass
717 as an argument to __throw. Note throw only uses the value to
718 determine what handler should be called -- ie the label is not
719 used as a jump target, it just marks regions in the code.
721 In theory we should be able to ignore the REG_LABEL notes, but
722 we have to make sure that the label and associated insns aren't
723 marked dead, so we make the block in question live and create an
724 edge from this insn to the label. This is not strictly correct,
725 but it is close enough for now.
727 See below for code that handles the eh_stub label specially. */
728 for (note = REG_NOTES (insn);
730 note = XEXP (note, 1))
732 if (REG_NOTE_KIND (note) == REG_LABEL
733 && XEXP (note, 0) != eh_return_stub_label)
736 block_live_static[BLOCK_NUM (x)] = 1;
737 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
742 /* If this is a computed jump, then mark it as reaching everything
743 on the label_value_list and forced_labels list. */
744 if (computed_jump_p (insn))
746 current_function_has_computed_jump = 1;
747 for (x = label_value_list; x; x = XEXP (x, 1))
749 int b = BLOCK_NUM (XEXP (x, 0));
750 basic_block_computed_jump_target[b] = 1;
751 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
755 for (x = forced_labels; x; x = XEXP (x, 1))
757 int b = BLOCK_NUM (XEXP (x, 0));
758 basic_block_computed_jump_target[b] = 1;
759 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
764 /* If this is a CALL_INSN, then mark it as reaching the active EH
765 handler for this CALL_INSN. If we're handling asynchronous
766 exceptions mark every insn as reaching the active EH handler.
768 Also mark the CALL_INSN as reaching any nonlocal goto sites. */
769 else if (asynchronous_exceptions
770 || (GET_CODE (insn) == CALL_INSN
771 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX)))
773 if (active_eh_region[INSN_UID (insn)])
777 region = active_eh_region[INSN_UID (insn)];
779 region = nested_eh_region[region])
781 ptr = get_first_handler (region);
782 for ( ; ptr ; ptr = ptr->next)
783 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
788 if (! asynchronous_exceptions)
790 for (x = nonlocal_label_list; x; x = XEXP (x, 1))
791 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
794 /* ??? This could be made smarter: in some cases it's possible
795 to tell that certain calls will not do a nonlocal goto.
797 For example, if the nested functions that do the nonlocal
798 gotos do not have their addresses taken, then only calls to
799 those functions or to other nested functions that use them
800 could possibly do nonlocal gotos. */
804 /* We know something about the structure of the function __throw in
805 libgcc2.c. It is the only function that ever contains eh_stub labels.
806 It modifies its return address so that the last block returns to one of
807 the eh_stub labels within it. So we have to make additional edges in
809 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
811 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, eh_return_stub_label),
812 basic_block_end[i], 0);
816 /* Check expression X for label references;
817 if one is found, add INSN to the label's chain of references.
819 CHECKDUP means check for and avoid creating duplicate references
820 from the same insn. Such duplicates do no serious harm but
821 can slow life analysis. CHECKDUP is set only when duplicates
825 mark_label_ref (x, insn, checkdup)
829 register RTX_CODE code;
833 /* We can be called with NULL when scanning label_value_list. */
838 if (code == LABEL_REF)
840 register rtx label = XEXP (x, 0);
842 if (GET_CODE (label) != CODE_LABEL)
844 /* If the label was never emitted, this insn is junk,
845 but avoid a crash trying to refer to BLOCK_NUM (label).
846 This can happen as a result of a syntax error
847 and a diagnostic has already been printed. */
848 if (INSN_UID (label) == 0)
850 CONTAINING_INSN (x) = insn;
851 /* if CHECKDUP is set, check for duplicate ref from same insn
854 for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y))
855 if (CONTAINING_INSN (y) == insn)
857 LABEL_NEXTREF (x) = LABEL_REFS (label);
858 LABEL_REFS (label) = x;
859 block_live_static[BLOCK_NUM (label)] = 1;
863 fmt = GET_RTX_FORMAT (code);
864 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
867 mark_label_ref (XEXP (x, i), insn, 0);
871 for (j = 0; j < XVECLEN (x, i); j++)
872 mark_label_ref (XVECEXP (x, i, j), insn, 1);
877 /* Now delete the code for any basic blocks that can't be reached.
878 They can occur because jump_optimize does not recognize unreachable loops
880 Return the number of deleted blocks. */
882 delete_unreachable_blocks ()
884 int deleted_handler = 0;
889 for (i = 0; i < n_basic_blocks; i++)
890 if (! block_live_static[i])
894 deleted_handler |= delete_block (i);
897 /* If we deleted an exception handler, we may have EH region
898 begin/end blocks to remove as well. */
900 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
901 if (GET_CODE (insn) == NOTE)
903 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
904 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
906 int num = CODE_LABEL_NUMBER (insn);
907 /* A NULL handler indicates a region is no longer needed */
908 if (get_first_handler (num) == NULL)
910 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
911 NOTE_SOURCE_FILE (insn) = 0;
918 /* Delete the insns in a (non-live) block. We physically delete every
919 non-note insn except the start and end (so basic_block_head/end needn't
920 be updated), we turn the latter into NOTE_INSN_DELETED notes.
922 We use to "delete" the insns by turning them into notes, but we may be
923 deleting lots of insns that subsequent passes would otherwise have to
924 process. Secondly, lots of deleted blocks in a row can really slow down
925 propagate_block since it will otherwise process insn-turned-notes multiple
926 times when it looks for loop begin/end notes.
928 Return nonzero if we deleted an exception handler. */
933 int deleted_handler = 0;
936 if (basic_block_head[i] != basic_block_end[i])
938 /* It would be quicker to delete all of these with a single
939 unchaining, rather than one at a time, but we need to keep
941 insn = NEXT_INSN (basic_block_head[i]);
942 while (insn != basic_block_end[i])
944 if (GET_CODE (insn) == BARRIER)
946 else if (GET_CODE (insn) != NOTE)
947 insn = flow_delete_insn (insn);
949 insn = NEXT_INSN (insn);
953 insn = basic_block_head[i];
954 if (GET_CODE (insn) != NOTE)
956 /* Turn the head into a deleted insn note. */
957 if (GET_CODE (insn) == BARRIER)
960 /* If the head of this block is a CODE_LABEL, then it might
961 be the label for an exception handler which can't be
964 We need to remove the label from the exception_handler_label
965 list and remove the associated NOTE_EH_REGION_BEG and
966 NOTE_EH_REGION_END notes. */
967 if (GET_CODE (insn) == CODE_LABEL)
969 rtx x, *prev = &exception_handler_labels;
971 for (x = exception_handler_labels; x; x = XEXP (x, 1))
973 if (XEXP (x, 0) == insn)
975 /* Found a match, splice this label out of the
978 XEXP (x, 1) = NULL_RTX;
979 XEXP (x, 0) = NULL_RTX;
981 /* Remove the handler from all regions */
982 remove_handler (insn);
990 PUT_CODE (insn, NOTE);
991 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
992 NOTE_SOURCE_FILE (insn) = 0;
994 insn = basic_block_end[i];
995 if (GET_CODE (insn) != NOTE)
997 /* Turn the tail into a deleted insn note. */
998 if (GET_CODE (insn) == BARRIER)
1000 PUT_CODE (insn, NOTE);
1001 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1002 NOTE_SOURCE_FILE (insn) = 0;
1004 /* BARRIERs are between basic blocks, not part of one.
1005 Delete a BARRIER if the preceding jump is deleted.
1006 We cannot alter a BARRIER into a NOTE
1007 because it is too short; but we can really delete
1008 it because it is not part of a basic block. */
1009 if (NEXT_INSN (insn) != 0
1010 && GET_CODE (NEXT_INSN (insn)) == BARRIER)
1011 delete_insn (NEXT_INSN (insn));
1013 /* Each time we delete some basic blocks,
1014 see if there is a jump around them that is
1015 being turned into a no-op. If so, delete it. */
1017 if (block_live_static[i - 1])
1020 for (j = i + 1; j < n_basic_blocks; j++)
1021 if (block_live_static[j])
1024 insn = basic_block_end[i - 1];
1025 if (GET_CODE (insn) == JUMP_INSN
1026 /* An unconditional jump is the only possibility
1027 we must check for, since a conditional one
1028 would make these blocks live. */
1029 && simplejump_p (insn)
1030 && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1)
1031 && INSN_UID (label) != 0
1032 && BLOCK_NUM (label) == j)
1036 /* The deleted blocks still show up in the cfg,
1037 so we must set basic_block_drops_in for blocks
1038 I to J inclusive to keep the cfg accurate. */
1039 for (k = i; k <= j; k++)
1040 basic_block_drops_in[k] = 1;
1042 PUT_CODE (insn, NOTE);
1043 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1044 NOTE_SOURCE_FILE (insn) = 0;
1045 if (GET_CODE (NEXT_INSN (insn)) != BARRIER)
1047 delete_insn (NEXT_INSN (insn));
1053 return deleted_handler;
1056 /* Delete INSN by patching it out.
1057 Return the next insn. */
1060 flow_delete_insn (insn)
1063 /* ??? For the moment we assume we don't have to watch for NULLs here
1064 since the start/end of basic blocks aren't deleted like this. */
1065 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1066 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1067 return NEXT_INSN (insn);
1070 /* Perform data flow analysis.
1071 F is the first insn of the function and NREGS the number of register numbers
1075 life_analysis (f, nregs, file)
1080 #ifdef ELIMINABLE_REGS
1082 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
1085 /* Record which registers will be eliminated. We use this in
1088 CLEAR_HARD_REG_SET (elim_reg_set);
1090 #ifdef ELIMINABLE_REGS
1091 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
1092 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1094 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1097 life_analysis_1 (f, nregs);
1099 dump_flow_info (file);
1101 free_basic_block_vars (1);
1104 /* Free the variables allocated by find_basic_blocks.
1106 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1107 are not to be freed. */
1110 free_basic_block_vars (keep_head_end_p)
1111 int keep_head_end_p;
1113 if (basic_block_drops_in)
1115 free (basic_block_drops_in);
1116 /* Tell dump_flow_info this isn't available anymore. */
1117 basic_block_drops_in = 0;
1119 if (basic_block_loop_depth)
1121 free (basic_block_loop_depth);
1122 basic_block_loop_depth = 0;
1124 if (uid_block_number)
1126 free (uid_block_number);
1127 uid_block_number = 0;
1131 free (uid_volatile);
1135 if (! keep_head_end_p && basic_block_head)
1137 free (basic_block_head);
1138 basic_block_head = 0;
1139 free (basic_block_end);
1140 basic_block_end = 0;
1144 /* Return nonzero if the destination of SET equals the source. */
1149 rtx src = SET_SRC (set);
1150 rtx dst = SET_DEST (set);
1151 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1152 && REGNO (src) == REGNO (dst))
1154 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
1155 || SUBREG_WORD (src) != SUBREG_WORD (dst))
1157 src = SUBREG_REG (src);
1158 dst = SUBREG_REG (dst);
1159 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
1160 && REGNO (src) == REGNO (dst))
1165 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1171 rtx pat = PATTERN (insn);
1173 /* Insns carrying these notes are useful later on. */
1174 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1177 if (GET_CODE (pat) == SET && set_noop_p (pat))
1180 if (GET_CODE (pat) == PARALLEL)
1183 /* If nothing but SETs of registers to themselves,
1184 this insn can also be deleted. */
1185 for (i = 0; i < XVECLEN (pat, 0); i++)
1187 rtx tem = XVECEXP (pat, 0, i);
1189 if (GET_CODE (tem) == USE
1190 || GET_CODE (tem) == CLOBBER)
1193 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1203 notice_stack_pointer_modification (x, pat)
1205 rtx pat ATTRIBUTE_UNUSED;
1207 if (x == stack_pointer_rtx
1208 /* The stack pointer is only modified indirectly as the result
1209 of a push until later in flow. See the comments in rtl.texi
1210 regarding Embedded Side-Effects on Addresses. */
1211 || (GET_CODE (x) == MEM
1212 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
1213 || GET_CODE (XEXP (x, 0)) == PRE_INC
1214 || GET_CODE (XEXP (x, 0)) == POST_DEC
1215 || GET_CODE (XEXP (x, 0)) == POST_INC)
1216 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
1217 current_function_sp_is_unchanging = 0;
1220 /* Record which insns refer to any volatile memory
1221 or for any reason can't be deleted just because they are dead stores.
1222 Also, delete any insns that copy a register to itself.
1223 And see if the stack pointer is modified. */
1225 record_volatile_insns (f)
1229 for (insn = f; insn; insn = NEXT_INSN (insn))
1231 enum rtx_code code1 = GET_CODE (insn);
1232 if (code1 == CALL_INSN)
1233 INSN_VOLATILE (insn) = 1;
1234 else if (code1 == INSN || code1 == JUMP_INSN)
1236 if (GET_CODE (PATTERN (insn)) != USE
1237 && volatile_refs_p (PATTERN (insn)))
1238 INSN_VOLATILE (insn) = 1;
1240 /* A SET that makes space on the stack cannot be dead.
1241 (Such SETs occur only for allocating variable-size data,
1242 so they will always have a PLUS or MINUS according to the
1243 direction of stack growth.)
1244 Even if this function never uses this stack pointer value,
1245 signal handlers do! */
1246 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
1247 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1248 #ifdef STACK_GROWS_DOWNWARD
1249 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
1251 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1253 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
1254 INSN_VOLATILE (insn) = 1;
1256 /* Delete (in effect) any obvious no-op moves. */
1257 else if (noop_move_p (insn))
1259 PUT_CODE (insn, NOTE);
1260 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1261 NOTE_SOURCE_FILE (insn) = 0;
1265 /* Check if insn modifies the stack pointer. */
1266 if ( current_function_sp_is_unchanging
1267 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1268 note_stores (PATTERN (insn), notice_stack_pointer_modification);
1272 /* Mark those regs which are needed at the end of the function as live
1273 at the end of the last basic block. */
1275 mark_regs_live_at_end (set)
1280 #ifdef EXIT_IGNORE_STACK
1281 if (! EXIT_IGNORE_STACK
1282 || (! FRAME_POINTER_REQUIRED
1283 && ! current_function_calls_alloca
1284 && flag_omit_frame_pointer)
1285 || current_function_sp_is_unchanging)
1287 /* If exiting needs the right stack value,
1288 consider the stack pointer live at the end of the function. */
1289 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
1291 /* Mark the frame pointer is needed at the end of the function. If
1292 we end up eliminating it, it will be removed from the live list
1293 of each basic block by reload. */
1295 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
1296 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1297 /* If they are different, also mark the hard frame pointer as live */
1298 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
1302 /* Mark all global registers and all registers used by the epilogue
1303 as being live at the end of the function since they may be
1304 referenced by our caller. */
1305 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1307 #ifdef EPILOGUE_USES
1308 || EPILOGUE_USES (i)
1311 SET_REGNO_REG_SET (set, i);
1314 /* Determine which registers are live at the start of each
1315 basic block of the function whose first insn is F.
1316 NREGS is the number of registers used in F.
1317 We allocate the vector basic_block_live_at_start
1318 and the regsets that it points to, and fill them with the data.
1319 regset_size and regset_bytes are also set here. */
1322 life_analysis_1 (f, nregs)
1328 /* For each basic block, a bitmask of regs
1329 live on exit from the block. */
1330 regset *basic_block_live_at_end;
1331 /* For each basic block, a bitmask of regs
1332 live on entry to a successor-block of this block.
1333 If this does not match basic_block_live_at_end,
1334 that must be updated, and the block must be rescanned. */
1335 regset *basic_block_new_live_at_end;
1336 /* For each basic block, a bitmask of regs
1337 whose liveness at the end of the basic block
1338 can make a difference in which regs are live on entry to the block.
1339 These are the regs that are set within the basic block,
1340 possibly excluding those that are used after they are set. */
1341 regset *basic_block_significant;
1343 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
1345 struct obstack flow_obstack;
1347 gcc_obstack_init (&flow_obstack);
1351 /* The post-reload life analysis have (on a global basis) the same registers
1352 live as was computed by reload itself.
1354 Otherwise elimination offsets and such may be incorrect.
1356 Reload will make some registers as live even though they do not appear
1358 if (reload_completed)
1359 bcopy (regs_ever_live, save_regs_ever_live, (sizeof (regs_ever_live)));
1361 /* Also remove all CLOBBER insns after reload. They can cause us to think
1362 a value is dead when it really is not dead. */
1363 if (reload_completed)
1367 for (insn = f; insn; insn = NEXT_INSN (insn))
1369 if (GET_CODE (insn) == INSN
1370 && GET_CODE (PATTERN (insn)) == CLOBBER)
1372 PUT_CODE (insn, NOTE);
1373 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1374 NOTE_SOURCE_FILE (insn) = 0;
1378 bzero (regs_ever_live, sizeof regs_ever_live);
1380 /* Allocate and zero out many data structures
1381 that will record the data from lifetime analysis. */
1383 allocate_for_life_analysis ();
1385 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
1386 bzero ((char *) reg_next_use, nregs * sizeof (rtx));
1388 /* Set up several regset-vectors used internally within this function.
1389 Their meanings are documented above, with their declarations. */
1391 basic_block_live_at_end
1392 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1394 /* Don't use alloca since that leads to a crash rather than an error message
1395 if there isn't enough space.
1396 Don't use oballoc since we may need to allocate other things during
1397 this function on the temporary obstack. */
1398 init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack);
1400 basic_block_new_live_at_end
1401 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1402 init_regset_vector (basic_block_new_live_at_end, n_basic_blocks,
1405 basic_block_significant
1406 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1407 init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack);
1409 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
1410 This will be cleared by record_volatile_insns if it encounters an insn
1411 which modifies the stack pointer. */
1412 current_function_sp_is_unchanging = !current_function_calls_alloca;
1414 record_volatile_insns (f);
1416 if (n_basic_blocks > 0)
1418 mark_regs_live_at_end (basic_block_live_at_end[n_basic_blocks - 1]);
1419 COPY_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1420 basic_block_live_at_end[n_basic_blocks - 1]);
1423 /* Propagate life info through the basic blocks
1424 around the graph of basic blocks.
1426 This is a relaxation process: each time a new register
1427 is live at the end of the basic block, we must scan the block
1428 to determine which registers are, as a consequence, live at the beginning
1429 of that block. These registers must then be marked live at the ends
1430 of all the blocks that can transfer control to that block.
1431 The process continues until it reaches a fixed point. */
1438 for (i = n_basic_blocks - 1; i >= 0; i--)
1440 int consider = first_pass;
1441 int must_rescan = first_pass;
1446 /* Set CONSIDER if this block needs thinking about at all
1447 (that is, if the regs live now at the end of it
1448 are not the same as were live at the end of it when
1449 we last thought about it).
1450 Set must_rescan if it needs to be thought about
1451 instruction by instruction (that is, if any additional
1452 reg that is live at the end now but was not live there before
1453 is one of the significant regs of this basic block). */
1455 EXECUTE_IF_AND_COMPL_IN_REG_SET
1456 (basic_block_new_live_at_end[i],
1457 basic_block_live_at_end[i], 0, j,
1460 if (!reload_completed
1461 && REGNO_REG_SET_P (basic_block_significant[i], j))
1472 /* The live_at_start of this block may be changing,
1473 so another pass will be required after this one. */
1478 /* No complete rescan needed;
1479 just record those variables newly known live at end
1480 as live at start as well. */
1481 IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i],
1482 basic_block_new_live_at_end[i],
1483 basic_block_live_at_end[i]);
1485 IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i],
1486 basic_block_new_live_at_end[i],
1487 basic_block_live_at_end[i]);
1491 /* Update the basic_block_live_at_start
1492 by propagation backwards through the block. */
1493 COPY_REG_SET (basic_block_live_at_end[i],
1494 basic_block_new_live_at_end[i]);
1495 COPY_REG_SET (basic_block_live_at_start[i],
1496 basic_block_live_at_end[i]);
1497 propagate_block (basic_block_live_at_start[i],
1498 basic_block_head[i], basic_block_end[i], 0,
1499 first_pass ? basic_block_significant[i]
1505 register rtx jump, head;
1507 /* Update the basic_block_new_live_at_end's of the block
1508 that falls through into this one (if any). */
1509 head = basic_block_head[i];
1510 if (basic_block_drops_in[i])
1511 IOR_REG_SET (basic_block_new_live_at_end[i-1],
1512 basic_block_live_at_start[i]);
1514 /* Update the basic_block_new_live_at_end's of
1515 all the blocks that jump to this one. */
1516 if (GET_CODE (head) == CODE_LABEL)
1517 for (jump = LABEL_REFS (head);
1519 jump = LABEL_NEXTREF (jump))
1521 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
1522 IOR_REG_SET (basic_block_new_live_at_end[from_block],
1523 basic_block_live_at_start[i]);
1533 /* The only pseudos that are live at the beginning of the function are
1534 those that were not set anywhere in the function. local-alloc doesn't
1535 know how to handle these correctly, so mark them as not local to any
1538 if (n_basic_blocks > 0)
1539 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0],
1540 FIRST_PSEUDO_REGISTER, i,
1542 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1545 /* Now the life information is accurate.
1546 Make one more pass over each basic block
1547 to delete dead stores, create autoincrement addressing
1548 and record how many times each register is used, is set, or dies.
1550 To save time, we operate directly in basic_block_live_at_end[i],
1551 thus destroying it (in fact, converting it into a copy of
1552 basic_block_live_at_start[i]). This is ok now because
1553 basic_block_live_at_end[i] is no longer used past this point. */
1555 for (i = 0; i < n_basic_blocks; i++)
1557 propagate_block (basic_block_live_at_end[i],
1558 basic_block_head[i], basic_block_end[i], 1,
1566 /* Something live during a setjmp should not be put in a register
1567 on certain machines which restore regs from stack frames
1568 rather than from the jmpbuf.
1569 But we don't need to do this for the user's variables, since
1570 ANSI says only volatile variables need this. */
1571 #ifdef LONGJMP_RESTORE_FROM_STACK
1572 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1573 FIRST_PSEUDO_REGISTER, i,
1575 if (regno_reg_rtx[i] != 0
1576 && ! REG_USERVAR_P (regno_reg_rtx[i]))
1578 REG_LIVE_LENGTH (i) = -1;
1579 REG_BASIC_BLOCK (i) = -1;
1585 /* We have a problem with any pseudoreg that
1586 lives across the setjmp. ANSI says that if a
1587 user variable does not change in value
1588 between the setjmp and the longjmp, then the longjmp preserves it.
1589 This includes longjmp from a place where the pseudo appears dead.
1590 (In principle, the value still exists if it is in scope.)
1591 If the pseudo goes in a hard reg, some other value may occupy
1592 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1593 Conclusion: such a pseudo must not go in a hard reg. */
1594 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1595 FIRST_PSEUDO_REGISTER, i,
1597 if (regno_reg_rtx[i] != 0)
1599 REG_LIVE_LENGTH (i) = -1;
1600 REG_BASIC_BLOCK (i) = -1;
1604 /* Restore regs_ever_live that was provided by reload. */
1605 if (reload_completed)
1606 bcopy (save_regs_ever_live, regs_ever_live, (sizeof (regs_ever_live)));
1608 free_regset_vector (basic_block_live_at_end, n_basic_blocks);
1609 free_regset_vector (basic_block_new_live_at_end, n_basic_blocks);
1610 free_regset_vector (basic_block_significant, n_basic_blocks);
1611 basic_block_live_at_end = (regset *)0;
1612 basic_block_new_live_at_end = (regset *)0;
1613 basic_block_significant = (regset *)0;
1615 obstack_free (&flow_obstack, NULL_PTR);
1618 /* Subroutines of life analysis. */
1620 /* Allocate the permanent data structures that represent the results
1621 of life analysis. Not static since used also for stupid life analysis. */
1624 allocate_for_life_analysis ()
1628 /* Recalculate the register space, in case it has grown. Old style
1629 vector oriented regsets would set regset_{size,bytes} here also. */
1630 allocate_reg_info (max_regno, FALSE, FALSE);
1632 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1633 information, explicitly reset it here. The allocation should have
1634 already happened on the previous reg_scan pass. Make sure in case
1635 some more registers were allocated. */
1636 for (i = 0; i < max_regno; i++)
1639 basic_block_live_at_start
1640 = (regset *) oballoc (n_basic_blocks * sizeof (regset));
1641 init_regset_vector (basic_block_live_at_start, n_basic_blocks,
1644 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
1645 CLEAR_REG_SET (regs_live_at_setjmp);
1648 /* Make each element of VECTOR point at a regset. The vector has
1649 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1653 init_regset_vector (vector, nelts, alloc_obstack)
1656 struct obstack *alloc_obstack;
1660 for (i = 0; i < nelts; i++)
1662 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
1663 CLEAR_REG_SET (vector[i]);
1667 /* Release any additional space allocated for each element of VECTOR point
1668 other than the regset header itself. The vector has NELTS elements. */
1671 free_regset_vector (vector, nelts)
1677 for (i = 0; i < nelts; i++)
1678 FREE_REG_SET (vector[i]);
1681 /* Compute the registers live at the beginning of a basic block
1682 from those live at the end.
1684 When called, OLD contains those live at the end.
1685 On return, it contains those live at the beginning.
1686 FIRST and LAST are the first and last insns of the basic block.
1688 FINAL is nonzero if we are doing the final pass which is not
1689 for computing the life info (since that has already been done)
1690 but for acting on it. On this pass, we delete dead stores,
1691 set up the logical links and dead-variables lists of instructions,
1692 and merge instructions for autoincrement and autodecrement addresses.
1694 SIGNIFICANT is nonzero only the first time for each basic block.
1695 If it is nonzero, it points to a regset in which we store
1696 a 1 for each register that is set within the block.
1698 BNUM is the number of the basic block. */
1701 propagate_block (old, first, last, final, significant, bnum)
1702 register regset old;
1714 /* The loop depth may change in the middle of a basic block. Since we
1715 scan from end to beginning, we start with the depth at the end of the
1716 current basic block, and adjust as we pass ends and starts of loops. */
1717 loop_depth = basic_block_loop_depth[bnum];
1719 dead = ALLOCA_REG_SET ();
1720 live = ALLOCA_REG_SET ();
1725 /* Include any notes at the end of the block in the scan.
1726 This is in case the block ends with a call to setjmp. */
1728 while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE)
1730 /* Look for loop boundaries, we are going forward here. */
1731 last = NEXT_INSN (last);
1732 if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG)
1734 else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END)
1742 /* Process the regs live at the end of the block.
1743 Mark them as not local to any one basic block. */
1744 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1746 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1750 /* Scan the block an insn at a time from end to beginning. */
1752 for (insn = last; ; insn = prev)
1754 prev = PREV_INSN (insn);
1756 if (GET_CODE (insn) == NOTE)
1758 /* Look for loop boundaries, remembering that we are going
1760 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1762 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1765 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1766 Abort now rather than setting register status incorrectly. */
1767 if (loop_depth == 0)
1770 /* If this is a call to `setjmp' et al,
1771 warn if any non-volatile datum is live. */
1773 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1774 IOR_REG_SET (regs_live_at_setjmp, old);
1777 /* Update the life-status of regs for this insn.
1778 First DEAD gets which regs are set in this insn
1779 then LIVE gets which regs are used in this insn.
1780 Then the regs live before the insn
1781 are those live after, with DEAD regs turned off,
1782 and then LIVE regs turned on. */
1784 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1787 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1789 = (insn_dead_p (PATTERN (insn), old, 0)
1790 /* Don't delete something that refers to volatile storage! */
1791 && ! INSN_VOLATILE (insn));
1793 = (insn_is_dead && note != 0
1794 && libcall_dead_p (PATTERN (insn), old, note, insn));
1796 /* If an instruction consists of just dead store(s) on final pass,
1797 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1798 We could really delete it with delete_insn, but that
1799 can cause trouble for first or last insn in a basic block. */
1800 if (!reload_completed && final && insn_is_dead)
1802 PUT_CODE (insn, NOTE);
1803 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1804 NOTE_SOURCE_FILE (insn) = 0;
1806 /* CC0 is now known to be dead. Either this insn used it,
1807 in which case it doesn't anymore, or clobbered it,
1808 so the next insn can't use it. */
1811 /* If this insn is copying the return value from a library call,
1812 delete the entire library call. */
1813 if (libcall_is_dead)
1815 rtx first = XEXP (note, 0);
1817 while (INSN_DELETED_P (first))
1818 first = NEXT_INSN (first);
1823 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
1824 NOTE_SOURCE_FILE (p) = 0;
1830 CLEAR_REG_SET (dead);
1831 CLEAR_REG_SET (live);
1833 /* See if this is an increment or decrement that can be
1834 merged into a following memory address. */
1837 register rtx x = single_set (insn);
1839 /* Does this instruction increment or decrement a register? */
1840 if (!reload_completed
1842 && GET_CODE (SET_DEST (x)) == REG
1843 && (GET_CODE (SET_SRC (x)) == PLUS
1844 || GET_CODE (SET_SRC (x)) == MINUS)
1845 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1846 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1847 /* Ok, look for a following memory ref we can combine with.
1848 If one is found, change the memory ref to a PRE_INC
1849 or PRE_DEC, cancel this insn, and return 1.
1850 Return 0 if nothing has been done. */
1851 && try_pre_increment_1 (insn))
1854 #endif /* AUTO_INC_DEC */
1856 /* If this is not the final pass, and this insn is copying the
1857 value of a library call and it's dead, don't scan the
1858 insns that perform the library call, so that the call's
1859 arguments are not marked live. */
1860 if (libcall_is_dead)
1862 /* Mark the dest reg as `significant'. */
1863 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
1865 insn = XEXP (note, 0);
1866 prev = PREV_INSN (insn);
1868 else if (GET_CODE (PATTERN (insn)) == SET
1869 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1870 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1871 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1872 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1873 /* We have an insn to pop a constant amount off the stack.
1874 (Such insns use PLUS regardless of the direction of the stack,
1875 and any insn to adjust the stack by a constant is always a pop.)
1876 These insns, if not dead stores, have no effect on life. */
1880 /* Any regs live at the time of a call instruction
1881 must not go in a register clobbered by calls.
1882 Find all regs now live and record this for them. */
1884 if (GET_CODE (insn) == CALL_INSN && final)
1885 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1887 REG_N_CALLS_CROSSED (i)++;
1890 /* LIVE gets the regs used in INSN;
1891 DEAD gets those set by it. Dead insns don't make anything
1894 mark_set_regs (old, dead, PATTERN (insn),
1895 final ? insn : NULL_RTX, significant);
1897 /* If an insn doesn't use CC0, it becomes dead since we
1898 assume that every insn clobbers it. So show it dead here;
1899 mark_used_regs will set it live if it is referenced. */
1903 mark_used_regs (old, live, PATTERN (insn), final, insn);
1905 /* Sometimes we may have inserted something before INSN (such as
1906 a move) when we make an auto-inc. So ensure we will scan
1909 prev = PREV_INSN (insn);
1912 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1918 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1920 note = XEXP (note, 1))
1921 if (GET_CODE (XEXP (note, 0)) == USE)
1922 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
1925 /* Each call clobbers all call-clobbered regs that are not
1926 global or fixed. Note that the function-value reg is a
1927 call-clobbered reg, and mark_set_regs has already had
1928 a chance to handle it. */
1930 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1931 if (call_used_regs[i] && ! global_regs[i]
1933 SET_REGNO_REG_SET (dead, i);
1935 /* The stack ptr is used (honorarily) by a CALL insn. */
1936 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
1938 /* Calls may also reference any of the global registers,
1939 so they are made live. */
1940 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1942 mark_used_regs (old, live,
1943 gen_rtx_REG (reg_raw_mode[i], i),
1946 /* Calls also clobber memory. */
1950 /* Update OLD for the registers used or set. */
1951 AND_COMPL_REG_SET (old, dead);
1952 IOR_REG_SET (old, live);
1956 /* On final pass, update counts of how many insns each reg is live
1959 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1960 { REG_LIVE_LENGTH (i)++; });
1967 FREE_REG_SET (dead);
1968 FREE_REG_SET (live);
1971 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
1972 (SET expressions whose destinations are registers dead after the insn).
1973 NEEDED is the regset that says which regs are alive after the insn.
1975 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
1978 insn_dead_p (x, needed, call_ok)
1983 enum rtx_code code = GET_CODE (x);
1985 /* If setting something that's a reg or part of one,
1986 see if that register's altered value will be live. */
1990 rtx r = SET_DEST (x);
1992 /* A SET that is a subroutine call cannot be dead. */
1993 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
1997 if (GET_CODE (r) == CC0)
2001 if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r)
2002 && rtx_equal_p (r, last_mem_set))
2005 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
2006 || GET_CODE (r) == ZERO_EXTRACT)
2009 if (GET_CODE (r) == REG)
2011 int regno = REGNO (r);
2013 /* Don't delete insns to set global regs. */
2014 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2015 /* Make sure insns to set frame pointer aren't deleted. */
2016 || regno == FRAME_POINTER_REGNUM
2017 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2018 || regno == HARD_FRAME_POINTER_REGNUM
2020 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2021 /* Make sure insns to set arg pointer are never deleted
2022 (if the arg pointer isn't fixed, there will be a USE for
2023 it, so we can treat it normally). */
2024 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2026 || REGNO_REG_SET_P (needed, regno))
2029 /* If this is a hard register, verify that subsequent words are
2031 if (regno < FIRST_PSEUDO_REGISTER)
2033 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
2036 if (REGNO_REG_SET_P (needed, regno+n))
2044 /* If performing several activities,
2045 insn is dead if each activity is individually dead.
2046 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
2047 that's inside a PARALLEL doesn't make the insn worth keeping. */
2048 else if (code == PARALLEL)
2050 int i = XVECLEN (x, 0);
2052 for (i--; i >= 0; i--)
2053 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
2054 && GET_CODE (XVECEXP (x, 0, i)) != USE
2055 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok))
2061 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
2062 is not necessarily true for hard registers. */
2063 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
2064 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
2065 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
2068 /* We do not check other CLOBBER or USE here. An insn consisting of just
2069 a CLOBBER or just a USE should not be deleted. */
2073 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
2074 return 1 if the entire library call is dead.
2075 This is true if X copies a register (hard or pseudo)
2076 and if the hard return reg of the call insn is dead.
2077 (The caller should have tested the destination of X already for death.)
2079 If this insn doesn't just copy a register, then we don't
2080 have an ordinary libcall. In that case, cse could not have
2081 managed to substitute the source for the dest later on,
2082 so we can assume the libcall is dead.
2084 NEEDED is the bit vector of pseudoregs live before this insn.
2085 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
2088 libcall_dead_p (x, needed, note, insn)
2094 register RTX_CODE code = GET_CODE (x);
2098 register rtx r = SET_SRC (x);
2099 if (GET_CODE (r) == REG)
2101 rtx call = XEXP (note, 0);
2104 /* Find the call insn. */
2105 while (call != insn && GET_CODE (call) != CALL_INSN)
2106 call = NEXT_INSN (call);
2108 /* If there is none, do nothing special,
2109 since ordinary death handling can understand these insns. */
2113 /* See if the hard reg holding the value is dead.
2114 If this is a PARALLEL, find the call within it. */
2115 call = PATTERN (call);
2116 if (GET_CODE (call) == PARALLEL)
2118 for (i = XVECLEN (call, 0) - 1; i >= 0; i--)
2119 if (GET_CODE (XVECEXP (call, 0, i)) == SET
2120 && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
2123 /* This may be a library call that is returning a value
2124 via invisible pointer. Do nothing special, since
2125 ordinary death handling can understand these insns. */
2129 call = XVECEXP (call, 0, i);
2132 return insn_dead_p (call, needed, 1);
2138 /* Return 1 if register REGNO was used before it was set, i.e. if it is
2139 live at function entry. Don't count global register variables, variables
2140 in registers that can be used for function arg passing, or variables in
2141 fixed hard registers. */
2144 regno_uninitialized (regno)
2147 if (n_basic_blocks == 0
2148 || (regno < FIRST_PSEUDO_REGISTER
2149 && (global_regs[regno]
2150 || fixed_regs[regno]
2151 || FUNCTION_ARG_REGNO_P (regno))))
2154 return REGNO_REG_SET_P (basic_block_live_at_start[0], regno);
2157 /* 1 if register REGNO was alive at a place where `setjmp' was called
2158 and was set more than once or is an argument.
2159 Such regs may be clobbered by `longjmp'. */
2162 regno_clobbered_at_setjmp (regno)
2165 if (n_basic_blocks == 0)
2168 return ((REG_N_SETS (regno) > 1
2169 || REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2170 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2173 /* Process the registers that are set within X.
2174 Their bits are set to 1 in the regset DEAD,
2175 because they are dead prior to this insn.
2177 If INSN is nonzero, it is the insn being processed
2178 and the fact that it is nonzero implies this is the FINAL pass
2179 in propagate_block. In this case, various info about register
2180 usage is stored, LOG_LINKS fields of insns are set up. */
2183 mark_set_regs (needed, dead, x, insn, significant)
2190 register RTX_CODE code = GET_CODE (x);
2192 if (code == SET || code == CLOBBER)
2193 mark_set_1 (needed, dead, x, insn, significant);
2194 else if (code == PARALLEL)
2197 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2199 code = GET_CODE (XVECEXP (x, 0, i));
2200 if (code == SET || code == CLOBBER)
2201 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
2206 /* Process a single SET rtx, X. */
2209 mark_set_1 (needed, dead, x, insn, significant)
2217 register rtx reg = SET_DEST (x);
2219 /* Some targets place small structures in registers for
2220 return values of functions. We have to detect this
2221 case specially here to get correct flow information. */
2222 if (GET_CODE (reg) == PARALLEL
2223 && GET_MODE (reg) == BLKmode)
2227 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
2228 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn, significant);
2232 /* Modifying just one hardware register of a multi-reg value
2233 or just a byte field of a register
2234 does not mean the value from before this insn is now dead.
2235 But it does mean liveness of that register at the end of the block
2238 Within mark_set_1, however, we treat it as if the register is
2239 indeed modified. mark_used_regs will, however, also treat this
2240 register as being used. Thus, we treat these insns as setting a
2241 new value for the register as a function of its old value. This
2242 cases LOG_LINKS to be made appropriately and this will help combine. */
2244 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
2245 || GET_CODE (reg) == SIGN_EXTRACT
2246 || GET_CODE (reg) == STRICT_LOW_PART)
2247 reg = XEXP (reg, 0);
2249 /* If we are writing into memory or into a register mentioned in the
2250 address of the last thing stored into memory, show we don't know
2251 what the last store was. If we are writing memory, save the address
2252 unless it is volatile. */
2253 if (GET_CODE (reg) == MEM
2254 || (GET_CODE (reg) == REG
2255 && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set)))
2258 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2259 /* There are no REG_INC notes for SP, so we can't assume we'll see
2260 everything that invalidates it. To be safe, don't eliminate any
2261 stores though SP; none of them should be redundant anyway. */
2262 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2265 if (GET_CODE (reg) == REG
2266 && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
2267 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2268 && regno != HARD_FRAME_POINTER_REGNUM
2270 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2271 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2273 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
2274 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2276 int some_needed = REGNO_REG_SET_P (needed, regno);
2277 int some_not_needed = ! some_needed;
2279 /* Mark it as a significant register for this basic block. */
2281 SET_REGNO_REG_SET (significant, regno);
2283 /* Mark it as dead before this insn. */
2284 SET_REGNO_REG_SET (dead, regno);
2286 /* A hard reg in a wide mode may really be multiple registers.
2287 If so, mark all of them just like the first. */
2288 if (regno < FIRST_PSEUDO_REGISTER)
2292 /* Nothing below is needed for the stack pointer; get out asap.
2293 Eg, log links aren't needed, since combine won't use them. */
2294 if (regno == STACK_POINTER_REGNUM)
2297 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2300 int regno_n = regno + n;
2301 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2303 SET_REGNO_REG_SET (significant, regno_n);
2305 SET_REGNO_REG_SET (dead, regno_n);
2306 some_needed |= needed_regno;
2307 some_not_needed |= ! needed_regno;
2310 /* Additional data to record if this is the final pass. */
2313 register rtx y = reg_next_use[regno];
2314 register int blocknum = BLOCK_NUM (insn);
2316 /* If this is a hard reg, record this function uses the reg. */
2318 if (regno < FIRST_PSEUDO_REGISTER)
2321 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
2323 for (i = regno; i < endregno; i++)
2325 /* The next use is no longer "next", since a store
2327 reg_next_use[i] = 0;
2329 regs_ever_live[i] = 1;
2335 /* The next use is no longer "next", since a store
2337 reg_next_use[regno] = 0;
2339 /* Keep track of which basic blocks each reg appears in. */
2341 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2342 REG_BASIC_BLOCK (regno) = blocknum;
2343 else if (REG_BASIC_BLOCK (regno) != blocknum)
2344 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2346 /* Count (weighted) references, stores, etc. This counts a
2347 register twice if it is modified, but that is correct. */
2348 REG_N_SETS (regno)++;
2350 REG_N_REFS (regno) += loop_depth;
2352 /* The insns where a reg is live are normally counted
2353 elsewhere, but we want the count to include the insn
2354 where the reg is set, and the normal counting mechanism
2355 would not count it. */
2356 REG_LIVE_LENGTH (regno)++;
2359 if (! some_not_needed)
2361 /* Make a logical link from the next following insn
2362 that uses this register, back to this insn.
2363 The following insns have already been processed.
2365 We don't build a LOG_LINK for hard registers containing
2366 in ASM_OPERANDs. If these registers get replaced,
2367 we might wind up changing the semantics of the insn,
2368 even if reload can make what appear to be valid assignments
2370 if (y && (BLOCK_NUM (y) == blocknum)
2371 && (regno >= FIRST_PSEUDO_REGISTER
2372 || asm_noperands (PATTERN (y)) < 0))
2374 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
2376 else if (! some_needed)
2378 /* Note that dead stores have already been deleted when possible
2379 If we get here, we have found a dead store that cannot
2380 be eliminated (because the same insn does something useful).
2381 Indicate this by marking the reg being set as dying here. */
2383 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2384 REG_N_DEATHS (REGNO (reg))++;
2388 /* This is a case where we have a multi-word hard register
2389 and some, but not all, of the words of the register are
2390 needed in subsequent insns. Write REG_UNUSED notes
2391 for those parts that were not needed. This case should
2396 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
2398 if (!REGNO_REG_SET_P (needed, regno + i))
2400 = gen_rtx_EXPR_LIST (REG_UNUSED,
2401 gen_rtx_REG (reg_raw_mode[regno + i],
2407 else if (GET_CODE (reg) == REG)
2408 reg_next_use[regno] = 0;
2410 /* If this is the last pass and this is a SCRATCH, show it will be dying
2411 here and count it. */
2412 else if (GET_CODE (reg) == SCRATCH && insn != 0)
2415 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2421 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2425 find_auto_inc (needed, x, insn)
2430 rtx addr = XEXP (x, 0);
2431 HOST_WIDE_INT offset = 0;
2434 /* Here we detect use of an index register which might be good for
2435 postincrement, postdecrement, preincrement, or predecrement. */
2437 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2438 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
2440 if (GET_CODE (addr) == REG)
2443 register int size = GET_MODE_SIZE (GET_MODE (x));
2446 int regno = REGNO (addr);
2448 /* Is the next use an increment that might make auto-increment? */
2449 if ((incr = reg_next_use[regno]) != 0
2450 && (set = single_set (incr)) != 0
2451 && GET_CODE (set) == SET
2452 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
2453 /* Can't add side effects to jumps; if reg is spilled and
2454 reloaded, there's no way to store back the altered value. */
2455 && GET_CODE (insn) != JUMP_INSN
2456 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
2457 && XEXP (y, 0) == addr
2458 && GET_CODE (XEXP (y, 1)) == CONST_INT
2460 #ifdef HAVE_POST_INCREMENT
2461 || (INTVAL (XEXP (y, 1)) == size && offset == 0)
2463 #ifdef HAVE_POST_DECREMENT
2464 || (INTVAL (XEXP (y, 1)) == - size && offset == 0)
2466 #ifdef HAVE_PRE_INCREMENT
2467 || (INTVAL (XEXP (y, 1)) == size && offset == size)
2469 #ifdef HAVE_PRE_DECREMENT
2470 || (INTVAL (XEXP (y, 1)) == - size && offset == - size)
2473 /* Make sure this reg appears only once in this insn. */
2474 && (use = find_use_as_address (PATTERN (insn), addr, offset),
2475 use != 0 && use != (rtx) 1))
2477 rtx q = SET_DEST (set);
2478 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
2479 ? (offset ? PRE_INC : POST_INC)
2480 : (offset ? PRE_DEC : POST_DEC));
2482 if (dead_or_set_p (incr, addr))
2484 /* This is the simple case. Try to make the auto-inc. If
2485 we can't, we are done. Otherwise, we will do any
2486 needed updates below. */
2487 if (! validate_change (insn, &XEXP (x, 0),
2488 gen_rtx_fmt_e (inc_code, Pmode, addr),
2492 else if (GET_CODE (q) == REG
2493 /* PREV_INSN used here to check the semi-open interval
2495 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
2496 /* We must also check for sets of q as q may be
2497 a call clobbered hard register and there may
2498 be a call between PREV_INSN (insn) and incr. */
2499 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
2501 /* We have *p followed sometime later by q = p+size.
2502 Both p and q must be live afterward,
2503 and q is not used between INSN and its assignment.
2504 Change it to q = p, ...*q..., q = q+size.
2505 Then fall into the usual case. */
2509 emit_move_insn (q, addr);
2510 insns = get_insns ();
2513 /* If anything in INSNS have UID's that don't fit within the
2514 extra space we allocate earlier, we can't make this auto-inc.
2515 This should never happen. */
2516 for (temp = insns; temp; temp = NEXT_INSN (temp))
2518 if (INSN_UID (temp) > max_uid_for_flow)
2520 BLOCK_NUM (temp) = BLOCK_NUM (insn);
2523 /* If we can't make the auto-inc, or can't make the
2524 replacement into Y, exit. There's no point in making
2525 the change below if we can't do the auto-inc and doing
2526 so is not correct in the pre-inc case. */
2528 validate_change (insn, &XEXP (x, 0),
2529 gen_rtx_fmt_e (inc_code, Pmode, q),
2531 validate_change (incr, &XEXP (y, 0), q, 1);
2532 if (! apply_change_group ())
2535 /* We now know we'll be doing this change, so emit the
2536 new insn(s) and do the updates. */
2537 emit_insns_before (insns, insn);
2539 if (basic_block_head[BLOCK_NUM (insn)] == insn)
2540 basic_block_head[BLOCK_NUM (insn)] = insns;
2542 /* INCR will become a NOTE and INSN won't contain a
2543 use of ADDR. If a use of ADDR was just placed in
2544 the insn before INSN, make that the next use.
2545 Otherwise, invalidate it. */
2546 if (GET_CODE (PREV_INSN (insn)) == INSN
2547 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
2548 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
2549 reg_next_use[regno] = PREV_INSN (insn);
2551 reg_next_use[regno] = 0;
2556 /* REGNO is now used in INCR which is below INSN, but
2557 it previously wasn't live here. If we don't mark
2558 it as needed, we'll put a REG_DEAD note for it
2559 on this insn, which is incorrect. */
2560 SET_REGNO_REG_SET (needed, regno);
2562 /* If there are any calls between INSN and INCR, show
2563 that REGNO now crosses them. */
2564 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
2565 if (GET_CODE (temp) == CALL_INSN)
2566 REG_N_CALLS_CROSSED (regno)++;
2571 /* If we haven't returned, it means we were able to make the
2572 auto-inc, so update the status. First, record that this insn
2573 has an implicit side effect. */
2576 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
2578 /* Modify the old increment-insn to simply copy
2579 the already-incremented value of our register. */
2580 if (! validate_change (incr, &SET_SRC (set), addr, 0))
2583 /* If that makes it a no-op (copying the register into itself) delete
2584 it so it won't appear to be a "use" and a "set" of this
2586 if (SET_DEST (set) == addr)
2588 PUT_CODE (incr, NOTE);
2589 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
2590 NOTE_SOURCE_FILE (incr) = 0;
2593 if (regno >= FIRST_PSEUDO_REGISTER)
2595 /* Count an extra reference to the reg. When a reg is
2596 incremented, spilling it is worse, so we want to make
2597 that less likely. */
2598 REG_N_REFS (regno) += loop_depth;
2600 /* Count the increment as a setting of the register,
2601 even though it isn't a SET in rtl. */
2602 REG_N_SETS (regno)++;
2607 #endif /* AUTO_INC_DEC */
2609 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2610 This is done assuming the registers needed from X
2611 are those that have 1-bits in NEEDED.
2613 On the final pass, FINAL is 1. This means try for autoincrement
2614 and count the uses and deaths of each pseudo-reg.
2616 INSN is the containing instruction. If INSN is dead, this function is not
2620 mark_used_regs (needed, live, x, final, insn)
2627 register RTX_CODE code;
2632 code = GET_CODE (x);
2653 /* If we are clobbering a MEM, mark any registers inside the address
2655 if (GET_CODE (XEXP (x, 0)) == MEM)
2656 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
2660 /* Invalidate the data for the last MEM stored, but only if MEM is
2661 something that can be stored into. */
2662 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2663 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2664 ; /* needn't clear last_mem_set */
2670 find_auto_inc (needed, x, insn);
2675 if (GET_CODE (SUBREG_REG (x)) == REG
2676 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
2677 && (GET_MODE_SIZE (GET_MODE (x))
2678 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2679 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
2681 /* While we're here, optimize this case. */
2684 /* In case the SUBREG is not of a register, don't optimize */
2685 if (GET_CODE (x) != REG)
2687 mark_used_regs (needed, live, x, final, insn);
2691 /* ... fall through ... */
2694 /* See a register other than being set
2695 => mark it as needed. */
2699 int some_needed = REGNO_REG_SET_P (needed, regno);
2700 int some_not_needed = ! some_needed;
2702 SET_REGNO_REG_SET (live, regno);
2704 /* A hard reg in a wide mode may really be multiple registers.
2705 If so, mark all of them just like the first. */
2706 if (regno < FIRST_PSEUDO_REGISTER)
2710 /* For stack ptr or fixed arg pointer,
2711 nothing below can be necessary, so waste no more time. */
2712 if (regno == STACK_POINTER_REGNUM
2713 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2714 || regno == HARD_FRAME_POINTER_REGNUM
2716 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2717 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2719 || regno == FRAME_POINTER_REGNUM)
2721 /* If this is a register we are going to try to eliminate,
2722 don't mark it live here. If we are successful in
2723 eliminating it, it need not be live unless it is used for
2724 pseudos, in which case it will have been set live when
2725 it was allocated to the pseudos. If the register will not
2726 be eliminated, reload will set it live at that point. */
2728 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
2729 regs_ever_live[regno] = 1;
2732 /* No death notes for global register variables;
2733 their values are live after this function exits. */
2734 if (global_regs[regno])
2737 reg_next_use[regno] = insn;
2741 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2744 int regno_n = regno + n;
2745 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2747 SET_REGNO_REG_SET (live, regno_n);
2748 some_needed |= needed_regno;
2749 some_not_needed |= ! needed_regno;
2754 /* Record where each reg is used, so when the reg
2755 is set we know the next insn that uses it. */
2757 reg_next_use[regno] = insn;
2759 if (regno < FIRST_PSEUDO_REGISTER)
2761 /* If a hard reg is being used,
2762 record that this function does use it. */
2764 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
2768 regs_ever_live[regno + --i] = 1;
2773 /* Keep track of which basic block each reg appears in. */
2775 register int blocknum = BLOCK_NUM (insn);
2777 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2778 REG_BASIC_BLOCK (regno) = blocknum;
2779 else if (REG_BASIC_BLOCK (regno) != blocknum)
2780 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2782 /* Count (weighted) number of uses of each reg. */
2784 REG_N_REFS (regno) += loop_depth;
2787 /* Record and count the insns in which a reg dies.
2788 If it is used in this insn and was dead below the insn
2789 then it dies in this insn. If it was set in this insn,
2790 we do not make a REG_DEAD note; likewise if we already
2791 made such a note. */
2794 && ! dead_or_set_p (insn, x)
2796 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
2800 /* Check for the case where the register dying partially
2801 overlaps the register set by this insn. */
2802 if (regno < FIRST_PSEUDO_REGISTER
2803 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2805 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2807 some_needed |= dead_or_set_regno_p (insn, regno + n);
2810 /* If none of the words in X is needed, make a REG_DEAD
2811 note. Otherwise, we must make partial REG_DEAD notes. */
2815 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
2816 REG_N_DEATHS (regno)++;
2822 /* Don't make a REG_DEAD note for a part of a register
2823 that is set in the insn. */
2825 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
2827 if (!REGNO_REG_SET_P (needed, regno + i)
2828 && ! dead_or_set_regno_p (insn, regno + i))
2830 = gen_rtx_EXPR_LIST (REG_DEAD,
2831 gen_rtx_REG (reg_raw_mode[regno + i],
2842 register rtx testreg = SET_DEST (x);
2845 /* If storing into MEM, don't show it as being used. But do
2846 show the address as being used. */
2847 if (GET_CODE (testreg) == MEM)
2851 find_auto_inc (needed, testreg, insn);
2853 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
2854 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2858 /* Storing in STRICT_LOW_PART is like storing in a reg
2859 in that this SET might be dead, so ignore it in TESTREG.
2860 but in some other ways it is like using the reg.
2862 Storing in a SUBREG or a bit field is like storing the entire
2863 register in that if the register's value is not used
2864 then this SET is not needed. */
2865 while (GET_CODE (testreg) == STRICT_LOW_PART
2866 || GET_CODE (testreg) == ZERO_EXTRACT
2867 || GET_CODE (testreg) == SIGN_EXTRACT
2868 || GET_CODE (testreg) == SUBREG)
2870 if (GET_CODE (testreg) == SUBREG
2871 && GET_CODE (SUBREG_REG (testreg)) == REG
2872 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
2873 && (GET_MODE_SIZE (GET_MODE (testreg))
2874 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
2875 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
2877 /* Modifying a single register in an alternate mode
2878 does not use any of the old value. But these other
2879 ways of storing in a register do use the old value. */
2880 if (GET_CODE (testreg) == SUBREG
2881 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
2886 testreg = XEXP (testreg, 0);
2889 /* If this is a store into a register,
2890 recursively scan the value being stored. */
2892 if ((GET_CODE (testreg) == PARALLEL
2893 && GET_MODE (testreg) == BLKmode)
2894 || (GET_CODE (testreg) == REG
2895 && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
2896 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2897 && regno != HARD_FRAME_POINTER_REGNUM
2899 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2900 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2903 /* We used to exclude global_regs here, but that seems wrong.
2904 Storing in them is like storing in mem. */
2906 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2908 mark_used_regs (needed, live, SET_DEST (x), final, insn);
2915 /* If exiting needs the right stack value, consider this insn as
2916 using the stack pointer. In any event, consider it as using
2917 all global registers and all registers used by return. */
2919 #ifdef EXIT_IGNORE_STACK
2920 if (! EXIT_IGNORE_STACK
2921 || (! FRAME_POINTER_REQUIRED
2922 && ! current_function_calls_alloca
2923 && flag_omit_frame_pointer))
2925 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2927 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2929 #ifdef EPILOGUE_USES
2930 || EPILOGUE_USES (i)
2933 SET_REGNO_REG_SET (live, i);
2940 /* Recursively scan the operands of this expression. */
2943 register char *fmt = GET_RTX_FORMAT (code);
2946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2950 /* Tail recursive case: save a function call level. */
2956 mark_used_regs (needed, live, XEXP (x, i), final, insn);
2958 else if (fmt[i] == 'E')
2961 for (j = 0; j < XVECLEN (x, i); j++)
2962 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
2971 try_pre_increment_1 (insn)
2974 /* Find the next use of this reg. If in same basic block,
2975 make it do pre-increment or pre-decrement if appropriate. */
2976 rtx x = single_set (insn);
2977 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
2978 * INTVAL (XEXP (SET_SRC (x), 1)));
2979 int regno = REGNO (SET_DEST (x));
2980 rtx y = reg_next_use[regno];
2982 && BLOCK_NUM (y) == BLOCK_NUM (insn)
2983 /* Don't do this if the reg dies, or gets set in y; a standard addressing
2984 mode would be better. */
2985 && ! dead_or_set_p (y, SET_DEST (x))
2986 && try_pre_increment (y, SET_DEST (x), amount))
2988 /* We have found a suitable auto-increment
2989 and already changed insn Y to do it.
2990 So flush this increment-instruction. */
2991 PUT_CODE (insn, NOTE);
2992 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2993 NOTE_SOURCE_FILE (insn) = 0;
2994 /* Count a reference to this reg for the increment
2995 insn we are deleting. When a reg is incremented.
2996 spilling it is worse, so we want to make that
2998 if (regno >= FIRST_PSEUDO_REGISTER)
3000 REG_N_REFS (regno) += loop_depth;
3001 REG_N_SETS (regno)++;
3008 /* Try to change INSN so that it does pre-increment or pre-decrement
3009 addressing on register REG in order to add AMOUNT to REG.
3010 AMOUNT is negative for pre-decrement.
3011 Returns 1 if the change could be made.
3012 This checks all about the validity of the result of modifying INSN. */
3015 try_pre_increment (insn, reg, amount)
3017 HOST_WIDE_INT amount;
3021 /* Nonzero if we can try to make a pre-increment or pre-decrement.
3022 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
3024 /* Nonzero if we can try to make a post-increment or post-decrement.
3025 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
3026 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
3027 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
3030 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
3033 /* From the sign of increment, see which possibilities are conceivable
3034 on this target machine. */
3035 #ifdef HAVE_PRE_INCREMENT
3039 #ifdef HAVE_POST_INCREMENT
3044 #ifdef HAVE_PRE_DECREMENT
3048 #ifdef HAVE_POST_DECREMENT
3053 if (! (pre_ok || post_ok))
3056 /* It is not safe to add a side effect to a jump insn
3057 because if the incremented register is spilled and must be reloaded
3058 there would be no way to store the incremented value back in memory. */
3060 if (GET_CODE (insn) == JUMP_INSN)
3065 use = find_use_as_address (PATTERN (insn), reg, 0);
3066 if (post_ok && (use == 0 || use == (rtx) 1))
3068 use = find_use_as_address (PATTERN (insn), reg, -amount);
3072 if (use == 0 || use == (rtx) 1)
3075 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
3078 /* See if this combination of instruction and addressing mode exists. */
3079 if (! validate_change (insn, &XEXP (use, 0),
3080 gen_rtx_fmt_e (amount > 0
3081 ? (do_post ? POST_INC : PRE_INC)
3082 : (do_post ? POST_DEC : PRE_DEC),
3086 /* Record that this insn now has an implicit side effect on X. */
3087 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
3091 #endif /* AUTO_INC_DEC */
3093 /* Find the place in the rtx X where REG is used as a memory address.
3094 Return the MEM rtx that so uses it.
3095 If PLUSCONST is nonzero, search instead for a memory address equivalent to
3096 (plus REG (const_int PLUSCONST)).
3098 If such an address does not appear, return 0.
3099 If REG appears more than once, or is used other than in such an address,
3103 find_use_as_address (x, reg, plusconst)
3106 HOST_WIDE_INT plusconst;
3108 enum rtx_code code = GET_CODE (x);
3109 char *fmt = GET_RTX_FORMAT (code);
3111 register rtx value = 0;
3114 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
3117 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
3118 && XEXP (XEXP (x, 0), 0) == reg
3119 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3120 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
3123 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
3125 /* If REG occurs inside a MEM used in a bit-field reference,
3126 that is unacceptable. */
3127 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
3128 return (rtx) (HOST_WIDE_INT) 1;
3132 return (rtx) (HOST_WIDE_INT) 1;
3134 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3138 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
3142 return (rtx) (HOST_WIDE_INT) 1;
3147 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3149 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
3153 return (rtx) (HOST_WIDE_INT) 1;
3161 /* Write information about registers and basic blocks into FILE.
3162 This is part of making a debugging dump. */
3165 dump_flow_info (file)
3169 static char *reg_class_names[] = REG_CLASS_NAMES;
3171 fprintf (file, "%d registers.\n", max_regno);
3173 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3176 enum reg_class class, altclass;
3177 fprintf (file, "\nRegister %d used %d times across %d insns",
3178 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
3179 if (REG_BASIC_BLOCK (i) >= 0)
3180 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
3182 fprintf (file, "; set %d time%s", REG_N_SETS (i),
3183 (REG_N_SETS (i) == 1) ? "" : "s");
3184 if (REG_USERVAR_P (regno_reg_rtx[i]))
3185 fprintf (file, "; user var");
3186 if (REG_N_DEATHS (i) != 1)
3187 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
3188 if (REG_N_CALLS_CROSSED (i) == 1)
3189 fprintf (file, "; crosses 1 call");
3190 else if (REG_N_CALLS_CROSSED (i))
3191 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
3192 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
3193 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
3194 class = reg_preferred_class (i);
3195 altclass = reg_alternate_class (i);
3196 if (class != GENERAL_REGS || altclass != ALL_REGS)
3198 if (altclass == ALL_REGS || class == ALL_REGS)
3199 fprintf (file, "; pref %s", reg_class_names[(int) class]);
3200 else if (altclass == NO_REGS)
3201 fprintf (file, "; %s or none", reg_class_names[(int) class]);
3203 fprintf (file, "; pref %s, else %s",
3204 reg_class_names[(int) class],
3205 reg_class_names[(int) altclass]);
3207 if (REGNO_POINTER_FLAG (i))
3208 fprintf (file, "; pointer");
3209 fprintf (file, ".\n");
3211 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
3212 for (i = 0; i < n_basic_blocks; i++)
3214 register rtx head, jump;
3216 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
3218 INSN_UID (basic_block_head[i]),
3219 INSN_UID (basic_block_end[i]));
3220 /* The control flow graph's storage is freed
3221 now when flow_analysis returns.
3222 Don't try to print it if it is gone. */
3223 if (basic_block_drops_in)
3225 fprintf (file, "Reached from blocks: ");
3226 head = basic_block_head[i];
3227 if (GET_CODE (head) == CODE_LABEL)
3228 for (jump = LABEL_REFS (head);
3230 jump = LABEL_NEXTREF (jump))
3232 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
3233 fprintf (file, " %d", from_block);
3235 if (basic_block_drops_in[i])
3236 fprintf (file, " previous");
3238 fprintf (file, "\nRegisters live at start:");
3239 for (regno = 0; regno < max_regno; regno++)
3240 if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno))
3241 fprintf (file, " %d", regno);
3242 fprintf (file, "\n");
3244 fprintf (file, "\n");
3248 /* Like print_rtl, but also print out live information for the start of each
3252 print_rtl_with_bb (outf, rtx_first)
3256 register rtx tmp_rtx;
3259 fprintf (outf, "(nil)\n");
3264 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
3265 int max_uid = get_max_uid ();
3266 int *start = (int *) alloca (max_uid * sizeof (int));
3267 int *end = (int *) alloca (max_uid * sizeof (int));
3268 enum bb_state *in_bb_p = (enum bb_state *)
3269 alloca (max_uid * sizeof (enum bb_state));
3271 for (i = 0; i < max_uid; i++)
3273 start[i] = end[i] = -1;
3274 in_bb_p[i] = NOT_IN_BB;
3277 for (i = n_basic_blocks-1; i >= 0; i--)
3280 start[INSN_UID (basic_block_head[i])] = i;
3281 end[INSN_UID (basic_block_end[i])] = i;
3282 for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x))
3284 in_bb_p[ INSN_UID(x)]
3285 = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB)
3286 ? IN_ONE_BB : IN_MULTIPLE_BB;
3287 if (x == basic_block_end[i])
3292 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
3296 if ((bb = start[INSN_UID (tmp_rtx)]) >= 0)
3298 fprintf (outf, ";; Start of basic block %d, registers live:",
3301 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i,
3303 fprintf (outf, " %d", i);
3304 if (i < FIRST_PSEUDO_REGISTER)
3305 fprintf (outf, " [%s]",
3311 if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB
3312 && GET_CODE (tmp_rtx) != NOTE
3313 && GET_CODE (tmp_rtx) != BARRIER)
3314 fprintf (outf, ";; Insn is not within a basic block\n");
3315 else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
3316 fprintf (outf, ";; Insn is in multiple basic blocks\n");
3318 did_output = print_rtl_single (outf, tmp_rtx);
3320 if ((bb = end[INSN_UID (tmp_rtx)]) >= 0)
3321 fprintf (outf, ";; End of basic block %d\n", bb);
3330 /* Integer list support. */
3332 /* Allocate a node from list *HEAD_PTR. */
3335 alloc_int_list_node (head_ptr)
3336 int_list_block **head_ptr;
3338 struct int_list_block *first_blk = *head_ptr;
3340 if (first_blk == NULL || first_blk->nodes_left <= 0)
3342 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
3343 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
3344 first_blk->next = *head_ptr;
3345 *head_ptr = first_blk;
3348 first_blk->nodes_left--;
3349 return &first_blk->nodes[first_blk->nodes_left];
3352 /* Pointer to head of predecessor/successor block list. */
3353 static int_list_block *pred_int_list_blocks;
3355 /* Add a new node to integer list LIST with value VAL.
3356 LIST is a pointer to a list object to allow for different implementations.
3357 If *LIST is initially NULL, the list is empty.
3358 The caller must not care whether the element is added to the front or
3359 to the end of the list (to allow for different implementations). */
3362 add_int_list_node (blk_list, list, val)
3363 int_list_block **blk_list;
3367 int_list_ptr p = alloc_int_list_node (blk_list);
3375 /* Free the blocks of lists at BLK_LIST. */
3378 free_int_list (blk_list)
3379 int_list_block **blk_list;
3381 int_list_block *p, *next;
3383 for (p = *blk_list; p != NULL; p = next)
3389 /* Mark list as empty for the next function we compile. */
3393 /* Predecessor/successor computation. */
3395 /* Mark PRED_BB a precessor of SUCC_BB,
3396 and conversely SUCC_BB a successor of PRED_BB. */
3399 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
3402 int_list_ptr *s_preds;
3403 int_list_ptr *s_succs;
3407 if (succ_bb != EXIT_BLOCK)
3409 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
3410 num_preds[succ_bb]++;
3412 if (pred_bb != ENTRY_BLOCK)
3414 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
3415 num_succs[pred_bb]++;
3419 /* Compute the predecessors and successors for each block. */
3421 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
3422 int_list_ptr *s_preds;
3423 int_list_ptr *s_succs;
3427 int bb, clear_local_bb_vars = 0;
3429 bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr));
3430 bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr));
3431 bzero ((char *) num_preds, n_basic_blocks * sizeof (int));
3432 bzero ((char *) num_succs, n_basic_blocks * sizeof (int));
3434 /* This routine can be called after life analysis; in that case
3435 basic_block_drops_in and uid_block_number will not be available
3436 and we must recompute their values. */
3437 if (basic_block_drops_in == NULL || uid_block_number == NULL)
3439 clear_local_bb_vars = 1;
3440 basic_block_drops_in = (char *) alloca (n_basic_blocks);
3441 uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3443 bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char));
3444 bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int));
3446 /* Scan each basic block setting basic_block_drops_in and
3447 uid_block_number as needed. */
3448 for (bb = 0; bb < n_basic_blocks; bb++)
3450 rtx insn, stop_insn;
3453 stop_insn = NULL_RTX;
3455 stop_insn = basic_block_end[bb-1];
3457 /* Look backwards from the start of this block. Stop if we
3458 hit the start of the function or the end of a previous
3459 block. Don't walk backwards through blocks that are just
3461 for (insn = PREV_INSN (basic_block_head[bb]);
3462 insn && insn != stop_insn && GET_CODE (insn) == NOTE;
3463 insn = PREV_INSN (insn))
3466 /* Never set basic_block_drops_in for the first block. It is
3469 If we stopped on anything other than a BARRIER, then this
3472 basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1);
3474 insn = basic_block_head[bb];
3477 BLOCK_NUM (insn) = bb;
3478 if (insn == basic_block_end[bb])
3480 insn = NEXT_INSN (insn);
3485 for (bb = 0; bb < n_basic_blocks; bb++)
3490 head = BLOCK_HEAD (bb);
3492 if (GET_CODE (head) == CODE_LABEL)
3493 for (jump = LABEL_REFS (head);
3495 jump = LABEL_NEXTREF (jump))
3497 if (! INSN_DELETED_P (CONTAINING_INSN (jump))
3498 && (GET_CODE (CONTAINING_INSN (jump)) != NOTE
3499 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump))
3500 != NOTE_INSN_DELETED)))
3501 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb,
3502 s_preds, s_succs, num_preds, num_succs);
3505 jump = BLOCK_END (bb);
3506 /* If this is a RETURN insn or a conditional jump in the last
3507 basic block, or a non-jump insn in the last basic block, then
3508 this block reaches the exit block. */
3509 if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN)
3510 || (((GET_CODE (jump) == JUMP_INSN
3511 && condjump_p (jump) && !simplejump_p (jump))
3512 || GET_CODE (jump) != JUMP_INSN)
3513 && (bb == n_basic_blocks - 1)))
3514 add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs);
3516 if (basic_block_drops_in[bb])
3517 add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs);
3520 add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs);
3523 /* If we allocated any variables in temporary storage, clear out the
3524 pointer to the local storage to avoid dangling pointers. */
3525 if (clear_local_bb_vars)
3527 basic_block_drops_in = NULL;
3528 uid_block_number = NULL;
3534 dump_bb_data (file, preds, succs)
3536 int_list_ptr *preds;
3537 int_list_ptr *succs;
3542 fprintf (file, "BB data\n\n");
3543 for (bb = 0; bb < n_basic_blocks; bb++)
3545 fprintf (file, "BB %d, start %d, end %d\n", bb,
3546 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
3547 fprintf (file, " preds:");
3548 for (p = preds[bb]; p != NULL; p = p->next)
3550 int pred_bb = INT_LIST_VAL (p);
3551 if (pred_bb == ENTRY_BLOCK)
3552 fprintf (file, " entry");
3554 fprintf (file, " %d", pred_bb);
3556 fprintf (file, "\n");
3557 fprintf (file, " succs:");
3558 for (p = succs[bb]; p != NULL; p = p->next)
3560 int succ_bb = INT_LIST_VAL (p);
3561 if (succ_bb == EXIT_BLOCK)
3562 fprintf (file, " exit");
3564 fprintf (file, " %d", succ_bb);
3566 fprintf (file, "\n");
3568 fprintf (file, "\n");
3572 dump_sbitmap (file, bmap)
3577 int set_size = bmap->size;
3578 int total_bits = bmap->n_bits;
3580 fprintf (file, " ");
3581 for (i = n = 0; i < set_size && n < total_bits; i++)
3583 for (j = 0; j < SBITMAP_ELT_BITS && n < total_bits; j++, n++)
3585 if (n != 0 && n % 10 == 0)
3586 fprintf (file, " ");
3587 fprintf (file, "%d", (bmap->elms[i] & (1L << j)) != 0);
3590 fprintf (file, "\n");
3594 dump_sbitmap_vector (file, title, subtitle, bmaps, n_maps)
3596 char *title, *subtitle;
3602 fprintf (file, "%s\n", title);
3603 for (bb = 0; bb < n_maps; bb++)
3605 fprintf (file, "%s %d\n", subtitle, bb);
3606 dump_sbitmap (file, bmaps[bb]);
3608 fprintf (file, "\n");
3611 /* Free basic block data storage. */
3616 free_int_list (&pred_int_list_blocks);
3619 /* Bitmap manipulation routines. */
3621 /* Allocate a simple bitmap of N_ELMS bits. */
3624 sbitmap_alloc (n_elms)
3627 int bytes, size, amt;
3630 size = SBITMAP_SET_SIZE (n_elms);
3631 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3632 amt = (sizeof (struct simple_bitmap_def)
3633 + bytes - sizeof (SBITMAP_ELT_TYPE));
3634 bmap = (sbitmap) xmalloc (amt);
3635 bmap->n_bits = n_elms;
3637 bmap->bytes = bytes;
3641 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3644 sbitmap_vector_alloc (n_vecs, n_elms)
3647 int i, bytes, offset, elm_bytes, size, amt, vector_bytes;
3648 sbitmap *bitmap_vector;
3650 size = SBITMAP_SET_SIZE (n_elms);
3651 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3652 elm_bytes = (sizeof (struct simple_bitmap_def)
3653 + bytes - sizeof (SBITMAP_ELT_TYPE));
3654 vector_bytes = n_vecs * sizeof (sbitmap *);
3656 /* Round up `vector_bytes' to account for the alignment requirements
3657 of an sbitmap. One could allocate the vector-table and set of sbitmaps
3658 separately, but that requires maintaining two pointers or creating
3659 a cover struct to hold both pointers (so our result is still just
3660 one pointer). Neither is a bad idea, but this is simpler for now. */
3662 /* Based on DEFAULT_ALIGNMENT computation in obstack.c. */
3663 struct { char x; SBITMAP_ELT_TYPE y; } align;
3664 int alignment = (char *) & align.y - & align.x;
3665 vector_bytes = (vector_bytes + alignment - 1) & ~ (alignment - 1);
3668 amt = vector_bytes + (n_vecs * elm_bytes);
3669 bitmap_vector = (sbitmap *) xmalloc (amt);
3671 for (i = 0, offset = vector_bytes;
3673 i++, offset += elm_bytes)
3675 sbitmap b = (sbitmap) ((char *) bitmap_vector + offset);
3676 bitmap_vector[i] = b;
3682 return bitmap_vector;
3685 /* Copy sbitmap SRC to DST. */
3688 sbitmap_copy (dst, src)
3691 bcopy ((PTR) src->elms, (PTR) dst->elms, sizeof (SBITMAP_ELT_TYPE) * dst->size);
3694 /* Zero all elements in a bitmap. */
3700 bzero ((char *) bmap->elms, bmap->bytes);
3703 /* Set to ones all elements in a bitmap. */
3709 memset (bmap->elms, -1, bmap->bytes);
3712 /* Zero a vector of N_VECS bitmaps. */
3715 sbitmap_vector_zero (bmap, n_vecs)
3721 for (i = 0; i < n_vecs; i++)
3722 sbitmap_zero (bmap[i]);
3725 /* Set to ones a vector of N_VECS bitmaps. */
3728 sbitmap_vector_ones (bmap, n_vecs)
3734 for (i = 0; i < n_vecs; i++)
3735 sbitmap_ones (bmap[i]);
3738 /* Set DST to be A union (B - C).
3740 Return non-zero if any change is made. */
3743 sbitmap_union_of_diff (dst, a, b, c)
3744 sbitmap dst, a, b, c;
3747 sbitmap_ptr dstp, ap, bp, cp;
3754 for (i = 0; i < dst->size; i++)
3756 SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp);
3760 dstp++; ap++; bp++; cp++;
3765 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3768 sbitmap_not (dst, src)
3772 sbitmap_ptr dstp, ap;
3776 for (i = 0; i < dst->size; i++)
3778 SBITMAP_ELT_TYPE tmp = ~(*ap);
3784 /* Set the bits in DST to be the difference between the bits
3785 in A and the bits in B. i.e. dst = a - b.
3786 The - operator is implemented as a & (~b). */
3789 sbitmap_difference (dst, a, b)
3793 sbitmap_ptr dstp, ap, bp;
3798 for (i = 0; i < dst->size; i++)
3799 *dstp++ = *ap++ & (~*bp++);
3802 /* Set DST to be (A and B)).
3803 Return non-zero if any change is made. */
3806 sbitmap_a_and_b (dst, a, b)
3810 sbitmap_ptr dstp, ap, bp;
3816 for (i = 0; i < dst->size; i++)
3818 SBITMAP_ELT_TYPE tmp = *ap & *bp;
3826 /* Set DST to be (A or B)).
3827 Return non-zero if any change is made. */
3830 sbitmap_a_or_b (dst, a, b)
3834 sbitmap_ptr dstp, ap, bp;
3840 for (i = 0; i < dst->size; i++)
3842 SBITMAP_ELT_TYPE tmp = *ap | *bp;
3851 /* Set DST to be (A or (B and C)).
3852 Return non-zero if any change is made. */
3855 sbitmap_a_or_b_and_c (dst, a, b, c)
3856 sbitmap dst, a, b, c;
3859 sbitmap_ptr dstp, ap, bp, cp;
3866 for (i = 0; i < dst->size; i++)
3868 SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp);
3872 dstp++; ap++; bp++; cp++;
3877 /* Set DST to be (A ann (B or C)).
3878 Return non-zero if any change is made. */
3881 sbitmap_a_and_b_or_c (dst, a, b, c)
3882 sbitmap dst, a, b, c;
3885 sbitmap_ptr dstp, ap, bp, cp;
3892 for (i = 0; i < dst->size; i++)
3894 SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp);
3898 dstp++; ap++; bp++; cp++;
3903 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3904 successors of block number BB (PRED_SUCC says which). */
3907 sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ)
3911 int_list_ptr *pred_succ;
3915 int set_size = dst->size;
3919 /* It is possible that there are no predecessors(/successors).
3920 This can happen for example in unreachable code. */
3924 /* In APL-speak this is the `and' reduction of the empty set and thus
3925 the result is the identity for `and'. */
3930 /* Set result to first predecessor/successor. */
3932 for ( ; ps != NULL; ps = ps->next)
3934 ps_bb = INT_LIST_VAL (ps);
3935 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3937 sbitmap_copy (dst, src[ps_bb]);
3938 /* Break out since we're only doing first predecessor. */
3944 /* Now do the remaining predecessors/successors. */
3946 for (ps = ps->next; ps != NULL; ps = ps->next)
3951 ps_bb = INT_LIST_VAL (ps);
3952 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3955 p = src[ps_bb]->elms;
3958 for (i = 0; i < set_size; i++)
3963 /* Set the bitmap DST to the intersection of SRC of all predecessors
3964 of block number BB. */
3967 sbitmap_intersect_of_predecessors (dst, src, bb, s_preds)
3971 int_list_ptr *s_preds;
3973 sbitmap_intersect_of_predsucc (dst, src, bb, s_preds);
3976 /* Set the bitmap DST to the intersection of SRC of all successors
3977 of block number BB. */
3980 sbitmap_intersect_of_successors (dst, src, bb, s_succs)
3984 int_list_ptr *s_succs;
3986 sbitmap_intersect_of_predsucc (dst, src, bb, s_succs);
3989 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
3993 sbitmap_union_of_predsucc (dst, src, bb, pred_succ)
3997 int_list_ptr *pred_succ;
4001 int set_size = dst->size;
4005 /* It is possible that there are no predecessors(/successors).
4006 This can happen for example in unreachable code. */
4010 /* In APL-speak this is the `or' reduction of the empty set and thus
4011 the result is the identity for `or'. */
4016 /* Set result to first predecessor/successor. */
4018 for ( ; ps != NULL; ps = ps->next)
4020 ps_bb = INT_LIST_VAL (ps);
4021 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4023 sbitmap_copy (dst, src[ps_bb]);
4024 /* Break out since we're only doing first predecessor. */
4030 /* Now do the remaining predecessors/successors. */
4032 for (ps = ps->next; ps != NULL; ps = ps->next)
4037 ps_bb = INT_LIST_VAL (ps);
4038 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
4041 p = src[ps_bb]->elms;
4044 for (i = 0; i < set_size; i++)
4049 /* Set the bitmap DST to the union of SRC of all predecessors of
4053 sbitmap_union_of_predecessors (dst, src, bb, s_preds)
4057 int_list_ptr *s_preds;
4059 sbitmap_union_of_predsucc (dst, src, bb, s_preds);
4062 /* Set the bitmap DST to the union of SRC of all predecessors of
4066 sbitmap_union_of_successors (dst, src, bb, s_succ)
4070 int_list_ptr *s_succ;
4072 sbitmap_union_of_predsucc (dst, src, bb, s_succ);
4075 /* Compute dominator relationships. */
4077 compute_dominators (dominators, post_dominators, s_preds, s_succs)
4078 sbitmap *dominators;
4079 sbitmap *post_dominators;
4080 int_list_ptr *s_preds;
4081 int_list_ptr *s_succs;
4083 int bb, changed, passes;
4084 sbitmap *temp_bitmap;
4086 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
4087 sbitmap_vector_ones (dominators, n_basic_blocks);
4088 sbitmap_vector_ones (post_dominators, n_basic_blocks);
4089 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
4091 sbitmap_zero (dominators[0]);
4092 SET_BIT (dominators[0], 0);
4094 sbitmap_zero (post_dominators[n_basic_blocks-1]);
4095 SET_BIT (post_dominators[n_basic_blocks-1], 0);
4102 for (bb = 1; bb < n_basic_blocks; bb++)
4104 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
4106 SET_BIT (temp_bitmap[bb], bb);
4107 changed |= sbitmap_a_and_b (dominators[bb],
4110 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
4112 SET_BIT (temp_bitmap[bb], bb);
4113 changed |= sbitmap_a_and_b (post_dominators[bb],
4114 post_dominators[bb],
4123 /* Count for a single SET rtx, X. */
4126 count_reg_sets_1 (x)
4130 register rtx reg = SET_DEST (x);
4132 /* Find the register that's set/clobbered. */
4133 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4134 || GET_CODE (reg) == SIGN_EXTRACT
4135 || GET_CODE (reg) == STRICT_LOW_PART)
4136 reg = XEXP (reg, 0);
4138 if (GET_CODE (reg) == PARALLEL
4139 && GET_MODE (reg) == BLKmode)
4142 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4143 count_reg_sets_1 (XVECEXP (reg, 0, i));
4147 if (GET_CODE (reg) == REG)
4149 regno = REGNO (reg);
4150 if (regno >= FIRST_PSEUDO_REGISTER)
4152 /* Count (weighted) references, stores, etc. This counts a
4153 register twice if it is modified, but that is correct. */
4154 REG_N_SETS (regno)++;
4156 REG_N_REFS (regno) += loop_depth;
4161 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
4162 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
4168 register RTX_CODE code = GET_CODE (x);
4170 if (code == SET || code == CLOBBER)
4171 count_reg_sets_1 (x);
4172 else if (code == PARALLEL)
4175 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4177 code = GET_CODE (XVECEXP (x, 0, i));
4178 if (code == SET || code == CLOBBER)
4179 count_reg_sets_1 (XVECEXP (x, 0, i));
4184 /* Increment REG_N_REFS by the current loop depth each register reference
4188 count_reg_references (x)
4191 register RTX_CODE code;
4194 code = GET_CODE (x);
4214 /* If we are clobbering a MEM, mark any registers inside the address
4216 if (GET_CODE (XEXP (x, 0)) == MEM)
4217 count_reg_references (XEXP (XEXP (x, 0), 0));
4221 /* While we're here, optimize this case. */
4224 /* In case the SUBREG is not of a register, don't optimize */
4225 if (GET_CODE (x) != REG)
4227 count_reg_references (x);
4231 /* ... fall through ... */
4234 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
4235 REG_N_REFS (REGNO (x)) += loop_depth;
4240 register rtx testreg = SET_DEST (x);
4243 /* If storing into MEM, don't show it as being used. But do
4244 show the address as being used. */
4245 if (GET_CODE (testreg) == MEM)
4247 count_reg_references (XEXP (testreg, 0));
4248 count_reg_references (SET_SRC (x));
4252 /* Storing in STRICT_LOW_PART is like storing in a reg
4253 in that this SET might be dead, so ignore it in TESTREG.
4254 but in some other ways it is like using the reg.
4256 Storing in a SUBREG or a bit field is like storing the entire
4257 register in that if the register's value is not used
4258 then this SET is not needed. */
4259 while (GET_CODE (testreg) == STRICT_LOW_PART
4260 || GET_CODE (testreg) == ZERO_EXTRACT
4261 || GET_CODE (testreg) == SIGN_EXTRACT
4262 || GET_CODE (testreg) == SUBREG)
4264 /* Modifying a single register in an alternate mode
4265 does not use any of the old value. But these other
4266 ways of storing in a register do use the old value. */
4267 if (GET_CODE (testreg) == SUBREG
4268 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4273 testreg = XEXP (testreg, 0);
4276 /* If this is a store into a register,
4277 recursively scan the value being stored. */
4279 if ((GET_CODE (testreg) == PARALLEL
4280 && GET_MODE (testreg) == BLKmode)
4281 || GET_CODE (testreg) == REG)
4283 count_reg_references (SET_SRC (x));
4285 count_reg_references (SET_DEST (x));
4295 /* Recursively scan the operands of this expression. */
4298 register char *fmt = GET_RTX_FORMAT (code);
4301 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4305 /* Tail recursive case: save a function call level. */
4311 count_reg_references (XEXP (x, i));
4313 else if (fmt[i] == 'E')
4316 for (j = 0; j < XVECLEN (x, i); j++)
4317 count_reg_references (XVECEXP (x, i, j));
4323 /* Recompute register set/reference counts immediately prior to register
4326 This avoids problems with set/reference counts changing to/from values
4327 which have special meanings to the register allocators.
4329 Additionally, the reference counts are the primary component used by the
4330 register allocators to prioritize pseudos for allocation to hard regs.
4331 More accurate reference counts generally lead to better register allocation.
4333 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
4334 possibly other information which is used by the register allocators. */
4337 recompute_reg_usage (f)
4343 /* Clear out the old data. */
4344 max_reg = max_reg_num ();
4345 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
4351 /* Scan each insn in the chain and count how many times each register is
4354 for (insn = f; insn; insn = NEXT_INSN (insn))
4356 /* Keep track of loop depth. */
4357 if (GET_CODE (insn) == NOTE)
4359 /* Look for loop boundaries. */
4360 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
4362 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
4365 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
4366 Abort now rather than setting register status incorrectly. */
4367 if (loop_depth == 0)
4370 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4374 /* This call will increment REG_N_SETS for each SET or CLOBBER
4375 of a register in INSN. It will also increment REG_N_REFS
4376 by the loop depth for each set of a register in INSN. */
4377 count_reg_sets (PATTERN (insn));
4379 /* count_reg_sets does not detect autoincrement address modes, so
4380 detect them here by looking at the notes attached to INSN. */
4381 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
4383 if (REG_NOTE_KIND (links) == REG_INC)
4384 /* Count (weighted) references, stores, etc. This counts a
4385 register twice if it is modified, but that is correct. */
4386 REG_N_SETS (REGNO (XEXP (links, 0)))++;
4389 /* This call will increment REG_N_REFS by the current loop depth for
4390 each reference to a register in INSN. */
4391 count_reg_references (PATTERN (insn));
4393 /* count_reg_references will not include counts for arguments to
4394 function calls, so detect them here by examining the
4395 CALL_INSN_FUNCTION_USAGE data. */
4396 if (GET_CODE (insn) == CALL_INSN)
4400 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4402 note = XEXP (note, 1))
4403 if (GET_CODE (XEXP (note, 0)) == USE)
4404 count_reg_references (SET_DEST (XEXP (note, 0)));