1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler.
23 It computes data flow information
24 which tells combine_instructions which insns to consider combining
25 and controls register allocation.
27 Additional data flow information that is too bulky to record
28 is generated during the analysis, and is used at that time to
29 create autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl
38 into basic blocks. It records the beginnings and ends of the
39 basic blocks in the vectors basic_block_head and basic_block_end,
40 and the number of blocks in n_basic_blocks.
42 find_basic_blocks also finds any unreachable loops
47 life_analysis is called immediately after find_basic_blocks.
48 It uses the basic block information to determine where each
49 hard or pseudo register is live.
51 ** live-register info **
53 The information about where each register is live is in two parts:
54 the REG_NOTES of insns, and the vector basic_block_live_at_start.
56 basic_block_live_at_start has an element for each basic block,
57 and the element is a bit-vector with a bit for each hard or pseudo
58 register. The bit is 1 if the register is live at the beginning
61 Two types of elements can be added to an insn's REG_NOTES.
62 A REG_DEAD note is added to an insn's REG_NOTES for any register
63 that meets both of two conditions: The value in the register is not
64 needed in subsequent insns and the insn does not replace the value in
65 the register (in the case of multi-word hard registers, the value in
66 each register must be replaced by the insn to avoid a REG_DEAD note).
68 In the vast majority of cases, an object in a REG_DEAD note will be
69 used somewhere in the insn. The (rare) exception to this is if an
70 insn uses a multi-word hard register and only some of the registers are
71 needed in subsequent insns. In that case, REG_DEAD notes will be
72 provided for those hard registers that are not subsequently needed.
73 Partial REG_DEAD notes of this type do not occur when an insn sets
74 only some of the hard registers used in such a multi-word operand;
75 omitting REG_DEAD notes for objects stored in an insn is optional and
76 the desire to do so does not justify the complexity of the partial
79 REG_UNUSED notes are added for each register that is set by the insn
80 but is unused subsequently (if every register set by the insn is unused
81 and the insn does not reference memory or have some other side-effect,
82 the insn is deleted instead). If only part of a multi-word hard
83 register is used in a subsequent insn, REG_UNUSED notes are made for
84 the parts that will not be used.
86 To determine which registers are live after any insn, one can
87 start from the beginning of the basic block and scan insns, noting
88 which registers are set by each insn and which die there.
90 ** Other actions of life_analysis **
92 life_analysis sets up the LOG_LINKS fields of insns because the
93 information needed to do so is readily available.
95 life_analysis deletes insns whose only effect is to store a value
98 life_analysis notices cases where a reference to a register as
99 a memory address can be combined with a preceding or following
100 incrementation or decrementation of the register. The separate
101 instruction to increment or decrement is deleted and the address
102 is changed to a POST_INC or similar rtx.
104 Each time an incrementing or decrementing address is created,
105 a REG_INC element is added to the insn's REG_NOTES list.
107 life_analysis fills in certain vectors containing information about
108 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
109 reg_n_calls_crosses and reg_basic_block. */
114 #include "basic-block.h"
115 #include "insn-config.h"
117 #include "hard-reg-set.h"
123 #define obstack_chunk_alloc xmalloc
124 #define obstack_chunk_free free
126 /* The contents of the current function definition are allocated
127 in this obstack, and all are freed at the end of the function.
128 For top-level functions, this is temporary_obstack.
129 Separate obstacks are made for nested functions. */
131 extern struct obstack *function_obstack;
133 /* List of labels that must never be deleted. */
134 extern rtx forced_labels;
136 /* Get the basic block number of an insn.
137 This info should not be expected to remain available
138 after the end of life_analysis. */
140 /* This is the limit of the allocated space in the following two arrays. */
142 static int max_uid_for_flow;
144 #define BLOCK_NUM(INSN) uid_block_number[INSN_UID (INSN)]
146 /* This is where the BLOCK_NUM values are really stored.
147 This is set up by find_basic_blocks and used there and in life_analysis,
150 int *uid_block_number;
152 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
154 #define INSN_VOLATILE(INSN) uid_volatile[INSN_UID (INSN)]
155 static char *uid_volatile;
157 /* Number of basic blocks in the current function. */
161 /* Maximum register number used in this function, plus one. */
165 /* Maximum number of SCRATCH rtx's used in any basic block of this
170 /* Number of SCRATCH rtx's in the current block. */
172 static int num_scratch;
174 /* Indexed by n, giving various register information */
176 reg_info *reg_n_info;
178 /* Element N is the next insn that uses (hard or pseudo) register number N
179 within the current basic block; or zero, if there is no such insn.
180 This is valid only during the final backward scan in propagate_block. */
182 static rtx *reg_next_use;
184 /* Size of a regset for the current function,
185 in (1) bytes and (2) elements. */
190 /* Element N is first insn in basic block N.
191 This info lasts until we finish compiling the function. */
193 rtx *basic_block_head;
195 /* Element N is last insn in basic block N.
196 This info lasts until we finish compiling the function. */
198 rtx *basic_block_end;
200 /* Element N indicates whether basic block N can be reached through a
203 char *basic_block_computed_jump_target;
205 /* Element N is a regset describing the registers live
206 at the start of basic block N.
207 This info lasts until we finish compiling the function. */
209 regset *basic_block_live_at_start;
211 /* Regset of regs live when calls to `setjmp'-like functions happen. */
213 regset regs_live_at_setjmp;
215 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
216 that have to go in the same hard reg.
217 The first two regs in the list are a pair, and the next two
218 are another pair, etc. */
221 /* Element N is nonzero if control can drop into basic block N
222 from the preceding basic block. Freed after life_analysis. */
224 static char *basic_block_drops_in;
226 /* Element N is depth within loops of the last insn in basic block number N.
227 Freed after life_analysis. */
229 static short *basic_block_loop_depth;
231 /* Element N nonzero if basic block N can actually be reached.
232 Vector exists only during find_basic_blocks. */
234 static char *block_live_static;
236 /* Depth within loops of basic block being scanned for lifetime analysis,
237 plus one. This is the weight attached to references to registers. */
239 static int loop_depth;
241 /* During propagate_block, this is non-zero if the value of CC0 is live. */
245 /* During propagate_block, this contains the last MEM stored into. It
246 is used to eliminate consecutive stores to the same location. */
248 static rtx last_mem_set;
250 /* Set of registers that may be eliminable. These are handled specially
251 in updating regs_ever_live. */
253 static HARD_REG_SET elim_reg_set;
255 /* Forward declarations */
256 static void find_basic_blocks_1 PROTO((rtx, rtx, int));
257 static void mark_label_ref PROTO((rtx, rtx, int));
258 static void life_analysis_1 PROTO((rtx, int));
259 void allocate_for_life_analysis PROTO((void));
260 void init_regset_vector PROTO((regset *, int, struct obstack *));
261 void free_regset_vector PROTO((regset *, int));
262 static void propagate_block PROTO((regset, rtx, rtx, int,
264 static rtx flow_delete_insn PROTO((rtx));
265 static int insn_dead_p PROTO((rtx, regset, int));
266 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
267 static void mark_set_regs PROTO((regset, regset, rtx,
269 static void mark_set_1 PROTO((regset, regset, rtx,
272 static void find_auto_inc PROTO((regset, rtx, rtx));
273 static int try_pre_increment_1 PROTO((rtx));
274 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
276 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
277 void dump_flow_info PROTO((FILE *));
278 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
279 int_list_ptr *, int *, int *));
280 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
281 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
284 /* Find basic blocks of the current function.
285 F is the first insn of the function and NREGS the number of register numbers
287 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
288 be reachable. This turns on a kludge that causes the control flow
289 information to be inaccurate and not suitable for passes like GCSE. */
292 find_basic_blocks (f, nregs, file, live_reachable_p)
296 int live_reachable_p;
300 rtx nonlocal_label_list = nonlocal_label_rtx_list ();
301 int in_libcall_block = 0;
303 /* Count the basic blocks. Also find maximum insn uid value used. */
306 register RTX_CODE prev_code = JUMP_INSN;
307 register RTX_CODE code;
310 max_uid_for_flow = 0;
312 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
315 /* Track when we are inside in LIBCALL block. */
316 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
317 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
318 in_libcall_block = 1;
320 code = GET_CODE (insn);
321 if (INSN_UID (insn) > max_uid_for_flow)
322 max_uid_for_flow = INSN_UID (insn);
323 if (code == CODE_LABEL
324 || (GET_RTX_CLASS (code) == 'i'
325 && (prev_code == JUMP_INSN
326 || (prev_code == CALL_INSN
327 && (nonlocal_label_list != 0 || eh_region)
328 && ! in_libcall_block)
329 || prev_code == BARRIER)))
332 if (code == CALL_INSN && find_reg_note (insn, REG_RETVAL, NULL_RTX))
337 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
339 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
342 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
343 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
344 in_libcall_block = 0;
351 /* Leave space for insns life_analysis makes in some cases for auto-inc.
352 These cases are rare, so we don't need too much space. */
353 max_uid_for_flow += max_uid_for_flow / 10;
356 /* Allocate some tables that last till end of compiling this function
357 and some needed only in find_basic_blocks and life_analysis. */
359 basic_block_head = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
360 basic_block_end = (rtx *) xmalloc (n_basic_blocks * sizeof (rtx));
361 basic_block_drops_in = (char *) xmalloc (n_basic_blocks);
362 basic_block_computed_jump_target = (char *) oballoc (n_basic_blocks);
363 basic_block_loop_depth = (short *) xmalloc (n_basic_blocks * sizeof (short));
365 = (int *) xmalloc ((max_uid_for_flow + 1) * sizeof (int));
366 uid_volatile = (char *) xmalloc (max_uid_for_flow + 1);
367 bzero (uid_volatile, max_uid_for_flow + 1);
369 find_basic_blocks_1 (f, nonlocal_label_list, live_reachable_p);
372 /* Find all basic blocks of the function whose first insn is F.
373 Store the correct data in the tables that describe the basic blocks,
374 set up the chains of references for each CODE_LABEL, and
375 delete any entire basic blocks that cannot be reached.
377 NONLOCAL_LABEL_LIST is a list of non-local labels in the function.
378 Blocks that are otherwise unreachable may be reachable with a non-local
380 LIVE_REACHABLE_P is non-zero if the caller needs all live blocks to
381 be reachable. This turns on a kludge that causes the control flow
382 information to be inaccurate and not suitable for passes like GCSE. */
385 find_basic_blocks_1 (f, nonlocal_label_list, live_reachable_p)
386 rtx f, nonlocal_label_list;
387 int live_reachable_p;
391 register char *block_live = (char *) alloca (n_basic_blocks);
392 register char *block_marked = (char *) alloca (n_basic_blocks);
393 /* An array of CODE_LABELs, indexed by UID for the start of the active
394 EH handler for each insn in F. */
395 rtx *active_eh_handler;
396 /* List of label_refs to all labels whose addresses are taken
398 rtx label_value_list;
399 rtx x, note, eh_note;
400 enum rtx_code prev_code, code;
402 int in_libcall_block = 0;
405 active_eh_handler = (rtx *) alloca ((max_uid_for_flow + 1) * sizeof (rtx));
408 label_value_list = 0;
409 block_live_static = block_live;
410 bzero (block_live, n_basic_blocks);
411 bzero (block_marked, n_basic_blocks);
412 bzero (basic_block_computed_jump_target, n_basic_blocks);
413 bzero (active_eh_handler, (max_uid_for_flow + 1) * sizeof (rtx));
414 current_function_has_computed_jump = 0;
416 /* Initialize with just block 0 reachable and no blocks marked. */
417 if (n_basic_blocks > 0)
420 /* Initialize the ref chain of each label to 0. Record where all the
421 blocks start and end and their depth in loops. For each insn, record
422 the block it is in. Also mark as reachable any blocks headed by labels
423 that must not be deleted. */
425 for (eh_note = NULL_RTX, insn = f, i = -1, prev_code = JUMP_INSN, depth = 1;
426 insn; insn = NEXT_INSN (insn))
429 /* Track when we are inside in LIBCALL block. */
430 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
431 && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
432 in_libcall_block = 1;
434 code = GET_CODE (insn);
437 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
439 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
443 /* A basic block starts at label, or after something that can jump. */
444 else if (code == CODE_LABEL
445 || (GET_RTX_CLASS (code) == 'i'
446 && (prev_code == JUMP_INSN
447 || (prev_code == CALL_INSN
448 && (nonlocal_label_list != 0 || eh_note)
449 && ! in_libcall_block)
450 || prev_code == BARRIER)))
452 basic_block_head[++i] = insn;
453 basic_block_end[i] = insn;
454 basic_block_loop_depth[i] = depth;
456 if (code == CODE_LABEL)
458 LABEL_REFS (insn) = insn;
459 /* Any label that cannot be deleted
460 is considered to start a reachable block. */
461 if (LABEL_PRESERVE_P (insn))
466 else if (GET_RTX_CLASS (code) == 'i')
468 basic_block_end[i] = insn;
469 basic_block_loop_depth[i] = depth;
472 if (GET_RTX_CLASS (code) == 'i')
474 /* Make a list of all labels referred to other than by jumps. */
475 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
476 if (REG_NOTE_KIND (note) == REG_LABEL)
477 label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
481 /* Keep a lifo list of the currently active exception handlers. */
482 if (GET_CODE (insn) == NOTE)
484 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
486 for (x = exception_handler_labels; x; x = XEXP (x, 1))
487 if (CODE_LABEL_NUMBER (XEXP (x, 0)) == NOTE_BLOCK_NUMBER (insn))
489 eh_note = gen_rtx_EXPR_LIST (VOIDmode,
490 XEXP (x, 0), eh_note);
496 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
497 eh_note = XEXP (eh_note, 1);
499 /* If we encounter a CALL_INSN, note which exception handler it
500 might pass control to.
502 If doing asynchronous exceptions, record the active EH handler
503 for every insn, since most insns can throw. */
505 && (asynchronous_exceptions
506 || (GET_CODE (insn) == CALL_INSN
507 && ! in_libcall_block)))
508 active_eh_handler[INSN_UID (insn)] = XEXP (eh_note, 0);
510 BLOCK_NUM (insn) = i;
515 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
516 && find_reg_note (insn, REG_RETVAL, NULL_RTX))
517 in_libcall_block = 0;
520 /* During the second pass, `n_basic_blocks' is only an upper bound.
521 Only perform the sanity check for the first pass, and on the second
522 pass ensure `n_basic_blocks' is set to the correct value. */
523 if (pass == 1 && i + 1 != n_basic_blocks)
525 n_basic_blocks = i + 1;
527 /* Record which basic blocks control can drop in to. */
529 for (i = 0; i < n_basic_blocks; i++)
531 for (insn = PREV_INSN (basic_block_head[i]);
532 insn && GET_CODE (insn) == NOTE; insn = PREV_INSN (insn))
535 basic_block_drops_in[i] = insn && GET_CODE (insn) != BARRIER;
538 /* Now find which basic blocks can actually be reached
539 and put all jump insns' LABEL_REFS onto the ref-chains
540 of their target labels. */
542 if (n_basic_blocks > 0)
544 int something_marked = 1;
547 /* Pass over all blocks, marking each block that is reachable
548 and has not yet been marked.
549 Keep doing this until, in one pass, no blocks have been marked.
550 Then blocks_live and blocks_marked are identical and correct.
551 In addition, all jumps actually reachable have been marked. */
553 while (something_marked)
555 something_marked = 0;
556 for (i = 0; i < n_basic_blocks; i++)
557 if (block_live[i] && !block_marked[i])
560 something_marked = 1;
561 if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1])
562 block_live[i + 1] = 1;
563 insn = basic_block_end[i];
564 if (GET_CODE (insn) == JUMP_INSN)
565 mark_label_ref (PATTERN (insn), insn, 0);
567 /* If we have any forced labels, mark them as potentially
568 reachable from this block. */
569 for (x = forced_labels; x; x = XEXP (x, 1))
570 if (! LABEL_REF_NONLOCAL_P (x))
571 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
574 /* Now scan the insns for this block, we may need to make
575 edges for some of them to various non-obvious locations
576 (exception handlers, nonlocal labels, etc). */
577 for (insn = basic_block_head[i];
578 insn != NEXT_INSN (basic_block_end[i]);
579 insn = NEXT_INSN (insn))
581 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584 /* References to labels in non-jumping insns have
585 REG_LABEL notes attached to them.
587 This can happen for computed gotos; we don't care
588 about them here since the values are also on the
589 label_value_list and will be marked live if we find
590 a live computed goto.
592 This can also happen when we take the address of
593 a label to pass as an argument to __throw. Note
594 throw only uses the value to determine what handler
595 should be called -- ie the label is not used as
596 a jump target, it just marks regions in the code.
598 In theory we should be able to ignore the REG_LABEL
599 notes, but we have to make sure that the label and
600 associated insns aren't marked dead, so we make
601 the block in question live and create an edge from
602 this insn to the label. This is not strictly
603 correct, but it is close enough for now. */
604 for (note = REG_NOTES (insn);
606 note = XEXP (note, 1))
608 if (REG_NOTE_KIND (note) == REG_LABEL)
611 block_live[BLOCK_NUM (x)] = 1;
612 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
617 /* If this is a computed jump, then mark it as
618 reaching everything on the label_value_list
619 and forced_labels list. */
620 if (computed_jump_p (insn))
622 current_function_has_computed_jump = 1;
623 for (x = label_value_list; x; x = XEXP (x, 1))
625 int b = BLOCK_NUM (XEXP (x, 0));
626 basic_block_computed_jump_target[b] = 1;
627 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
632 for (x = forced_labels; x; x = XEXP (x, 1))
634 int b = BLOCK_NUM (XEXP (x, 0));
635 basic_block_computed_jump_target[b] = 1;
636 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
642 /* If this is a CALL_INSN, then mark it as reaching
643 the active EH handler for this CALL_INSN. If
644 we're handling asynchronous exceptions mark every
645 insn as reaching the active EH handler.
647 Also mark the CALL_INSN as reaching any nonlocal
649 else if (asynchronous_exceptions
650 || (GET_CODE (insn) == CALL_INSN
651 && ! find_reg_note (insn, REG_RETVAL,
654 if (active_eh_handler[INSN_UID (insn)])
655 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
656 active_eh_handler[INSN_UID (insn)]),
659 if (!asynchronous_exceptions)
661 for (x = nonlocal_label_list;
664 mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
668 /* ??? This could be made smarter:
669 in some cases it's possible to tell that
670 certain calls will not do a nonlocal goto.
672 For example, if the nested functions that
673 do the nonlocal gotos do not have their
674 addresses taken, then only calls to those
675 functions or to other nested functions that
676 use them could possibly do nonlocal gotos. */
683 /* This should never happen. If it does that means we've computed an
684 incorrect flow graph, which can lead to aborts/crashes later in the
685 compiler or incorrect code generation.
687 We used to try and continue here, but that's just asking for trouble
688 later during the compile or at runtime. It's easier to debug the
689 problem here than later! */
690 for (i = 1; i < n_basic_blocks; i++)
691 if (block_live[i] && ! basic_block_drops_in[i]
692 && GET_CODE (basic_block_head[i]) == CODE_LABEL
693 && LABEL_REFS (basic_block_head[i]) == basic_block_head[i])
696 /* Now delete the code for any basic blocks that can't be reached.
697 They can occur because jump_optimize does not recognize
698 unreachable loops as unreachable. */
701 for (i = 0; i < n_basic_blocks; i++)
706 /* Delete the insns in a (non-live) block. We physically delete
707 every non-note insn except the start and end (so
708 basic_block_head/end needn't be updated), we turn the latter
709 into NOTE_INSN_DELETED notes.
710 We use to "delete" the insns by turning them into notes, but
711 we may be deleting lots of insns that subsequent passes would
712 otherwise have to process. Secondly, lots of deleted blocks in
713 a row can really slow down propagate_block since it will
714 otherwise process insn-turned-notes multiple times when it
715 looks for loop begin/end notes. */
716 if (basic_block_head[i] != basic_block_end[i])
718 /* It would be quicker to delete all of these with a single
719 unchaining, rather than one at a time, but we need to keep
721 insn = NEXT_INSN (basic_block_head[i]);
722 while (insn != basic_block_end[i])
724 if (GET_CODE (insn) == BARRIER)
726 else if (GET_CODE (insn) != NOTE)
727 insn = flow_delete_insn (insn);
729 insn = NEXT_INSN (insn);
732 insn = basic_block_head[i];
733 if (GET_CODE (insn) != NOTE)
735 /* Turn the head into a deleted insn note. */
736 if (GET_CODE (insn) == BARRIER)
739 /* If the head of this block is a CODE_LABEL, then it might
740 be the label for an exception handler which can't be
743 We need to remove the label from the exception_handler_label
744 list and remove the associated NOTE_EH_REGION_BEG and
745 NOTE_EH_REGION_END notes. */
746 if (GET_CODE (insn) == CODE_LABEL)
748 rtx x, *prev = &exception_handler_labels;
750 for (x = exception_handler_labels; x; x = XEXP (x, 1))
752 if (XEXP (x, 0) == insn)
754 /* Found a match, splice this label out of the
757 XEXP (x, 1) = NULL_RTX;
758 XEXP (x, 0) = NULL_RTX;
760 /* Now we have to find the EH_BEG and EH_END notes
761 associated with this label and remove them. */
763 for (x = get_insns (); x; x = NEXT_INSN (x))
765 if (GET_CODE (x) == NOTE
766 && ((NOTE_LINE_NUMBER (x)
767 == NOTE_INSN_EH_REGION_BEG)
768 || (NOTE_LINE_NUMBER (x)
769 == NOTE_INSN_EH_REGION_END))
770 && (NOTE_BLOCK_NUMBER (x)
771 == CODE_LABEL_NUMBER (insn)))
773 NOTE_LINE_NUMBER (x) = NOTE_INSN_DELETED;
774 NOTE_SOURCE_FILE (x) = 0;
783 PUT_CODE (insn, NOTE);
784 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
785 NOTE_SOURCE_FILE (insn) = 0;
787 insn = basic_block_end[i];
788 if (GET_CODE (insn) != NOTE)
790 /* Turn the tail into a deleted insn note. */
791 if (GET_CODE (insn) == BARRIER)
793 PUT_CODE (insn, NOTE);
794 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
795 NOTE_SOURCE_FILE (insn) = 0;
797 /* BARRIERs are between basic blocks, not part of one.
798 Delete a BARRIER if the preceding jump is deleted.
799 We cannot alter a BARRIER into a NOTE
800 because it is too short; but we can really delete
801 it because it is not part of a basic block. */
802 if (NEXT_INSN (insn) != 0
803 && GET_CODE (NEXT_INSN (insn)) == BARRIER)
804 delete_insn (NEXT_INSN (insn));
806 /* Each time we delete some basic blocks,
807 see if there is a jump around them that is
808 being turned into a no-op. If so, delete it. */
810 if (block_live[i - 1])
813 for (j = i + 1; j < n_basic_blocks; j++)
817 insn = basic_block_end[i - 1];
818 if (GET_CODE (insn) == JUMP_INSN
819 /* An unconditional jump is the only possibility
820 we must check for, since a conditional one
821 would make these blocks live. */
822 && simplejump_p (insn)
823 && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1)
824 && INSN_UID (label) != 0
825 && BLOCK_NUM (label) == j)
829 /* The deleted blocks still show up in the cfg,
830 so we must set basic_block_drops_in for blocks
831 I to J inclusive to keep the cfg accurate. */
832 for (k = i; k <= j; k++)
833 basic_block_drops_in[k] = 1;
835 PUT_CODE (insn, NOTE);
836 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
837 NOTE_SOURCE_FILE (insn) = 0;
838 if (GET_CODE (NEXT_INSN (insn)) != BARRIER)
840 delete_insn (NEXT_INSN (insn));
847 /* There are pathological cases where one function calling hundreds of
848 nested inline functions can generate lots and lots of unreachable
849 blocks that jump can't delete. Since we don't use sparse matrices
850 a lot of memory will be needed to compile such functions.
851 Implementing sparse matrices is a fair bit of work and it is not
852 clear that they win more than they lose (we don't want to
853 unnecessarily slow down compilation of normal code). By making
854 another pass for the pathological case, we can greatly speed up
855 their compilation without hurting normal code. This works because
856 all the insns in the unreachable blocks have either been deleted or
858 Note that we're talking about reducing memory usage by 10's of
859 megabytes and reducing compilation time by several minutes. */
860 /* ??? The choice of when to make another pass is a bit arbitrary,
861 and was derived from empirical data. */
866 n_basic_blocks -= deleted;
867 /* `n_basic_blocks' may not be correct at this point: two previously
868 separate blocks may now be merged. That's ok though as we
869 recalculate it during the second pass. It certainly can't be
870 any larger than the current value. */
876 /* Record INSN's block number as BB. */
879 set_block_num (insn, bb)
883 if (INSN_UID (insn) >= max_uid_for_flow)
885 /* Add one-eighth the size so we don't keep calling xrealloc. */
886 max_uid_for_flow = INSN_UID (insn) + (INSN_UID (insn) + 7) / 8;
887 uid_block_number = (int *)
888 xrealloc (uid_block_number, (max_uid_for_flow + 1) * sizeof (int));
890 BLOCK_NUM (insn) = bb;
894 /* Subroutines of find_basic_blocks. */
896 /* Check expression X for label references;
897 if one is found, add INSN to the label's chain of references.
899 CHECKDUP means check for and avoid creating duplicate references
900 from the same insn. Such duplicates do no serious harm but
901 can slow life analysis. CHECKDUP is set only when duplicates
905 mark_label_ref (x, insn, checkdup)
909 register RTX_CODE code;
913 /* We can be called with NULL when scanning label_value_list. */
918 if (code == LABEL_REF)
920 register rtx label = XEXP (x, 0);
922 if (GET_CODE (label) != CODE_LABEL)
924 /* If the label was never emitted, this insn is junk,
925 but avoid a crash trying to refer to BLOCK_NUM (label).
926 This can happen as a result of a syntax error
927 and a diagnostic has already been printed. */
928 if (INSN_UID (label) == 0)
930 CONTAINING_INSN (x) = insn;
931 /* if CHECKDUP is set, check for duplicate ref from same insn
934 for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y))
935 if (CONTAINING_INSN (y) == insn)
937 LABEL_NEXTREF (x) = LABEL_REFS (label);
938 LABEL_REFS (label) = x;
939 block_live_static[BLOCK_NUM (label)] = 1;
943 fmt = GET_RTX_FORMAT (code);
944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
947 mark_label_ref (XEXP (x, i), insn, 0);
951 for (j = 0; j < XVECLEN (x, i); j++)
952 mark_label_ref (XVECEXP (x, i, j), insn, 1);
957 /* Delete INSN by patching it out.
958 Return the next insn. */
961 flow_delete_insn (insn)
964 /* ??? For the moment we assume we don't have to watch for NULLs here
965 since the start/end of basic blocks aren't deleted like this. */
966 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
967 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
968 return NEXT_INSN (insn);
971 /* Perform data flow analysis.
972 F is the first insn of the function and NREGS the number of register numbers
976 life_analysis (f, nregs, file)
981 #ifdef ELIMINABLE_REGS
983 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
986 /* Record which registers will be eliminated. We use this in
989 CLEAR_HARD_REG_SET (elim_reg_set);
991 #ifdef ELIMINABLE_REGS
992 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
993 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
995 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
998 life_analysis_1 (f, nregs);
1000 dump_flow_info (file);
1002 free_basic_block_vars (1);
1005 /* Free the variables allocated by find_basic_blocks.
1007 KEEP_HEAD_END_P is non-zero if basic_block_head and basic_block_end
1008 are not to be freed. */
1011 free_basic_block_vars (keep_head_end_p)
1012 int keep_head_end_p;
1014 if (basic_block_drops_in)
1016 free (basic_block_drops_in);
1017 /* Tell dump_flow_info this isn't available anymore. */
1018 basic_block_drops_in = 0;
1020 if (basic_block_loop_depth)
1022 free (basic_block_loop_depth);
1023 basic_block_loop_depth = 0;
1025 if (uid_block_number)
1027 free (uid_block_number);
1028 uid_block_number = 0;
1032 free (uid_volatile);
1036 if (! keep_head_end_p && basic_block_head)
1038 free (basic_block_head);
1039 basic_block_head = 0;
1040 free (basic_block_end);
1041 basic_block_end = 0;
1045 /* Determine which registers are live at the start of each
1046 basic block of the function whose first insn is F.
1047 NREGS is the number of registers used in F.
1048 We allocate the vector basic_block_live_at_start
1049 and the regsets that it points to, and fill them with the data.
1050 regset_size and regset_bytes are also set here. */
1053 life_analysis_1 (f, nregs)
1059 /* For each basic block, a bitmask of regs
1060 live on exit from the block. */
1061 regset *basic_block_live_at_end;
1062 /* For each basic block, a bitmask of regs
1063 live on entry to a successor-block of this block.
1064 If this does not match basic_block_live_at_end,
1065 that must be updated, and the block must be rescanned. */
1066 regset *basic_block_new_live_at_end;
1067 /* For each basic block, a bitmask of regs
1068 whose liveness at the end of the basic block
1069 can make a difference in which regs are live on entry to the block.
1070 These are the regs that are set within the basic block,
1071 possibly excluding those that are used after they are set. */
1072 regset *basic_block_significant;
1076 struct obstack flow_obstack;
1078 gcc_obstack_init (&flow_obstack);
1082 bzero (regs_ever_live, sizeof regs_ever_live);
1084 /* Allocate and zero out many data structures
1085 that will record the data from lifetime analysis. */
1087 allocate_for_life_analysis ();
1089 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
1090 bzero ((char *) reg_next_use, nregs * sizeof (rtx));
1092 /* Set up several regset-vectors used internally within this function.
1093 Their meanings are documented above, with their declarations. */
1095 basic_block_live_at_end
1096 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1098 /* Don't use alloca since that leads to a crash rather than an error message
1099 if there isn't enough space.
1100 Don't use oballoc since we may need to allocate other things during
1101 this function on the temporary obstack. */
1102 init_regset_vector (basic_block_live_at_end, n_basic_blocks, &flow_obstack);
1104 basic_block_new_live_at_end
1105 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1106 init_regset_vector (basic_block_new_live_at_end, n_basic_blocks,
1109 basic_block_significant
1110 = (regset *) alloca (n_basic_blocks * sizeof (regset));
1111 init_regset_vector (basic_block_significant, n_basic_blocks, &flow_obstack);
1113 /* Record which insns refer to any volatile memory
1114 or for any reason can't be deleted just because they are dead stores.
1115 Also, delete any insns that copy a register to itself. */
1117 for (insn = f; insn; insn = NEXT_INSN (insn))
1119 enum rtx_code code1 = GET_CODE (insn);
1120 if (code1 == CALL_INSN)
1121 INSN_VOLATILE (insn) = 1;
1122 else if (code1 == INSN || code1 == JUMP_INSN)
1124 /* Delete (in effect) any obvious no-op moves. */
1125 if (GET_CODE (PATTERN (insn)) == SET
1126 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
1127 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
1128 && (REGNO (SET_DEST (PATTERN (insn)))
1129 == REGNO (SET_SRC (PATTERN (insn))))
1130 /* Insns carrying these notes are useful later on. */
1131 && ! find_reg_note (insn, REG_EQUAL, NULL_RTX))
1133 PUT_CODE (insn, NOTE);
1134 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1135 NOTE_SOURCE_FILE (insn) = 0;
1137 /* Delete (in effect) any obvious no-op moves. */
1138 else if (GET_CODE (PATTERN (insn)) == SET
1139 && GET_CODE (SET_DEST (PATTERN (insn))) == SUBREG
1140 && GET_CODE (SUBREG_REG (SET_DEST (PATTERN (insn)))) == REG
1141 && GET_CODE (SET_SRC (PATTERN (insn))) == SUBREG
1142 && GET_CODE (SUBREG_REG (SET_SRC (PATTERN (insn)))) == REG
1143 && (REGNO (SUBREG_REG (SET_DEST (PATTERN (insn))))
1144 == REGNO (SUBREG_REG (SET_SRC (PATTERN (insn)))))
1145 && SUBREG_WORD (SET_DEST (PATTERN (insn))) ==
1146 SUBREG_WORD (SET_SRC (PATTERN (insn)))
1147 /* Insns carrying these notes are useful later on. */
1148 && ! find_reg_note (insn, REG_EQUAL, NULL_RTX))
1150 PUT_CODE (insn, NOTE);
1151 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1152 NOTE_SOURCE_FILE (insn) = 0;
1154 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
1156 /* If nothing but SETs of registers to themselves,
1157 this insn can also be deleted. */
1158 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1160 rtx tem = XVECEXP (PATTERN (insn), 0, i);
1162 if (GET_CODE (tem) == USE
1163 || GET_CODE (tem) == CLOBBER)
1166 if (GET_CODE (tem) != SET
1167 || GET_CODE (SET_DEST (tem)) != REG
1168 || GET_CODE (SET_SRC (tem)) != REG
1169 || REGNO (SET_DEST (tem)) != REGNO (SET_SRC (tem)))
1173 if (i == XVECLEN (PATTERN (insn), 0)
1174 /* Insns carrying these notes are useful later on. */
1175 && ! find_reg_note (insn, REG_EQUAL, NULL_RTX))
1177 PUT_CODE (insn, NOTE);
1178 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1179 NOTE_SOURCE_FILE (insn) = 0;
1182 INSN_VOLATILE (insn) = volatile_refs_p (PATTERN (insn));
1184 else if (GET_CODE (PATTERN (insn)) != USE)
1185 INSN_VOLATILE (insn) = volatile_refs_p (PATTERN (insn));
1186 /* A SET that makes space on the stack cannot be dead.
1187 (Such SETs occur only for allocating variable-size data,
1188 so they will always have a PLUS or MINUS according to the
1189 direction of stack growth.)
1190 Even if this function never uses this stack pointer value,
1191 signal handlers do! */
1192 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
1193 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1194 #ifdef STACK_GROWS_DOWNWARD
1195 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
1197 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1199 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
1200 INSN_VOLATILE (insn) = 1;
1204 if (n_basic_blocks > 0)
1205 #ifdef EXIT_IGNORE_STACK
1206 if (! EXIT_IGNORE_STACK
1207 || (! FRAME_POINTER_REQUIRED
1208 && ! current_function_calls_alloca
1209 && flag_omit_frame_pointer))
1212 /* If exiting needs the right stack value,
1213 consider the stack pointer live at the end of the function. */
1214 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1],
1215 STACK_POINTER_REGNUM);
1216 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1217 STACK_POINTER_REGNUM);
1220 /* Mark the frame pointer is needed at the end of the function. If
1221 we end up eliminating it, it will be removed from the live list
1222 of each basic block by reload. */
1224 if (n_basic_blocks > 0)
1226 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1],
1227 FRAME_POINTER_REGNUM);
1228 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1229 FRAME_POINTER_REGNUM);
1230 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1231 /* If they are different, also mark the hard frame pointer as live */
1232 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1],
1233 HARD_FRAME_POINTER_REGNUM);
1234 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1],
1235 HARD_FRAME_POINTER_REGNUM);
1239 /* Mark all global registers and all registers used by the epilogue
1240 as being live at the end of the function since they may be
1241 referenced by our caller. */
1243 if (n_basic_blocks > 0)
1244 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1246 #ifdef EPILOGUE_USES
1247 || EPILOGUE_USES (i)
1251 SET_REGNO_REG_SET (basic_block_live_at_end[n_basic_blocks - 1], i);
1252 SET_REGNO_REG_SET (basic_block_new_live_at_end[n_basic_blocks - 1], i);
1255 /* Propagate life info through the basic blocks
1256 around the graph of basic blocks.
1258 This is a relaxation process: each time a new register
1259 is live at the end of the basic block, we must scan the block
1260 to determine which registers are, as a consequence, live at the beginning
1261 of that block. These registers must then be marked live at the ends
1262 of all the blocks that can transfer control to that block.
1263 The process continues until it reaches a fixed point. */
1270 for (i = n_basic_blocks - 1; i >= 0; i--)
1272 int consider = first_pass;
1273 int must_rescan = first_pass;
1278 /* Set CONSIDER if this block needs thinking about at all
1279 (that is, if the regs live now at the end of it
1280 are not the same as were live at the end of it when
1281 we last thought about it).
1282 Set must_rescan if it needs to be thought about
1283 instruction by instruction (that is, if any additional
1284 reg that is live at the end now but was not live there before
1285 is one of the significant regs of this basic block). */
1287 EXECUTE_IF_AND_COMPL_IN_REG_SET
1288 (basic_block_new_live_at_end[i],
1289 basic_block_live_at_end[i], 0, j,
1292 if (REGNO_REG_SET_P (basic_block_significant[i], j))
1303 /* The live_at_start of this block may be changing,
1304 so another pass will be required after this one. */
1309 /* No complete rescan needed;
1310 just record those variables newly known live at end
1311 as live at start as well. */
1312 IOR_AND_COMPL_REG_SET (basic_block_live_at_start[i],
1313 basic_block_new_live_at_end[i],
1314 basic_block_live_at_end[i]);
1316 IOR_AND_COMPL_REG_SET (basic_block_live_at_end[i],
1317 basic_block_new_live_at_end[i],
1318 basic_block_live_at_end[i]);
1322 /* Update the basic_block_live_at_start
1323 by propagation backwards through the block. */
1324 COPY_REG_SET (basic_block_live_at_end[i],
1325 basic_block_new_live_at_end[i]);
1326 COPY_REG_SET (basic_block_live_at_start[i],
1327 basic_block_live_at_end[i]);
1328 propagate_block (basic_block_live_at_start[i],
1329 basic_block_head[i], basic_block_end[i], 0,
1330 first_pass ? basic_block_significant[i]
1336 register rtx jump, head;
1338 /* Update the basic_block_new_live_at_end's of the block
1339 that falls through into this one (if any). */
1340 head = basic_block_head[i];
1341 if (basic_block_drops_in[i])
1342 IOR_REG_SET (basic_block_new_live_at_end[i-1],
1343 basic_block_live_at_start[i]);
1345 /* Update the basic_block_new_live_at_end's of
1346 all the blocks that jump to this one. */
1347 if (GET_CODE (head) == CODE_LABEL)
1348 for (jump = LABEL_REFS (head);
1350 jump = LABEL_NEXTREF (jump))
1352 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
1353 IOR_REG_SET (basic_block_new_live_at_end[from_block],
1354 basic_block_live_at_start[i]);
1364 /* The only pseudos that are live at the beginning of the function are
1365 those that were not set anywhere in the function. local-alloc doesn't
1366 know how to handle these correctly, so mark them as not local to any
1369 if (n_basic_blocks > 0)
1370 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[0],
1371 FIRST_PSEUDO_REGISTER, i,
1373 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1376 /* Now the life information is accurate.
1377 Make one more pass over each basic block
1378 to delete dead stores, create autoincrement addressing
1379 and record how many times each register is used, is set, or dies.
1381 To save time, we operate directly in basic_block_live_at_end[i],
1382 thus destroying it (in fact, converting it into a copy of
1383 basic_block_live_at_start[i]). This is ok now because
1384 basic_block_live_at_end[i] is no longer used past this point. */
1388 for (i = 0; i < n_basic_blocks; i++)
1390 propagate_block (basic_block_live_at_end[i],
1391 basic_block_head[i], basic_block_end[i], 1,
1399 /* Something live during a setjmp should not be put in a register
1400 on certain machines which restore regs from stack frames
1401 rather than from the jmpbuf.
1402 But we don't need to do this for the user's variables, since
1403 ANSI says only volatile variables need this. */
1404 #ifdef LONGJMP_RESTORE_FROM_STACK
1405 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1406 FIRST_PSEUDO_REGISTER, i,
1408 if (regno_reg_rtx[i] != 0
1409 && ! REG_USERVAR_P (regno_reg_rtx[i]))
1411 REG_LIVE_LENGTH (i) = -1;
1412 REG_BASIC_BLOCK (i) = -1;
1418 /* We have a problem with any pseudoreg that
1419 lives across the setjmp. ANSI says that if a
1420 user variable does not change in value
1421 between the setjmp and the longjmp, then the longjmp preserves it.
1422 This includes longjmp from a place where the pseudo appears dead.
1423 (In principle, the value still exists if it is in scope.)
1424 If the pseudo goes in a hard reg, some other value may occupy
1425 that hard reg where this pseudo is dead, thus clobbering the pseudo.
1426 Conclusion: such a pseudo must not go in a hard reg. */
1427 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
1428 FIRST_PSEUDO_REGISTER, i,
1430 if (regno_reg_rtx[i] != 0)
1432 REG_LIVE_LENGTH (i) = -1;
1433 REG_BASIC_BLOCK (i) = -1;
1438 free_regset_vector (basic_block_live_at_end, n_basic_blocks);
1439 free_regset_vector (basic_block_new_live_at_end, n_basic_blocks);
1440 free_regset_vector (basic_block_significant, n_basic_blocks);
1441 basic_block_live_at_end = (regset *)0;
1442 basic_block_new_live_at_end = (regset *)0;
1443 basic_block_significant = (regset *)0;
1445 obstack_free (&flow_obstack, NULL_PTR);
1448 /* Subroutines of life analysis. */
1450 /* Allocate the permanent data structures that represent the results
1451 of life analysis. Not static since used also for stupid life analysis. */
1454 allocate_for_life_analysis ()
1458 /* Recalculate the register space, in case it has grown. Old style
1459 vector oriented regsets would set regset_{size,bytes} here also. */
1460 allocate_reg_info (max_regno, FALSE, FALSE);
1462 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
1463 information, explicitly reset it here. The allocation should have
1464 already happened on the previous reg_scan pass. Make sure in case
1465 some more registers were allocated. */
1466 for (i = 0; i < max_regno; i++)
1469 basic_block_live_at_start
1470 = (regset *) oballoc (n_basic_blocks * sizeof (regset));
1471 init_regset_vector (basic_block_live_at_start, n_basic_blocks,
1474 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
1475 CLEAR_REG_SET (regs_live_at_setjmp);
1478 /* Make each element of VECTOR point at a regset. The vector has
1479 NELTS elements, and space is allocated from the ALLOC_OBSTACK
1483 init_regset_vector (vector, nelts, alloc_obstack)
1486 struct obstack *alloc_obstack;
1490 for (i = 0; i < nelts; i++)
1492 vector[i] = OBSTACK_ALLOC_REG_SET (alloc_obstack);
1493 CLEAR_REG_SET (vector[i]);
1497 /* Release any additional space allocated for each element of VECTOR point
1498 other than the regset header itself. The vector has NELTS elements. */
1501 free_regset_vector (vector, nelts)
1507 for (i = 0; i < nelts; i++)
1508 FREE_REG_SET (vector[i]);
1511 /* Compute the registers live at the beginning of a basic block
1512 from those live at the end.
1514 When called, OLD contains those live at the end.
1515 On return, it contains those live at the beginning.
1516 FIRST and LAST are the first and last insns of the basic block.
1518 FINAL is nonzero if we are doing the final pass which is not
1519 for computing the life info (since that has already been done)
1520 but for acting on it. On this pass, we delete dead stores,
1521 set up the logical links and dead-variables lists of instructions,
1522 and merge instructions for autoincrement and autodecrement addresses.
1524 SIGNIFICANT is nonzero only the first time for each basic block.
1525 If it is nonzero, it points to a regset in which we store
1526 a 1 for each register that is set within the block.
1528 BNUM is the number of the basic block. */
1531 propagate_block (old, first, last, final, significant, bnum)
1532 register regset old;
1544 /* The following variables are used only if FINAL is nonzero. */
1545 /* This vector gets one element for each reg that has been live
1546 at any point in the basic block that has been scanned so far.
1547 SOMETIMES_MAX says how many elements are in use so far. */
1548 register int *regs_sometimes_live;
1549 int sometimes_max = 0;
1550 /* This regset has 1 for each reg that we have seen live so far.
1551 It and REGS_SOMETIMES_LIVE are updated together. */
1554 /* The loop depth may change in the middle of a basic block. Since we
1555 scan from end to beginning, we start with the depth at the end of the
1556 current basic block, and adjust as we pass ends and starts of loops. */
1557 loop_depth = basic_block_loop_depth[bnum];
1559 dead = ALLOCA_REG_SET ();
1560 live = ALLOCA_REG_SET ();
1565 /* Include any notes at the end of the block in the scan.
1566 This is in case the block ends with a call to setjmp. */
1568 while (NEXT_INSN (last) != 0 && GET_CODE (NEXT_INSN (last)) == NOTE)
1570 /* Look for loop boundaries, we are going forward here. */
1571 last = NEXT_INSN (last);
1572 if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_BEG)
1574 else if (NOTE_LINE_NUMBER (last) == NOTE_INSN_LOOP_END)
1583 maxlive = ALLOCA_REG_SET ();
1584 COPY_REG_SET (maxlive, old);
1585 regs_sometimes_live = (int *) alloca (max_regno * sizeof (int));
1587 /* Process the regs live at the end of the block.
1588 Enter them in MAXLIVE and REGS_SOMETIMES_LIVE.
1589 Also mark them as not local to any one basic block. */
1590 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
1592 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
1593 regs_sometimes_live[sometimes_max] = i;
1598 /* Scan the block an insn at a time from end to beginning. */
1600 for (insn = last; ; insn = prev)
1602 prev = PREV_INSN (insn);
1604 if (GET_CODE (insn) == NOTE)
1606 /* Look for loop boundaries, remembering that we are going
1608 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1610 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1613 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
1614 Abort now rather than setting register status incorrectly. */
1615 if (loop_depth == 0)
1618 /* If this is a call to `setjmp' et al,
1619 warn if any non-volatile datum is live. */
1621 if (final && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1622 IOR_REG_SET (regs_live_at_setjmp, old);
1625 /* Update the life-status of regs for this insn.
1626 First DEAD gets which regs are set in this insn
1627 then LIVE gets which regs are used in this insn.
1628 Then the regs live before the insn
1629 are those live after, with DEAD regs turned off,
1630 and then LIVE regs turned on. */
1632 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1635 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
1637 = (insn_dead_p (PATTERN (insn), old, 0)
1638 /* Don't delete something that refers to volatile storage! */
1639 && ! INSN_VOLATILE (insn));
1641 = (insn_is_dead && note != 0
1642 && libcall_dead_p (PATTERN (insn), old, note, insn));
1644 /* If an instruction consists of just dead store(s) on final pass,
1645 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
1646 We could really delete it with delete_insn, but that
1647 can cause trouble for first or last insn in a basic block. */
1648 if (final && insn_is_dead)
1650 PUT_CODE (insn, NOTE);
1651 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1652 NOTE_SOURCE_FILE (insn) = 0;
1654 /* CC0 is now known to be dead. Either this insn used it,
1655 in which case it doesn't anymore, or clobbered it,
1656 so the next insn can't use it. */
1659 /* If this insn is copying the return value from a library call,
1660 delete the entire library call. */
1661 if (libcall_is_dead)
1663 rtx first = XEXP (note, 0);
1665 while (INSN_DELETED_P (first))
1666 first = NEXT_INSN (first);
1671 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
1672 NOTE_SOURCE_FILE (p) = 0;
1678 CLEAR_REG_SET (dead);
1679 CLEAR_REG_SET (live);
1681 /* See if this is an increment or decrement that can be
1682 merged into a following memory address. */
1685 register rtx x = single_set (insn);
1687 /* Does this instruction increment or decrement a register? */
1689 && GET_CODE (SET_DEST (x)) == REG
1690 && (GET_CODE (SET_SRC (x)) == PLUS
1691 || GET_CODE (SET_SRC (x)) == MINUS)
1692 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
1693 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1694 /* Ok, look for a following memory ref we can combine with.
1695 If one is found, change the memory ref to a PRE_INC
1696 or PRE_DEC, cancel this insn, and return 1.
1697 Return 0 if nothing has been done. */
1698 && try_pre_increment_1 (insn))
1701 #endif /* AUTO_INC_DEC */
1703 /* If this is not the final pass, and this insn is copying the
1704 value of a library call and it's dead, don't scan the
1705 insns that perform the library call, so that the call's
1706 arguments are not marked live. */
1707 if (libcall_is_dead)
1709 /* Mark the dest reg as `significant'. */
1710 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX, significant);
1712 insn = XEXP (note, 0);
1713 prev = PREV_INSN (insn);
1715 else if (GET_CODE (PATTERN (insn)) == SET
1716 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
1717 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
1718 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
1719 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
1720 /* We have an insn to pop a constant amount off the stack.
1721 (Such insns use PLUS regardless of the direction of the stack,
1722 and any insn to adjust the stack by a constant is always a pop.)
1723 These insns, if not dead stores, have no effect on life. */
1727 /* LIVE gets the regs used in INSN;
1728 DEAD gets those set by it. Dead insns don't make anything
1731 mark_set_regs (old, dead, PATTERN (insn),
1732 final ? insn : NULL_RTX, significant);
1734 /* If an insn doesn't use CC0, it becomes dead since we
1735 assume that every insn clobbers it. So show it dead here;
1736 mark_used_regs will set it live if it is referenced. */
1740 mark_used_regs (old, live, PATTERN (insn), final, insn);
1742 /* Sometimes we may have inserted something before INSN (such as
1743 a move) when we make an auto-inc. So ensure we will scan
1746 prev = PREV_INSN (insn);
1749 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
1755 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1757 note = XEXP (note, 1))
1758 if (GET_CODE (XEXP (note, 0)) == USE)
1759 mark_used_regs (old, live, SET_DEST (XEXP (note, 0)),
1762 /* Each call clobbers all call-clobbered regs that are not
1763 global or fixed. Note that the function-value reg is a
1764 call-clobbered reg, and mark_set_regs has already had
1765 a chance to handle it. */
1767 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1768 if (call_used_regs[i] && ! global_regs[i]
1770 SET_REGNO_REG_SET (dead, i);
1772 /* The stack ptr is used (honorarily) by a CALL insn. */
1773 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
1775 /* Calls may also reference any of the global registers,
1776 so they are made live. */
1777 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1779 mark_used_regs (old, live,
1780 gen_rtx_REG (reg_raw_mode[i], i),
1783 /* Calls also clobber memory. */
1787 /* Update OLD for the registers used or set. */
1788 AND_COMPL_REG_SET (old, dead);
1789 IOR_REG_SET (old, live);
1791 if (GET_CODE (insn) == CALL_INSN && final)
1793 /* Any regs live at the time of a call instruction
1794 must not go in a register clobbered by calls.
1795 Find all regs now live and record this for them. */
1797 register int *p = regs_sometimes_live;
1799 for (i = 0; i < sometimes_max; i++, p++)
1800 if (REGNO_REG_SET_P (old, *p))
1801 REG_N_CALLS_CROSSED (*p)++;
1805 /* On final pass, add any additional sometimes-live regs
1806 into MAXLIVE and REGS_SOMETIMES_LIVE.
1807 Also update counts of how many insns each reg is live at. */
1814 EXECUTE_IF_AND_COMPL_IN_REG_SET
1815 (live, maxlive, 0, regno,
1817 regs_sometimes_live[sometimes_max++] = regno;
1818 SET_REGNO_REG_SET (maxlive, regno);
1821 p = regs_sometimes_live;
1822 for (i = 0; i < sometimes_max; i++)
1825 if (REGNO_REG_SET_P (old, regno))
1826 REG_LIVE_LENGTH (regno)++;
1835 FREE_REG_SET (dead);
1836 FREE_REG_SET (live);
1838 FREE_REG_SET (maxlive);
1840 if (num_scratch > max_scratch)
1841 max_scratch = num_scratch;
1844 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
1845 (SET expressions whose destinations are registers dead after the insn).
1846 NEEDED is the regset that says which regs are alive after the insn.
1848 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL. */
1851 insn_dead_p (x, needed, call_ok)
1856 register RTX_CODE code = GET_CODE (x);
1857 /* If setting something that's a reg or part of one,
1858 see if that register's altered value will be live. */
1862 register rtx r = SET_DEST (x);
1863 /* A SET that is a subroutine call cannot be dead. */
1864 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
1868 if (GET_CODE (r) == CC0)
1872 if (GET_CODE (r) == MEM && last_mem_set && ! MEM_VOLATILE_P (r)
1873 && rtx_equal_p (r, last_mem_set))
1876 while (GET_CODE (r) == SUBREG
1877 || GET_CODE (r) == STRICT_LOW_PART
1878 || GET_CODE (r) == ZERO_EXTRACT
1879 || GET_CODE (r) == SIGN_EXTRACT)
1882 if (GET_CODE (r) == REG)
1884 register int regno = REGNO (r);
1886 /* Don't delete insns to set global regs. */
1887 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1888 /* Make sure insns to set frame pointer aren't deleted. */
1889 || regno == FRAME_POINTER_REGNUM
1890 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1891 || regno == HARD_FRAME_POINTER_REGNUM
1893 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1894 /* Make sure insns to set arg pointer are never deleted
1895 (if the arg pointer isn't fixed, there will be a USE for
1896 it, so we can treat it normally). */
1897 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
1899 || REGNO_REG_SET_P (needed, regno))
1902 /* If this is a hard register, verify that subsequent words are
1904 if (regno < FIRST_PSEUDO_REGISTER)
1906 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
1909 if (REGNO_REG_SET_P (needed, regno+n))
1916 /* If performing several activities,
1917 insn is dead if each activity is individually dead.
1918 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
1919 that's inside a PARALLEL doesn't make the insn worth keeping. */
1920 else if (code == PARALLEL)
1922 register int i = XVECLEN (x, 0);
1923 for (i--; i >= 0; i--)
1925 rtx elt = XVECEXP (x, 0, i);
1926 if (!insn_dead_p (elt, needed, call_ok)
1927 && GET_CODE (elt) != CLOBBER
1928 && GET_CODE (elt) != USE)
1933 /* We do not check CLOBBER or USE here.
1934 An insn consisting of just a CLOBBER or just a USE
1935 should not be deleted. */
1939 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
1940 return 1 if the entire library call is dead.
1941 This is true if X copies a register (hard or pseudo)
1942 and if the hard return reg of the call insn is dead.
1943 (The caller should have tested the destination of X already for death.)
1945 If this insn doesn't just copy a register, then we don't
1946 have an ordinary libcall. In that case, cse could not have
1947 managed to substitute the source for the dest later on,
1948 so we can assume the libcall is dead.
1950 NEEDED is the bit vector of pseudoregs live before this insn.
1951 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
1954 libcall_dead_p (x, needed, note, insn)
1960 register RTX_CODE code = GET_CODE (x);
1964 register rtx r = SET_SRC (x);
1965 if (GET_CODE (r) == REG)
1967 rtx call = XEXP (note, 0);
1970 /* Find the call insn. */
1971 while (call != insn && GET_CODE (call) != CALL_INSN)
1972 call = NEXT_INSN (call);
1974 /* If there is none, do nothing special,
1975 since ordinary death handling can understand these insns. */
1979 /* See if the hard reg holding the value is dead.
1980 If this is a PARALLEL, find the call within it. */
1981 call = PATTERN (call);
1982 if (GET_CODE (call) == PARALLEL)
1984 for (i = XVECLEN (call, 0) - 1; i >= 0; i--)
1985 if (GET_CODE (XVECEXP (call, 0, i)) == SET
1986 && GET_CODE (SET_SRC (XVECEXP (call, 0, i))) == CALL)
1989 /* This may be a library call that is returning a value
1990 via invisible pointer. Do nothing special, since
1991 ordinary death handling can understand these insns. */
1995 call = XVECEXP (call, 0, i);
1998 return insn_dead_p (call, needed, 1);
2004 /* Return 1 if register REGNO was used before it was set.
2005 In other words, if it is live at function entry.
2006 Don't count global register variables or variables in registers
2007 that can be used for function arg passing, though. */
2010 regno_uninitialized (regno)
2013 if (n_basic_blocks == 0
2014 || (regno < FIRST_PSEUDO_REGISTER
2015 && (global_regs[regno] || FUNCTION_ARG_REGNO_P (regno))))
2018 return REGNO_REG_SET_P (basic_block_live_at_start[0], regno);
2021 /* 1 if register REGNO was alive at a place where `setjmp' was called
2022 and was set more than once or is an argument.
2023 Such regs may be clobbered by `longjmp'. */
2026 regno_clobbered_at_setjmp (regno)
2029 if (n_basic_blocks == 0)
2032 return ((REG_N_SETS (regno) > 1
2033 || REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
2034 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
2037 /* Process the registers that are set within X.
2038 Their bits are set to 1 in the regset DEAD,
2039 because they are dead prior to this insn.
2041 If INSN is nonzero, it is the insn being processed
2042 and the fact that it is nonzero implies this is the FINAL pass
2043 in propagate_block. In this case, various info about register
2044 usage is stored, LOG_LINKS fields of insns are set up. */
2047 mark_set_regs (needed, dead, x, insn, significant)
2054 register RTX_CODE code = GET_CODE (x);
2056 if (code == SET || code == CLOBBER)
2057 mark_set_1 (needed, dead, x, insn, significant);
2058 else if (code == PARALLEL)
2061 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2063 code = GET_CODE (XVECEXP (x, 0, i));
2064 if (code == SET || code == CLOBBER)
2065 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn, significant);
2070 /* Process a single SET rtx, X. */
2073 mark_set_1 (needed, dead, x, insn, significant)
2081 register rtx reg = SET_DEST (x);
2083 /* Modifying just one hardware register of a multi-reg value
2084 or just a byte field of a register
2085 does not mean the value from before this insn is now dead.
2086 But it does mean liveness of that register at the end of the block
2089 Within mark_set_1, however, we treat it as if the register is
2090 indeed modified. mark_used_regs will, however, also treat this
2091 register as being used. Thus, we treat these insns as setting a
2092 new value for the register as a function of its old value. This
2093 cases LOG_LINKS to be made appropriately and this will help combine. */
2095 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
2096 || GET_CODE (reg) == SIGN_EXTRACT
2097 || GET_CODE (reg) == STRICT_LOW_PART)
2098 reg = XEXP (reg, 0);
2100 /* If we are writing into memory or into a register mentioned in the
2101 address of the last thing stored into memory, show we don't know
2102 what the last store was. If we are writing memory, save the address
2103 unless it is volatile. */
2104 if (GET_CODE (reg) == MEM
2105 || (GET_CODE (reg) == REG
2106 && last_mem_set != 0 && reg_overlap_mentioned_p (reg, last_mem_set)))
2109 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
2110 /* There are no REG_INC notes for SP, so we can't assume we'll see
2111 everything that invalidates it. To be safe, don't eliminate any
2112 stores though SP; none of them should be redundant anyway. */
2113 && ! reg_mentioned_p (stack_pointer_rtx, reg))
2116 if (GET_CODE (reg) == REG
2117 && (regno = REGNO (reg), regno != FRAME_POINTER_REGNUM)
2118 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2119 && regno != HARD_FRAME_POINTER_REGNUM
2121 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2122 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2124 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
2125 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
2127 int some_needed = REGNO_REG_SET_P (needed, regno);
2128 int some_not_needed = ! some_needed;
2130 /* Mark it as a significant register for this basic block. */
2132 SET_REGNO_REG_SET (significant, regno);
2134 /* Mark it as as dead before this insn. */
2135 SET_REGNO_REG_SET (dead, regno);
2137 /* A hard reg in a wide mode may really be multiple registers.
2138 If so, mark all of them just like the first. */
2139 if (regno < FIRST_PSEUDO_REGISTER)
2143 /* Nothing below is needed for the stack pointer; get out asap.
2144 Eg, log links aren't needed, since combine won't use them. */
2145 if (regno == STACK_POINTER_REGNUM)
2148 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2151 int regno_n = regno + n;
2152 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2154 SET_REGNO_REG_SET (significant, regno_n);
2156 SET_REGNO_REG_SET (dead, regno_n);
2157 some_needed |= needed_regno;
2158 some_not_needed |= ! needed_regno;
2161 /* Additional data to record if this is the final pass. */
2164 register rtx y = reg_next_use[regno];
2165 register int blocknum = BLOCK_NUM (insn);
2167 /* If this is a hard reg, record this function uses the reg. */
2169 if (regno < FIRST_PSEUDO_REGISTER)
2172 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
2174 for (i = regno; i < endregno; i++)
2176 /* The next use is no longer "next", since a store
2178 reg_next_use[i] = 0;
2180 regs_ever_live[i] = 1;
2186 /* The next use is no longer "next", since a store
2188 reg_next_use[regno] = 0;
2190 /* Keep track of which basic blocks each reg appears in. */
2192 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2193 REG_BASIC_BLOCK (regno) = blocknum;
2194 else if (REG_BASIC_BLOCK (regno) != blocknum)
2195 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2197 /* Count (weighted) references, stores, etc. This counts a
2198 register twice if it is modified, but that is correct. */
2199 REG_N_SETS (regno)++;
2201 REG_N_REFS (regno) += loop_depth;
2203 /* The insns where a reg is live are normally counted
2204 elsewhere, but we want the count to include the insn
2205 where the reg is set, and the normal counting mechanism
2206 would not count it. */
2207 REG_LIVE_LENGTH (regno)++;
2210 if (! some_not_needed)
2212 /* Make a logical link from the next following insn
2213 that uses this register, back to this insn.
2214 The following insns have already been processed.
2216 We don't build a LOG_LINK for hard registers containing
2217 in ASM_OPERANDs. If these registers get replaced,
2218 we might wind up changing the semantics of the insn,
2219 even if reload can make what appear to be valid assignments
2221 if (y && (BLOCK_NUM (y) == blocknum)
2222 && (regno >= FIRST_PSEUDO_REGISTER
2223 || asm_noperands (PATTERN (y)) < 0))
2225 = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
2227 else if (! some_needed)
2229 /* Note that dead stores have already been deleted when possible
2230 If we get here, we have found a dead store that cannot
2231 be eliminated (because the same insn does something useful).
2232 Indicate this by marking the reg being set as dying here. */
2234 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2235 REG_N_DEATHS (REGNO (reg))++;
2239 /* This is a case where we have a multi-word hard register
2240 and some, but not all, of the words of the register are
2241 needed in subsequent insns. Write REG_UNUSED notes
2242 for those parts that were not needed. This case should
2247 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
2249 if (!REGNO_REG_SET_P (needed, regno + i))
2251 = gen_rtx_EXPR_LIST (REG_UNUSED,
2252 gen_rtx_REG (reg_raw_mode[regno + i],
2258 else if (GET_CODE (reg) == REG)
2259 reg_next_use[regno] = 0;
2261 /* If this is the last pass and this is a SCRATCH, show it will be dying
2262 here and count it. */
2263 else if (GET_CODE (reg) == SCRATCH && insn != 0)
2266 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
2273 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
2277 find_auto_inc (needed, x, insn)
2282 rtx addr = XEXP (x, 0);
2283 HOST_WIDE_INT offset = 0;
2286 /* Here we detect use of an index register which might be good for
2287 postincrement, postdecrement, preincrement, or predecrement. */
2289 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2290 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
2292 if (GET_CODE (addr) == REG)
2295 register int size = GET_MODE_SIZE (GET_MODE (x));
2298 int regno = REGNO (addr);
2300 /* Is the next use an increment that might make auto-increment? */
2301 if ((incr = reg_next_use[regno]) != 0
2302 && (set = single_set (incr)) != 0
2303 && GET_CODE (set) == SET
2304 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
2305 /* Can't add side effects to jumps; if reg is spilled and
2306 reloaded, there's no way to store back the altered value. */
2307 && GET_CODE (insn) != JUMP_INSN
2308 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
2309 && XEXP (y, 0) == addr
2310 && GET_CODE (XEXP (y, 1)) == CONST_INT
2312 #ifdef HAVE_POST_INCREMENT
2313 || (INTVAL (XEXP (y, 1)) == size && offset == 0)
2315 #ifdef HAVE_POST_DECREMENT
2316 || (INTVAL (XEXP (y, 1)) == - size && offset == 0)
2318 #ifdef HAVE_PRE_INCREMENT
2319 || (INTVAL (XEXP (y, 1)) == size && offset == size)
2321 #ifdef HAVE_PRE_DECREMENT
2322 || (INTVAL (XEXP (y, 1)) == - size && offset == - size)
2325 /* Make sure this reg appears only once in this insn. */
2326 && (use = find_use_as_address (PATTERN (insn), addr, offset),
2327 use != 0 && use != (rtx) 1))
2329 rtx q = SET_DEST (set);
2330 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
2331 ? (offset ? PRE_INC : POST_INC)
2332 : (offset ? PRE_DEC : POST_DEC));
2334 if (dead_or_set_p (incr, addr))
2336 /* This is the simple case. Try to make the auto-inc. If
2337 we can't, we are done. Otherwise, we will do any
2338 needed updates below. */
2339 if (! validate_change (insn, &XEXP (x, 0),
2340 gen_rtx_fmt_e (inc_code, Pmode, addr),
2344 else if (GET_CODE (q) == REG
2345 /* PREV_INSN used here to check the semi-open interval
2347 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
2348 /* We must also check for sets of q as q may be
2349 a call clobbered hard register and there may
2350 be a call between PREV_INSN (insn) and incr. */
2351 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
2353 /* We have *p followed sometime later by q = p+size.
2354 Both p and q must be live afterward,
2355 and q is not used between INSN and it's assignment.
2356 Change it to q = p, ...*q..., q = q+size.
2357 Then fall into the usual case. */
2361 emit_move_insn (q, addr);
2362 insns = get_insns ();
2365 /* If anything in INSNS have UID's that don't fit within the
2366 extra space we allocate earlier, we can't make this auto-inc.
2367 This should never happen. */
2368 for (temp = insns; temp; temp = NEXT_INSN (temp))
2370 if (INSN_UID (temp) > max_uid_for_flow)
2372 BLOCK_NUM (temp) = BLOCK_NUM (insn);
2375 /* If we can't make the auto-inc, or can't make the
2376 replacement into Y, exit. There's no point in making
2377 the change below if we can't do the auto-inc and doing
2378 so is not correct in the pre-inc case. */
2380 validate_change (insn, &XEXP (x, 0),
2381 gen_rtx_fmt_e (inc_code, Pmode, q),
2383 validate_change (incr, &XEXP (y, 0), q, 1);
2384 if (! apply_change_group ())
2387 /* We now know we'll be doing this change, so emit the
2388 new insn(s) and do the updates. */
2389 emit_insns_before (insns, insn);
2391 if (basic_block_head[BLOCK_NUM (insn)] == insn)
2392 basic_block_head[BLOCK_NUM (insn)] = insns;
2394 /* INCR will become a NOTE and INSN won't contain a
2395 use of ADDR. If a use of ADDR was just placed in
2396 the insn before INSN, make that the next use.
2397 Otherwise, invalidate it. */
2398 if (GET_CODE (PREV_INSN (insn)) == INSN
2399 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
2400 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
2401 reg_next_use[regno] = PREV_INSN (insn);
2403 reg_next_use[regno] = 0;
2408 /* REGNO is now used in INCR which is below INSN, but
2409 it previously wasn't live here. If we don't mark
2410 it as needed, we'll put a REG_DEAD note for it
2411 on this insn, which is incorrect. */
2412 SET_REGNO_REG_SET (needed, regno);
2414 /* If there are any calls between INSN and INCR, show
2415 that REGNO now crosses them. */
2416 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
2417 if (GET_CODE (temp) == CALL_INSN)
2418 REG_N_CALLS_CROSSED (regno)++;
2423 /* If we haven't returned, it means we were able to make the
2424 auto-inc, so update the status. First, record that this insn
2425 has an implicit side effect. */
2428 = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
2430 /* Modify the old increment-insn to simply copy
2431 the already-incremented value of our register. */
2432 if (! validate_change (incr, &SET_SRC (set), addr, 0))
2435 /* If that makes it a no-op (copying the register into itself) delete
2436 it so it won't appear to be a "use" and a "set" of this
2438 if (SET_DEST (set) == addr)
2440 PUT_CODE (incr, NOTE);
2441 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
2442 NOTE_SOURCE_FILE (incr) = 0;
2445 if (regno >= FIRST_PSEUDO_REGISTER)
2447 /* Count an extra reference to the reg. When a reg is
2448 incremented, spilling it is worse, so we want to make
2449 that less likely. */
2450 REG_N_REFS (regno) += loop_depth;
2452 /* Count the increment as a setting of the register,
2453 even though it isn't a SET in rtl. */
2454 REG_N_SETS (regno)++;
2459 #endif /* AUTO_INC_DEC */
2461 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
2462 This is done assuming the registers needed from X
2463 are those that have 1-bits in NEEDED.
2465 On the final pass, FINAL is 1. This means try for autoincrement
2466 and count the uses and deaths of each pseudo-reg.
2468 INSN is the containing instruction. If INSN is dead, this function is not
2472 mark_used_regs (needed, live, x, final, insn)
2479 register RTX_CODE code;
2484 code = GET_CODE (x);
2505 /* If we are clobbering a MEM, mark any registers inside the address
2507 if (GET_CODE (XEXP (x, 0)) == MEM)
2508 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), final, insn);
2512 /* Invalidate the data for the last MEM stored, but only if MEM is
2513 something that can be stored into. */
2514 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2515 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2516 ; /* needn't clear last_mem_set */
2522 find_auto_inc (needed, x, insn);
2527 if (GET_CODE (SUBREG_REG (x)) == REG
2528 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
2529 && (GET_MODE_SIZE (GET_MODE (x))
2530 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2531 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
2533 /* While we're here, optimize this case. */
2536 /* In case the SUBREG is not of a register, don't optimize */
2537 if (GET_CODE (x) != REG)
2539 mark_used_regs (needed, live, x, final, insn);
2543 /* ... fall through ... */
2546 /* See a register other than being set
2547 => mark it as needed. */
2551 int some_needed = REGNO_REG_SET_P (needed, regno);
2552 int some_not_needed = ! some_needed;
2554 SET_REGNO_REG_SET (live, regno);
2556 /* A hard reg in a wide mode may really be multiple registers.
2557 If so, mark all of them just like the first. */
2558 if (regno < FIRST_PSEUDO_REGISTER)
2562 /* For stack ptr or fixed arg pointer,
2563 nothing below can be necessary, so waste no more time. */
2564 if (regno == STACK_POINTER_REGNUM
2565 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2566 || regno == HARD_FRAME_POINTER_REGNUM
2568 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2569 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2571 || regno == FRAME_POINTER_REGNUM)
2573 /* If this is a register we are going to try to eliminate,
2574 don't mark it live here. If we are successful in
2575 eliminating it, it need not be live unless it is used for
2576 pseudos, in which case it will have been set live when
2577 it was allocated to the pseudos. If the register will not
2578 be eliminated, reload will set it live at that point. */
2580 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
2581 regs_ever_live[regno] = 1;
2584 /* No death notes for global register variables;
2585 their values are live after this function exits. */
2586 if (global_regs[regno])
2589 reg_next_use[regno] = insn;
2593 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2596 int regno_n = regno + n;
2597 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
2599 SET_REGNO_REG_SET (live, regno_n);
2600 some_needed |= needed_regno;
2601 some_not_needed |= ! needed_regno;
2606 /* Record where each reg is used, so when the reg
2607 is set we know the next insn that uses it. */
2609 reg_next_use[regno] = insn;
2611 if (regno < FIRST_PSEUDO_REGISTER)
2613 /* If a hard reg is being used,
2614 record that this function does use it. */
2616 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
2620 regs_ever_live[regno + --i] = 1;
2625 /* Keep track of which basic block each reg appears in. */
2627 register int blocknum = BLOCK_NUM (insn);
2629 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
2630 REG_BASIC_BLOCK (regno) = blocknum;
2631 else if (REG_BASIC_BLOCK (regno) != blocknum)
2632 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
2634 /* Count (weighted) number of uses of each reg. */
2636 REG_N_REFS (regno) += loop_depth;
2639 /* Record and count the insns in which a reg dies.
2640 If it is used in this insn and was dead below the insn
2641 then it dies in this insn. If it was set in this insn,
2642 we do not make a REG_DEAD note; likewise if we already
2643 made such a note. */
2646 && ! dead_or_set_p (insn, x)
2648 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
2652 /* Check for the case where the register dying partially
2653 overlaps the register set by this insn. */
2654 if (regno < FIRST_PSEUDO_REGISTER
2655 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2657 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
2659 some_needed |= dead_or_set_regno_p (insn, regno + n);
2662 /* If none of the words in X is needed, make a REG_DEAD
2663 note. Otherwise, we must make partial REG_DEAD notes. */
2667 = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
2668 REG_N_DEATHS (regno)++;
2674 /* Don't make a REG_DEAD note for a part of a register
2675 that is set in the insn. */
2677 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
2679 if (!REGNO_REG_SET_P (needed, regno + i)
2680 && ! dead_or_set_regno_p (insn, regno + i))
2682 = gen_rtx_EXPR_LIST (REG_DEAD,
2683 gen_rtx_REG (reg_raw_mode[regno + i],
2694 register rtx testreg = SET_DEST (x);
2697 /* If storing into MEM, don't show it as being used. But do
2698 show the address as being used. */
2699 if (GET_CODE (testreg) == MEM)
2703 find_auto_inc (needed, testreg, insn);
2705 mark_used_regs (needed, live, XEXP (testreg, 0), final, insn);
2706 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2710 /* Storing in STRICT_LOW_PART is like storing in a reg
2711 in that this SET might be dead, so ignore it in TESTREG.
2712 but in some other ways it is like using the reg.
2714 Storing in a SUBREG or a bit field is like storing the entire
2715 register in that if the register's value is not used
2716 then this SET is not needed. */
2717 while (GET_CODE (testreg) == STRICT_LOW_PART
2718 || GET_CODE (testreg) == ZERO_EXTRACT
2719 || GET_CODE (testreg) == SIGN_EXTRACT
2720 || GET_CODE (testreg) == SUBREG)
2722 if (GET_CODE (testreg) == SUBREG
2723 && GET_CODE (SUBREG_REG (testreg)) == REG
2724 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
2725 && (GET_MODE_SIZE (GET_MODE (testreg))
2726 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
2727 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
2729 /* Modifying a single register in an alternate mode
2730 does not use any of the old value. But these other
2731 ways of storing in a register do use the old value. */
2732 if (GET_CODE (testreg) == SUBREG
2733 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
2738 testreg = XEXP (testreg, 0);
2741 /* If this is a store into a register,
2742 recursively scan the value being stored. */
2744 if (GET_CODE (testreg) == REG
2745 && (regno = REGNO (testreg), regno != FRAME_POINTER_REGNUM)
2746 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2747 && regno != HARD_FRAME_POINTER_REGNUM
2749 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2750 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2753 /* We used to exclude global_regs here, but that seems wrong.
2754 Storing in them is like storing in mem. */
2756 mark_used_regs (needed, live, SET_SRC (x), final, insn);
2758 mark_used_regs (needed, live, SET_DEST (x), final, insn);
2765 /* If exiting needs the right stack value, consider this insn as
2766 using the stack pointer. In any event, consider it as using
2767 all global registers and all registers used by return. */
2769 #ifdef EXIT_IGNORE_STACK
2770 if (! EXIT_IGNORE_STACK
2771 || (! FRAME_POINTER_REQUIRED
2772 && ! current_function_calls_alloca
2773 && flag_omit_frame_pointer))
2775 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
2777 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2779 #ifdef EPILOGUE_USES
2780 || EPILOGUE_USES (i)
2783 SET_REGNO_REG_SET (live, i);
2790 /* Recursively scan the operands of this expression. */
2793 register char *fmt = GET_RTX_FORMAT (code);
2796 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2800 /* Tail recursive case: save a function call level. */
2806 mark_used_regs (needed, live, XEXP (x, i), final, insn);
2808 else if (fmt[i] == 'E')
2811 for (j = 0; j < XVECLEN (x, i); j++)
2812 mark_used_regs (needed, live, XVECEXP (x, i, j), final, insn);
2821 try_pre_increment_1 (insn)
2824 /* Find the next use of this reg. If in same basic block,
2825 make it do pre-increment or pre-decrement if appropriate. */
2826 rtx x = single_set (insn);
2827 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
2828 * INTVAL (XEXP (SET_SRC (x), 1)));
2829 int regno = REGNO (SET_DEST (x));
2830 rtx y = reg_next_use[regno];
2832 && BLOCK_NUM (y) == BLOCK_NUM (insn)
2833 /* Don't do this if the reg dies, or gets set in y; a standard addressing
2834 mode would be better. */
2835 && ! dead_or_set_p (y, SET_DEST (x))
2836 && try_pre_increment (y, SET_DEST (x), amount))
2838 /* We have found a suitable auto-increment
2839 and already changed insn Y to do it.
2840 So flush this increment-instruction. */
2841 PUT_CODE (insn, NOTE);
2842 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2843 NOTE_SOURCE_FILE (insn) = 0;
2844 /* Count a reference to this reg for the increment
2845 insn we are deleting. When a reg is incremented.
2846 spilling it is worse, so we want to make that
2848 if (regno >= FIRST_PSEUDO_REGISTER)
2850 REG_N_REFS (regno) += loop_depth;
2851 REG_N_SETS (regno)++;
2858 /* Try to change INSN so that it does pre-increment or pre-decrement
2859 addressing on register REG in order to add AMOUNT to REG.
2860 AMOUNT is negative for pre-decrement.
2861 Returns 1 if the change could be made.
2862 This checks all about the validity of the result of modifying INSN. */
2865 try_pre_increment (insn, reg, amount)
2867 HOST_WIDE_INT amount;
2871 /* Nonzero if we can try to make a pre-increment or pre-decrement.
2872 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
2874 /* Nonzero if we can try to make a post-increment or post-decrement.
2875 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
2876 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
2877 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
2880 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
2883 /* From the sign of increment, see which possibilities are conceivable
2884 on this target machine. */
2885 #ifdef HAVE_PRE_INCREMENT
2889 #ifdef HAVE_POST_INCREMENT
2894 #ifdef HAVE_PRE_DECREMENT
2898 #ifdef HAVE_POST_DECREMENT
2903 if (! (pre_ok || post_ok))
2906 /* It is not safe to add a side effect to a jump insn
2907 because if the incremented register is spilled and must be reloaded
2908 there would be no way to store the incremented value back in memory. */
2910 if (GET_CODE (insn) == JUMP_INSN)
2915 use = find_use_as_address (PATTERN (insn), reg, 0);
2916 if (post_ok && (use == 0 || use == (rtx) 1))
2918 use = find_use_as_address (PATTERN (insn), reg, -amount);
2922 if (use == 0 || use == (rtx) 1)
2925 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
2928 /* See if this combination of instruction and addressing mode exists. */
2929 if (! validate_change (insn, &XEXP (use, 0),
2930 gen_rtx_fmt_e (amount > 0
2931 ? (do_post ? POST_INC : PRE_INC)
2932 : (do_post ? POST_DEC : PRE_DEC),
2936 /* Record that this insn now has an implicit side effect on X. */
2937 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
2941 #endif /* AUTO_INC_DEC */
2943 /* Find the place in the rtx X where REG is used as a memory address.
2944 Return the MEM rtx that so uses it.
2945 If PLUSCONST is nonzero, search instead for a memory address equivalent to
2946 (plus REG (const_int PLUSCONST)).
2948 If such an address does not appear, return 0.
2949 If REG appears more than once, or is used other than in such an address,
2953 find_use_as_address (x, reg, plusconst)
2956 HOST_WIDE_INT plusconst;
2958 enum rtx_code code = GET_CODE (x);
2959 char *fmt = GET_RTX_FORMAT (code);
2961 register rtx value = 0;
2964 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
2967 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
2968 && XEXP (XEXP (x, 0), 0) == reg
2969 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2970 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
2973 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
2975 /* If REG occurs inside a MEM used in a bit-field reference,
2976 that is unacceptable. */
2977 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
2978 return (rtx) (HOST_WIDE_INT) 1;
2982 return (rtx) (HOST_WIDE_INT) 1;
2984 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2988 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
2992 return (rtx) (HOST_WIDE_INT) 1;
2997 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2999 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
3003 return (rtx) (HOST_WIDE_INT) 1;
3011 /* Write information about registers and basic blocks into FILE.
3012 This is part of making a debugging dump. */
3015 dump_flow_info (file)
3019 static char *reg_class_names[] = REG_CLASS_NAMES;
3021 fprintf (file, "%d registers.\n", max_regno);
3023 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3026 enum reg_class class, altclass;
3027 fprintf (file, "\nRegister %d used %d times across %d insns",
3028 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
3029 if (REG_BASIC_BLOCK (i) >= 0)
3030 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
3031 if (REG_N_DEATHS (i) != 1)
3032 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
3033 if (REG_N_CALLS_CROSSED (i) == 1)
3034 fprintf (file, "; crosses 1 call");
3035 else if (REG_N_CALLS_CROSSED (i))
3036 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
3037 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
3038 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
3039 class = reg_preferred_class (i);
3040 altclass = reg_alternate_class (i);
3041 if (class != GENERAL_REGS || altclass != ALL_REGS)
3043 if (altclass == ALL_REGS || class == ALL_REGS)
3044 fprintf (file, "; pref %s", reg_class_names[(int) class]);
3045 else if (altclass == NO_REGS)
3046 fprintf (file, "; %s or none", reg_class_names[(int) class]);
3048 fprintf (file, "; pref %s, else %s",
3049 reg_class_names[(int) class],
3050 reg_class_names[(int) altclass]);
3052 if (REGNO_POINTER_FLAG (i))
3053 fprintf (file, "; pointer");
3054 fprintf (file, ".\n");
3056 fprintf (file, "\n%d basic blocks.\n", n_basic_blocks);
3057 for (i = 0; i < n_basic_blocks; i++)
3059 register rtx head, jump;
3061 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
3063 INSN_UID (basic_block_head[i]),
3064 INSN_UID (basic_block_end[i]));
3065 /* The control flow graph's storage is freed
3066 now when flow_analysis returns.
3067 Don't try to print it if it is gone. */
3068 if (basic_block_drops_in)
3070 fprintf (file, "Reached from blocks: ");
3071 head = basic_block_head[i];
3072 if (GET_CODE (head) == CODE_LABEL)
3073 for (jump = LABEL_REFS (head);
3075 jump = LABEL_NEXTREF (jump))
3077 register int from_block = BLOCK_NUM (CONTAINING_INSN (jump));
3078 fprintf (file, " %d", from_block);
3080 if (basic_block_drops_in[i])
3081 fprintf (file, " previous");
3083 fprintf (file, "\nRegisters live at start:");
3084 for (regno = 0; regno < max_regno; regno++)
3085 if (REGNO_REG_SET_P (basic_block_live_at_start[i], regno))
3086 fprintf (file, " %d", regno);
3087 fprintf (file, "\n");
3089 fprintf (file, "\n");
3093 /* Like print_rtl, but also print out live information for the start of each
3097 print_rtl_with_bb (outf, rtx_first)
3101 register rtx tmp_rtx;
3104 fprintf (outf, "(nil)\n");
3109 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
3110 int max_uid = get_max_uid ();
3111 int *start = (int *) alloca (max_uid * sizeof (int));
3112 int *end = (int *) alloca (max_uid * sizeof (int));
3113 char *in_bb_p = (char *) alloca (max_uid * sizeof (enum bb_state));
3115 for (i = 0; i < max_uid; i++)
3117 start[i] = end[i] = -1;
3118 in_bb_p[i] = NOT_IN_BB;
3121 for (i = n_basic_blocks-1; i >= 0; i--)
3124 start[INSN_UID (basic_block_head[i])] = i;
3125 end[INSN_UID (basic_block_end[i])] = i;
3126 for (x = basic_block_head[i]; x != NULL_RTX; x = NEXT_INSN (x))
3128 in_bb_p[ INSN_UID(x)]
3129 = (in_bb_p[ INSN_UID(x)] == NOT_IN_BB)
3130 ? IN_ONE_BB : IN_MULTIPLE_BB;
3131 if (x == basic_block_end[i])
3136 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
3138 if ((bb = start[INSN_UID (tmp_rtx)]) >= 0)
3140 fprintf (outf, ";; Start of basic block %d, registers live:",
3143 EXECUTE_IF_SET_IN_REG_SET (basic_block_live_at_start[bb], 0, i,
3145 fprintf (outf, " %d", i);
3146 if (i < FIRST_PSEUDO_REGISTER)
3147 fprintf (outf, " [%s]",
3153 if (in_bb_p[ INSN_UID(tmp_rtx)] == NOT_IN_BB
3154 && GET_CODE (tmp_rtx) != NOTE
3155 && GET_CODE (tmp_rtx) != BARRIER)
3156 fprintf (outf, ";; Insn is not within a basic block\n");
3157 else if (in_bb_p[ INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
3158 fprintf (outf, ";; Insn is in multiple basic blocks\n");
3160 print_rtl_single (outf, tmp_rtx);
3162 if ((bb = end[INSN_UID (tmp_rtx)]) >= 0)
3163 fprintf (outf, ";; End of basic block %d\n", bb);
3171 /* Integer list support. */
3173 /* Allocate a node from list *HEAD_PTR. */
3176 alloc_int_list_node (head_ptr)
3177 int_list_block **head_ptr;
3179 struct int_list_block *first_blk = *head_ptr;
3181 if (first_blk == NULL || first_blk->nodes_left <= 0)
3183 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
3184 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
3185 first_blk->next = *head_ptr;
3186 *head_ptr = first_blk;
3189 first_blk->nodes_left--;
3190 return &first_blk->nodes[first_blk->nodes_left];
3193 /* Pointer to head of predecessor/successor block list. */
3194 static int_list_block *pred_int_list_blocks;
3196 /* Add a new node to integer list LIST with value VAL.
3197 LIST is a pointer to a list object to allow for different implementations.
3198 If *LIST is initially NULL, the list is empty.
3199 The caller must not care whether the element is added to the front or
3200 to the end of the list (to allow for different implementations). */
3203 add_int_list_node (blk_list, list, val)
3204 int_list_block **blk_list;
3208 int_list_ptr p = alloc_int_list_node (blk_list);
3216 /* Free the blocks of lists at BLK_LIST. */
3219 free_int_list (blk_list)
3220 int_list_block **blk_list;
3222 int_list_block *p, *next;
3224 for (p = *blk_list; p != NULL; p = next)
3230 /* Mark list as empty for the next function we compile. */
3234 /* Predecessor/successor computation. */
3236 /* Mark PRED_BB a precessor of SUCC_BB,
3237 and conversely SUCC_BB a successor of PRED_BB. */
3240 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
3243 int_list_ptr *s_preds;
3244 int_list_ptr *s_succs;
3248 if (succ_bb != EXIT_BLOCK)
3250 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
3251 num_preds[succ_bb]++;
3253 if (pred_bb != ENTRY_BLOCK)
3255 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
3256 num_succs[pred_bb]++;
3260 /* Compute the predecessors and successors for each block. */
3262 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
3263 int_list_ptr *s_preds;
3264 int_list_ptr *s_succs;
3268 int bb, clear_local_bb_vars = 0;
3270 bzero ((char *) s_preds, n_basic_blocks * sizeof (int_list_ptr));
3271 bzero ((char *) s_succs, n_basic_blocks * sizeof (int_list_ptr));
3272 bzero ((char *) num_preds, n_basic_blocks * sizeof (int));
3273 bzero ((char *) num_succs, n_basic_blocks * sizeof (int));
3275 /* This routine can be called after life analysis; in that case
3276 basic_block_drops_in and uid_block_number will not be available
3277 and we must recompute their values. */
3278 if (basic_block_drops_in == NULL || uid_block_number == NULL)
3280 clear_local_bb_vars = 1;
3281 basic_block_drops_in = (char *) alloca (n_basic_blocks);
3282 uid_block_number = (int *) alloca ((get_max_uid () + 1) * sizeof (int));
3284 bzero ((char *) basic_block_drops_in, n_basic_blocks * sizeof (char));
3285 bzero ((char *) uid_block_number, n_basic_blocks * sizeof (int));
3287 /* Scan each basic block setting basic_block_drops_in and
3288 uid_block_number as needed. */
3289 for (bb = 0; bb < n_basic_blocks; bb++)
3291 rtx insn, stop_insn;
3294 stop_insn = NULL_RTX;
3296 stop_insn = basic_block_end[bb-1];
3298 /* Look backwards from the start of this block. Stop if we
3299 hit the start of the function or the end of a previous
3300 block. Don't walk backwards through blocks that are just
3302 for (insn = PREV_INSN (basic_block_head[bb]);
3303 insn && insn != stop_insn && GET_CODE (insn) == NOTE;
3304 insn = PREV_INSN (insn))
3307 /* Never set basic_block_drops_in for the first block. It is
3310 If we stopped on anything other than a BARRIER, then this
3313 basic_block_drops_in[bb] = (insn ? GET_CODE (insn) != BARRIER : 1);
3315 insn = basic_block_head[bb];
3318 BLOCK_NUM (insn) = bb;
3319 if (insn == basic_block_end[bb])
3321 insn = NEXT_INSN (insn);
3326 for (bb = 0; bb < n_basic_blocks; bb++)
3331 head = BLOCK_HEAD (bb);
3333 if (GET_CODE (head) == CODE_LABEL)
3334 for (jump = LABEL_REFS (head);
3336 jump = LABEL_NEXTREF (jump))
3338 if (! INSN_DELETED_P (CONTAINING_INSN (jump))
3339 && (GET_CODE (CONTAINING_INSN (jump)) != NOTE
3340 || (NOTE_LINE_NUMBER (CONTAINING_INSN (jump))
3341 != NOTE_INSN_DELETED)))
3342 add_pred_succ (BLOCK_NUM (CONTAINING_INSN (jump)), bb,
3343 s_preds, s_succs, num_preds, num_succs);
3346 jump = BLOCK_END (bb);
3347 /* If this is a RETURN insn or a conditional jump in the last
3348 basic block, or a non-jump insn in the last basic block, then
3349 this block reaches the exit block. */
3350 if ((GET_CODE (jump) == JUMP_INSN && GET_CODE (PATTERN (jump)) == RETURN)
3351 || (((GET_CODE (jump) == JUMP_INSN
3352 && condjump_p (jump) && !simplejump_p (jump))
3353 || GET_CODE (jump) != JUMP_INSN)
3354 && (bb == n_basic_blocks - 1)))
3355 add_pred_succ (bb, EXIT_BLOCK, s_preds, s_succs, num_preds, num_succs);
3357 if (basic_block_drops_in[bb])
3358 add_pred_succ (bb - 1, bb, s_preds, s_succs, num_preds, num_succs);
3361 add_pred_succ (ENTRY_BLOCK, 0, s_preds, s_succs, num_preds, num_succs);
3364 /* If we allocated any variables in temporary storage, clear out the
3365 pointer to the local storage to avoid dangling pointers. */
3366 if (clear_local_bb_vars)
3368 basic_block_drops_in = NULL;
3369 uid_block_number = NULL;
3375 dump_bb_data (file, preds, succs)
3377 int_list_ptr *preds;
3378 int_list_ptr *succs;
3383 fprintf (file, "BB data\n\n");
3384 for (bb = 0; bb < n_basic_blocks; bb++)
3386 fprintf (file, "BB %d, start %d, end %d\n", bb,
3387 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
3388 fprintf (file, " preds:");
3389 for (p = preds[bb]; p != NULL; p = p->next)
3391 int pred_bb = INT_LIST_VAL (p);
3392 if (pred_bb == ENTRY_BLOCK)
3393 fprintf (file, " entry");
3395 fprintf (file, " %d", pred_bb);
3397 fprintf (file, "\n");
3398 fprintf (file, " succs:");
3399 for (p = succs[bb]; p != NULL; p = p->next)
3401 int succ_bb = INT_LIST_VAL (p);
3402 if (succ_bb == EXIT_BLOCK)
3403 fprintf (file, " exit");
3405 fprintf (file, " %d", succ_bb);
3407 fprintf (file, "\n");
3409 fprintf (file, "\n");
3412 /* Free basic block data storage. */
3417 free_int_list (&pred_int_list_blocks);
3420 /* Bitmap manipulation routines. */
3422 /* Allocate a simple bitmap of N_ELMS bits. */
3425 sbitmap_alloc (n_elms)
3428 int bytes, size, amt;
3431 size = SBITMAP_SET_SIZE (n_elms);
3432 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3433 amt = (sizeof (struct simple_bitmap_def)
3434 + bytes - sizeof (SBITMAP_ELT_TYPE));
3435 bmap = (sbitmap) xmalloc (amt);
3436 bmap->n_bits = n_elms;
3438 bmap->bytes = bytes;
3442 /* Allocate a vector of N_VECS bitmaps of N_ELMS bits. */
3445 sbitmap_vector_alloc (n_vecs, n_elms)
3448 int i, bytes, offset, elm_bytes, size, amt;
3449 sbitmap *bitmap_vector;
3451 size = SBITMAP_SET_SIZE (n_elms);
3452 bytes = size * sizeof (SBITMAP_ELT_TYPE);
3453 elm_bytes = (sizeof (struct simple_bitmap_def)
3454 + bytes - sizeof (SBITMAP_ELT_TYPE));
3455 amt = (n_vecs * sizeof (sbitmap *)) + (n_vecs * elm_bytes);
3456 bitmap_vector = (sbitmap *) xmalloc (amt);
3458 /* ??? There may be alignment problems, `offset' should be rounded up
3459 each time to account for alignment. Later [if ever]. */
3461 for (i = 0, offset = n_vecs * sizeof (sbitmap *);
3463 i++, offset += elm_bytes)
3465 sbitmap b = (sbitmap) ((char *) bitmap_vector + offset);
3466 bitmap_vector[i] = b;
3472 return bitmap_vector;
3475 /* Copy sbitmap SRC to DST. */
3478 sbitmap_copy (dst, src)
3486 for (i = 0; i < dst->size; i++)
3490 /* Zero all elements in a bitmap. */
3496 bzero ((char *) bmap->elms, bmap->bytes);
3499 /* Set to ones all elements in a bitmap. */
3505 memset (bmap->elms, -1, bmap->bytes);
3508 /* Zero a vector of N_VECS bitmaps. */
3511 sbitmap_vector_zero (bmap, n_vecs)
3517 for (i = 0; i < n_vecs; i++)
3518 sbitmap_zero (bmap[i]);
3521 /* Set to ones a vector of N_VECS bitmaps. */
3524 sbitmap_vector_ones (bmap, n_vecs)
3530 for (i = 0; i < n_vecs; i++)
3531 sbitmap_ones (bmap[i]);
3534 /* Set DST to be A union (B - C).
3536 Return non-zero if any change is made. */
3539 sbitmap_union_of_diff (dst, a, b, c)
3540 sbitmap dst, a, b, c;
3543 sbitmap_ptr dstp, ap, bp, cp;
3550 for (i = 0; i < dst->size; i++)
3552 SBITMAP_ELT_TYPE tmp = *ap | (*bp & ~*cp);
3556 dstp++; ap++; bp++; cp++;
3561 /* Set bitmap DST to the bitwise negation of the bitmap SRC. */
3564 sbitmap_not (dst, src)
3568 sbitmap_ptr dstp, ap;
3572 for (i = 0; i < dst->size; i++)
3574 SBITMAP_ELT_TYPE tmp = ~(*ap);
3580 /* Set the bits in DST to be the difference between the bits
3581 in A and the bits in B. i.e. dst = a - b.
3582 The - operator is implemented as a & (~b). */
3585 sbitmap_difference (dst, a, b)
3589 sbitmap_ptr dstp, ap, bp;
3594 for (i = 0; i < dst->size; i++)
3595 *dstp++ = *ap++ & (~*bp++);
3598 /* Set DST to be (A and B)).
3599 Return non-zero if any change is made. */
3602 sbitmap_a_and_b (dst, a, b)
3606 sbitmap_ptr dstp, ap, bp;
3612 for (i = 0; i < dst->size; i++)
3614 SBITMAP_ELT_TYPE tmp = *ap & *bp;
3622 /* Set DST to be (A or B)).
3623 Return non-zero if any change is made. */
3626 sbitmap_a_or_b (dst, a, b)
3630 sbitmap_ptr dstp, ap, bp;
3636 for (i = 0; i < dst->size; i++)
3638 SBITMAP_ELT_TYPE tmp = *ap | *bp;
3647 /* Set DST to be (A or (B and C)).
3648 Return non-zero if any change is made. */
3651 sbitmap_a_or_b_and_c (dst, a, b, c)
3652 sbitmap dst, a, b, c;
3655 sbitmap_ptr dstp, ap, bp, cp;
3662 for (i = 0; i < dst->size; i++)
3664 SBITMAP_ELT_TYPE tmp = *ap | (*bp & *cp);
3668 dstp++; ap++; bp++; cp++;
3673 /* Set DST to be (A ann (B or C)).
3674 Return non-zero if any change is made. */
3677 sbitmap_a_and_b_or_c (dst, a, b, c)
3678 sbitmap dst, a, b, c;
3681 sbitmap_ptr dstp, ap, bp, cp;
3688 for (i = 0; i < dst->size; i++)
3690 SBITMAP_ELT_TYPE tmp = *ap & (*bp | *cp);
3694 dstp++; ap++; bp++; cp++;
3699 /* Set the bitmap DST to the intersection of SRC of all predecessors or
3700 successors of block number BB (PRED_SUCC says which). */
3703 sbitmap_intersect_of_predsucc (dst, src, bb, pred_succ)
3707 int_list_ptr *pred_succ;
3711 int set_size = dst->size;
3715 /* It is possible that there are no predecessors(/successors).
3716 This can happen for example in unreachable code. */
3720 /* In APL-speak this is the `and' reduction of the empty set and thus
3721 the result is the identity for `and'. */
3726 /* Set result to first predecessor/successor. */
3728 for ( ; ps != NULL; ps = ps->next)
3730 ps_bb = INT_LIST_VAL (ps);
3731 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3733 sbitmap_copy (dst, src[ps_bb]);
3734 /* Break out since we're only doing first predecessor. */
3740 /* Now do the remaining predecessors/successors. */
3742 for (ps = ps->next; ps != NULL; ps = ps->next)
3747 ps_bb = INT_LIST_VAL (ps);
3748 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3751 p = src[ps_bb]->elms;
3754 for (i = 0; i < set_size; i++)
3759 /* Set the bitmap DST to the intersection of SRC of all predecessors
3760 of block number BB. */
3763 sbitmap_intersect_of_predecessors (dst, src, bb, s_preds)
3767 int_list_ptr *s_preds;
3769 sbitmap_intersect_of_predsucc (dst, src, bb, s_preds);
3772 /* Set the bitmap DST to the intersection of SRC of all successors
3773 of block number BB. */
3776 sbitmap_intersect_of_successors (dst, src, bb, s_succs)
3780 int_list_ptr *s_succs;
3782 sbitmap_intersect_of_predsucc (dst, src, bb, s_succs);
3785 /* Set the bitmap DST to the union of SRC of all predecessors/successors of
3789 sbitmap_union_of_predsucc (dst, src, bb, pred_succ)
3793 int_list_ptr *pred_succ;
3797 int set_size = dst->size;
3801 /* It is possible that there are no predecessors(/successors).
3802 This can happen for example in unreachable code. */
3806 /* In APL-speak this is the `or' reduction of the empty set and thus
3807 the result is the identity for `or'. */
3812 /* Set result to first predecessor/successor. */
3814 for ( ; ps != NULL; ps = ps->next)
3816 ps_bb = INT_LIST_VAL (ps);
3817 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3819 sbitmap_copy (dst, src[ps_bb]);
3820 /* Break out since we're only doing first predecessor. */
3826 /* Now do the remaining predecessors/successors. */
3828 for (ps = ps->next; ps != NULL; ps = ps->next)
3833 ps_bb = INT_LIST_VAL (ps);
3834 if (ps_bb == ENTRY_BLOCK || ps_bb == EXIT_BLOCK)
3837 p = src[ps_bb]->elms;
3840 for (i = 0; i < set_size; i++)
3845 /* Set the bitmap DST to the union of SRC of all predecessors of
3849 sbitmap_union_of_predecessors (dst, src, bb, s_preds)
3853 int_list_ptr *s_preds;
3855 sbitmap_union_of_predsucc (dst, src, bb, s_preds);
3858 /* Compute dominator relationships. */
3860 compute_dominators (dominators, post_dominators, s_preds, s_succs)
3861 sbitmap *dominators;
3862 sbitmap *post_dominators;
3863 int_list_ptr *s_preds;
3864 int_list_ptr *s_succs;
3866 int bb, changed, passes;
3867 sbitmap *temp_bitmap;
3869 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
3870 sbitmap_vector_ones (dominators, n_basic_blocks);
3871 sbitmap_vector_ones (post_dominators, n_basic_blocks);
3872 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
3874 sbitmap_zero (dominators[0]);
3875 SET_BIT (dominators[0], 0);
3877 sbitmap_zero (post_dominators[n_basic_blocks-1]);
3878 SET_BIT (post_dominators[n_basic_blocks-1], 0);
3885 for (bb = 1; bb < n_basic_blocks; bb++)
3887 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
3889 SET_BIT (temp_bitmap[bb], bb);
3890 changed |= sbitmap_a_and_b (dominators[bb],
3893 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
3895 SET_BIT (temp_bitmap[bb], bb);
3896 changed |= sbitmap_a_and_b (post_dominators[bb],
3897 post_dominators[bb],