1 /* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 This file provides some dataflow routines for computing reaching defs,
27 upward exposed uses, live variables, def-use chains, and use-def
28 chains. The global dataflow is performed using simple iterative
29 methods with a worklist and could be sped up by ordering the blocks
30 with a depth first search order.
32 A `struct ref' data structure (ref) is allocated for every register
33 reference (def or use) and this records the insn and bb the ref is
34 found within. The refs are linked together in chains of uses and defs
35 for each insn and for each register. Each ref also has a chain field
36 that links all the use refs for a def or all the def refs for a use.
37 This is used to create use-def or def-use chains.
42 Here's an example of using the dataflow routines.
48 df_analyse (df, 0, DF_ALL);
50 df_dump (df, DF_ALL, stderr);
55 df_init simply creates a poor man's object (df) that needs to be
56 passed to all the dataflow routines. df_finish destroys this
57 object and frees up any allocated memory. DF_ALL says to analyse
60 df_analyse performs the following:
62 1. Records defs and uses by scanning the insns in each basic block
63 or by scanning the insns queued by df_insn_modify.
64 2. Links defs and uses into insn-def and insn-use chains.
65 3. Links defs and uses into reg-def and reg-use chains.
66 4. Assigns LUIDs to each insn (for modified blocks).
67 5. Calculates local reaching definitions.
68 6. Calculates global reaching definitions.
69 7. Creates use-def chains.
70 8. Calculates local reaching uses (upwards exposed uses).
71 9. Calculates global reaching uses.
72 10. Creates def-use chains.
73 11. Calculates local live registers.
74 12. Calculates global live registers.
75 13. Calculates register lifetimes and determines local registers.
80 Note that the dataflow information is not updated for every newly
81 deleted or created insn. If the dataflow information requires
82 updating then all the changed, new, or deleted insns needs to be
83 marked with df_insn_modify (or df_insns_modify) either directly or
84 indirectly (say through calling df_insn_delete). df_insn_modify
85 marks all the modified insns to get processed the next time df_analyse
88 Beware that tinkering with insns may invalidate the dataflow information.
89 The philosophy behind these routines is that once the dataflow
90 information has been gathered, the user should store what they require
91 before they tinker with any insn. Once a reg is replaced, for example,
92 then the reg-def/reg-use chains will point to the wrong place. Once a
93 whole lot of changes have been made, df_analyse can be called again
94 to update the dataflow information. Currently, this is not very smart
95 with regard to propagating changes to the dataflow so it should not
101 The basic object is a REF (reference) and this may either be a DEF
102 (definition) or a USE of a register.
104 These are linked into a variety of lists; namely reg-def, reg-use,
105 insn-def, insn-use, def-use, and use-def lists. For example,
106 the reg-def lists contain all the refs that define a given register
107 while the insn-use lists contain all the refs used by an insn.
109 Note that the reg-def and reg-use chains are generally short (except for the
110 hard registers) and thus it is much faster to search these chains
111 rather than searching the def or use bitmaps.
113 If the insns are in SSA form then the reg-def and use-def lists
114 should only contain the single defining ref.
119 1) Incremental dataflow analysis.
121 Note that if a loop invariant insn is hoisted (or sunk), we do not
122 need to change the def-use or use-def chains. All we have to do is to
123 change the bb field for all the associated defs and uses and to
124 renumber the LUIDs for the original and new basic blocks of the insn.
126 When shadowing loop mems we create new uses and defs for new pseudos
127 so we do not affect the existing dataflow information.
129 My current strategy is to queue up all modified, created, or deleted
130 insns so when df_analyse is called we can easily determine all the new
131 or deleted refs. Currently the global dataflow information is
132 recomputed from scratch but this could be propagated more efficiently.
134 2) Reduced memory requirements.
136 We could operate a pool of ref structures. When a ref is deleted it
137 gets returned to the pool (say by linking on to a chain of free refs).
138 This will require a pair of bitmaps for defs and uses so that we can
139 tell which ones have been changed. Alternatively, we could
140 periodically squeeze the def and use tables and associated bitmaps and
141 renumber the def and use ids.
143 3) Ordering of reg-def and reg-use lists.
145 Should the first entry in the def list be the first def (within a BB)?
146 Similarly, should the first entry in the use list be the last use
149 4) Working with a sub-CFG.
151 Often the whole CFG does not need to be analyzed, for example,
152 when optimising a loop, only certain registers are of interest.
153 Perhaps there should be a bitmap argument to df_analyse to specify
154 which registers should be analyzed?
159 Embedded addressing side-effects, such as POST_INC or PRE_INC, generate
160 both a use and a def. These are both marked read/write to show that they
161 are dependent. For example, (set (reg 40) (mem (post_inc (reg 42))))
162 will generate a use of reg 42 followed by a def of reg 42 (both marked
163 read/write). Similarly, (set (reg 40) (mem (pre_dec (reg 41))))
164 generates a use of reg 41 then a def of reg 41 (both marked read/write),
165 even though reg 41 is decremented before it is used for the memory
166 address in this second example.
168 A set to a REG inside a ZERO_EXTRACT, SIGN_EXTRACT, or SUBREG invokes
169 a read-modify write operation. We generate both a use and a def
170 and again mark them read/write.
175 #include "coretypes.h"
179 #include "insn-config.h"
181 #include "function.h"
184 #include "hard-reg-set.h"
185 #include "basic-block.h"
191 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
194 unsigned int node_; \
195 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
196 {(BB) = BASIC_BLOCK (node_); CODE;}); \
200 static struct obstack df_ref_obstack;
201 static struct df *ddf;
203 static void df_reg_table_realloc PARAMS((struct df *, int));
204 static void df_insn_table_realloc PARAMS((struct df *, unsigned int));
205 static void df_bitmaps_alloc PARAMS((struct df *, int));
206 static void df_bitmaps_free PARAMS((struct df *, int));
207 static void df_free PARAMS((struct df *));
208 static void df_alloc PARAMS((struct df *, int));
210 static rtx df_reg_clobber_gen PARAMS((unsigned int));
211 static rtx df_reg_use_gen PARAMS((unsigned int));
213 static inline struct df_link *df_link_create PARAMS((struct ref *,
215 static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
216 static void df_def_unlink PARAMS((struct df *, struct ref *));
217 static void df_use_unlink PARAMS((struct df *, struct ref *));
218 static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
220 static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
221 static void df_refs_unlink PARAMS ((struct df *, bitmap));
224 static struct ref *df_ref_create PARAMS((struct df *,
226 enum df_ref_type, enum df_ref_flags));
227 static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
228 rtx, enum df_ref_type,
230 static void df_ref_record PARAMS((struct df *, rtx, rtx *,
231 rtx, enum df_ref_type,
233 static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
234 static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
235 static void df_uses_record PARAMS((struct df *, rtx *,
236 enum df_ref_type, basic_block, rtx,
238 static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
239 static void df_bb_refs_record PARAMS((struct df *, basic_block));
240 static void df_refs_record PARAMS((struct df *, bitmap));
242 static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
243 static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
244 static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
245 static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
246 static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
247 static void df_du_chain_create PARAMS((struct df *, bitmap));
248 static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
249 static void df_ud_chain_create PARAMS((struct df *, bitmap));
250 static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
251 static void df_rd_local_compute PARAMS((struct df *, bitmap));
252 static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
253 static void df_ru_local_compute PARAMS((struct df *, bitmap));
254 static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
255 static void df_lr_local_compute PARAMS((struct df *, bitmap));
256 static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
257 static void df_reg_info_compute PARAMS((struct df *, bitmap));
259 static int df_bb_luids_set PARAMS((struct df *df, basic_block));
260 static int df_luids_set PARAMS((struct df *df, bitmap));
262 static int df_modified_p PARAMS ((struct df *, bitmap));
263 static int df_refs_queue PARAMS ((struct df *));
264 static int df_refs_process PARAMS ((struct df *));
265 static int df_bb_refs_update PARAMS ((struct df *, basic_block));
266 static int df_refs_update PARAMS ((struct df *));
267 static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
269 static void df_insns_modify PARAMS((struct df *, basic_block,
271 static int df_rtx_mem_replace PARAMS ((rtx *, void *));
272 static int df_rtx_reg_replace PARAMS ((rtx *, void *));
273 void df_refs_reg_replace PARAMS ((struct df *, bitmap,
274 struct df_link *, rtx, rtx));
276 static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
277 static int df_def_dominates_uses_p PARAMS((struct df *,
278 struct ref *def, bitmap));
279 static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
281 static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
283 static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
286 static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
290 static void df_chain_dump PARAMS((struct df_link *, FILE *file));
291 static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
292 static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
293 static void df_regno_rtl_debug PARAMS ((struct df *, unsigned int, FILE *));
294 static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
295 static void df_rd_transfer_function PARAMS ((int, int *, bitmap, bitmap,
296 bitmap, bitmap, void *));
297 static void df_ru_transfer_function PARAMS ((int, int *, bitmap, bitmap,
298 bitmap, bitmap, void *));
299 static void df_lr_transfer_function PARAMS ((int, int *, bitmap, bitmap,
300 bitmap, bitmap, void *));
301 static void hybrid_search_bitmap PARAMS ((basic_block, bitmap *, bitmap *,
302 bitmap *, bitmap *, enum df_flow_dir,
303 enum df_confluence_op,
304 transfer_function_bitmap,
305 sbitmap, sbitmap, void *));
306 static void hybrid_search_sbitmap PARAMS ((basic_block, sbitmap *, sbitmap *,
307 sbitmap *, sbitmap *, enum df_flow_dir,
308 enum df_confluence_op,
309 transfer_function_sbitmap,
310 sbitmap, sbitmap, void *));
311 static inline bool read_modify_subreg_p PARAMS ((rtx));
314 /* Local memory allocation/deallocation routines. */
317 /* Increase the insn info table to have space for at least SIZE + 1
320 df_insn_table_realloc (df, size)
325 if (size <= df->insn_size)
328 /* Make the table a little larger than requested, so we do not need
329 to enlarge it so often. */
330 size += df->insn_size / 4;
332 df->insns = (struct insn_info *)
333 xrealloc (df->insns, size * sizeof (struct insn_info));
335 memset (df->insns + df->insn_size, 0,
336 (size - df->insn_size) * sizeof (struct insn_info));
338 df->insn_size = size;
340 if (! df->insns_modified)
342 df->insns_modified = BITMAP_XMALLOC ();
343 bitmap_zero (df->insns_modified);
348 /* Increase the reg info table by SIZE more elements. */
350 df_reg_table_realloc (df, size)
354 /* Make table 25 percent larger by default. */
356 size = df->reg_size / 4;
358 size += df->reg_size;
359 if (size < max_reg_num ())
360 size = max_reg_num ();
362 df->regs = (struct reg_info *)
363 xrealloc (df->regs, size * sizeof (struct reg_info));
365 /* Zero the new entries. */
366 memset (df->regs + df->reg_size, 0,
367 (size - df->reg_size) * sizeof (struct reg_info));
373 /* Allocate bitmaps for each basic block. */
375 df_bitmaps_alloc (df, flags)
382 /* Free the bitmaps if they need resizing. */
383 if ((flags & DF_LR) && df->n_regs < (unsigned int) max_reg_num ())
384 dflags |= DF_LR | DF_RU;
385 if ((flags & DF_RU) && df->n_uses < df->use_id)
387 if ((flags & DF_RD) && df->n_defs < df->def_id)
391 df_bitmaps_free (df, dflags);
393 df->n_defs = df->def_id;
394 df->n_uses = df->use_id;
398 struct bb_info *bb_info = DF_BB_INFO (df, bb);
400 if (flags & DF_RD && ! bb_info->rd_in)
402 /* Allocate bitmaps for reaching definitions. */
403 bb_info->rd_kill = BITMAP_XMALLOC ();
404 bitmap_zero (bb_info->rd_kill);
405 bb_info->rd_gen = BITMAP_XMALLOC ();
406 bitmap_zero (bb_info->rd_gen);
407 bb_info->rd_in = BITMAP_XMALLOC ();
408 bb_info->rd_out = BITMAP_XMALLOC ();
409 bb_info->rd_valid = 0;
412 if (flags & DF_RU && ! bb_info->ru_in)
414 /* Allocate bitmaps for upward exposed uses. */
415 bb_info->ru_kill = BITMAP_XMALLOC ();
416 bitmap_zero (bb_info->ru_kill);
417 /* Note the lack of symmetry. */
418 bb_info->ru_gen = BITMAP_XMALLOC ();
419 bitmap_zero (bb_info->ru_gen);
420 bb_info->ru_in = BITMAP_XMALLOC ();
421 bb_info->ru_out = BITMAP_XMALLOC ();
422 bb_info->ru_valid = 0;
425 if (flags & DF_LR && ! bb_info->lr_in)
427 /* Allocate bitmaps for live variables. */
428 bb_info->lr_def = BITMAP_XMALLOC ();
429 bitmap_zero (bb_info->lr_def);
430 bb_info->lr_use = BITMAP_XMALLOC ();
431 bitmap_zero (bb_info->lr_use);
432 bb_info->lr_in = BITMAP_XMALLOC ();
433 bb_info->lr_out = BITMAP_XMALLOC ();
434 bb_info->lr_valid = 0;
440 /* Free bitmaps for each basic block. */
442 df_bitmaps_free (df, flags)
443 struct df *df ATTRIBUTE_UNUSED;
450 struct bb_info *bb_info = DF_BB_INFO (df, bb);
455 if ((flags & DF_RD) && bb_info->rd_in)
457 /* Free bitmaps for reaching definitions. */
458 BITMAP_XFREE (bb_info->rd_kill);
459 bb_info->rd_kill = NULL;
460 BITMAP_XFREE (bb_info->rd_gen);
461 bb_info->rd_gen = NULL;
462 BITMAP_XFREE (bb_info->rd_in);
463 bb_info->rd_in = NULL;
464 BITMAP_XFREE (bb_info->rd_out);
465 bb_info->rd_out = NULL;
468 if ((flags & DF_RU) && bb_info->ru_in)
470 /* Free bitmaps for upward exposed uses. */
471 BITMAP_XFREE (bb_info->ru_kill);
472 bb_info->ru_kill = NULL;
473 BITMAP_XFREE (bb_info->ru_gen);
474 bb_info->ru_gen = NULL;
475 BITMAP_XFREE (bb_info->ru_in);
476 bb_info->ru_in = NULL;
477 BITMAP_XFREE (bb_info->ru_out);
478 bb_info->ru_out = NULL;
481 if ((flags & DF_LR) && bb_info->lr_in)
483 /* Free bitmaps for live variables. */
484 BITMAP_XFREE (bb_info->lr_def);
485 bb_info->lr_def = NULL;
486 BITMAP_XFREE (bb_info->lr_use);
487 bb_info->lr_use = NULL;
488 BITMAP_XFREE (bb_info->lr_in);
489 bb_info->lr_in = NULL;
490 BITMAP_XFREE (bb_info->lr_out);
491 bb_info->lr_out = NULL;
494 df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
498 /* Allocate and initialize dataflow memory. */
500 df_alloc (df, n_regs)
507 gcc_obstack_init (&df_ref_obstack);
509 /* Perhaps we should use LUIDs to save memory for the insn_refs
510 table. This is only a small saving; a few pointers. */
511 n_insns = get_max_uid () + 1;
515 /* Approximate number of defs by number of insns. */
516 df->def_size = n_insns;
517 df->defs = xmalloc (df->def_size * sizeof (*df->defs));
521 /* Approximate number of uses by twice number of insns. */
522 df->use_size = n_insns * 2;
523 df->uses = xmalloc (df->use_size * sizeof (*df->uses));
526 df->n_bbs = last_basic_block;
528 /* Allocate temporary working array used during local dataflow analysis. */
529 df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
531 df_insn_table_realloc (df, n_insns);
533 df_reg_table_realloc (df, df->n_regs);
535 df->bbs_modified = BITMAP_XMALLOC ();
536 bitmap_zero (df->bbs_modified);
540 df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
542 df->all_blocks = BITMAP_XMALLOC ();
544 bitmap_set_bit (df->all_blocks, bb->index);
548 /* Free all the dataflow info. */
553 df_bitmaps_free (df, DF_ALL);
581 if (df->bbs_modified)
582 BITMAP_XFREE (df->bbs_modified);
583 df->bbs_modified = 0;
585 if (df->insns_modified)
586 BITMAP_XFREE (df->insns_modified);
587 df->insns_modified = 0;
589 BITMAP_XFREE (df->all_blocks);
592 obstack_free (&df_ref_obstack, NULL);
595 /* Local miscellaneous routines. */
597 /* Return a USE for register REGNO. */
598 static rtx df_reg_use_gen (regno)
604 reg = regno_reg_rtx[regno];
606 use = gen_rtx_USE (GET_MODE (reg), reg);
611 /* Return a CLOBBER for register REGNO. */
612 static rtx df_reg_clobber_gen (regno)
618 reg = regno_reg_rtx[regno];
620 use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
624 /* Local chain manipulation routines. */
626 /* Create a link in a def-use or use-def chain. */
627 static inline struct df_link *
628 df_link_create (ref, next)
630 struct df_link *next;
632 struct df_link *link;
634 link = (struct df_link *) obstack_alloc (&df_ref_obstack,
642 /* Add REF to chain head pointed to by PHEAD. */
643 static struct df_link *
644 df_ref_unlink (phead, ref)
645 struct df_link **phead;
648 struct df_link *link = *phead;
654 /* Only a single ref. It must be the one we want.
655 If not, the def-use and use-def chains are likely to
657 if (link->ref != ref)
659 /* Now have an empty chain. */
664 /* Multiple refs. One of them must be us. */
665 if (link->ref == ref)
670 for (; link->next; link = link->next)
672 if (link->next->ref == ref)
674 /* Unlink from list. */
675 link->next = link->next->next;
686 /* Unlink REF from all def-use/use-def chains, etc. */
688 df_ref_remove (df, ref)
692 if (DF_REF_REG_DEF_P (ref))
694 df_def_unlink (df, ref);
695 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
699 df_use_unlink (df, ref);
700 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
706 /* Unlink DEF from use-def and reg-def chains. */
708 df_def_unlink (df, def)
709 struct df *df ATTRIBUTE_UNUSED;
712 struct df_link *du_link;
713 unsigned int dregno = DF_REF_REGNO (def);
715 /* Follow def-use chain to find all the uses of this def. */
716 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
718 struct ref *use = du_link->ref;
720 /* Unlink this def from the use-def chain. */
721 df_ref_unlink (&DF_REF_CHAIN (use), def);
723 DF_REF_CHAIN (def) = 0;
725 /* Unlink def from reg-def chain. */
726 df_ref_unlink (&df->regs[dregno].defs, def);
728 df->defs[DF_REF_ID (def)] = 0;
732 /* Unlink use from def-use and reg-use chains. */
734 df_use_unlink (df, use)
735 struct df *df ATTRIBUTE_UNUSED;
738 struct df_link *ud_link;
739 unsigned int uregno = DF_REF_REGNO (use);
741 /* Follow use-def chain to find all the defs of this use. */
742 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
744 struct ref *def = ud_link->ref;
746 /* Unlink this use from the def-use chain. */
747 df_ref_unlink (&DF_REF_CHAIN (def), use);
749 DF_REF_CHAIN (use) = 0;
751 /* Unlink use from reg-use chain. */
752 df_ref_unlink (&df->regs[uregno].uses, use);
754 df->uses[DF_REF_ID (use)] = 0;
757 /* Local routines for recording refs. */
760 /* Create a new ref of type DF_REF_TYPE for register REG at address
761 LOC within INSN of BB. */
763 df_ref_create (df, reg, loc, insn, ref_type, ref_flags)
768 enum df_ref_type ref_type;
769 enum df_ref_flags ref_flags;
771 struct ref *this_ref;
773 this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
775 DF_REF_REG (this_ref) = reg;
776 DF_REF_LOC (this_ref) = loc;
777 DF_REF_INSN (this_ref) = insn;
778 DF_REF_CHAIN (this_ref) = 0;
779 DF_REF_TYPE (this_ref) = ref_type;
780 DF_REF_FLAGS (this_ref) = ref_flags;
782 if (ref_type == DF_REF_REG_DEF)
784 if (df->def_id >= df->def_size)
786 /* Make table 25 percent larger. */
787 df->def_size += (df->def_size / 4);
788 df->defs = xrealloc (df->defs,
789 df->def_size * sizeof (*df->defs));
791 DF_REF_ID (this_ref) = df->def_id;
792 df->defs[df->def_id++] = this_ref;
796 if (df->use_id >= df->use_size)
798 /* Make table 25 percent larger. */
799 df->use_size += (df->use_size / 4);
800 df->uses = xrealloc (df->uses,
801 df->use_size * sizeof (*df->uses));
803 DF_REF_ID (this_ref) = df->use_id;
804 df->uses[df->use_id++] = this_ref;
810 /* Create a new reference of type DF_REF_TYPE for a single register REG,
811 used inside the LOC rtx of INSN. */
813 df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags)
818 enum df_ref_type ref_type;
819 enum df_ref_flags ref_flags;
821 df_ref_create (df, reg, loc, insn, ref_type, ref_flags);
825 /* Create new references of type DF_REF_TYPE for each part of register REG
826 at address LOC within INSN of BB. */
828 df_ref_record (df, reg, loc, insn, ref_type, ref_flags)
833 enum df_ref_type ref_type;
834 enum df_ref_flags ref_flags;
838 if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
841 /* For the reg allocator we are interested in some SUBREG rtx's, but not
842 all. Notably only those representing a word extraction from a multi-word
843 reg. As written in the docu those should have the form
844 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
845 XXX Is that true? We could also use the global word_mode variable. */
846 if (GET_CODE (reg) == SUBREG
847 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
848 || GET_MODE_SIZE (GET_MODE (reg))
849 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
851 loc = &SUBREG_REG (reg);
855 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
856 if (regno < FIRST_PSEUDO_REGISTER)
861 if (! (df->flags & DF_HARD_REGS))
864 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
865 for the mode, because we only want to add references to regs, which
866 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
867 reference the whole reg 0 in DI mode (which would also include
868 reg 1, at least, if 0 and 1 are SImode registers). */
869 endregno = HARD_REGNO_NREGS (regno, GET_MODE (reg));
870 if (GET_CODE (reg) == SUBREG)
871 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
872 SUBREG_BYTE (reg), GET_MODE (reg));
875 for (i = regno; i < endregno; i++)
876 df_ref_record_1 (df, regno_reg_rtx[i],
877 loc, insn, ref_type, ref_flags);
881 df_ref_record_1 (df, reg, loc, insn, ref_type, ref_flags);
886 /* Return non-zero if writes to paradoxical SUBREGs, or SUBREGs which
887 are too narrow, are read-modify-write. */
889 read_modify_subreg_p (x)
892 unsigned int isize, osize;
893 if (GET_CODE (x) != SUBREG)
895 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
896 osize = GET_MODE_SIZE (GET_MODE (x));
899 if (isize <= UNITS_PER_WORD)
901 if (osize > UNITS_PER_WORD)
907 /* Process all the registers defined in the rtx, X. */
909 df_def_record_1 (df, x, bb, insn)
915 rtx *loc = &SET_DEST (x);
917 enum df_ref_flags flags = 0;
919 /* Some targets place small structures in registers for
920 return values of functions. */
921 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
925 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
926 df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
930 #ifdef CLASS_CANNOT_CHANGE_MODE
931 if (GET_CODE (dst) == SUBREG
932 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
933 GET_MODE (SUBREG_REG (dst))))
934 flags |= DF_REF_MODE_CHANGE;
937 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
938 be handy for the reg allocator. */
939 while (GET_CODE (dst) == STRICT_LOW_PART
940 || GET_CODE (dst) == ZERO_EXTRACT
941 || GET_CODE (dst) == SIGN_EXTRACT
942 || read_modify_subreg_p (dst))
944 /* Strict low part always contains SUBREG, but we do not want to make
945 it appear outside, as whole register is always considered. */
946 if (GET_CODE (dst) == STRICT_LOW_PART)
948 loc = &XEXP (dst, 0);
951 #ifdef CLASS_CANNOT_CHANGE_MODE
952 if (GET_CODE (dst) == SUBREG
953 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
954 GET_MODE (SUBREG_REG (dst))))
955 flags |= DF_REF_MODE_CHANGE;
957 loc = &XEXP (dst, 0);
959 flags |= DF_REF_READ_WRITE;
962 if (GET_CODE (dst) == REG
963 || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
964 df_ref_record (df, dst, loc, insn, DF_REF_REG_DEF, flags);
968 /* Process all the registers defined in the pattern rtx, X. */
970 df_defs_record (df, x, bb, insn)
976 RTX_CODE code = GET_CODE (x);
978 if (code == SET || code == CLOBBER)
980 /* Mark the single def within the pattern. */
981 df_def_record_1 (df, x, bb, insn);
983 else if (code == PARALLEL)
987 /* Mark the multiple defs within the pattern. */
988 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
990 code = GET_CODE (XVECEXP (x, 0, i));
991 if (code == SET || code == CLOBBER)
992 df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
998 /* Process all the registers used in the rtx at address LOC. */
1000 df_uses_record (df, loc, ref_type, bb, insn, flags)
1003 enum df_ref_type ref_type;
1006 enum df_ref_flags flags;
1014 code = GET_CODE (x);
1030 /* If we are clobbering a MEM, mark any registers inside the address
1032 if (GET_CODE (XEXP (x, 0)) == MEM)
1033 df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1034 DF_REF_REG_MEM_STORE, bb, insn, flags);
1036 /* If we're clobbering a REG then we have a def so ignore. */
1040 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, flags);
1044 /* While we're here, optimize this case. */
1046 /* In case the SUBREG is not of a REG, do not optimize. */
1047 if (GET_CODE (SUBREG_REG (x)) != REG)
1049 loc = &SUBREG_REG (x);
1050 df_uses_record (df, loc, ref_type, bb, insn, flags);
1053 #ifdef CLASS_CANNOT_CHANGE_MODE
1054 if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
1055 GET_MODE (SUBREG_REG (x))))
1056 flags |= DF_REF_MODE_CHANGE;
1059 /* ... Fall through ... */
1062 /* See a REG (or SUBREG) other than being set. */
1063 df_ref_record (df, x, loc, insn, ref_type, flags);
1068 rtx dst = SET_DEST (x);
1070 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn, 0);
1072 switch (GET_CODE (dst))
1074 enum df_ref_flags use_flags;
1076 if (read_modify_subreg_p (dst))
1078 use_flags = DF_REF_READ_WRITE;
1079 #ifdef CLASS_CANNOT_CHANGE_MODE
1080 if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1081 GET_MODE (SUBREG_REG (dst))))
1082 use_flags |= DF_REF_MODE_CHANGE;
1084 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1088 /* ... FALLTHRU ... */
1095 df_uses_record (df, &XEXP (dst, 0),
1096 DF_REF_REG_MEM_STORE,
1099 case STRICT_LOW_PART:
1100 /* A strict_low_part uses the whole REG and not just the SUBREG. */
1101 dst = XEXP (dst, 0);
1102 if (GET_CODE (dst) != SUBREG)
1104 use_flags = DF_REF_READ_WRITE;
1105 #ifdef CLASS_CANNOT_CHANGE_MODE
1106 if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
1107 GET_MODE (SUBREG_REG (dst))))
1108 use_flags |= DF_REF_MODE_CHANGE;
1110 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
1115 df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn,
1117 df_uses_record (df, &XEXP (dst, 1), DF_REF_REG_USE, bb, insn, 0);
1118 df_uses_record (df, &XEXP (dst, 2), DF_REF_REG_USE, bb, insn, 0);
1119 dst = XEXP (dst, 0);
1131 case UNSPEC_VOLATILE:
1135 /* Traditional and volatile asm instructions must be considered to use
1136 and clobber all hard registers, all pseudo-registers and all of
1137 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1139 Consider for instance a volatile asm that changes the fpu rounding
1140 mode. An insn should not be moved across this even if it only uses
1141 pseudo-regs because it might give an incorrectly rounded result.
1143 For now, just mark any regs we can find in ASM_OPERANDS as
1146 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1147 We can not just fall through here since then we would be confused
1148 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1149 traditional asms unlike their normal usage. */
1150 if (code == ASM_OPERANDS)
1154 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1155 df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1156 DF_REF_REG_USE, bb, insn, 0);
1168 /* Catch the def of the register being modified. */
1169 df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), insn, DF_REF_REG_DEF, DF_REF_READ_WRITE);
1171 /* ... Fall through to handle uses ... */
1177 /* Recursively scan the operands of this expression. */
1179 const char *fmt = GET_RTX_FORMAT (code);
1182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1186 /* Tail recursive case: save a function call level. */
1192 df_uses_record (df, &XEXP (x, i), ref_type, bb, insn, flags);
1194 else if (fmt[i] == 'E')
1197 for (j = 0; j < XVECLEN (x, i); j++)
1198 df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1206 /* Record all the df within INSN of basic block BB. */
1208 df_insn_refs_record (df, bb, insn)
1219 /* Record register defs */
1220 df_defs_record (df, PATTERN (insn), bb, insn);
1222 if (df->flags & DF_EQUIV_NOTES)
1223 for (note = REG_NOTES (insn); note;
1224 note = XEXP (note, 1))
1226 switch (REG_NOTE_KIND (note))
1230 df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1237 if (GET_CODE (insn) == CALL_INSN)
1242 /* Record the registers used to pass arguments. */
1243 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1244 note = XEXP (note, 1))
1246 if (GET_CODE (XEXP (note, 0)) == USE)
1247 df_uses_record (df, &XEXP (XEXP (note, 0), 0), DF_REF_REG_USE,
1251 /* The stack ptr is used (honorarily) by a CALL insn. */
1252 x = df_reg_use_gen (STACK_POINTER_REGNUM);
1253 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_USE, bb, insn, 0);
1255 if (df->flags & DF_HARD_REGS)
1257 /* Calls may also reference any of the global registers,
1258 so they are recorded as used. */
1259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1262 x = df_reg_use_gen (i);
1263 df_uses_record (df, &SET_DEST (x),
1264 DF_REF_REG_USE, bb, insn, 0);
1269 /* Record the register uses. */
1270 df_uses_record (df, &PATTERN (insn),
1271 DF_REF_REG_USE, bb, insn, 0);
1273 if (GET_CODE (insn) == CALL_INSN)
1277 if (df->flags & DF_HARD_REGS)
1279 /* Kill all registers invalidated by a call. */
1280 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1281 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1283 rtx reg_clob = df_reg_clobber_gen (i);
1284 df_defs_record (df, reg_clob, bb, insn);
1288 /* There may be extra registers to be clobbered. */
1289 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1291 note = XEXP (note, 1))
1292 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1293 df_defs_record (df, XEXP (note, 0), bb, insn);
1299 /* Record all the refs within the basic block BB. */
1301 df_bb_refs_record (df, bb)
1307 /* Scan the block an insn at a time from beginning to end. */
1308 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1312 /* Record defs within INSN. */
1313 df_insn_refs_record (df, bb, insn);
1315 if (insn == bb->end)
1321 /* Record all the refs in the basic blocks specified by BLOCKS. */
1323 df_refs_record (df, blocks)
1329 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1331 df_bb_refs_record (df, bb);
1335 /* Dataflow analysis routines. */
1338 /* Create reg-def chains for basic block BB. These are a list of
1339 definitions for each register. */
1341 df_bb_reg_def_chain_create (df, bb)
1347 /* Perhaps the defs should be sorted using a depth first search
1348 of the CFG (or possibly a breadth first search). We currently
1349 scan the basic blocks in reverse order so that the first defs
1350 appear at the start of the chain. */
1352 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1353 insn = PREV_INSN (insn))
1355 struct df_link *link;
1356 unsigned int uid = INSN_UID (insn);
1358 if (! INSN_P (insn))
1361 for (link = df->insns[uid].defs; link; link = link->next)
1363 struct ref *def = link->ref;
1364 unsigned int dregno = DF_REF_REGNO (def);
1366 /* Do not add ref's to the chain twice, i.e., only add new
1367 refs. XXX the same could be done by testing if the
1368 current insn is a modified (or a new) one. This would be
1370 if (DF_REF_ID (def) < df->def_id_save)
1373 df->regs[dregno].defs
1374 = df_link_create (def, df->regs[dregno].defs);
1380 /* Create reg-def chains for each basic block within BLOCKS. These
1381 are a list of definitions for each register. */
1383 df_reg_def_chain_create (df, blocks)
1389 FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1391 df_bb_reg_def_chain_create (df, bb);
1396 /* Create reg-use chains for basic block BB. These are a list of uses
1397 for each register. */
1399 df_bb_reg_use_chain_create (df, bb)
1405 /* Scan in forward order so that the last uses appear at the start
1408 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1409 insn = NEXT_INSN (insn))
1411 struct df_link *link;
1412 unsigned int uid = INSN_UID (insn);
1414 if (! INSN_P (insn))
1417 for (link = df->insns[uid].uses; link; link = link->next)
1419 struct ref *use = link->ref;
1420 unsigned int uregno = DF_REF_REGNO (use);
1422 /* Do not add ref's to the chain twice, i.e., only add new
1423 refs. XXX the same could be done by testing if the
1424 current insn is a modified (or a new) one. This would be
1426 if (DF_REF_ID (use) < df->use_id_save)
1429 df->regs[uregno].uses
1430 = df_link_create (use, df->regs[uregno].uses);
1436 /* Create reg-use chains for each basic block within BLOCKS. These
1437 are a list of uses for each register. */
1439 df_reg_use_chain_create (df, blocks)
1445 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1447 df_bb_reg_use_chain_create (df, bb);
1452 /* Create def-use chains from reaching use bitmaps for basic block BB. */
1454 df_bb_du_chain_create (df, bb, ru)
1459 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1462 bitmap_copy (ru, bb_info->ru_out);
1464 /* For each def in BB create a linked list (chain) of uses
1465 reached from the def. */
1466 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1467 insn = PREV_INSN (insn))
1469 struct df_link *def_link;
1470 struct df_link *use_link;
1471 unsigned int uid = INSN_UID (insn);
1473 if (! INSN_P (insn))
1476 /* For each def in insn... */
1477 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1479 struct ref *def = def_link->ref;
1480 unsigned int dregno = DF_REF_REGNO (def);
1482 DF_REF_CHAIN (def) = 0;
1484 /* While the reg-use chains are not essential, it
1485 is _much_ faster to search these short lists rather
1486 than all the reaching uses, especially for large functions. */
1487 for (use_link = df->regs[dregno].uses; use_link;
1488 use_link = use_link->next)
1490 struct ref *use = use_link->ref;
1492 if (bitmap_bit_p (ru, DF_REF_ID (use)))
1495 = df_link_create (use, DF_REF_CHAIN (def));
1497 bitmap_clear_bit (ru, DF_REF_ID (use));
1502 /* For each use in insn... */
1503 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1505 struct ref *use = use_link->ref;
1506 bitmap_set_bit (ru, DF_REF_ID (use));
1512 /* Create def-use chains from reaching use bitmaps for basic blocks
1515 df_du_chain_create (df, blocks)
1522 ru = BITMAP_XMALLOC ();
1524 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1526 df_bb_du_chain_create (df, bb, ru);
1533 /* Create use-def chains from reaching def bitmaps for basic block BB. */
1535 df_bb_ud_chain_create (df, bb)
1539 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1540 struct ref **reg_def_last = df->reg_def_last;
1543 memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1545 /* For each use in BB create a linked list (chain) of defs
1546 that reach the use. */
1547 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1548 insn = NEXT_INSN (insn))
1550 unsigned int uid = INSN_UID (insn);
1551 struct df_link *use_link;
1552 struct df_link *def_link;
1554 if (! INSN_P (insn))
1557 /* For each use in insn... */
1558 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1560 struct ref *use = use_link->ref;
1561 unsigned int regno = DF_REF_REGNO (use);
1563 DF_REF_CHAIN (use) = 0;
1565 /* Has regno been defined in this BB yet? If so, use
1566 the last def as the single entry for the use-def
1567 chain for this use. Otherwise, we need to add all
1568 the defs using this regno that reach the start of
1570 if (reg_def_last[regno])
1573 = df_link_create (reg_def_last[regno], 0);
1577 /* While the reg-def chains are not essential, it is
1578 _much_ faster to search these short lists rather than
1579 all the reaching defs, especially for large
1581 for (def_link = df->regs[regno].defs; def_link;
1582 def_link = def_link->next)
1584 struct ref *def = def_link->ref;
1586 if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1589 = df_link_create (def, DF_REF_CHAIN (use));
1596 /* For each def in insn... record the last def of each reg. */
1597 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1599 struct ref *def = def_link->ref;
1600 int dregno = DF_REF_REGNO (def);
1602 reg_def_last[dregno] = def;
1608 /* Create use-def chains from reaching def bitmaps for basic blocks
1611 df_ud_chain_create (df, blocks)
1617 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1619 df_bb_ud_chain_create (df, bb);
1626 df_rd_transfer_function (bb, changed, in, out, gen, kill, data)
1627 int bb ATTRIBUTE_UNUSED;
1629 bitmap in, out, gen, kill;
1630 void *data ATTRIBUTE_UNUSED;
1632 *changed = bitmap_union_of_diff (out, gen, in, kill);
1637 df_ru_transfer_function (bb, changed, in, out, gen, kill, data)
1638 int bb ATTRIBUTE_UNUSED;
1640 bitmap in, out, gen, kill;
1641 void *data ATTRIBUTE_UNUSED;
1643 *changed = bitmap_union_of_diff (in, gen, out, kill);
1648 df_lr_transfer_function (bb, changed, in, out, use, def, data)
1649 int bb ATTRIBUTE_UNUSED;
1651 bitmap in, out, use, def;
1652 void *data ATTRIBUTE_UNUSED;
1654 *changed = bitmap_union_of_diff (in, use, out, def);
1658 /* Compute local reaching def info for basic block BB. */
1660 df_bb_rd_local_compute (df, bb)
1664 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1667 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1668 insn = NEXT_INSN (insn))
1670 unsigned int uid = INSN_UID (insn);
1671 struct df_link *def_link;
1673 if (! INSN_P (insn))
1676 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1678 struct ref *def = def_link->ref;
1679 unsigned int regno = DF_REF_REGNO (def);
1680 struct df_link *def2_link;
1682 for (def2_link = df->regs[regno].defs; def2_link;
1683 def2_link = def2_link->next)
1685 struct ref *def2 = def2_link->ref;
1687 /* Add all defs of this reg to the set of kills. This
1688 is greedy since many of these defs will not actually
1689 be killed by this BB but it keeps things a lot
1691 bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1693 /* Zap from the set of gens for this BB. */
1694 bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1697 bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1701 bb_info->rd_valid = 1;
1705 /* Compute local reaching def info for each basic block within BLOCKS. */
1707 df_rd_local_compute (df, blocks)
1713 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1715 df_bb_rd_local_compute (df, bb);
1720 /* Compute local reaching use (upward exposed use) info for basic
1723 df_bb_ru_local_compute (df, bb)
1727 /* This is much more tricky than computing reaching defs. With
1728 reaching defs, defs get killed by other defs. With upwards
1729 exposed uses, these get killed by defs with the same regno. */
1731 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1735 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1736 insn = PREV_INSN (insn))
1738 unsigned int uid = INSN_UID (insn);
1739 struct df_link *def_link;
1740 struct df_link *use_link;
1742 if (! INSN_P (insn))
1745 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1747 struct ref *def = def_link->ref;
1748 unsigned int dregno = DF_REF_REGNO (def);
1750 for (use_link = df->regs[dregno].uses; use_link;
1751 use_link = use_link->next)
1753 struct ref *use = use_link->ref;
1755 /* Add all uses of this reg to the set of kills. This
1756 is greedy since many of these uses will not actually
1757 be killed by this BB but it keeps things a lot
1759 bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1761 /* Zap from the set of gens for this BB. */
1762 bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1766 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1768 struct ref *use = use_link->ref;
1769 /* Add use to set of gens in this BB. */
1770 bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1773 bb_info->ru_valid = 1;
1777 /* Compute local reaching use (upward exposed use) info for each basic
1778 block within BLOCKS. */
1780 df_ru_local_compute (df, blocks)
1786 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1788 df_bb_ru_local_compute (df, bb);
1793 /* Compute local live variable info for basic block BB. */
1795 df_bb_lr_local_compute (df, bb)
1799 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1802 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1803 insn = PREV_INSN (insn))
1805 unsigned int uid = INSN_UID (insn);
1806 struct df_link *link;
1808 if (! INSN_P (insn))
1811 for (link = df->insns[uid].defs; link; link = link->next)
1813 struct ref *def = link->ref;
1814 unsigned int dregno = DF_REF_REGNO (def);
1816 /* Add def to set of defs in this BB. */
1817 bitmap_set_bit (bb_info->lr_def, dregno);
1819 bitmap_clear_bit (bb_info->lr_use, dregno);
1822 for (link = df->insns[uid].uses; link; link = link->next)
1824 struct ref *use = link->ref;
1825 /* Add use to set of uses in this BB. */
1826 bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
1829 bb_info->lr_valid = 1;
1833 /* Compute local live variable info for each basic block within BLOCKS. */
1835 df_lr_local_compute (df, blocks)
1841 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1843 df_bb_lr_local_compute (df, bb);
1848 /* Compute register info: lifetime, bb, and number of defs and uses
1849 for basic block BB. */
1851 df_bb_reg_info_compute (df, bb, live)
1856 struct reg_info *reg_info = df->regs;
1857 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1860 bitmap_copy (live, bb_info->lr_out);
1862 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1863 insn = PREV_INSN (insn))
1865 unsigned int uid = INSN_UID (insn);
1867 struct df_link *link;
1869 if (! INSN_P (insn))
1872 for (link = df->insns[uid].defs; link; link = link->next)
1874 struct ref *def = link->ref;
1875 unsigned int dregno = DF_REF_REGNO (def);
1877 /* Kill this register. */
1878 bitmap_clear_bit (live, dregno);
1879 reg_info[dregno].n_defs++;
1882 for (link = df->insns[uid].uses; link; link = link->next)
1884 struct ref *use = link->ref;
1885 unsigned int uregno = DF_REF_REGNO (use);
1887 /* This register is now live. */
1888 bitmap_set_bit (live, uregno);
1889 reg_info[uregno].n_uses++;
1892 /* Increment lifetimes of all live registers. */
1893 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
1895 reg_info[regno].lifetime++;
1901 /* Compute register info: lifetime, bb, and number of defs and uses. */
1903 df_reg_info_compute (df, blocks)
1910 live = BITMAP_XMALLOC ();
1912 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1914 df_bb_reg_info_compute (df, bb, live);
1917 BITMAP_XFREE (live);
1921 /* Assign LUIDs for BB. */
1923 df_bb_luids_set (df, bb)
1930 /* The LUIDs are monotonically increasing for each basic block. */
1932 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1935 DF_INSN_LUID (df, insn) = luid++;
1936 DF_INSN_LUID (df, insn) = luid;
1938 if (insn == bb->end)
1945 /* Assign LUIDs for each basic block within BLOCKS. */
1947 df_luids_set (df, blocks)
1954 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1956 total += df_bb_luids_set (df, bb);
1962 /* Perform dataflow analysis using existing DF structure for blocks
1963 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
1965 df_analyse_1 (df, blocks, flags, update)
1978 if (flags & DF_UD_CHAIN)
1979 aflags |= DF_RD | DF_RD_CHAIN;
1981 if (flags & DF_DU_CHAIN)
1985 aflags |= DF_RU_CHAIN;
1987 if (flags & DF_REG_INFO)
1991 blocks = df->all_blocks;
1996 df_refs_update (df);
1997 /* More fine grained incremental dataflow analysis would be
1998 nice. For now recompute the whole shebang for the
2001 df_refs_unlink (df, blocks);
2003 /* All the def-use, use-def chains can be potentially
2004 modified by changes in one block. The size of the
2005 bitmaps can also change. */
2009 /* Scan the function for all register defs and uses. */
2011 df_refs_record (df, blocks);
2013 /* Link all the new defs and uses to the insns. */
2014 df_refs_process (df);
2017 /* Allocate the bitmaps now the total number of defs and uses are
2018 known. If the number of defs or uses have changed, then
2019 these bitmaps need to be reallocated. */
2020 df_bitmaps_alloc (df, aflags);
2022 /* Set the LUIDs for each specified basic block. */
2023 df_luids_set (df, blocks);
2025 /* Recreate reg-def and reg-use chains from scratch so that first
2026 def is at the head of the reg-def chain and the last use is at
2027 the head of the reg-use chain. This is only important for
2028 regs local to a basic block as it speeds up searching. */
2029 if (aflags & DF_RD_CHAIN)
2031 df_reg_def_chain_create (df, blocks);
2034 if (aflags & DF_RU_CHAIN)
2036 df_reg_use_chain_create (df, blocks);
2039 df->dfs_order = xmalloc (sizeof (int) * n_basic_blocks);
2040 df->rc_order = xmalloc (sizeof (int) * n_basic_blocks);
2041 df->rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2042 df->inverse_dfs_map = xmalloc (sizeof (int) * last_basic_block);
2043 df->inverse_rc_map = xmalloc (sizeof (int) * last_basic_block);
2044 df->inverse_rts_map = xmalloc (sizeof (int) * last_basic_block);
2046 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2047 flow_reverse_top_sort_order_compute (df->rts_order);
2048 for (i = 0; i < n_basic_blocks; i++)
2050 df->inverse_dfs_map[df->dfs_order[i]] = i;
2051 df->inverse_rc_map[df->rc_order[i]] = i;
2052 df->inverse_rts_map[df->rts_order[i]] = i;
2056 /* Compute the sets of gens and kills for the defs of each bb. */
2057 df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2059 bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2060 bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2061 bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2062 bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2065 in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
2066 out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
2067 gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
2068 kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
2070 iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2071 DF_FORWARD, DF_UNION, df_rd_transfer_function,
2072 df->inverse_rc_map, NULL);
2080 if (aflags & DF_UD_CHAIN)
2082 /* Create use-def chains. */
2083 df_ud_chain_create (df, df->all_blocks);
2085 if (! (flags & DF_RD))
2091 /* Compute the sets of gens and kills for the upwards exposed
2093 df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2095 bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2096 bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2097 bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
2098 bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
2101 in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
2102 out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
2103 gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
2104 kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
2106 iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
2107 DF_BACKWARD, DF_UNION, df_ru_transfer_function,
2108 df->inverse_rts_map, NULL);
2116 if (aflags & DF_DU_CHAIN)
2118 /* Create def-use chains. */
2119 df_du_chain_create (df, df->all_blocks);
2121 if (! (flags & DF_RU))
2125 /* Free up bitmaps that are no longer required. */
2127 df_bitmaps_free (df, dflags);
2131 /* Compute the sets of defs and uses of live variables. */
2132 df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2134 bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
2135 bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
2136 bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
2137 bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
2140 in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
2141 out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
2142 use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
2143 def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
2145 iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
2146 DF_BACKWARD, DF_UNION, df_lr_transfer_function,
2147 df->inverse_rts_map, NULL);
2155 if (aflags & DF_REG_INFO)
2157 df_reg_info_compute (df, df->all_blocks);
2159 free (df->dfs_order);
2160 free (df->rc_order);
2161 free (df->rts_order);
2162 free (df->inverse_rc_map);
2163 free (df->inverse_dfs_map);
2164 free (df->inverse_rts_map);
2168 /* Initialize dataflow analysis. */
2174 df = xcalloc (1, sizeof (struct df));
2176 /* Squirrel away a global for debugging. */
2183 /* Start queuing refs. */
2188 df->def_id_save = df->def_id;
2189 df->use_id_save = df->use_id;
2190 /* ???? Perhaps we should save current obstack state so that we can
2196 /* Process queued refs. */
2198 df_refs_process (df)
2203 /* Build new insn-def chains. */
2204 for (i = df->def_id_save; i != df->def_id; i++)
2206 struct ref *def = df->defs[i];
2207 unsigned int uid = DF_REF_INSN_UID (def);
2209 /* Add def to head of def list for INSN. */
2211 = df_link_create (def, df->insns[uid].defs);
2214 /* Build new insn-use chains. */
2215 for (i = df->use_id_save; i != df->use_id; i++)
2217 struct ref *use = df->uses[i];
2218 unsigned int uid = DF_REF_INSN_UID (use);
2220 /* Add use to head of use list for INSN. */
2222 = df_link_create (use, df->insns[uid].uses);
2228 /* Update refs for basic block BB. */
2230 df_bb_refs_update (df, bb)
2237 /* While we have to scan the chain of insns for this BB, we do not
2238 need to allocate and queue a long chain of BB/INSN pairs. Using
2239 a bitmap for insns_modified saves memory and avoids queuing
2242 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2246 uid = INSN_UID (insn);
2248 if (bitmap_bit_p (df->insns_modified, uid))
2250 /* Delete any allocated refs of this insn. MPH, FIXME. */
2251 df_insn_refs_unlink (df, bb, insn);
2253 /* Scan the insn for refs. */
2254 df_insn_refs_record (df, bb, insn);
2258 if (insn == bb->end)
2265 /* Process all the modified/deleted insns that were queued. */
2273 if ((unsigned int) max_reg_num () >= df->reg_size)
2274 df_reg_table_realloc (df, 0);
2278 FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2280 count += df_bb_refs_update (df, bb);
2283 df_refs_process (df);
2288 /* Return nonzero if any of the requested blocks in the bitmap
2289 BLOCKS have been modified. */
2291 df_modified_p (df, blocks)
2302 if (bitmap_bit_p (df->bbs_modified, bb->index)
2303 && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
2313 /* Analyze dataflow info for the basic blocks specified by the bitmap
2314 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2315 modified blocks if BLOCKS is -1. */
2317 df_analyse (df, blocks, flags)
2324 /* We could deal with additional basic blocks being created by
2325 rescanning everything again. */
2326 if (df->n_bbs && df->n_bbs != (unsigned int) last_basic_block)
2329 update = df_modified_p (df, blocks);
2330 if (update || (flags != df->flags))
2336 /* Recompute everything from scratch. */
2339 /* Allocate and initialize data structures. */
2340 df_alloc (df, max_reg_num ());
2341 df_analyse_1 (df, 0, flags, 0);
2346 if (blocks == (bitmap) -1)
2347 blocks = df->bbs_modified;
2352 df_analyse_1 (df, blocks, flags, 1);
2353 bitmap_zero (df->bbs_modified);
2354 bitmap_zero (df->insns_modified);
2361 /* Free all the dataflow info and the DF structure. */
2371 /* Unlink INSN from its reference information. */
2373 df_insn_refs_unlink (df, bb, insn)
2375 basic_block bb ATTRIBUTE_UNUSED;
2378 struct df_link *link;
2381 uid = INSN_UID (insn);
2383 /* Unlink all refs defined by this insn. */
2384 for (link = df->insns[uid].defs; link; link = link->next)
2385 df_def_unlink (df, link->ref);
2387 /* Unlink all refs used by this insn. */
2388 for (link = df->insns[uid].uses; link; link = link->next)
2389 df_use_unlink (df, link->ref);
2391 df->insns[uid].defs = 0;
2392 df->insns[uid].uses = 0;
2397 /* Unlink all the insns within BB from their reference information. */
2399 df_bb_refs_unlink (df, bb)
2405 /* Scan the block an insn at a time from beginning to end. */
2406 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2410 /* Unlink refs for INSN. */
2411 df_insn_refs_unlink (df, bb, insn);
2413 if (insn == bb->end)
2419 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2420 Not currently used. */
2422 df_refs_unlink (df, blocks)
2430 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2432 df_bb_refs_unlink (df, bb);
2438 df_bb_refs_unlink (df, bb);
2443 /* Functions to modify insns. */
2446 /* Delete INSN and all its reference information. */
2448 df_insn_delete (df, bb, insn)
2450 basic_block bb ATTRIBUTE_UNUSED;
2453 /* If the insn is a jump, we should perhaps call delete_insn to
2454 handle the JUMP_LABEL? */
2456 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2457 if (insn == bb->head)
2460 /* Delete the insn. */
2463 df_insn_modify (df, bb, insn);
2465 return NEXT_INSN (insn);
2469 /* Mark that INSN within BB may have changed (created/modified/deleted).
2470 This may be called multiple times for the same insn. There is no
2471 harm calling this function if the insn wasn't changed; it will just
2472 slow down the rescanning of refs. */
2474 df_insn_modify (df, bb, insn)
2481 uid = INSN_UID (insn);
2482 if (uid >= df->insn_size)
2483 df_insn_table_realloc (df, uid);
2485 bitmap_set_bit (df->bbs_modified, bb->index);
2486 bitmap_set_bit (df->insns_modified, uid);
2488 /* For incremental updating on the fly, perhaps we could make a copy
2489 of all the refs of the original insn and turn them into
2490 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2491 the original refs. If validate_change fails then these anti-refs
2492 will just get ignored. */
2496 typedef struct replace_args
2505 /* Replace mem pointed to by PX with its associated pseudo register.
2506 DATA is actually a pointer to a structure describing the
2507 instruction currently being scanned and the MEM we are currently
2510 df_rtx_mem_replace (px, data)
2514 replace_args *args = (replace_args *) data;
2517 if (mem == NULL_RTX)
2520 switch (GET_CODE (mem))
2526 /* We're not interested in the MEM associated with a
2527 CONST_DOUBLE, so there's no need to traverse into one. */
2531 /* This is not a MEM. */
2535 if (!rtx_equal_p (args->match, mem))
2536 /* This is not the MEM we are currently replacing. */
2539 /* Actually replace the MEM. */
2540 validate_change (args->insn, px, args->replacement, 1);
2548 df_insn_mem_replace (df, bb, insn, mem, reg)
2559 args.replacement = reg;
2562 /* Search and replace all matching mems within insn. */
2563 for_each_rtx (&insn, df_rtx_mem_replace, &args);
2566 df_insn_modify (df, bb, insn);
2568 /* ???? FIXME. We may have a new def or one or more new uses of REG
2569 in INSN. REG should be a new pseudo so it won't affect the
2570 dataflow information that we currently have. We should add
2571 the new uses and defs to INSN and then recreate the chains
2572 when df_analyse is called. */
2573 return args.modified;
2577 /* Replace one register with another. Called through for_each_rtx; PX
2578 points to the rtx being scanned. DATA is actually a pointer to a
2579 structure of arguments. */
2581 df_rtx_reg_replace (px, data)
2586 replace_args *args = (replace_args *) data;
2591 if (x == args->match)
2593 validate_change (args->insn, px, args->replacement, 1);
2601 /* Replace the reg within every ref on CHAIN that is within the set
2602 BLOCKS of basic blocks with NEWREG. Also update the regs within
2605 df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2608 struct df_link *chain;
2612 struct df_link *link;
2616 blocks = df->all_blocks;
2618 args.match = oldreg;
2619 args.replacement = newreg;
2622 for (link = chain; link; link = link->next)
2624 struct ref *ref = link->ref;
2625 rtx insn = DF_REF_INSN (ref);
2627 if (! INSN_P (insn))
2630 if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2632 df_ref_reg_replace (df, ref, oldreg, newreg);
2634 /* Replace occurrences of the reg within the REG_NOTES. */
2635 if ((! link->next || DF_REF_INSN (ref)
2636 != DF_REF_INSN (link->next->ref))
2637 && REG_NOTES (insn))
2640 for_each_rtx (®_NOTES (insn), df_rtx_reg_replace, &args);
2645 /* Temporary check to ensure that we have a grip on which
2646 regs should be replaced. */
2653 /* Replace all occurrences of register OLDREG with register NEWREG in
2654 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2655 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2656 routine expects the reg-use and reg-def chains to be valid. */
2658 df_reg_replace (df, blocks, oldreg, newreg)
2664 unsigned int oldregno = REGNO (oldreg);
2666 df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2667 df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2672 /* Try replacing the reg within REF with NEWREG. Do not modify
2673 def-use/use-def chains. */
2675 df_ref_reg_replace (df, ref, oldreg, newreg)
2681 /* Check that insn was deleted by being converted into a NOTE. If
2682 so ignore this insn. */
2683 if (! INSN_P (DF_REF_INSN (ref)))
2686 if (oldreg && oldreg != DF_REF_REG (ref))
2689 if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2692 df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2698 df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2709 struct df_link *link;
2711 def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2715 use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2719 /* The USE no longer exists. */
2720 use_uid = INSN_UID (use_insn);
2721 df_use_unlink (df, use);
2722 df_ref_unlink (&df->insns[use_uid].uses, use);
2724 /* The DEF requires shifting so remove it from DEF_INSN
2725 and add it to USE_INSN by reusing LINK. */
2726 def_uid = INSN_UID (def_insn);
2727 link = df_ref_unlink (&df->insns[def_uid].defs, def);
2729 link->next = df->insns[use_uid].defs;
2730 df->insns[use_uid].defs = link;
2733 link = df_ref_unlink (&df->regs[regno].defs, def);
2735 link->next = df->regs[regno].defs;
2736 df->insns[regno].defs = link;
2739 DF_REF_INSN (def) = use_insn;
2744 /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2745 insns must be processed by this routine. */
2747 df_insns_modify (df, bb, first_insn, last_insn)
2755 for (insn = first_insn; ; insn = NEXT_INSN (insn))
2759 /* A non-const call should not have slipped through the net. If
2760 it does, we need to create a new basic block. Ouch. The
2761 same applies for a label. */
2762 if ((GET_CODE (insn) == CALL_INSN
2763 && ! CONST_OR_PURE_CALL_P (insn))
2764 || GET_CODE (insn) == CODE_LABEL)
2767 uid = INSN_UID (insn);
2769 if (uid >= df->insn_size)
2770 df_insn_table_realloc (df, uid);
2772 df_insn_modify (df, bb, insn);
2774 if (insn == last_insn)
2780 /* Emit PATTERN before INSN within BB. */
2782 df_pattern_emit_before (df, pattern, bb, insn)
2783 struct df *df ATTRIBUTE_UNUSED;
2789 rtx prev_insn = PREV_INSN (insn);
2791 /* We should not be inserting before the start of the block. */
2792 if (insn == bb->head)
2794 ret_insn = emit_insn_before (pattern, insn);
2795 if (ret_insn == insn)
2798 df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2803 /* Emit PATTERN after INSN within BB. */
2805 df_pattern_emit_after (df, pattern, bb, insn)
2813 ret_insn = emit_insn_after (pattern, insn);
2814 if (ret_insn == insn)
2817 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2822 /* Emit jump PATTERN after INSN within BB. */
2824 df_jump_pattern_emit_after (df, pattern, bb, insn)
2832 ret_insn = emit_jump_insn_after (pattern, insn);
2833 if (ret_insn == insn)
2836 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2841 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
2843 This function should only be used to move loop invariant insns
2844 out of a loop where it has been proven that the def-use info
2845 will still be valid. */
2847 df_insn_move_before (df, bb, insn, before_bb, before_insn)
2851 basic_block before_bb;
2854 struct df_link *link;
2858 return df_pattern_emit_before (df, insn, before_bb, before_insn);
2860 uid = INSN_UID (insn);
2862 /* Change bb for all df defined and used by this insn. */
2863 for (link = df->insns[uid].defs; link; link = link->next)
2864 DF_REF_BB (link->ref) = before_bb;
2865 for (link = df->insns[uid].uses; link; link = link->next)
2866 DF_REF_BB (link->ref) = before_bb;
2868 /* The lifetimes of the registers used in this insn will be reduced
2869 while the lifetimes of the registers defined in this insn
2870 are likely to be increased. */
2872 /* ???? Perhaps all the insns moved should be stored on a list
2873 which df_analyse removes when it recalculates data flow. */
2875 return emit_insn_before (insn, before_insn);
2878 /* Functions to query dataflow information. */
2882 df_insn_regno_def_p (df, bb, insn, regno)
2884 basic_block bb ATTRIBUTE_UNUSED;
2889 struct df_link *link;
2891 uid = INSN_UID (insn);
2893 for (link = df->insns[uid].defs; link; link = link->next)
2895 struct ref *def = link->ref;
2897 if (DF_REF_REGNO (def) == regno)
2906 df_def_dominates_all_uses_p (df, def)
2907 struct df *df ATTRIBUTE_UNUSED;
2910 struct df_link *du_link;
2912 /* Follow def-use chain to find all the uses of this def. */
2913 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2915 struct ref *use = du_link->ref;
2916 struct df_link *ud_link;
2918 /* Follow use-def chain to check all the defs for this use. */
2919 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2920 if (ud_link->ref != def)
2928 df_insn_dominates_all_uses_p (df, bb, insn)
2930 basic_block bb ATTRIBUTE_UNUSED;
2934 struct df_link *link;
2936 uid = INSN_UID (insn);
2938 for (link = df->insns[uid].defs; link; link = link->next)
2940 struct ref *def = link->ref;
2942 if (! df_def_dominates_all_uses_p (df, def))
2950 /* Return nonzero if all DF dominates all the uses within the bitmap
2953 df_def_dominates_uses_p (df, def, blocks)
2954 struct df *df ATTRIBUTE_UNUSED;
2958 struct df_link *du_link;
2960 /* Follow def-use chain to find all the uses of this def. */
2961 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
2963 struct ref *use = du_link->ref;
2964 struct df_link *ud_link;
2966 /* Only worry about the uses within BLOCKS. For example,
2967 consider a register defined within a loop that is live at the
2969 if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
2971 /* Follow use-def chain to check all the defs for this use. */
2972 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
2973 if (ud_link->ref != def)
2981 /* Return nonzero if all the defs of INSN within BB dominates
2982 all the corresponding uses. */
2984 df_insn_dominates_uses_p (df, bb, insn, blocks)
2986 basic_block bb ATTRIBUTE_UNUSED;
2991 struct df_link *link;
2993 uid = INSN_UID (insn);
2995 for (link = df->insns[uid].defs; link; link = link->next)
2997 struct ref *def = link->ref;
2999 /* Only consider the defs within BLOCKS. */
3000 if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3001 && ! df_def_dominates_uses_p (df, def, blocks))
3008 /* Return the basic block that REG referenced in or NULL if referenced
3009 in multiple basic blocks. */
3011 df_regno_bb (df, regno)
3015 struct df_link *defs = df->regs[regno].defs;
3016 struct df_link *uses = df->regs[regno].uses;
3017 struct ref *def = defs ? defs->ref : 0;
3018 struct ref *use = uses ? uses->ref : 0;
3019 basic_block bb_def = def ? DF_REF_BB (def) : 0;
3020 basic_block bb_use = use ? DF_REF_BB (use) : 0;
3022 /* Compare blocks of first def and last use. ???? FIXME. What if
3023 the reg-def and reg-use lists are not correctly ordered. */
3024 return bb_def == bb_use ? bb_def : 0;
3028 /* Return nonzero if REG used in multiple basic blocks. */
3030 df_reg_global_p (df, reg)
3034 return df_regno_bb (df, REGNO (reg)) != 0;
3038 /* Return total lifetime (in insns) of REG. */
3040 df_reg_lifetime (df, reg)
3044 return df->regs[REGNO (reg)].lifetime;
3048 /* Return nonzero if REG live at start of BB. */
3050 df_bb_reg_live_start_p (df, bb, reg)
3051 struct df *df ATTRIBUTE_UNUSED;
3055 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3057 #ifdef ENABLE_CHECKING
3058 if (! bb_info->lr_in)
3062 return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3066 /* Return nonzero if REG live at end of BB. */
3068 df_bb_reg_live_end_p (df, bb, reg)
3069 struct df *df ATTRIBUTE_UNUSED;
3073 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3075 #ifdef ENABLE_CHECKING
3076 if (! bb_info->lr_in)
3080 return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3084 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3085 after life of REG2, or 0, if the lives overlap. */
3087 df_bb_regs_lives_compare (df, bb, reg1, reg2)
3093 unsigned int regno1 = REGNO (reg1);
3094 unsigned int regno2 = REGNO (reg2);
3101 /* The regs must be local to BB. */
3102 if (df_regno_bb (df, regno1) != bb
3103 || df_regno_bb (df, regno2) != bb)
3106 def2 = df_bb_regno_first_def_find (df, bb, regno2);
3107 use1 = df_bb_regno_last_use_find (df, bb, regno1);
3109 if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3110 > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3113 def1 = df_bb_regno_first_def_find (df, bb, regno1);
3114 use2 = df_bb_regno_last_use_find (df, bb, regno2);
3116 if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3117 > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3124 /* Return last use of REGNO within BB. */
3126 df_bb_regno_last_use_find (df, bb, regno)
3128 basic_block bb ATTRIBUTE_UNUSED;
3131 struct df_link *link;
3133 /* This assumes that the reg-use list is ordered such that for any
3134 BB, the last use is found first. However, since the BBs are not
3135 ordered, the first use in the chain is not necessarily the last
3136 use in the function. */
3137 for (link = df->regs[regno].uses; link; link = link->next)
3139 struct ref *use = link->ref;
3141 if (DF_REF_BB (use) == bb)
3148 /* Return first def of REGNO within BB. */
3150 df_bb_regno_first_def_find (df, bb, regno)
3152 basic_block bb ATTRIBUTE_UNUSED;
3155 struct df_link *link;
3157 /* This assumes that the reg-def list is ordered such that for any
3158 BB, the first def is found first. However, since the BBs are not
3159 ordered, the first def in the chain is not necessarily the first
3160 def in the function. */
3161 for (link = df->regs[regno].defs; link; link = link->next)
3163 struct ref *def = link->ref;
3165 if (DF_REF_BB (def) == bb)
3172 /* Return first use of REGNO inside INSN within BB. */
3174 df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3176 basic_block bb ATTRIBUTE_UNUSED;
3181 struct df_link *link;
3183 uid = INSN_UID (insn);
3185 for (link = df->insns[uid].uses; link; link = link->next)
3187 struct ref *use = link->ref;
3189 if (DF_REF_REGNO (use) == regno)
3197 /* Return first def of REGNO inside INSN within BB. */
3199 df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3201 basic_block bb ATTRIBUTE_UNUSED;
3206 struct df_link *link;
3208 uid = INSN_UID (insn);
3210 for (link = df->insns[uid].defs; link; link = link->next)
3212 struct ref *def = link->ref;
3214 if (DF_REF_REGNO (def) == regno)
3222 /* Return insn using REG if the BB contains only a single
3223 use and def of REG. */
3225 df_bb_single_def_use_insn_find (df, bb, insn, reg)
3233 struct df_link *du_link;
3235 def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3240 du_link = DF_REF_CHAIN (def);
3247 /* Check if def is dead. */
3251 /* Check for multiple uses. */
3255 return DF_REF_INSN (use);
3258 /* Functions for debugging/dumping dataflow information. */
3261 /* Dump a def-use or use-def chain for REF to FILE. */
3263 df_chain_dump (link, file)
3264 struct df_link *link;
3267 fprintf (file, "{ ");
3268 for (; link; link = link->next)
3270 fprintf (file, "%c%d ",
3271 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3272 DF_REF_ID (link->ref));
3274 fprintf (file, "}");
3278 /* Dump a chain of refs with the associated regno. */
3280 df_chain_dump_regno (link, file)
3281 struct df_link *link;
3284 fprintf (file, "{ ");
3285 for (; link; link = link->next)
3287 fprintf (file, "%c%d(%d) ",
3288 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3289 DF_REF_ID (link->ref),
3290 DF_REF_REGNO (link->ref));
3292 fprintf (file, "}");
3296 /* Dump dataflow info. */
3298 df_dump (df, flags, file)
3309 fprintf (file, "\nDataflow summary:\n");
3310 fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3311 df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3317 fprintf (file, "Reaching defs:\n");
3320 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3322 if (! bb_info->rd_in)
3325 fprintf (file, "bb %d in \t", bb->index);
3326 dump_bitmap (file, bb_info->rd_in);
3327 fprintf (file, "bb %d gen \t", bb->index);
3328 dump_bitmap (file, bb_info->rd_gen);
3329 fprintf (file, "bb %d kill\t", bb->index);
3330 dump_bitmap (file, bb_info->rd_kill);
3331 fprintf (file, "bb %d out \t", bb->index);
3332 dump_bitmap (file, bb_info->rd_out);
3336 if (flags & DF_UD_CHAIN)
3338 fprintf (file, "Use-def chains:\n");
3339 for (j = 0; j < df->n_defs; j++)
3343 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3344 j, DF_REF_BBNO (df->defs[j]),
3345 DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3346 DF_REF_INSN_UID (df->defs[j]),
3347 DF_REF_REGNO (df->defs[j]));
3348 if (df->defs[j]->flags & DF_REF_READ_WRITE)
3349 fprintf (file, "read/write ");
3350 df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3351 fprintf (file, "\n");
3358 fprintf (file, "Reaching uses:\n");
3361 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3363 if (! bb_info->ru_in)
3366 fprintf (file, "bb %d in \t", bb->index);
3367 dump_bitmap (file, bb_info->ru_in);
3368 fprintf (file, "bb %d gen \t", bb->index);
3369 dump_bitmap (file, bb_info->ru_gen);
3370 fprintf (file, "bb %d kill\t", bb->index);
3371 dump_bitmap (file, bb_info->ru_kill);
3372 fprintf (file, "bb %d out \t", bb->index);
3373 dump_bitmap (file, bb_info->ru_out);
3377 if (flags & DF_DU_CHAIN)
3379 fprintf (file, "Def-use chains:\n");
3380 for (j = 0; j < df->n_uses; j++)
3384 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3385 j, DF_REF_BBNO (df->uses[j]),
3386 DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3387 DF_REF_INSN_UID (df->uses[j]),
3388 DF_REF_REGNO (df->uses[j]));
3389 if (df->uses[j]->flags & DF_REF_READ_WRITE)
3390 fprintf (file, "read/write ");
3391 df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3392 fprintf (file, "\n");
3399 fprintf (file, "Live regs:\n");
3402 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3404 if (! bb_info->lr_in)
3407 fprintf (file, "bb %d in \t", bb->index);
3408 dump_bitmap (file, bb_info->lr_in);
3409 fprintf (file, "bb %d use \t", bb->index);
3410 dump_bitmap (file, bb_info->lr_use);
3411 fprintf (file, "bb %d def \t", bb->index);
3412 dump_bitmap (file, bb_info->lr_def);
3413 fprintf (file, "bb %d out \t", bb->index);
3414 dump_bitmap (file, bb_info->lr_out);
3418 if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3420 struct reg_info *reg_info = df->regs;
3422 fprintf (file, "Register info:\n");
3423 for (j = 0; j < df->n_regs; j++)
3425 if (((flags & DF_REG_INFO)
3426 && (reg_info[j].n_uses || reg_info[j].n_defs))
3427 || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3428 || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3430 fprintf (file, "reg %d", j);
3431 if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3433 basic_block bb = df_regno_bb (df, j);
3436 fprintf (file, " bb %d", bb->index);
3438 fprintf (file, " bb ?");
3440 if (flags & DF_REG_INFO)
3442 fprintf (file, " life %d", reg_info[j].lifetime);
3445 if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3447 fprintf (file, " defs ");
3448 if (flags & DF_REG_INFO)
3449 fprintf (file, "%d ", reg_info[j].n_defs);
3450 if (flags & DF_RD_CHAIN)
3451 df_chain_dump (reg_info[j].defs, file);
3454 if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3456 fprintf (file, " uses ");
3457 if (flags & DF_REG_INFO)
3458 fprintf (file, "%d ", reg_info[j].n_uses);
3459 if (flags & DF_RU_CHAIN)
3460 df_chain_dump (reg_info[j].uses, file);
3463 fprintf (file, "\n");
3467 fprintf (file, "\n");
3472 df_insn_debug (df, insn, file)
3480 uid = INSN_UID (insn);
3481 if (uid >= df->insn_size)
3484 if (df->insns[uid].defs)
3485 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3486 else if (df->insns[uid].uses)
3487 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3491 fprintf (file, "insn %d bb %d luid %d defs ",
3492 uid, bbi, DF_INSN_LUID (df, insn));
3493 df_chain_dump (df->insns[uid].defs, file);
3494 fprintf (file, " uses ");
3495 df_chain_dump (df->insns[uid].uses, file);
3496 fprintf (file, "\n");
3501 df_insn_debug_regno (df, insn, file)
3509 uid = INSN_UID (insn);
3510 if (uid >= df->insn_size)
3513 if (df->insns[uid].defs)
3514 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3515 else if (df->insns[uid].uses)
3516 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3520 fprintf (file, "insn %d bb %d luid %d defs ",
3521 uid, bbi, DF_INSN_LUID (df, insn));
3522 df_chain_dump_regno (df->insns[uid].defs, file);
3523 fprintf (file, " uses ");
3524 df_chain_dump_regno (df->insns[uid].uses, file);
3525 fprintf (file, "\n");
3530 df_regno_debug (df, regno, file)
3535 if (regno >= df->reg_size)
3538 fprintf (file, "reg %d life %d defs ",
3539 regno, df->regs[regno].lifetime);
3540 df_chain_dump (df->regs[regno].defs, file);
3541 fprintf (file, " uses ");
3542 df_chain_dump (df->regs[regno].uses, file);
3543 fprintf (file, "\n");
3548 df_ref_debug (df, ref, file)
3553 fprintf (file, "%c%d ",
3554 DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3556 fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3559 DF_INSN_LUID (df, DF_REF_INSN (ref)),
3560 INSN_UID (DF_REF_INSN (ref)));
3561 df_chain_dump (DF_REF_CHAIN (ref), file);
3562 fprintf (file, "\n");
3565 /* Functions for debugging from GDB. */
3568 debug_df_insn (insn)
3571 df_insn_debug (ddf, insn, stderr);
3580 df_regno_debug (ddf, REGNO (reg), stderr);
3585 debug_df_regno (regno)
3588 df_regno_debug (ddf, regno, stderr);
3596 df_ref_debug (ddf, ref, stderr);
3601 debug_df_defno (defno)
3604 df_ref_debug (ddf, ddf->defs[defno], stderr);
3609 debug_df_useno (defno)
3612 df_ref_debug (ddf, ddf->uses[defno], stderr);
3617 debug_df_chain (link)
3618 struct df_link *link;
3620 df_chain_dump (link, stderr);
3621 fputc ('\n', stderr);
3625 /* Hybrid search algorithm from "Implementation Techniques for
3626 Efficient Data-Flow Analysis of Large Programs". */
3628 hybrid_search_bitmap (block, in, out, gen, kill, dir,
3629 conf_op, transfun, visited, pending,
3632 bitmap *in, *out, *gen, *kill;
3633 enum df_flow_dir dir;
3634 enum df_confluence_op conf_op;
3635 transfer_function_bitmap transfun;
3641 int i = block->index;
3643 basic_block bb = block;
3645 SET_BIT (visited, block->index);
3646 if (TEST_BIT (pending, block->index))
3648 if (dir == DF_FORWARD)
3650 /* Calculate <conf_op> of predecessor_outs. */
3651 bitmap_zero (in[i]);
3652 for (e = bb->pred; e != 0; e = e->pred_next)
3654 if (e->src == ENTRY_BLOCK_PTR)
3659 bitmap_a_or_b (in[i], in[i], out[e->src->index]);
3661 case DF_INTERSECTION:
3662 bitmap_a_and_b (in[i], in[i], out[e->src->index]);
3669 /* Calculate <conf_op> of successor ins. */
3670 bitmap_zero (out[i]);
3671 for (e = bb->succ; e != 0; e = e->succ_next)
3673 if (e->dest == EXIT_BLOCK_PTR)
3678 bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3680 case DF_INTERSECTION:
3681 bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3687 (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3688 RESET_BIT (pending, i);
3691 if (dir == DF_FORWARD)
3693 for (e = bb->succ; e != 0; e = e->succ_next)
3695 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3697 SET_BIT (pending, e->dest->index);
3702 for (e = bb->pred; e != 0; e = e->pred_next)
3704 if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3706 SET_BIT (pending, e->src->index);
3711 if (dir == DF_FORWARD)
3713 for (e = bb->succ; e != 0; e = e->succ_next)
3715 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3717 if (!TEST_BIT (visited, e->dest->index))
3718 hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
3719 conf_op, transfun, visited, pending,
3725 for (e = bb->pred; e != 0; e = e->pred_next)
3727 if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3729 if (!TEST_BIT (visited, e->src->index))
3730 hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
3731 conf_op, transfun, visited, pending,
3738 /* Hybrid search for sbitmaps, rather than bitmaps. */
3740 hybrid_search_sbitmap (block, in, out, gen, kill, dir,
3741 conf_op, transfun, visited, pending,
3744 sbitmap *in, *out, *gen, *kill;
3745 enum df_flow_dir dir;
3746 enum df_confluence_op conf_op;
3747 transfer_function_sbitmap transfun;
3753 int i = block->index;
3755 basic_block bb = block;
3757 SET_BIT (visited, block->index);
3758 if (TEST_BIT (pending, block->index))
3760 if (dir == DF_FORWARD)
3762 /* Calculate <conf_op> of predecessor_outs. */
3763 sbitmap_zero (in[i]);
3764 for (e = bb->pred; e != 0; e = e->pred_next)
3766 if (e->src == ENTRY_BLOCK_PTR)
3771 sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
3773 case DF_INTERSECTION:
3774 sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
3781 /* Calculate <conf_op> of successor ins. */
3782 sbitmap_zero (out[i]);
3783 for (e = bb->succ; e != 0; e = e->succ_next)
3785 if (e->dest == EXIT_BLOCK_PTR)
3790 sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
3792 case DF_INTERSECTION:
3793 sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
3799 (*transfun)(i, &changed, in[i], out[i], gen[i], kill[i], data);
3800 RESET_BIT (pending, i);
3803 if (dir == DF_FORWARD)
3805 for (e = bb->succ; e != 0; e = e->succ_next)
3807 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3809 SET_BIT (pending, e->dest->index);
3814 for (e = bb->pred; e != 0; e = e->pred_next)
3816 if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
3818 SET_BIT (pending, e->src->index);
3823 if (dir == DF_FORWARD)
3825 for (e = bb->succ; e != 0; e = e->succ_next)
3827 if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
3829 if (!TEST_BIT (visited, e->dest->index))
3830 hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
3831 conf_op, transfun, visited, pending,
3837 for (e = bb->pred; e != 0; e = e->pred_next)
3839 if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
3841 if (!TEST_BIT (visited, e->src->index))
3842 hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
3843 conf_op, transfun, visited, pending,
3852 in, out = Filled in by function.
3853 blocks = Blocks to analyze.
3854 dir = Dataflow direction.
3855 conf_op = Confluence operation.
3856 transfun = Transfer function.
3857 order = Order to iterate in. (Should map block numbers -> order)
3858 data = Whatever you want. It's passed to the transfer function.
3860 This function will perform iterative bitvector dataflow, producing
3861 the in and out sets. Even if you only want to perform it for a
3862 small number of blocks, the vectors for in and out must be large
3863 enough for *all* blocks, because changing one block might affect
3864 others. However, it'll only put what you say to analyze on the
3867 For forward problems, you probably want to pass in a mapping of
3868 block number to rc_order (like df->inverse_rc_map).
3871 iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
3872 dir, conf_op, transfun, order, data)
3873 sbitmap *in, *out, *gen, *kill;
3875 enum df_flow_dir dir;
3876 enum df_confluence_op conf_op;
3877 transfer_function_sbitmap transfun;
3884 sbitmap visited, pending;
3886 pending = sbitmap_alloc (last_basic_block);
3887 visited = sbitmap_alloc (last_basic_block);
3888 sbitmap_zero (pending);
3889 sbitmap_zero (visited);
3890 worklist = fibheap_new ();
3892 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3894 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3895 SET_BIT (pending, i);
3896 if (dir == DF_FORWARD)
3897 sbitmap_copy (out[i], gen[i]);
3899 sbitmap_copy (in[i], gen[i]);
3902 while (sbitmap_first_set_bit (pending) != -1)
3904 while (!fibheap_empty (worklist))
3906 i = (size_t) fibheap_extract_min (worklist);
3907 bb = BASIC_BLOCK (i);
3908 if (!TEST_BIT (visited, bb->index))
3909 hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
3910 conf_op, transfun, visited, pending, data);
3913 if (sbitmap_first_set_bit (pending) != -1)
3915 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3917 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3919 sbitmap_zero (visited);
3927 sbitmap_free (pending);
3928 sbitmap_free (visited);
3929 fibheap_delete (worklist);
3933 /* Exactly the same as iterative_dataflow_sbitmap, except it works on
3936 iterative_dataflow_bitmap (in, out, gen, kill, blocks,
3937 dir, conf_op, transfun, order, data)
3938 bitmap *in, *out, *gen, *kill;
3940 enum df_flow_dir dir;
3941 enum df_confluence_op conf_op;
3942 transfer_function_bitmap transfun;
3949 sbitmap visited, pending;
3951 pending = sbitmap_alloc (last_basic_block);
3952 visited = sbitmap_alloc (last_basic_block);
3953 sbitmap_zero (pending);
3954 sbitmap_zero (visited);
3955 worklist = fibheap_new ();
3957 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3959 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3960 SET_BIT (pending, i);
3961 if (dir == DF_FORWARD)
3962 bitmap_copy (out[i], gen[i]);
3964 bitmap_copy (in[i], gen[i]);
3967 while (sbitmap_first_set_bit (pending) != -1)
3969 while (!fibheap_empty (worklist))
3971 i = (size_t) fibheap_extract_min (worklist);
3972 bb = BASIC_BLOCK (i);
3973 if (!TEST_BIT (visited, bb->index))
3974 hybrid_search_bitmap (bb, in, out, gen, kill, dir,
3975 conf_op, transfun, visited, pending, data);
3978 if (sbitmap_first_set_bit (pending) != -1)
3980 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
3982 fibheap_insert (worklist, order[i], (void *) (size_t) i);
3984 sbitmap_zero (visited);
3991 sbitmap_free (pending);
3992 sbitmap_free (visited);
3993 fibheap_delete (worklist);