1 /* Dataflow support routines.
2 Copyright (C) 1999, 2000, 2001 Free Software Foundation, Inc.
3 Contributed by Michael P. Hayes (m.hayes@elec.canterbury.ac.nz,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 This file provides some dataflow routines for computing reaching defs,
27 upward exposed uses, live variables, def-use chains, and use-def
28 chains. The global dataflow is performed using simple iterative
29 methods with a worklist and could be sped up by ordering the blocks
30 with a depth first search order.
32 A `struct ref' data structure (ref) is allocated for every register
33 reference (def or use) and this records the insn and bb the ref is
34 found within. The refs are linked together in chains of uses and defs
35 for each insn and for each register. Each ref also has a chain field
36 that links all the use refs for a def or all the def refs for a use.
37 This is used to create use-def or def-use chains.
42 Here's an example of using the dataflow routines.
48 df_analyse (df, 0, DF_ALL);
50 df_dump (df, DF_ALL, stderr);
55 df_init simply creates a poor man's object (df) that needs to be
56 passed to all the dataflow routines. df_finish destroys this
57 object and frees up any allocated memory.
59 df_analyse performs the following:
61 1. Records defs and uses by scanning the insns in each basic block
62 or by scanning the insns queued by df_insn_modify.
63 2. Links defs and uses into insn-def and insn-use chains.
64 3. Links defs and uses into reg-def and reg-use chains.
65 4. Assigns LUIDs to each insn (for modified blocks).
66 5. Calculates local reaching definitions.
67 6. Calculates global reaching definitions.
68 7. Creates use-def chains.
69 8. Calculates local reaching uses (upwards exposed uses).
70 9. Calculates global reaching uses.
71 10. Creates def-use chains.
72 11. Calculates local live registers.
73 12. Calculates global live registers.
74 13. Calculates register lifetimes and determines local registers.
79 Note that the dataflow information is not updated for every newly
80 deleted or created insn. If the dataflow information requires
81 updating then all the changed, new, or deleted insns needs to be
82 marked with df_insn_modify (or df_insns_modify) either directly or
83 indirectly (say through calling df_insn_delete). df_insn_modify
84 marks all the modified insns to get processed the next time df_analyse
87 Beware that tinkering with insns may invalidate the dataflow information.
88 The philosophy behind these routines is that once the dataflow
89 information has been gathered, the user should store what they require
90 before they tinker with any insn. Once a reg is replaced, for example,
91 then the reg-def/reg-use chains will point to the wrong place. Once a
92 whole lot of changes have been made, df_analyse can be called again
93 to update the dataflow information. Currently, this is not very smart
94 with regard to propagating changes to the dataflow so it should not
100 The basic object is a REF (reference) and this may either be a DEF
101 (definition) or a USE of a register.
103 These are linked into a variety of lists; namely reg-def, reg-use,
104 insn-def, insn-use, def-use, and use-def lists. For example,
105 the reg-def lists contain all the refs that define a given register
106 while the insn-use lists contain all the refs used by an insn.
108 Note that the reg-def and reg-use chains are generally short (except for the
109 hard registers) and thus it is much faster to search these chains
110 rather than searching the def or use bitmaps.
112 If the insns are in SSA form then the reg-def and use-def lists
113 should only contain the single defining ref.
117 1) Incremental dataflow analysis.
119 Note that if a loop invariant insn is hoisted (or sunk), we do not
120 need to change the def-use or use-def chains. All we have to do is to
121 change the bb field for all the associated defs and uses and to
122 renumber the LUIDs for the original and new basic blocks of the insn.
124 When shadowing loop mems we create new uses and defs for new pseudos
125 so we do not affect the existing dataflow information.
127 My current strategy is to queue up all modified, created, or deleted
128 insns so when df_analyse is called we can easily determine all the new
129 or deleted refs. Currently the global dataflow information is
130 recomputed from scratch but this could be propagated more efficiently.
132 2) Improved global data flow computation using depth first search.
134 3) Reduced memory requirements.
136 We could operate a pool of ref structures. When a ref is deleted it
137 gets returned to the pool (say by linking on to a chain of free refs).
138 This will require a pair of bitmaps for defs and uses so that we can
139 tell which ones have been changed. Alternatively, we could
140 periodically squeeze the def and use tables and associated bitmaps and
141 renumber the def and use ids.
143 4) Ordering of reg-def and reg-use lists.
145 Should the first entry in the def list be the first def (within a BB)?
146 Similarly, should the first entry in the use list be the last use
149 5) Working with a sub-CFG.
151 Often the whole CFG does not need to be analysed, for example,
152 when optimising a loop, only certain registers are of interest.
153 Perhaps there should be a bitmap argument to df_analyse to specify
154 which registers should be analysed? */
156 #define HANDLE_SUBREG
162 #include "insn-config.h"
164 #include "function.h"
167 #include "hard-reg-set.h"
168 #include "basic-block.h"
173 #define FOR_ALL_BBS(BB, CODE) \
176 for (node_ = 0; node_ < n_basic_blocks; node_++) \
177 {(BB) = BASIC_BLOCK (node_); CODE;};} while (0)
179 #define FOR_EACH_BB_IN_BITMAP(BITMAP, MIN, BB, CODE) \
181 unsigned int node_; \
182 EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
183 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
185 #define FOR_EACH_BB_IN_BITMAP_REV(BITMAP, MIN, BB, CODE) \
187 unsigned int node_; \
188 EXECUTE_IF_SET_IN_BITMAP_REV (BITMAP, node_, \
189 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
191 #define FOR_EACH_BB_IN_SBITMAP(BITMAP, MIN, BB, CODE) \
193 unsigned int node_; \
194 EXECUTE_IF_SET_IN_SBITMAP (BITMAP, MIN, node_, \
195 {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
197 #define obstack_chunk_alloc xmalloc
198 #define obstack_chunk_free free
200 static struct obstack df_ref_obstack;
201 static struct df *ddf;
203 static void df_reg_table_realloc PARAMS((struct df *, int));
205 static void df_def_table_realloc PARAMS((struct df *, int));
207 static void df_insn_table_realloc PARAMS((struct df *, int));
208 static void df_bitmaps_alloc PARAMS((struct df *, int));
209 static void df_bitmaps_free PARAMS((struct df *, int));
210 static void df_free PARAMS((struct df *));
211 static void df_alloc PARAMS((struct df *, int));
213 static rtx df_reg_clobber_gen PARAMS((unsigned int));
214 static rtx df_reg_use_gen PARAMS((unsigned int));
216 static inline struct df_link *df_link_create PARAMS((struct ref *,
218 static struct df_link *df_ref_unlink PARAMS((struct df_link **, struct ref *));
219 static void df_def_unlink PARAMS((struct df *, struct ref *));
220 static void df_use_unlink PARAMS((struct df *, struct ref *));
221 static void df_insn_refs_unlink PARAMS ((struct df *, basic_block, rtx));
223 static void df_bb_refs_unlink PARAMS ((struct df *, basic_block));
224 static void df_refs_unlink PARAMS ((struct df *, bitmap));
227 static struct ref *df_ref_create PARAMS((struct df *,
228 rtx, rtx *, basic_block, rtx,
230 static void df_ref_record_1 PARAMS((struct df *, rtx, rtx *,
231 basic_block, rtx, enum df_ref_type));
232 static void df_ref_record PARAMS((struct df *, rtx, rtx *,
233 basic_block bb, rtx, enum df_ref_type));
234 static void df_def_record_1 PARAMS((struct df *, rtx, basic_block, rtx));
235 static void df_defs_record PARAMS((struct df *, rtx, basic_block, rtx));
236 static void df_uses_record PARAMS((struct df *, rtx *,
237 enum df_ref_type, basic_block, rtx));
238 static void df_insn_refs_record PARAMS((struct df *, basic_block, rtx));
239 static void df_bb_refs_record PARAMS((struct df *, basic_block));
240 static void df_refs_record PARAMS((struct df *, bitmap));
242 static int df_visit_next_rc PARAMS ((struct df *, sbitmap));
243 static int df_visit_next_rts PARAMS ((struct df *, sbitmap));
244 static void df_bb_reg_def_chain_create PARAMS((struct df *, basic_block));
245 static void df_reg_def_chain_create PARAMS((struct df *, bitmap));
246 static void df_bb_reg_use_chain_create PARAMS((struct df *, basic_block));
247 static void df_reg_use_chain_create PARAMS((struct df *, bitmap));
248 static void df_bb_du_chain_create PARAMS((struct df *, basic_block, bitmap));
249 static void df_du_chain_create PARAMS((struct df *, bitmap));
250 static void df_bb_ud_chain_create PARAMS((struct df *, basic_block));
251 static void df_ud_chain_create PARAMS((struct df *, bitmap));
252 static void df_rd_global_compute PARAMS((struct df *, bitmap));
253 static void df_ru_global_compute PARAMS((struct df *, bitmap));
254 static void df_lr_global_compute PARAMS((struct df *, bitmap));
255 static void df_bb_rd_local_compute PARAMS((struct df *, basic_block));
256 static void df_rd_local_compute PARAMS((struct df *, bitmap));
257 static void df_bb_ru_local_compute PARAMS((struct df *, basic_block));
258 static void df_ru_local_compute PARAMS((struct df *, bitmap));
259 static void df_bb_lr_local_compute PARAMS((struct df *, basic_block));
260 static void df_lr_local_compute PARAMS((struct df *, bitmap));
261 static void df_bb_reg_info_compute PARAMS((struct df *, basic_block, bitmap));
262 static void df_reg_info_compute PARAMS((struct df *, bitmap));
264 static int df_bb_luids_set PARAMS((struct df *df, basic_block));
265 static int df_luids_set PARAMS((struct df *df, bitmap));
267 static int df_modified_p PARAMS ((struct df *, bitmap));
268 static int df_refs_queue PARAMS ((struct df *));
269 static int df_refs_process PARAMS ((struct df *));
270 static int df_bb_refs_update PARAMS ((struct df *, basic_block));
271 static int df_refs_update PARAMS ((struct df *));
272 static void df_analyse_1 PARAMS((struct df *, bitmap, int, int));
274 static void df_insns_modify PARAMS((struct df *, basic_block,
276 static int df_rtx_mem_replace PARAMS ((rtx *, void *));
277 static int df_rtx_reg_replace PARAMS ((rtx *, void *));
278 void df_refs_reg_replace PARAMS ((struct df *, bitmap,
279 struct df_link *, rtx, rtx));
281 static int df_def_dominates_all_uses_p PARAMS((struct df *, struct ref *def));
282 static int df_def_dominates_uses_p PARAMS((struct df *,
283 struct ref *def, bitmap));
284 static struct ref *df_bb_regno_last_use_find PARAMS((struct df *, basic_block,
286 static struct ref *df_bb_regno_first_def_find PARAMS((struct df *, basic_block,
288 static struct ref *df_bb_insn_regno_last_use_find PARAMS((struct df *,
291 static struct ref *df_bb_insn_regno_first_def_find PARAMS((struct df *,
295 static void df_chain_dump PARAMS((struct df_link *, FILE *file));
296 static void df_chain_dump_regno PARAMS((struct df_link *, FILE *file));
297 static void df_regno_debug PARAMS ((struct df *, unsigned int, FILE *));
298 static void df_ref_debug PARAMS ((struct df *, struct ref *, FILE *));
301 /* Local memory allocation/deallocation routines. */
304 /* Increase the insn info table by SIZE more elements. */
306 df_insn_table_realloc (df, size)
310 /* Make table 25 percent larger by default. */
312 size = df->insn_size / 4;
314 size += df->insn_size;
316 df->insns = (struct insn_info *)
317 xrealloc (df->insns, size * sizeof (struct insn_info));
319 memset (df->insns + df->insn_size, 0,
320 (size - df->insn_size) * sizeof (struct insn_info));
322 df->insn_size = size;
324 if (! df->insns_modified)
326 df->insns_modified = BITMAP_XMALLOC ();
327 bitmap_zero (df->insns_modified);
332 /* Increase the reg info table by SIZE more elements. */
334 df_reg_table_realloc (df, size)
338 /* Make table 25 percent larger by default. */
340 size = df->reg_size / 4;
342 size += df->reg_size;
344 df->regs = (struct reg_info *)
345 xrealloc (df->regs, size * sizeof (struct reg_info));
347 /* Zero the new entries. */
348 memset (df->regs + df->reg_size, 0,
349 (size - df->reg_size) * sizeof (struct reg_info));
356 /* Not currently used. */
358 df_def_table_realloc (df, size)
365 /* Make table 25 percent larger by default. */
367 size = df->def_size / 4;
369 df->def_size += size;
370 df->defs = xrealloc (df->defs,
371 df->def_size * sizeof (*df->defs));
373 /* Allocate a new block of memory and link into list of blocks
374 that will need to be freed later. */
376 refs = xmalloc (size * sizeof (*refs));
378 /* Link all the new refs together, overloading the chain field. */
379 for (i = 0; i < size - 1; i++)
380 refs[i].chain = (struct df_link *)(refs + i + 1);
381 refs[size - 1].chain = 0;
387 /* Allocate bitmaps for each basic block. */
389 df_bitmaps_alloc (df, flags)
396 /* Free the bitmaps if they need resizing. */
397 if ((flags & DF_LR) && df->n_regs < (unsigned int)max_reg_num ())
398 dflags |= DF_LR | DF_RU;
399 if ((flags & DF_RU) && df->n_uses < df->use_id)
401 if ((flags & DF_RD) && df->n_defs < df->def_id)
405 df_bitmaps_free (df, dflags);
407 df->n_defs = df->def_id;
408 df->n_uses = df->use_id;
410 for (i = 0; i < df->n_bbs; i++)
412 basic_block bb = BASIC_BLOCK (i);
413 struct bb_info *bb_info = DF_BB_INFO (df, bb);
415 if (flags & DF_RD && ! bb_info->rd_in)
417 /* Allocate bitmaps for reaching definitions. */
418 bb_info->rd_kill = BITMAP_XMALLOC ();
419 bitmap_zero (bb_info->rd_kill);
420 bb_info->rd_gen = BITMAP_XMALLOC ();
421 bitmap_zero (bb_info->rd_gen);
422 bb_info->rd_in = BITMAP_XMALLOC ();
423 bb_info->rd_out = BITMAP_XMALLOC ();
424 bb_info->rd_valid = 0;
427 if (flags & DF_RU && ! bb_info->ru_in)
429 /* Allocate bitmaps for upward exposed uses. */
430 bb_info->ru_kill = BITMAP_XMALLOC ();
431 bitmap_zero (bb_info->ru_kill);
432 /* Note the lack of symmetry. */
433 bb_info->ru_gen = BITMAP_XMALLOC ();
434 bitmap_zero (bb_info->ru_gen);
435 bb_info->ru_in = BITMAP_XMALLOC ();
436 bb_info->ru_out = BITMAP_XMALLOC ();
437 bb_info->ru_valid = 0;
440 if (flags & DF_LR && ! bb_info->lr_in)
442 /* Allocate bitmaps for live variables. */
443 bb_info->lr_def = BITMAP_XMALLOC ();
444 bitmap_zero (bb_info->lr_def);
445 bb_info->lr_use = BITMAP_XMALLOC ();
446 bitmap_zero (bb_info->lr_use);
447 bb_info->lr_in = BITMAP_XMALLOC ();
448 bb_info->lr_out = BITMAP_XMALLOC ();
449 bb_info->lr_valid = 0;
455 /* Free bitmaps for each basic block. */
457 df_bitmaps_free (df, flags)
458 struct df *df ATTRIBUTE_UNUSED;
463 for (i = 0; i < df->n_bbs; i++)
465 basic_block bb = BASIC_BLOCK (i);
466 struct bb_info *bb_info = DF_BB_INFO (df, bb);
471 if ((flags & DF_RD) && bb_info->rd_in)
473 /* Free bitmaps for reaching definitions. */
474 BITMAP_XFREE (bb_info->rd_kill);
475 bb_info->rd_kill = NULL;
476 BITMAP_XFREE (bb_info->rd_gen);
477 bb_info->rd_gen = NULL;
478 BITMAP_XFREE (bb_info->rd_in);
479 bb_info->rd_in = NULL;
480 BITMAP_XFREE (bb_info->rd_out);
481 bb_info->rd_out = NULL;
484 if ((flags & DF_RU) && bb_info->ru_in)
486 /* Free bitmaps for upward exposed uses. */
487 BITMAP_XFREE (bb_info->ru_kill);
488 bb_info->ru_kill = NULL;
489 BITMAP_XFREE (bb_info->ru_gen);
490 bb_info->ru_gen = NULL;
491 BITMAP_XFREE (bb_info->ru_in);
492 bb_info->ru_in = NULL;
493 BITMAP_XFREE (bb_info->ru_out);
494 bb_info->ru_out = NULL;
497 if ((flags & DF_LR) && bb_info->lr_in)
499 /* Free bitmaps for live variables. */
500 BITMAP_XFREE (bb_info->lr_def);
501 bb_info->lr_def = NULL;
502 BITMAP_XFREE (bb_info->lr_use);
503 bb_info->lr_use = NULL;
504 BITMAP_XFREE (bb_info->lr_in);
505 bb_info->lr_in = NULL;
506 BITMAP_XFREE (bb_info->lr_out);
507 bb_info->lr_out = NULL;
510 df->flags &= ~(flags & (DF_RD | DF_RU | DF_LR));
514 /* Allocate and initialise dataflow memory. */
516 df_alloc (df, n_regs)
523 gcc_obstack_init (&df_ref_obstack);
525 /* Perhaps we should use LUIDs to save memory for the insn_refs
526 table. This is only a small saving; a few pointers. */
527 n_insns = get_max_uid () + 1;
531 /* Approximate number of defs by number of insns. */
532 df->def_size = n_insns;
533 df->defs = xmalloc (df->def_size * sizeof (*df->defs));
537 /* Approximate number of uses by twice number of insns. */
538 df->use_size = n_insns * 2;
539 df->uses = xmalloc (df->use_size * sizeof (*df->uses));
542 df->n_bbs = n_basic_blocks;
544 /* Allocate temporary working array used during local dataflow analysis. */
545 df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
547 df_insn_table_realloc (df, n_insns);
549 df_reg_table_realloc (df, df->n_regs);
551 df->bbs_modified = BITMAP_XMALLOC ();
552 bitmap_zero (df->bbs_modified);
556 df->bbs = xcalloc (df->n_bbs, sizeof (struct bb_info));
558 df->all_blocks = BITMAP_XMALLOC ();
559 for (i = 0; i < n_basic_blocks; i++)
560 bitmap_set_bit (df->all_blocks, i);
564 /* Free all the dataflow info. */
569 df_bitmaps_free (df, DF_ALL);
597 if (df->bbs_modified)
598 BITMAP_XFREE (df->bbs_modified);
599 df->bbs_modified = 0;
601 if (df->insns_modified)
602 BITMAP_XFREE (df->insns_modified);
603 df->insns_modified = 0;
605 BITMAP_XFREE (df->all_blocks);
608 obstack_free (&df_ref_obstack, NULL);
611 /* Local miscellaneous routines. */
613 /* Return a USE for register REGNO. */
614 static rtx df_reg_use_gen (regno)
620 reg = regno >= FIRST_PSEUDO_REGISTER
621 ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
623 use = gen_rtx_USE (GET_MODE (reg), reg);
628 /* Return a CLOBBER for register REGNO. */
629 static rtx df_reg_clobber_gen (regno)
635 reg = regno >= FIRST_PSEUDO_REGISTER
636 ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
638 use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
642 /* Local chain manipulation routines. */
644 /* Create a link in a def-use or use-def chain. */
645 static inline struct df_link *
646 df_link_create (ref, next)
648 struct df_link *next;
650 struct df_link *link;
652 link = (struct df_link *) obstack_alloc (&df_ref_obstack,
660 /* Add REF to chain head pointed to by PHEAD. */
661 static struct df_link *
662 df_ref_unlink (phead, ref)
663 struct df_link **phead;
666 struct df_link *link = *phead;
672 /* Only a single ref. It must be the one we want.
673 If not, the def-use and use-def chains are likely to
675 if (link->ref != ref)
677 /* Now have an empty chain. */
682 /* Multiple refs. One of them must be us. */
683 if (link->ref == ref)
688 for (; link->next; link = link->next)
690 if (link->next->ref == ref)
692 /* Unlink from list. */
693 link->next = link->next->next;
704 /* Unlink REF from all def-use/use-def chains, etc. */
706 df_ref_remove (df, ref)
710 if (DF_REF_REG_DEF_P (ref))
712 df_def_unlink (df, ref);
713 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].defs, ref);
717 df_use_unlink (df, ref);
718 df_ref_unlink (&df->insns[DF_REF_INSN_UID (ref)].uses, ref);
724 /* Unlink DEF from use-def and reg-def chains. */
726 df_def_unlink (df, def)
727 struct df *df ATTRIBUTE_UNUSED;
730 struct df_link *du_link;
731 unsigned int dregno = DF_REF_REGNO (def);
733 /* Follow def-use chain to find all the uses of this def. */
734 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
736 struct ref *use = du_link->ref;
738 /* Unlink this def from the use-def chain. */
739 df_ref_unlink (&DF_REF_CHAIN (use), def);
741 DF_REF_CHAIN (def) = 0;
743 /* Unlink def from reg-def chain. */
744 df_ref_unlink (&df->regs[dregno].defs, def);
746 df->defs[DF_REF_ID (def)] = 0;
750 /* Unlink use from def-use and reg-use chains. */
752 df_use_unlink (df, use)
753 struct df *df ATTRIBUTE_UNUSED;
756 struct df_link *ud_link;
757 unsigned int uregno = DF_REF_REGNO (use);
759 /* Follow use-def chain to find all the defs of this use. */
760 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
762 struct ref *def = ud_link->ref;
764 /* Unlink this use from the def-use chain. */
765 df_ref_unlink (&DF_REF_CHAIN (def), use);
767 DF_REF_CHAIN (use) = 0;
769 /* Unlink use from reg-use chain. */
770 df_ref_unlink (&df->regs[uregno].uses, use);
772 df->uses[DF_REF_ID (use)] = 0;
775 /* Local routines for recording refs. */
778 /* Create a new ref of type DF_REF_TYPE for register REG at address
779 LOC within INSN of BB. */
781 df_ref_create (df, reg, loc, bb, insn, ref_type)
787 enum df_ref_type ref_type;
789 struct ref *this_ref;
792 this_ref = (struct ref *) obstack_alloc (&df_ref_obstack,
794 DF_REF_REG (this_ref) = reg;
795 DF_REF_LOC (this_ref) = loc;
796 DF_REF_BB (this_ref) = bb;
797 DF_REF_INSN (this_ref) = insn;
798 DF_REF_CHAIN (this_ref) = 0;
799 DF_REF_TYPE (this_ref) = ref_type;
800 uid = INSN_UID (insn);
802 if (ref_type == DF_REF_REG_DEF)
804 if (df->def_id >= df->def_size)
806 /* Make table 25 percent larger. */
807 df->def_size += (df->def_size / 4);
808 df->defs = xrealloc (df->defs,
809 df->def_size * sizeof (*df->defs));
811 DF_REF_ID (this_ref) = df->def_id;
812 df->defs[df->def_id++] = this_ref;
816 if (df->use_id >= df->use_size)
818 /* Make table 25 percent larger. */
819 df->use_size += (df->use_size / 4);
820 df->uses = xrealloc (df->uses,
821 df->use_size * sizeof (*df->uses));
823 DF_REF_ID (this_ref) = df->use_id;
824 df->uses[df->use_id++] = this_ref;
830 /* Create a new reference of type DF_REF_TYPE for a single register REG,
831 used inside the LOC rtx of INSN. */
833 df_ref_record_1 (df, reg, loc, bb, insn, ref_type)
839 enum df_ref_type ref_type;
841 df_ref_create (df, reg, loc, bb, insn, ref_type);
845 /* Create new references of type DF_REF_TYPE for each part of register REG
846 at address LOC within INSN of BB. */
848 df_ref_record (df, reg, loc, bb, insn, ref_type)
854 enum df_ref_type ref_type;
858 if (GET_CODE (reg) != REG && GET_CODE (reg) != SUBREG)
861 /* For the reg allocator we are interested in some SUBREG rtx's, but not
862 all. Notably only those representing a word extraction from a multi-word
863 reg. As written in the docu those should have the form
864 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
865 XXX Is that true? We could also use the global word_mode variable. */
866 if (GET_CODE (reg) == SUBREG
867 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
868 || GET_MODE_SIZE (GET_MODE (reg))
869 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
871 loc = &SUBREG_REG (reg);
875 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
876 if (regno < FIRST_PSEUDO_REGISTER)
881 if (! (df->flags & DF_HARD_REGS))
884 /* GET_MODE (reg) is correct here. We don't want to go into a SUBREG
885 for the mode, because we only want to add references to regs, which
886 are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
887 reference the whole reg 0 in DI mode (which would also include
888 reg 1, at least, if 0 and 1 are SImode registers). */
889 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
891 for (i = regno; i < endregno; i++)
892 df_ref_record_1 (df, gen_rtx_REG (reg_raw_mode[i], i),
893 loc, bb, insn, ref_type);
897 df_ref_record_1 (df, reg, loc, bb, insn, ref_type);
902 /* Process all the registers defined in the rtx, X. */
904 df_def_record_1 (df, x, bb, insn)
910 rtx *loc = &SET_DEST (x);
913 /* Some targets place small structures in registers for
914 return values of functions. */
915 if (GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
919 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
920 df_def_record_1 (df, XVECEXP (dst, 0, i), bb, insn);
924 /* May be, we should flag the use of strict_low_part somehow. Might be
925 handy for the reg allocator. */
927 while (GET_CODE (dst) == STRICT_LOW_PART
928 || GET_CODE (dst) == ZERO_EXTRACT
929 || GET_CODE (dst) == SIGN_EXTRACT)
931 loc = &XEXP (dst, 0);
934 /* For the reg allocator we are interested in exact register references.
935 This means, we want to know, if only a part of a register is
938 if (GET_CODE (dst) == SUBREG)
940 loc = &XEXP (dst, 0);
945 while (GET_CODE (dst) == SUBREG
946 || GET_CODE (dst) == ZERO_EXTRACT
947 || GET_CODE (dst) == SIGN_EXTRACT
948 || GET_CODE (dst) == STRICT_LOW_PART)
950 loc = &XEXP (dst, 0);
955 if (GET_CODE (dst) == REG
956 || (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG))
957 df_ref_record (df, dst, loc, bb, insn, DF_REF_REG_DEF);
961 /* Process all the registers defined in the pattern rtx, X. */
963 df_defs_record (df, x, bb, insn)
969 RTX_CODE code = GET_CODE (x);
971 if (code == SET || code == CLOBBER)
973 /* Mark the single def within the pattern. */
974 df_def_record_1 (df, x, bb, insn);
976 else if (code == PARALLEL)
980 /* Mark the multiple defs within the pattern. */
981 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
983 code = GET_CODE (XVECEXP (x, 0, i));
984 if (code == SET || code == CLOBBER)
985 df_def_record_1 (df, XVECEXP (x, 0, i), bb, insn);
991 /* Process all the registers used in the rtx at address LOC. */
993 df_uses_record (df, loc, ref_type, bb, insn)
996 enum df_ref_type ref_type;
1005 code = GET_CODE (x);
1019 /* If we are clobbering a MEM, mark any registers inside the address
1021 if (GET_CODE (XEXP (x, 0)) == MEM)
1022 df_uses_record (df, &XEXP (XEXP (x, 0), 0),
1023 DF_REF_REG_MEM_STORE, bb, insn);
1025 /* If we're clobbering a REG then we have a def so ignore. */
1029 df_uses_record (df, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn);
1033 /* While we're here, optimize this case. */
1034 #if defined(HANDLE_SUBREG)
1036 /* In case the SUBREG is not of a register, don't optimize. */
1037 if (GET_CODE (SUBREG_REG (x)) != REG)
1039 loc = &SUBREG_REG (x);
1040 df_uses_record (df, loc, ref_type, bb, insn);
1044 loc = &SUBREG_REG (x);
1046 if (GET_CODE (x) != REG)
1048 df_uses_record (df, loc, ref_type, bb, insn);
1053 /* ... Fall through ... */
1056 /* See a register (or subreg) other than being set. */
1057 df_ref_record (df, x, loc, bb, insn, ref_type);
1062 rtx dst = SET_DEST (x);
1065 /* If storing into MEM, don't show it as being used. But do
1066 show the address as being used. */
1067 if (GET_CODE (dst) == MEM)
1069 df_uses_record (df, &XEXP (dst, 0),
1070 DF_REF_REG_MEM_STORE,
1072 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn);
1076 #if 1 && defined(HANDLE_SUBREG)
1077 /* Look for sets that perform a read-modify-write. */
1078 while (GET_CODE (dst) == STRICT_LOW_PART
1079 || GET_CODE (dst) == ZERO_EXTRACT
1080 || GET_CODE (dst) == SIGN_EXTRACT)
1082 if (GET_CODE (dst) == STRICT_LOW_PART)
1084 dst = XEXP (dst, 0);
1085 if (GET_CODE (dst) != SUBREG)
1087 /* A strict_low_part uses the whole reg not only the subreg. */
1088 df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb, insn);
1092 df_uses_record (df, &XEXP (dst, 0), DF_REF_REG_USE, bb, insn);
1093 dst = XEXP (dst, 0);
1096 if (GET_CODE (dst) == SUBREG)
1098 /* Paradoxical or too small subreg's are read-mod-write. */
1099 if (GET_MODE_SIZE (GET_MODE (dst)) < GET_MODE_SIZE (word_mode)
1100 || GET_MODE_SIZE (GET_MODE (dst))
1101 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dst))))
1104 /* In the original code also some SUBREG rtx's were considered
1105 read-modify-write (those with
1106 REG_SIZE(SUBREG_REG(dst)) > REG_SIZE(dst) )
1107 e.g. a (subreg:QI (reg:SI A) 0). I can't see this. The only
1108 reason for a read cycle for reg A would be to somehow preserve
1109 the bits outside of the subreg:QI. But for this a strict_low_part
1110 was necessary anyway, and this we handled already. */
1112 while (GET_CODE (dst) == STRICT_LOW_PART
1113 || GET_CODE (dst) == ZERO_EXTRACT
1114 || GET_CODE (dst) == SIGN_EXTRACT
1115 || GET_CODE (dst) == SUBREG)
1117 /* A SUBREG of a smaller size does not use the old value. */
1118 if (GET_CODE (dst) != SUBREG
1119 || (REG_SIZE (SUBREG_REG (dst)) > REG_SIZE (dst)))
1121 dst = XEXP (dst, 0);
1125 if ((GET_CODE (dst) == PARALLEL && GET_MODE (dst) == BLKmode)
1126 || GET_CODE (dst) == REG || GET_CODE (dst) == SUBREG)
1128 #if 1 || !defined(HANDLE_SUBREG)
1130 df_uses_record (df, &SET_DEST (x), DF_REF_REG_USE, bb, insn);
1132 df_uses_record (df, &SET_SRC (x), DF_REF_REG_USE, bb, insn);
1142 case UNSPEC_VOLATILE:
1146 /* Traditional and volatile asm instructions must be considered to use
1147 and clobber all hard registers, all pseudo-registers and all of
1148 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
1150 Consider for instance a volatile asm that changes the fpu rounding
1151 mode. An insn should not be moved across this even if it only uses
1152 pseudo-regs because it might give an incorrectly rounded result.
1154 For now, just mark any regs we can find in ASM_OPERANDS as
1157 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
1158 We can not just fall through here since then we would be confused
1159 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
1160 traditional asms unlike their normal usage. */
1161 if (code == ASM_OPERANDS)
1165 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1166 df_uses_record (df, &ASM_OPERANDS_INPUT (x, j),
1167 DF_REF_REG_USE, bb, insn);
1179 /* Catch the def of the register being modified. */
1180 df_ref_record (df, XEXP (x, 0), &XEXP (x, 0), bb, insn, DF_REF_REG_DEF);
1182 /* ... Fall through to handle uses ... */
1188 /* Recursively scan the operands of this expression. */
1190 const char *fmt = GET_RTX_FORMAT (code);
1193 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1197 /* Tail recursive case: save a function call level. */
1203 df_uses_record (df, &XEXP (x, i), ref_type, bb, insn);
1205 else if (fmt[i] == 'E')
1208 for (j = 0; j < XVECLEN (x, i); j++)
1209 df_uses_record (df, &XVECEXP (x, i, j), ref_type,
1217 /* Record all the df within INSN of basic block BB. */
1219 df_insn_refs_record (df, bb, insn)
1230 /* Record register defs */
1231 df_defs_record (df, PATTERN (insn), bb, insn);
1233 if (df->flags & DF_EQUIV_NOTES)
1234 for (note = REG_NOTES (insn); note;
1235 note = XEXP (note, 1))
1237 switch (REG_NOTE_KIND (note))
1241 df_uses_record (df, &XEXP (note, 0), DF_REF_REG_USE,
1248 if (GET_CODE (insn) == CALL_INSN)
1253 /* Record the registers used to pass arguments. */
1254 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1255 note = XEXP (note, 1))
1257 if (GET_CODE (XEXP (note, 0)) == USE)
1258 df_uses_record (df, &SET_DEST (XEXP (note, 0)), DF_REF_REG_USE,
1262 /* The stack ptr is used (honorarily) by a CALL insn. */
1263 x = df_reg_use_gen (STACK_POINTER_REGNUM);
1264 df_uses_record (df, &SET_DEST (x), DF_REF_REG_USE, bb, insn);
1266 if (df->flags & DF_HARD_REGS)
1268 /* Calls may also reference any of the global registers,
1269 so they are recorded as used. */
1270 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1273 x = df_reg_use_gen (i);
1274 df_uses_record (df, &SET_DEST (x),
1275 DF_REF_REG_USE, bb, insn);
1280 /* Record the register uses. */
1281 df_uses_record (df, &PATTERN (insn),
1282 DF_REF_REG_USE, bb, insn);
1285 if (GET_CODE (insn) == CALL_INSN)
1289 if (df->flags & DF_HARD_REGS)
1291 /* Kill all registers invalidated by a call. */
1292 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1293 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1295 rtx reg_clob = df_reg_clobber_gen (i);
1296 df_defs_record (df, reg_clob, bb, insn);
1300 /* There may be extra registers to be clobbered. */
1301 for (note = CALL_INSN_FUNCTION_USAGE (insn);
1303 note = XEXP (note, 1))
1304 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1305 df_defs_record (df, XEXP (note, 0), bb, insn);
1311 /* Record all the refs within the basic block BB. */
1313 df_bb_refs_record (df, bb)
1319 /* Scan the block an insn at a time from beginning to end. */
1320 for (insn = bb->head; ; insn = NEXT_INSN (insn))
1324 /* Record defs within INSN. */
1325 df_insn_refs_record (df, bb, insn);
1327 if (insn == bb->end)
1333 /* Record all the refs in the basic blocks specified by BLOCKS. */
1335 df_refs_record (df, blocks)
1341 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1343 df_bb_refs_record (df, bb);
1347 /* Dataflow analysis routines. */
1350 /* Create reg-def chains for basic block BB. These are a list of
1351 definitions for each register. */
1353 df_bb_reg_def_chain_create (df, bb)
1359 /* Perhaps the defs should be sorted using a depth first search
1360 of the CFG (or possibly a breadth first search). We currently
1361 scan the basic blocks in reverse order so that the first defs
1362 apprear at the start of the chain. */
1364 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1365 insn = PREV_INSN (insn))
1367 struct df_link *link;
1368 unsigned int uid = INSN_UID (insn);
1370 if (! INSN_P (insn))
1373 for (link = df->insns[uid].defs; link; link = link->next)
1375 struct ref *def = link->ref;
1376 unsigned int dregno = DF_REF_REGNO (def);
1378 df->regs[dregno].defs
1379 = df_link_create (def, df->regs[dregno].defs);
1385 /* Create reg-def chains for each basic block within BLOCKS. These
1386 are a list of definitions for each register. */
1388 df_reg_def_chain_create (df, blocks)
1394 FOR_EACH_BB_IN_BITMAP/*_REV*/ (blocks, 0, bb,
1396 df_bb_reg_def_chain_create (df, bb);
1401 /* Create reg-use chains for basic block BB. These are a list of uses
1402 for each register. */
1404 df_bb_reg_use_chain_create (df, bb)
1410 /* Scan in forward order so that the last uses appear at the
1411 start of the chain. */
1413 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1414 insn = NEXT_INSN (insn))
1416 struct df_link *link;
1417 unsigned int uid = INSN_UID (insn);
1419 if (! INSN_P (insn))
1422 for (link = df->insns[uid].uses; link; link = link->next)
1424 struct ref *use = link->ref;
1425 unsigned int uregno = DF_REF_REGNO (use);
1427 df->regs[uregno].uses
1428 = df_link_create (use, df->regs[uregno].uses);
1434 /* Create reg-use chains for each basic block within BLOCKS. These
1435 are a list of uses for each register. */
1437 df_reg_use_chain_create (df, blocks)
1443 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1445 df_bb_reg_use_chain_create (df, bb);
1450 /* Create def-use chains from reaching use bitmaps for basic block BB. */
1452 df_bb_du_chain_create (df, bb, ru)
1457 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1460 bitmap_copy (ru, bb_info->ru_out);
1462 /* For each def in BB create a linked list (chain) of uses
1463 reached from the def. */
1464 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1465 insn = PREV_INSN (insn))
1467 struct df_link *def_link;
1468 struct df_link *use_link;
1469 unsigned int uid = INSN_UID (insn);
1471 if (! INSN_P (insn))
1474 /* For each def in insn... */
1475 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1477 struct ref *def = def_link->ref;
1478 unsigned int dregno = DF_REF_REGNO (def);
1480 DF_REF_CHAIN (def) = 0;
1482 /* While the reg-use chains are not essential, it
1483 is _much_ faster to search these short lists rather
1484 than all the reaching uses, especially for large functions. */
1485 for (use_link = df->regs[dregno].uses; use_link;
1486 use_link = use_link->next)
1488 struct ref *use = use_link->ref;
1490 if (bitmap_bit_p (ru, DF_REF_ID (use)))
1493 = df_link_create (use, DF_REF_CHAIN (def));
1495 bitmap_clear_bit (ru, DF_REF_ID (use));
1500 /* For each use in insn... */
1501 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1503 struct ref *use = use_link->ref;
1504 bitmap_set_bit (ru, DF_REF_ID (use));
1510 /* Create def-use chains from reaching use bitmaps for basic blocks
1513 df_du_chain_create (df, blocks)
1520 ru = BITMAP_XMALLOC ();
1522 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1524 df_bb_du_chain_create (df, bb, ru);
1531 /* Create use-def chains from reaching def bitmaps for basic block BB. */
1533 df_bb_ud_chain_create (df, bb)
1537 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1538 struct ref **reg_def_last = df->reg_def_last;
1541 memset (reg_def_last, 0, df->n_regs * sizeof (struct ref *));
1543 /* For each use in BB create a linked list (chain) of defs
1544 that reach the use. */
1545 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1546 insn = NEXT_INSN (insn))
1548 unsigned int uid = INSN_UID (insn);
1549 struct df_link *use_link;
1550 struct df_link *def_link;
1552 if (! INSN_P (insn))
1555 /* For each use in insn... */
1556 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1558 struct ref *use = use_link->ref;
1559 unsigned int regno = DF_REF_REGNO (use);
1561 DF_REF_CHAIN (use) = 0;
1563 /* Has regno been defined in this BB yet? If so, use
1564 the last def as the single entry for the use-def
1565 chain for this use. Otherwise, we need to add all
1566 the defs using this regno that reach the start of
1568 if (reg_def_last[regno])
1571 = df_link_create (reg_def_last[regno], 0);
1575 /* While the reg-def chains are not essential, it is
1576 _much_ faster to search these short lists rather than
1577 all the reaching defs, especially for large
1579 for (def_link = df->regs[regno].defs; def_link;
1580 def_link = def_link->next)
1582 struct ref *def = def_link->ref;
1584 if (bitmap_bit_p (bb_info->rd_in, DF_REF_ID (def)))
1587 = df_link_create (def, DF_REF_CHAIN (use));
1594 /* For each def in insn...record the last def of each reg. */
1595 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1597 struct ref *def = def_link->ref;
1598 int dregno = DF_REF_REGNO (def);
1600 reg_def_last[dregno] = def;
1606 /* Create use-def chains from reaching def bitmaps for basic blocks
1609 df_ud_chain_create (df, blocks)
1615 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1617 df_bb_ud_chain_create (df, bb);
1622 /* Use reverse completion order, and the worklist, to figure out what block
1626 df_visit_next_rc (df, blocks)
1627 struct df *df ATTRIBUTE_UNUSED;
1631 for (i = 0; i < n_basic_blocks; i++)
1632 if (TEST_BIT (blocks, df->rc_order[i]))
1633 return df->rc_order[i];
1634 return sbitmap_first_set_bit (blocks);
1637 /* Use reverse topsort order, and the worklist, to figure out what block
1641 df_visit_next_rts (df, blocks)
1642 struct df *df ATTRIBUTE_UNUSED;
1646 for (i = 0; i < n_basic_blocks; i++)
1647 if (TEST_BIT (blocks, df->rts_order[i]))
1648 return df->rts_order[i];
1649 return sbitmap_first_set_bit (blocks);
1653 /* Calculate reaching defs for each basic block in BLOCKS, i.e., the
1654 defs that are live at the start of a basic block. */
1656 df_rd_global_compute (df, blocks)
1657 struct df *df ATTRIBUTE_UNUSED;
1664 worklist = sbitmap_alloc (n_basic_blocks);
1665 sbitmap_zero (worklist);
1667 /* Copy the blocklist to the worklist */
1668 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
1670 SET_BIT (worklist, i);
1673 /* We assume that only the basic blocks in WORKLIST have been
1675 FOR_EACH_BB_IN_SBITMAP (worklist, 0, bb,
1677 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1679 bitmap_copy (bb_info->rd_out, bb_info->rd_gen);
1682 while ((i = df_visit_next_rc (df, worklist)) >= 0)
1684 struct bb_info *bb_info;
1688 /* Remove this block from the worklist. */
1689 RESET_BIT (worklist, i);
1692 bb = BASIC_BLOCK (i);
1693 bb_info = DF_BB_INFO (df, bb);
1695 /* Calculate union of predecessor outs. */
1696 bitmap_zero (bb_info->rd_in);
1697 for (e = bb->pred; e != 0; e = e->pred_next)
1699 struct bb_info *pred_refs = DF_BB_INFO (df, e->src);
1701 if (e->src == ENTRY_BLOCK_PTR)
1704 bitmap_a_or_b (bb_info->rd_in, bb_info->rd_in,
1708 /* RD_OUT is the set of defs that are live at the end of the
1709 BB. These are the defs that are either generated by defs
1710 (RD_GEN) within the BB or are live at the start (RD_IN)
1711 and are not killed by other defs (RD_KILL). */
1712 changed = bitmap_union_of_diff (bb_info->rd_out, bb_info->rd_gen,
1713 bb_info->rd_in, bb_info->rd_kill);
1717 /* Add each of this block's successors to the worklist. */
1718 for (e = bb->succ; e != 0; e = e->succ_next)
1720 if (e->dest == EXIT_BLOCK_PTR)
1723 SET_BIT (worklist, e->dest->index);
1727 sbitmap_free (worklist);
1731 /* Calculate reaching uses for each basic block within BLOCKS, i.e.,
1732 the uses that are live at the start of a basic block. */
1734 df_ru_global_compute (df, blocks)
1735 struct df *df ATTRIBUTE_UNUSED;
1742 worklist = sbitmap_alloc (n_basic_blocks);
1743 sbitmap_zero (worklist);
1745 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i,
1747 SET_BIT (worklist, i);
1750 /* We assume that only the basic blocks in WORKLIST have been
1752 FOR_EACH_BB_IN_SBITMAP (worklist, 0, bb,
1754 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1756 bitmap_copy (bb_info->ru_in, bb_info->ru_gen);
1760 while ((i = df_visit_next_rts (df, worklist)) >= 0)
1762 struct bb_info *bb_info;
1766 /* Remove this block from the worklist. */
1767 RESET_BIT (worklist, i);
1769 bb = BASIC_BLOCK (i);
1770 bb_info = DF_BB_INFO (df, bb);
1772 /* Calculate union of successor ins. */
1773 bitmap_zero (bb_info->ru_out);
1774 for (e = bb->succ; e != 0; e = e->succ_next)
1776 struct bb_info *succ_refs = DF_BB_INFO (df, e->dest);
1778 if (e->dest == EXIT_BLOCK_PTR)
1781 bitmap_a_or_b (bb_info->ru_out, bb_info->ru_out,
1785 /* RU_IN is the set of uses that are live at the start of the
1786 BB. These are the uses that are either generated within the
1787 BB (RU_GEN) or are live at the end (RU_OUT) and are not uses
1788 killed by defs within the BB (RU_KILL). */
1789 changed = bitmap_union_of_diff (bb_info->ru_in, bb_info->ru_gen,
1790 bb_info->ru_out, bb_info->ru_kill);
1794 /* Add each of this block's predecessors to the worklist. */
1795 for (e = bb->pred; e != 0; e = e->pred_next)
1797 if (e->src == ENTRY_BLOCK_PTR)
1800 SET_BIT (worklist, e->src->index);
1805 sbitmap_free (worklist);
1809 /* Calculate live registers for each basic block within BLOCKS. */
1811 df_lr_global_compute (df, blocks)
1812 struct df *df ATTRIBUTE_UNUSED;
1819 worklist = BITMAP_XMALLOC ();
1820 bitmap_copy (worklist, blocks);
1822 /* We assume that only the basic blocks in WORKLIST have been
1824 FOR_EACH_BB_IN_BITMAP (worklist, 0, bb,
1826 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1828 bitmap_copy (bb_info->lr_in, bb_info->lr_use);
1831 while ((i = bitmap_last_set_bit (worklist)) >= 0)
1833 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1837 /* Remove this block from the worklist. */
1838 bitmap_clear_bit (worklist, i);
1840 bb = BASIC_BLOCK (i);
1841 bb_info = DF_BB_INFO (df, bb);
1843 /* Calculate union of successor ins. */
1844 bitmap_zero (bb_info->lr_out);
1845 for (e = bb->succ; e != 0; e = e->succ_next)
1847 struct bb_info *succ_refs = DF_BB_INFO (df, e->dest);
1849 if (e->dest == EXIT_BLOCK_PTR)
1852 bitmap_a_or_b (bb_info->lr_out, bb_info->lr_out,
1856 /* LR_IN is the set of uses that are live at the start of the
1857 BB. These are the uses that are either generated by uses
1858 (LR_USE) within the BB or are live at the end (LR_OUT)
1859 and are not killed by other uses (LR_DEF). */
1860 changed = bitmap_union_of_diff (bb_info->lr_in, bb_info->lr_use,
1861 bb_info->lr_out, bb_info->lr_def);
1865 /* Add each of this block's predecessors to the worklist. */
1866 for (e = bb->pred; e != 0; e = e->pred_next)
1868 if (e->src == ENTRY_BLOCK_PTR)
1871 bitmap_set_bit (worklist, e->src->index);
1875 BITMAP_XFREE (worklist);
1879 /* Compute local reaching def info for basic block BB. */
1881 df_bb_rd_local_compute (df, bb)
1885 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1888 for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
1889 insn = NEXT_INSN (insn))
1891 unsigned int uid = INSN_UID (insn);
1892 struct df_link *def_link;
1894 if (! INSN_P (insn))
1897 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1899 struct ref *def = def_link->ref;
1900 unsigned int regno = DF_REF_REGNO (def);
1901 struct df_link *def2_link;
1903 for (def2_link = df->regs[regno].defs; def2_link;
1904 def2_link = def2_link->next)
1906 struct ref *def2 = def2_link->ref;
1908 /* Add all defs of this reg to the set of kills. This
1909 is greedy since many of these defs will not actually
1910 be killed by this BB but it keeps things a lot
1912 bitmap_set_bit (bb_info->rd_kill, DF_REF_ID (def2));
1914 /* Zap from the set of gens for this BB. */
1915 bitmap_clear_bit (bb_info->rd_gen, DF_REF_ID (def2));
1918 bitmap_set_bit (bb_info->rd_gen, DF_REF_ID (def));
1922 bb_info->rd_valid = 1;
1926 /* Compute local reaching def info for each basic block within BLOCKS. */
1928 df_rd_local_compute (df, blocks)
1934 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
1936 df_bb_rd_local_compute (df, bb);
1941 /* Compute local reaching use (upward exposed use) info for basic
1944 df_bb_ru_local_compute (df, bb)
1948 /* This is much more tricky than computing reaching defs. With
1949 reaching defs, defs get killed by other defs. With upwards
1950 exposed uses, these get killed by defs with the same regno. */
1952 struct bb_info *bb_info = DF_BB_INFO (df, bb);
1955 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
1956 insn = PREV_INSN (insn))
1958 unsigned int uid = INSN_UID (insn);
1959 struct df_link *def_link;
1960 struct df_link *use_link;
1962 if (! INSN_P (insn))
1965 for (def_link = df->insns[uid].defs; def_link; def_link = def_link->next)
1967 struct ref *def = def_link->ref;
1968 unsigned int dregno = DF_REF_REGNO (def);
1970 for (use_link = df->regs[dregno].uses; use_link;
1971 use_link = use_link->next)
1973 struct ref *use = use_link->ref;
1975 /* Add all uses of this reg to the set of kills. This
1976 is greedy since many of these uses will not actually
1977 be killed by this BB but it keeps things a lot
1979 bitmap_set_bit (bb_info->ru_kill, DF_REF_ID (use));
1981 /* Zap from the set of gens for this BB. */
1982 bitmap_clear_bit (bb_info->ru_gen, DF_REF_ID (use));
1986 for (use_link = df->insns[uid].uses; use_link; use_link = use_link->next)
1988 struct ref *use = use_link->ref;
1989 /* Add use to set of gens in this BB. */
1990 bitmap_set_bit (bb_info->ru_gen, DF_REF_ID (use));
1993 bb_info->ru_valid = 1;
1997 /* Compute local reaching use (upward exposed use) info for each basic
1998 block within BLOCKS. */
2000 df_ru_local_compute (df, blocks)
2006 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2008 df_bb_ru_local_compute (df, bb);
2013 /* Compute local live variable info for basic block BB. */
2015 df_bb_lr_local_compute (df, bb)
2019 struct bb_info *bb_info = DF_BB_INFO (df, bb);
2022 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
2023 insn = PREV_INSN (insn))
2025 unsigned int uid = INSN_UID (insn);
2026 struct df_link *link;
2028 if (! INSN_P (insn))
2031 for (link = df->insns[uid].defs; link; link = link->next)
2033 struct ref *def = link->ref;
2034 unsigned int dregno = DF_REF_REGNO (def);
2036 /* Add def to set of defs in this BB. */
2037 bitmap_set_bit (bb_info->lr_def, dregno);
2039 bitmap_clear_bit (bb_info->lr_use, dregno);
2042 for (link = df->insns[uid].uses; link; link = link->next)
2044 struct ref *use = link->ref;
2045 /* Add use to set of uses in this BB. */
2046 bitmap_set_bit (bb_info->lr_use, DF_REF_REGNO (use));
2049 bb_info->lr_valid = 1;
2053 /* Compute local live variable info for each basic block within BLOCKS. */
2055 df_lr_local_compute (df, blocks)
2061 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2063 df_bb_lr_local_compute (df, bb);
2068 /* Compute register info: lifetime, bb, and number of defs and uses
2069 for basic block BB. */
2071 df_bb_reg_info_compute (df, bb, live)
2076 struct reg_info *reg_info = df->regs;
2077 struct bb_info *bb_info = DF_BB_INFO (df, bb);
2080 bitmap_copy (live, bb_info->lr_out);
2082 for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
2083 insn = PREV_INSN (insn))
2085 unsigned int uid = INSN_UID (insn);
2087 struct df_link *link;
2089 if (! INSN_P (insn))
2092 for (link = df->insns[uid].defs; link; link = link->next)
2094 struct ref *def = link->ref;
2095 unsigned int dregno = DF_REF_REGNO (def);
2097 /* Kill this register. */
2098 bitmap_clear_bit (live, dregno);
2099 reg_info[dregno].n_defs++;
2102 for (link = df->insns[uid].uses; link; link = link->next)
2104 struct ref *use = link->ref;
2105 unsigned int uregno = DF_REF_REGNO (use);
2107 /* This register is now live. */
2108 bitmap_set_bit (live, uregno);
2109 reg_info[uregno].n_uses++;
2112 /* Increment lifetimes of all live registers. */
2113 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno,
2115 reg_info[regno].lifetime++;
2121 /* Compute register info: lifetime, bb, and number of defs and uses. */
2123 df_reg_info_compute (df, blocks)
2130 live = BITMAP_XMALLOC ();
2132 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2134 df_bb_reg_info_compute (df, bb, live);
2137 BITMAP_XFREE (live);
2141 /* Assign LUIDs for BB. */
2143 df_bb_luids_set (df, bb)
2150 /* The LUIDs are monotonically increasing for each basic block. */
2152 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2155 DF_INSN_LUID (df, insn) = luid++;
2156 DF_INSN_LUID (df, insn) = luid;
2158 if (insn == bb->end)
2165 /* Assign LUIDs for each basic block within BLOCKS. */
2167 df_luids_set (df, blocks)
2174 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2176 total += df_bb_luids_set (df, bb);
2182 /* Perform dataflow analysis using existing DF structure for blocks
2183 within BLOCKS. If BLOCKS is zero, use all basic blocks in the CFG. */
2185 df_analyse_1 (df, blocks, flags, update)
2196 if (flags & DF_UD_CHAIN)
2197 aflags |= DF_RD | DF_RD_CHAIN;
2199 if (flags & DF_DU_CHAIN)
2203 aflags |= DF_RU_CHAIN;
2205 if (flags & DF_REG_INFO)
2209 blocks = df->all_blocks;
2214 df_refs_update (df);
2215 /* More fine grained incremental dataflow analysis would be
2216 nice. For now recompute the whole shebang for the
2219 df_refs_unlink (df, blocks);
2221 /* All the def-use, use-def chains can be potentially
2222 modified by changes in one block. The size of the
2223 bitmaps can also change. */
2227 /* Scan the function for all register defs and uses. */
2229 df_refs_record (df, blocks);
2231 /* Link all the new defs and uses to the insns. */
2232 df_refs_process (df);
2235 /* Allocate the bitmaps now the total number of defs and uses are
2236 known. If the number of defs or uses have changed, then
2237 these bitmaps need to be reallocated. */
2238 df_bitmaps_alloc (df, aflags);
2240 /* Set the LUIDs for each specified basic block. */
2241 df_luids_set (df, blocks);
2243 /* Recreate reg-def and reg-use chains from scratch so that first
2244 def is at the head of the reg-def chain and the last use is at
2245 the head of the reg-use chain. This is only important for
2246 regs local to a basic block as it speeds up searching. */
2247 if (aflags & DF_RD_CHAIN)
2249 df_reg_def_chain_create (df, blocks);
2252 if (aflags & DF_RU_CHAIN)
2254 df_reg_use_chain_create (df, blocks);
2257 df->dfs_order = xmalloc (sizeof(int) * n_basic_blocks);
2258 df->rc_order = xmalloc (sizeof(int) * n_basic_blocks);
2259 df->rts_order = xmalloc (sizeof(int) * n_basic_blocks);
2261 flow_depth_first_order_compute (df->dfs_order, df->rc_order);
2262 flow_reverse_top_sort_order_compute (df->rts_order);
2265 /* Compute the sets of gens and kills for the defs of each bb. */
2266 df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
2268 /* Compute the global reaching definitions. */
2269 df_rd_global_compute (df, df->all_blocks);
2272 if (aflags & DF_UD_CHAIN)
2274 /* Create use-def chains. */
2275 df_ud_chain_create (df, df->all_blocks);
2277 if (! (flags & DF_RD))
2283 /* Compute the sets of gens and kills for the upwards exposed
2285 df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
2287 /* Compute the global reaching uses. */
2288 df_ru_global_compute (df, df->all_blocks);
2291 if (aflags & DF_DU_CHAIN)
2293 /* Create def-use chains. */
2294 df_du_chain_create (df, df->all_blocks);
2296 if (! (flags & DF_RU))
2300 /* Free up bitmaps that are no longer required. */
2302 df_bitmaps_free (df, dflags);
2306 /* Compute the sets of defs and uses of live variables. */
2307 df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
2309 /* Compute the global live variables. */
2310 df_lr_global_compute (df, df->all_blocks);
2313 if (aflags & DF_REG_INFO)
2315 df_reg_info_compute (df, df->all_blocks);
2317 free (df->dfs_order);
2318 free (df->rc_order);
2319 free (df->rts_order);
2323 /* Initialise dataflow analysis. */
2329 df = xcalloc (1, sizeof (struct df));
2331 /* Squirrel away a global for debugging. */
2338 /* Start queuing refs. */
2343 df->def_id_save = df->def_id;
2344 df->use_id_save = df->use_id;
2345 /* ???? Perhaps we should save current obstack state so that we can
2351 /* Process queued refs. */
2353 df_refs_process (df)
2358 /* Build new insn-def chains. */
2359 for (i = df->def_id_save; i != df->def_id; i++)
2361 struct ref *def = df->defs[i];
2362 unsigned int uid = DF_REF_INSN_UID (def);
2364 /* Add def to head of def list for INSN. */
2366 = df_link_create (def, df->insns[uid].defs);
2369 /* Build new insn-use chains. */
2370 for (i = df->use_id_save; i != df->use_id; i++)
2372 struct ref *use = df->uses[i];
2373 unsigned int uid = DF_REF_INSN_UID (use);
2375 /* Add use to head of use list for INSN. */
2377 = df_link_create (use, df->insns[uid].uses);
2383 /* Update refs for basic block BB. */
2385 df_bb_refs_update (df, bb)
2392 /* While we have to scan the chain of insns for this BB, we don't
2393 need to allocate and queue a long chain of BB/INSN pairs. Using
2394 a bitmap for insns_modified saves memory and avoids queuing
2397 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2401 uid = INSN_UID (insn);
2403 if (bitmap_bit_p (df->insns_modified, uid))
2405 /* Delete any allocated refs of this insn. MPH, FIXME. */
2406 df_insn_refs_unlink (df, bb, insn);
2408 /* Scan the insn for refs. */
2409 df_insn_refs_record (df, bb, insn);
2412 bitmap_clear_bit (df->insns_modified, uid);
2415 if (insn == bb->end)
2422 /* Process all the modified/deleted insns that were queued. */
2430 if ((unsigned int)max_reg_num () >= df->reg_size)
2431 df_reg_table_realloc (df, 0);
2435 FOR_EACH_BB_IN_BITMAP (df->bbs_modified, 0, bb,
2437 count += df_bb_refs_update (df, bb);
2440 df_refs_process (df);
2445 /* Return non-zero if any of the requested blocks in the bitmap
2446 BLOCKS have been modified. */
2448 df_modified_p (df, blocks)
2455 for (j = 0; j < df->n_bbs; j++)
2456 if (bitmap_bit_p (df->bbs_modified, j)
2457 && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, j)))
2467 /* Analyse dataflow info for the basic blocks specified by the bitmap
2468 BLOCKS, or for the whole CFG if BLOCKS is zero, or just for the
2469 modified blocks if BLOCKS is -1. */
2471 df_analyse (df, blocks, flags)
2478 /* We could deal with additional basic blocks being created by
2479 rescanning everything again. */
2480 if (df->n_bbs && df->n_bbs != (unsigned int)n_basic_blocks)
2483 update = df_modified_p (df, blocks);
2484 if (update || (flags != df->flags))
2490 /* Recompute everything from scratch. */
2493 /* Allocate and initialise data structures. */
2494 df_alloc (df, max_reg_num ());
2495 df_analyse_1 (df, 0, flags, 0);
2500 if (blocks == (bitmap) -1)
2501 blocks = df->bbs_modified;
2506 df_analyse_1 (df, blocks, flags, 1);
2507 bitmap_zero (df->bbs_modified);
2514 /* Free all the dataflow info and the DF structure. */
2524 /* Unlink INSN from its reference information. */
2526 df_insn_refs_unlink (df, bb, insn)
2528 basic_block bb ATTRIBUTE_UNUSED;
2531 struct df_link *link;
2534 uid = INSN_UID (insn);
2536 /* Unlink all refs defined by this insn. */
2537 for (link = df->insns[uid].defs; link; link = link->next)
2538 df_def_unlink (df, link->ref);
2540 /* Unlink all refs used by this insn. */
2541 for (link = df->insns[uid].uses; link; link = link->next)
2542 df_use_unlink (df, link->ref);
2544 df->insns[uid].defs = 0;
2545 df->insns[uid].uses = 0;
2550 /* Unlink all the insns within BB from their reference information. */
2552 df_bb_refs_unlink (df, bb)
2558 /* Scan the block an insn at a time from beginning to end. */
2559 for (insn = bb->head; ; insn = NEXT_INSN (insn))
2563 /* Unlink refs for INSN. */
2564 df_insn_refs_unlink (df, bb, insn);
2566 if (insn == bb->end)
2572 /* Unlink all the refs in the basic blocks specified by BLOCKS.
2573 Not currently used. */
2575 df_refs_unlink (df, blocks)
2583 FOR_EACH_BB_IN_BITMAP (blocks, 0, bb,
2585 df_bb_refs_unlink (df, bb);
2592 df_bb_refs_unlink (df, bb);
2598 /* Functions to modify insns. */
2601 /* Delete INSN and all its reference information. */
2603 df_insn_delete (df, bb, insn)
2605 basic_block bb ATTRIBUTE_UNUSED;
2608 /* If the insn is a jump, we should perhaps call delete_insn to
2609 handle the JUMP_LABEL? */
2611 /* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
2612 if (insn == bb->head)
2615 /* Delete the insn. */
2618 df_insn_modify (df, bb, insn);
2620 return NEXT_INSN (insn);
2624 /* Mark that INSN within BB may have changed (created/modified/deleted).
2625 This may be called multiple times for the same insn. There is no
2626 harm calling this function if the insn wasn't changed; it will just
2627 slow down the rescanning of refs. */
2629 df_insn_modify (df, bb, insn)
2636 uid = INSN_UID (insn);
2638 if (uid >= df->insn_size)
2639 df_insn_table_realloc (df, 0);
2641 bitmap_set_bit (df->bbs_modified, bb->index);
2642 bitmap_set_bit (df->insns_modified, uid);
2645 /* For incremental updating on the fly, perhaps we could make a copy
2646 of all the refs of the original insn and turn them into
2647 anti-refs. When df_refs_update finds these anti-refs, it annihilates
2648 the original refs. If validate_change fails then these anti-refs
2649 will just get ignored. */
2655 typedef struct replace_args
2664 /* Replace mem pointed to by PX with its associated pseudo register.
2665 DATA is actually a pointer to a structure describing the
2666 instruction currently being scanned and the MEM we are currently
2669 df_rtx_mem_replace (px, data)
2673 replace_args *args = (replace_args *) data;
2676 if (mem == NULL_RTX)
2679 switch (GET_CODE (mem))
2685 /* We're not interested in the MEM associated with a
2686 CONST_DOUBLE, so there's no need to traverse into one. */
2690 /* This is not a MEM. */
2694 if (!rtx_equal_p (args->match, mem))
2695 /* This is not the MEM we are currently replacing. */
2698 /* Actually replace the MEM. */
2699 validate_change (args->insn, px, args->replacement, 1);
2707 df_insn_mem_replace (df, bb, insn, mem, reg)
2718 args.replacement = reg;
2721 /* Seach and replace all matching mems within insn. */
2722 for_each_rtx (&insn, df_rtx_mem_replace, &args);
2725 df_insn_modify (df, bb, insn);
2727 /* ???? FIXME. We may have a new def or one or more new uses of REG
2728 in INSN. REG should be a new pseudo so it won't affect the
2729 dataflow information that we currently have. We should add
2730 the new uses and defs to INSN and then recreate the chains
2731 when df_analyse is called. */
2732 return args.modified;
2736 /* Replace one register with another. Called through for_each_rtx; PX
2737 points to the rtx being scanned. DATA is actually a pointer to a
2738 structure of arguments. */
2740 df_rtx_reg_replace (px, data)
2745 replace_args *args = (replace_args *) data;
2750 if (x == args->match)
2752 validate_change (args->insn, px, args->replacement, 1);
2760 /* Replace the reg within every ref on CHAIN that is within the set
2761 BLOCKS of basic blocks with NEWREG. Also update the regs within
2764 df_refs_reg_replace (df, blocks, chain, oldreg, newreg)
2767 struct df_link *chain;
2771 struct df_link *link;
2775 blocks = df->all_blocks;
2777 args.match = oldreg;
2778 args.replacement = newreg;
2781 for (link = chain; link; link = link->next)
2783 struct ref *ref = link->ref;
2784 rtx insn = DF_REF_INSN (ref);
2786 if (! INSN_P (insn))
2789 if (bitmap_bit_p (blocks, DF_REF_BBNO (ref)))
2791 df_ref_reg_replace (df, ref, oldreg, newreg);
2793 /* Replace occurrences of the reg within the REG_NOTES. */
2794 if ((! link->next || DF_REF_INSN (ref)
2795 != DF_REF_INSN (link->next->ref))
2796 && REG_NOTES (insn))
2799 for_each_rtx (®_NOTES (insn), df_rtx_reg_replace, &args);
2804 /* Temporary check to ensure that we have a grip on which
2805 regs should be replaced. */
2812 /* Replace all occurrences of register OLDREG with register NEWREG in
2813 blocks defined by bitmap BLOCKS. This also replaces occurrences of
2814 OLDREG in the REG_NOTES but only for insns containing OLDREG. This
2815 routine expects the reg-use and reg-def chains to be valid. */
2817 df_reg_replace (df, blocks, oldreg, newreg)
2823 unsigned int oldregno = REGNO (oldreg);
2825 df_refs_reg_replace (df, blocks, df->regs[oldregno].defs, oldreg, newreg);
2826 df_refs_reg_replace (df, blocks, df->regs[oldregno].uses, oldreg, newreg);
2831 /* Try replacing the reg within REF with NEWREG. Do not modify
2832 def-use/use-def chains. */
2834 df_ref_reg_replace (df, ref, oldreg, newreg)
2840 /* Check that insn was deleted by being converted into a NOTE. If
2841 so ignore this insn. */
2842 if (! INSN_P (DF_REF_INSN (ref)))
2845 if (oldreg && oldreg != DF_REF_REG (ref))
2848 if (! validate_change (DF_REF_INSN (ref), DF_REF_LOC (ref), newreg, 1))
2851 df_insn_modify (df, DF_REF_BB (ref), DF_REF_INSN (ref));
2857 df_bb_def_use_swap (df, bb, def_insn, use_insn, regno)
2868 struct df_link *link;
2870 def = df_bb_insn_regno_first_def_find (df, bb, def_insn, regno);
2874 use = df_bb_insn_regno_last_use_find (df, bb, use_insn, regno);
2878 /* The USE no longer exists. */
2879 use_uid = INSN_UID (use_insn);
2880 df_use_unlink (df, use);
2881 df_ref_unlink (&df->insns[use_uid].uses, use);
2883 /* The DEF requires shifting so remove it from DEF_INSN
2884 and add it to USE_INSN by reusing LINK. */
2885 def_uid = INSN_UID (def_insn);
2886 link = df_ref_unlink (&df->insns[def_uid].defs, def);
2888 link->next = df->insns[use_uid].defs;
2889 df->insns[use_uid].defs = link;
2892 link = df_ref_unlink (&df->regs[regno].defs, def);
2894 link->next = df->regs[regno].defs;
2895 df->insns[regno].defs = link;
2898 DF_REF_INSN (def) = use_insn;
2903 /* Record df between FIRST_INSN and LAST_INSN inclusive. All new
2904 insns must be processed by this routine. */
2906 df_insns_modify (df, bb, first_insn, last_insn)
2914 for (insn = first_insn; ; insn = NEXT_INSN (insn))
2918 /* A non-const call should not have slipped through the net. If
2919 it does, we need to create a new basic block. Ouch. The
2920 same applies for a label. */
2921 if ((GET_CODE (insn) == CALL_INSN
2922 && ! CONST_OR_PURE_CALL_P (insn))
2923 || GET_CODE (insn) == CODE_LABEL)
2926 uid = INSN_UID (insn);
2928 if (uid >= df->insn_size)
2929 df_insn_table_realloc (df, 0);
2931 df_insn_modify (df, bb, insn);
2933 if (insn == last_insn)
2939 /* Emit PATTERN before INSN within BB. */
2941 df_pattern_emit_before (df, pattern, bb, insn)
2942 struct df *df ATTRIBUTE_UNUSED;
2948 rtx prev_insn = PREV_INSN (insn);
2950 /* We should not be inserting before the start of the block. */
2951 if (insn == bb->head)
2953 ret_insn = emit_insn_before (pattern, insn);
2954 if (ret_insn == insn)
2957 df_insns_modify (df, bb, NEXT_INSN (prev_insn), ret_insn);
2962 /* Emit PATTERN after INSN within BB. */
2964 df_pattern_emit_after (df, pattern, bb, insn)
2972 ret_insn = emit_insn_after (pattern, insn);
2973 if (ret_insn == insn)
2976 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
2981 /* Emit jump PATTERN after INSN within BB. */
2983 df_jump_pattern_emit_after (df, pattern, bb, insn)
2991 ret_insn = emit_jump_insn_after (pattern, insn);
2992 if (ret_insn == insn)
2995 df_insns_modify (df, bb, NEXT_INSN (insn), ret_insn);
3000 /* Move INSN within BB before BEFORE_INSN within BEFORE_BB.
3002 This function should only be used to move loop invariant insns
3003 out of a loop where it has been proven that the def-use info
3004 will still be valid. */
3006 df_insn_move_before (df, bb, insn, before_bb, before_insn)
3010 basic_block before_bb;
3013 struct df_link *link;
3017 return df_pattern_emit_before (df, insn, before_bb, before_insn);
3019 uid = INSN_UID (insn);
3021 /* Change bb for all df defined and used by this insn. */
3022 for (link = df->insns[uid].defs; link; link = link->next)
3023 DF_REF_BB (link->ref) = before_bb;
3024 for (link = df->insns[uid].uses; link; link = link->next)
3025 DF_REF_BB (link->ref) = before_bb;
3027 /* The lifetimes of the registers used in this insn will be reduced
3028 while the lifetimes of the registers defined in this insn
3029 are likely to be increased. */
3031 /* ???? Perhaps all the insns moved should be stored on a list
3032 which df_analyse removes when it recalculates data flow. */
3034 return emit_insn_before (insn, before_insn);
3037 /* Functions to query dataflow information. */
3041 df_insn_regno_def_p (df, bb, insn, regno)
3043 basic_block bb ATTRIBUTE_UNUSED;
3048 struct df_link *link;
3050 uid = INSN_UID (insn);
3052 for (link = df->insns[uid].defs; link; link = link->next)
3054 struct ref *def = link->ref;
3056 if (DF_REF_REGNO (def) == regno)
3065 df_def_dominates_all_uses_p (df, def)
3066 struct df *df ATTRIBUTE_UNUSED;
3069 struct df_link *du_link;
3071 /* Follow def-use chain to find all the uses of this def. */
3072 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3074 struct ref *use = du_link->ref;
3075 struct df_link *ud_link;
3077 /* Follow use-def chain to check all the defs for this use. */
3078 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3079 if (ud_link->ref != def)
3087 df_insn_dominates_all_uses_p (df, bb, insn)
3089 basic_block bb ATTRIBUTE_UNUSED;
3093 struct df_link *link;
3095 uid = INSN_UID (insn);
3097 for (link = df->insns[uid].defs; link; link = link->next)
3099 struct ref *def = link->ref;
3101 if (! df_def_dominates_all_uses_p (df, def))
3109 /* Return non-zero if all DF dominates all the uses within the bitmap
3112 df_def_dominates_uses_p (df, def, blocks)
3113 struct df *df ATTRIBUTE_UNUSED;
3117 struct df_link *du_link;
3119 /* Follow def-use chain to find all the uses of this def. */
3120 for (du_link = DF_REF_CHAIN (def); du_link; du_link = du_link->next)
3122 struct ref *use = du_link->ref;
3123 struct df_link *ud_link;
3125 /* Only worry about the uses within BLOCKS. For example,
3126 consider a register defined within a loop that is live at the
3128 if (bitmap_bit_p (blocks, DF_REF_BBNO (use)))
3130 /* Follow use-def chain to check all the defs for this use. */
3131 for (ud_link = DF_REF_CHAIN (use); ud_link; ud_link = ud_link->next)
3132 if (ud_link->ref != def)
3140 /* Return non-zero if all the defs of INSN within BB dominates
3141 all the corresponding uses. */
3143 df_insn_dominates_uses_p (df, bb, insn, blocks)
3145 basic_block bb ATTRIBUTE_UNUSED;
3150 struct df_link *link;
3152 uid = INSN_UID (insn);
3154 for (link = df->insns[uid].defs; link; link = link->next)
3156 struct ref *def = link->ref;
3158 /* Only consider the defs within BLOCKS. */
3159 if (bitmap_bit_p (blocks, DF_REF_BBNO (def))
3160 && ! df_def_dominates_uses_p (df, def, blocks))
3167 /* Return the basic block that REG referenced in or NULL if referenced
3168 in multiple basic blocks. */
3170 df_regno_bb (df, regno)
3174 struct df_link *defs = df->regs[regno].defs;
3175 struct df_link *uses = df->regs[regno].uses;
3176 struct ref *def = defs ? defs->ref : 0;
3177 struct ref *use = uses ? uses->ref : 0;
3178 basic_block bb_def = def ? DF_REF_BB (def) : 0;
3179 basic_block bb_use = use ? DF_REF_BB (use) : 0;
3181 /* Compare blocks of first def and last use. ???? FIXME. What if
3182 the reg-def and reg-use lists are not correctly ordered. */
3183 return bb_def == bb_use ? bb_def : 0;
3187 /* Return non-zero if REG used in multiple basic blocks. */
3189 df_reg_global_p (df, reg)
3193 return df_regno_bb (df, REGNO (reg)) != 0;
3197 /* Return total lifetime (in insns) of REG. */
3199 df_reg_lifetime (df, reg)
3203 return df->regs[REGNO (reg)].lifetime;
3207 /* Return non-zero if REG live at start of BB. */
3209 df_bb_reg_live_start_p (df, bb, reg)
3210 struct df *df ATTRIBUTE_UNUSED;
3214 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3216 #ifdef ENABLE_CHECKING
3217 if (! bb_info->lr_in)
3221 return bitmap_bit_p (bb_info->lr_in, REGNO (reg));
3225 /* Return non-zero if REG live at end of BB. */
3227 df_bb_reg_live_end_p (df, bb, reg)
3228 struct df *df ATTRIBUTE_UNUSED;
3232 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3234 #ifdef ENABLE_CHECKING
3235 if (! bb_info->lr_in)
3239 return bitmap_bit_p (bb_info->lr_out, REGNO (reg));
3243 /* Return -1 if life of REG1 before life of REG2, 1 if life of REG1
3244 after life of REG2, or 0, if the lives overlap. */
3246 df_bb_regs_lives_compare (df, bb, reg1, reg2)
3252 unsigned int regno1 = REGNO (reg1);
3253 unsigned int regno2 = REGNO (reg2);
3260 /* The regs must be local to BB. */
3261 if (df_regno_bb (df, regno1) != bb
3262 || df_regno_bb (df, regno2) != bb)
3265 def2 = df_bb_regno_first_def_find (df, bb, regno2);
3266 use1 = df_bb_regno_last_use_find (df, bb, regno1);
3268 if (DF_INSN_LUID (df, DF_REF_INSN (def2))
3269 > DF_INSN_LUID (df, DF_REF_INSN (use1)))
3272 def1 = df_bb_regno_first_def_find (df, bb, regno1);
3273 use2 = df_bb_regno_last_use_find (df, bb, regno2);
3275 if (DF_INSN_LUID (df, DF_REF_INSN (def1))
3276 > DF_INSN_LUID (df, DF_REF_INSN (use2)))
3283 /* Return last use of REGNO within BB. */
3285 df_bb_regno_last_use_find (df, bb, regno)
3287 basic_block bb ATTRIBUTE_UNUSED;
3290 struct df_link *link;
3292 /* This assumes that the reg-use list is ordered such that for any
3293 BB, the last use is found first. However, since the BBs are not
3294 ordered, the first use in the chain is not necessarily the last
3295 use in the function. */
3296 for (link = df->regs[regno].uses; link; link = link->next)
3298 struct ref *use = link->ref;
3300 if (DF_REF_BB (use) == bb)
3307 /* Return first def of REGNO within BB. */
3309 df_bb_regno_first_def_find (df, bb, regno)
3311 basic_block bb ATTRIBUTE_UNUSED;
3314 struct df_link *link;
3316 /* This assumes that the reg-def list is ordered such that for any
3317 BB, the first def is found first. However, since the BBs are not
3318 ordered, the first def in the chain is not necessarily the first
3319 def in the function. */
3320 for (link = df->regs[regno].defs; link; link = link->next)
3322 struct ref *def = link->ref;
3324 if (DF_REF_BB (def) == bb)
3331 /* Return first use of REGNO inside INSN within BB. */
3333 df_bb_insn_regno_last_use_find (df, bb, insn, regno)
3335 basic_block bb ATTRIBUTE_UNUSED;
3340 struct df_link *link;
3342 uid = INSN_UID (insn);
3344 for (link = df->insns[uid].uses; link; link = link->next)
3346 struct ref *use = link->ref;
3348 if (DF_REF_REGNO (use) == regno)
3356 /* Return first def of REGNO inside INSN within BB. */
3358 df_bb_insn_regno_first_def_find (df, bb, insn, regno)
3360 basic_block bb ATTRIBUTE_UNUSED;
3365 struct df_link *link;
3367 uid = INSN_UID (insn);
3369 for (link = df->insns[uid].defs; link; link = link->next)
3371 struct ref *def = link->ref;
3373 if (DF_REF_REGNO (def) == regno)
3381 /* Return insn using REG if the BB contains only a single
3382 use and def of REG. */
3384 df_bb_single_def_use_insn_find (df, bb, insn, reg)
3392 struct df_link *du_link;
3394 def = df_bb_insn_regno_first_def_find (df, bb, insn, REGNO (reg));
3399 du_link = DF_REF_CHAIN (def);
3406 /* Check if def is dead. */
3410 /* Check for multiple uses. */
3414 return DF_REF_INSN (use);
3417 /* Functions for debugging/dumping dataflow information. */
3420 /* Dump a def-use or use-def chain for REF to FILE. */
3422 df_chain_dump (link, file)
3423 struct df_link *link;
3426 fprintf (file, "{ ");
3427 for (; link; link = link->next)
3429 fprintf (file, "%c%d ",
3430 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3431 DF_REF_ID (link->ref));
3433 fprintf (file, "}");
3437 df_chain_dump_regno (link, file)
3438 struct df_link *link;
3441 fprintf (file, "{ ");
3442 for (; link; link = link->next)
3444 fprintf (file, "%c%d(%d) ",
3445 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
3446 DF_REF_ID (link->ref),
3447 DF_REF_REGNO (link->ref));
3449 fprintf (file, "}");
3452 /* Dump dataflow info. */
3454 df_dump (df, flags, file)
3465 fprintf (file, "\nDataflow summary:\n");
3466 fprintf (file, "n_regs = %d, n_defs = %d, n_uses = %d, n_bbs = %d\n",
3467 df->n_regs, df->n_defs, df->n_uses, df->n_bbs);
3471 fprintf (file, "Reaching defs:\n");
3472 for (i = 0; i < df->n_bbs; i++)
3474 basic_block bb = BASIC_BLOCK (i);
3475 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3477 if (! bb_info->rd_in)
3480 fprintf (file, "bb %d in \t", i);
3481 dump_bitmap (file, bb_info->rd_in);
3482 fprintf (file, "bb %d gen \t", i);
3483 dump_bitmap (file, bb_info->rd_gen);
3484 fprintf (file, "bb %d kill\t", i);
3485 dump_bitmap (file, bb_info->rd_kill);
3486 fprintf (file, "bb %d out \t", i);
3487 dump_bitmap (file, bb_info->rd_out);
3491 if (flags & DF_UD_CHAIN)
3493 fprintf (file, "Use-def chains:\n");
3494 for (j = 0; j < df->n_defs; j++)
3498 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3499 j, DF_REF_BBNO (df->defs[j]),
3500 DF_INSN_LUID (df, DF_REF_INSN (df->defs[j])),
3501 DF_REF_INSN_UID (df->defs[j]),
3502 DF_REF_REGNO (df->defs[j]));
3503 df_chain_dump (DF_REF_CHAIN (df->defs[j]), file);
3504 fprintf (file, "\n");
3511 fprintf (file, "Reaching uses:\n");
3512 for (i = 0; i < df->n_bbs; i++)
3514 basic_block bb = BASIC_BLOCK (i);
3515 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3517 if (! bb_info->ru_in)
3520 fprintf (file, "bb %d in \t", i);
3521 dump_bitmap (file, bb_info->ru_in);
3522 fprintf (file, "bb %d gen \t", i);
3523 dump_bitmap (file, bb_info->ru_gen);
3524 fprintf (file, "bb %d kill\t", i);
3525 dump_bitmap (file, bb_info->ru_kill);
3526 fprintf (file, "bb %d out \t", i);
3527 dump_bitmap (file, bb_info->ru_out);
3531 if (flags & DF_DU_CHAIN)
3533 fprintf (file, "Def-use chains:\n");
3534 for (j = 0; j < df->n_uses; j++)
3538 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3539 j, DF_REF_BBNO (df->uses[j]),
3540 DF_INSN_LUID (df, DF_REF_INSN (df->uses[j])),
3541 DF_REF_INSN_UID (df->uses[j]),
3542 DF_REF_REGNO (df->uses[j]));
3543 df_chain_dump (DF_REF_CHAIN (df->uses[j]), file);
3544 fprintf (file, "\n");
3551 fprintf (file, "Live regs:\n");
3552 for (i = 0; i < df->n_bbs; i++)
3554 basic_block bb = BASIC_BLOCK (i);
3555 struct bb_info *bb_info = DF_BB_INFO (df, bb);
3557 if (! bb_info->lr_in)
3560 fprintf (file, "bb %d in \t", i);
3561 dump_bitmap (file, bb_info->lr_in);
3562 fprintf (file, "bb %d use \t", i);
3563 dump_bitmap (file, bb_info->lr_use);
3564 fprintf (file, "bb %d def \t", i);
3565 dump_bitmap (file, bb_info->lr_def);
3566 fprintf (file, "bb %d out \t", i);
3567 dump_bitmap (file, bb_info->lr_out);
3571 if (flags & (DF_REG_INFO | DF_RD_CHAIN | DF_RU_CHAIN))
3573 struct reg_info *reg_info = df->regs;
3575 fprintf (file, "Register info:\n");
3576 for (j = 0; j < df->n_regs; j++)
3578 if (((flags & DF_REG_INFO)
3579 && (reg_info[j].n_uses || reg_info[j].n_defs))
3580 || ((flags & DF_RD_CHAIN) && reg_info[j].defs)
3581 || ((flags & DF_RU_CHAIN) && reg_info[j].uses))
3583 fprintf (file, "reg %d", j);
3584 if ((flags & DF_RD_CHAIN) && (flags & DF_RU_CHAIN))
3586 basic_block bb = df_regno_bb (df, j);
3589 fprintf (file, " bb %d", bb->index);
3591 fprintf (file, " bb ?");
3593 if (flags & DF_REG_INFO)
3595 fprintf (file, " life %d", reg_info[j].lifetime);
3598 if ((flags & DF_REG_INFO) || (flags & DF_RD_CHAIN))
3600 fprintf (file, " defs ");
3601 if (flags & DF_REG_INFO)
3602 fprintf (file, "%d ", reg_info[j].n_defs);
3603 if (flags & DF_RD_CHAIN)
3604 df_chain_dump (reg_info[j].defs, file);
3607 if ((flags & DF_REG_INFO) || (flags & DF_RU_CHAIN))
3609 fprintf (file, " uses ");
3610 if (flags & DF_REG_INFO)
3611 fprintf (file, "%d ", reg_info[j].n_uses);
3612 if (flags & DF_RU_CHAIN)
3613 df_chain_dump (reg_info[j].uses, file);
3616 fprintf (file, "\n");
3620 fprintf (file, "\n");
3625 df_insn_debug (df, insn, file)
3633 uid = INSN_UID (insn);
3634 if (uid >= df->insn_size)
3637 if (df->insns[uid].defs)
3638 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3639 else if (df->insns[uid].uses)
3640 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3644 fprintf (file, "insn %d bb %d luid %d defs ",
3645 uid, bbi, DF_INSN_LUID (df, insn));
3646 df_chain_dump (df->insns[uid].defs, file);
3647 fprintf (file, " uses ");
3648 df_chain_dump (df->insns[uid].uses, file);
3649 fprintf (file, "\n");
3653 df_insn_debug_regno (df, insn, file)
3661 uid = INSN_UID (insn);
3662 if (uid >= df->insn_size)
3665 if (df->insns[uid].defs)
3666 bbi = DF_REF_BBNO (df->insns[uid].defs->ref);
3667 else if (df->insns[uid].uses)
3668 bbi = DF_REF_BBNO (df->insns[uid].uses->ref);
3672 fprintf (file, "insn %d bb %d luid %d defs ",
3673 uid, bbi, DF_INSN_LUID (df, insn));
3674 df_chain_dump_regno (df->insns[uid].defs, file);
3675 fprintf (file, " uses ");
3676 df_chain_dump_regno (df->insns[uid].uses, file);
3677 fprintf (file, "\n");
3681 df_regno_debug (df, regno, file)
3686 if (regno >= df->reg_size)
3689 fprintf (file, "reg %d life %d defs ",
3690 regno, df->regs[regno].lifetime);
3691 df_chain_dump (df->regs[regno].defs, file);
3692 fprintf (file, " uses ");
3693 df_chain_dump (df->regs[regno].uses, file);
3694 fprintf (file, "\n");
3699 df_ref_debug (df, ref, file)
3704 fprintf (file, "%c%d ",
3705 DF_REF_REG_DEF_P (ref) ? 'd' : 'u',
3707 fprintf (file, "reg %d bb %d luid %d insn %d chain ",
3710 DF_INSN_LUID (df, DF_REF_INSN (ref)),
3711 INSN_UID (DF_REF_INSN (ref)));
3712 df_chain_dump (DF_REF_CHAIN (ref), file);
3713 fprintf (file, "\n");
3718 debug_df_insn (insn)
3721 df_insn_debug (ddf, insn, stderr);
3730 df_regno_debug (ddf, REGNO (reg), stderr);
3735 debug_df_regno (regno)
3738 df_regno_debug (ddf, regno, stderr);
3746 df_ref_debug (ddf, ref, stderr);
3751 debug_df_defno (defno)
3754 df_ref_debug (ddf, ddf->defs[defno], stderr);
3759 debug_df_useno (defno)
3762 df_ref_debug (ddf, ddf->uses[defno], stderr);
3767 debug_df_chain (link)
3768 struct df_link *link;
3770 df_chain_dump (link, stderr);
3771 fputc ('\n', stderr);