1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
49 #define REG_DEAD_DEBUGGING
52 #define DF_SPARSE_THRESHOLD 32
54 static bitmap seen_in_block = NULL;
55 static bitmap seen_in_insn = NULL;
56 static void df_ri_dump (struct dataflow *, FILE *);
59 /*----------------------------------------------------------------------------
60 Public functions access functions for the dataflow problems.
61 ----------------------------------------------------------------------------*/
63 /* Create a du or ud chain from SRC to DST and link it into SRC. */
66 df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
68 struct df_link *head = DF_REF_CHAIN (src);
69 struct df_link *link = pool_alloc (dflow->block_pool);;
71 DF_REF_CHAIN (src) = link;
78 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
79 chains for ref and check to see if the reverse chains can also be
80 deleted. If LINK is not NULL it must be a link off of ref. In
81 this case, the other end is not deleted. */
84 df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
86 struct df_link *chain = DF_REF_CHAIN (ref);
89 /* Link was the first element in the chain. */
91 DF_REF_CHAIN (ref) = link->next;
94 /* Link is an internal element in the chain. */
95 struct df_link *prev = chain;
100 prev->next = chain->next;
107 pool_free (dflow->block_pool, link);
111 /* If chain is NULL here, it was because of a recursive call
112 when the other flavor of chains was not built. Just run thru
113 the entire chain calling the other side and then deleting the
117 struct df_link *next = chain->next;
118 /* Delete the other side if it exists. */
119 df_chain_unlink (dflow, chain->ref, chain);
126 /* Copy the du or ud chain starting at FROM_REF and attach it to
130 df_chain_copy (struct dataflow *dflow,
131 struct df_ref *to_ref,
132 struct df_link *from_ref)
136 df_chain_create (dflow, to_ref, from_ref->ref);
137 from_ref = from_ref->next;
142 /* Get the live in set for BB no matter what problem happens to be
146 df_get_live_in (struct df *df, basic_block bb)
148 gcc_assert (df->problems_by_index[DF_LR]);
150 if (df->problems_by_index[DF_UREC])
151 return DF_RA_LIVE_IN (df, bb);
152 else if (df->problems_by_index[DF_UR])
153 return DF_LIVE_IN (df, bb);
155 return DF_UPWARD_LIVE_IN (df, bb);
159 /* Get the live out set for BB no matter what problem happens to be
163 df_get_live_out (struct df *df, basic_block bb)
165 gcc_assert (df->problems_by_index[DF_LR]);
167 if (df->problems_by_index[DF_UREC])
168 return DF_RA_LIVE_OUT (df, bb);
169 else if (df->problems_by_index[DF_UR])
170 return DF_LIVE_OUT (df, bb);
172 return DF_UPWARD_LIVE_OUT (df, bb);
176 /*----------------------------------------------------------------------------
178 ----------------------------------------------------------------------------*/
180 /* Generic versions to get the void* version of the block info. Only
181 used inside the problem instance vectors. */
183 /* Grow the bb_info array. */
186 df_grow_bb_info (struct dataflow *dflow)
188 unsigned int new_size = last_basic_block + 1;
189 if (dflow->block_info_size < new_size)
191 new_size += new_size / 4;
192 dflow->block_info = xrealloc (dflow->block_info,
193 new_size *sizeof (void*));
194 memset (dflow->block_info + dflow->block_info_size, 0,
195 (new_size - dflow->block_info_size) *sizeof (void *));
196 dflow->block_info_size = new_size;
200 /* Dump a def-use or use-def chain for REF to FILE. */
203 df_chain_dump (struct df_link *link, FILE *file)
205 fprintf (file, "{ ");
206 for (; link; link = link->next)
208 fprintf (file, "%c%d(bb %d insn %d) ",
209 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
210 DF_REF_ID (link->ref),
211 DF_REF_BBNO (link->ref),
212 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
218 /* Print some basic block info as part of df_dump. */
221 df_print_bb_index (basic_block bb, FILE *file)
226 fprintf (file, "( ");
227 FOR_EACH_EDGE (e, ei, bb->preds)
229 basic_block pred = e->src;
230 fprintf (file, "%d ", pred->index);
232 fprintf (file, ")->[%d]->( ", bb->index);
233 FOR_EACH_EDGE (e, ei, bb->succs)
235 basic_block succ = e->dest;
236 fprintf (file, "%d ", succ->index);
238 fprintf (file, ")\n");
242 /* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
245 df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
247 bitmap ids = maps[regno];
251 unsigned int end = start + count;;
252 ids = BITMAP_ALLOC (NULL);
254 for (i = start; i < end; i++)
255 bitmap_set_bit (ids, i);
261 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
267 seen_in_block = BITMAP_ALLOC (NULL);
268 seen_in_insn = BITMAP_ALLOC (NULL);
275 BITMAP_FREE (seen_in_block);
276 BITMAP_FREE (seen_in_insn);
281 /*----------------------------------------------------------------------------
284 Find the locations in the function where each use site for a pseudo
287 ----------------------------------------------------------------------------*/
289 struct df_ru_problem_data
291 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
292 unsigned int use_sites_size; /* Size of use_sites. */
293 /* The set of defs to regs invalidated by call. */
294 bitmap sparse_invalidated_by_call;
295 /* The set of defs to regs invalidated by call for ru. */
296 bitmap dense_invalidated_by_call;
299 /* Get basic block info. */
301 struct df_ru_bb_info *
302 df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
304 return (struct df_ru_bb_info *) dflow->block_info[index];
308 /* Set basic block info. */
311 df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
312 struct df_ru_bb_info *bb_info)
314 dflow->block_info[index] = bb_info;
318 /* Free basic block info. */
321 df_ru_free_bb_info (struct dataflow *dflow,
322 basic_block bb ATTRIBUTE_UNUSED,
325 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
328 BITMAP_FREE (bb_info->kill);
329 BITMAP_FREE (bb_info->sparse_kill);
330 BITMAP_FREE (bb_info->gen);
331 BITMAP_FREE (bb_info->in);
332 BITMAP_FREE (bb_info->out);
333 pool_free (dflow->block_pool, bb_info);
338 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
339 not touched unless the block is new. */
342 df_ru_alloc (struct dataflow *dflow,
343 bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
346 unsigned int bb_index;
348 unsigned int reg_size = max_reg_num ();
350 if (!dflow->block_pool)
351 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
352 sizeof (struct df_ru_bb_info), 50);
354 if (dflow->problem_data)
357 struct df_ru_problem_data *problem_data
358 = (struct df_ru_problem_data *) dflow->problem_data;
360 for (i = 0; i < problem_data->use_sites_size; i++)
362 bitmap bm = problem_data->use_sites[i];
366 problem_data->use_sites[i] = NULL;
370 if (problem_data->use_sites_size < reg_size)
372 problem_data->use_sites
373 = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
374 memset (problem_data->use_sites + problem_data->use_sites_size, 0,
375 (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
376 problem_data->use_sites_size = reg_size;
379 bitmap_clear (problem_data->sparse_invalidated_by_call);
380 bitmap_clear (problem_data->dense_invalidated_by_call);
384 struct df_ru_problem_data *problem_data = XNEW (struct df_ru_problem_data);
385 dflow->problem_data = problem_data;
387 problem_data->use_sites = XCNEWVEC (bitmap, reg_size);
388 problem_data->use_sites_size = reg_size;
389 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
390 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
393 df_grow_bb_info (dflow);
395 /* Because of the clustering of all def sites for the same pseudo,
396 we have to process all of the blocks before doing the
399 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
401 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
404 bitmap_clear (bb_info->kill);
405 bitmap_clear (bb_info->sparse_kill);
406 bitmap_clear (bb_info->gen);
410 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
411 df_ru_set_bb_info (dflow, bb_index, bb_info);
412 bb_info->kill = BITMAP_ALLOC (NULL);
413 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
414 bb_info->gen = BITMAP_ALLOC (NULL);
415 bb_info->in = BITMAP_ALLOC (NULL);
416 bb_info->out = BITMAP_ALLOC (NULL);
422 /* Process a list of DEFs for df_ru_bb_local_compute. */
425 df_ru_bb_local_compute_process_def (struct dataflow *dflow,
426 struct df_ru_bb_info *bb_info,
428 enum df_ref_flags top_flag)
430 struct df *df = dflow->df;
433 if ((top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
434 /* If the def is to only part of the reg, it is as if it did
435 not happen, since some of the bits may get thru. */
436 && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
438 unsigned int regno = DF_REF_REGNO (def);
439 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
440 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
441 if (!bitmap_bit_p (seen_in_block, regno))
443 /* The first def for regno in the insn, causes the kill
444 info to be generated. Do not modify the gen set
445 because the only values in it are the uses from here
446 to the top of the block and this def does not effect
448 if (!bitmap_bit_p (seen_in_insn, regno))
450 if (n_uses > DF_SPARSE_THRESHOLD)
451 bitmap_set_bit (bb_info->sparse_kill, regno);
454 struct df_ru_problem_data * problem_data
455 = (struct df_ru_problem_data *)dflow->problem_data;
457 = df_ref_bitmap (problem_data->use_sites, regno,
459 bitmap_ior_into (bb_info->kill, uses);
462 bitmap_set_bit (seen_in_insn, regno);
470 /* Process a list of USEs for df_ru_bb_local_compute. */
473 df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
475 enum df_ref_flags top_flag)
479 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
481 /* Add use to set of gens in this BB unless we have seen a
482 def in a previous instruction. */
483 unsigned int regno = DF_REF_REGNO (use);
484 if (!bitmap_bit_p (seen_in_block, regno))
485 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
491 /* Compute local reaching use (upward exposed use) info for basic
492 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
494 df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
496 struct df *df = dflow->df;
497 basic_block bb = BASIC_BLOCK (bb_index);
498 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
501 /* Set when a def for regno is seen. */
502 bitmap_clear (seen_in_block);
503 bitmap_clear (seen_in_insn);
506 /* Variables defined in the prolog that are used by the exception
508 df_ru_bb_local_compute_process_use (bb_info,
509 df_get_artificial_uses (df, bb_index),
512 df_ru_bb_local_compute_process_def (dflow, bb_info,
513 df_get_artificial_defs (df, bb_index),
516 FOR_BB_INSNS (bb, insn)
518 unsigned int uid = INSN_UID (insn);
522 df_ru_bb_local_compute_process_use (bb_info,
523 DF_INSN_UID_USES (df, uid), 0);
525 df_ru_bb_local_compute_process_def (dflow, bb_info,
526 DF_INSN_UID_DEFS (df, uid), 0);
528 bitmap_ior_into (seen_in_block, seen_in_insn);
529 bitmap_clear (seen_in_insn);
532 /* Process the hardware registers that are always live. */
533 df_ru_bb_local_compute_process_use (bb_info,
534 df_get_artificial_uses (df, bb_index), 0);
536 df_ru_bb_local_compute_process_def (dflow, bb_info,
537 df_get_artificial_defs (df, bb_index), 0);
541 /* Compute local reaching use (upward exposed use) info for each basic
542 block within BLOCKS. */
544 df_ru_local_compute (struct dataflow *dflow,
546 bitmap rescan_blocks ATTRIBUTE_UNUSED)
548 struct df *df = dflow->df;
549 unsigned int bb_index;
552 struct df_ru_problem_data *problem_data
553 = (struct df_ru_problem_data *) dflow->problem_data;
554 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
555 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
559 if (!df->use_info.refs_organized)
560 df_reorganize_refs (&df->use_info);
562 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
564 df_ru_bb_local_compute (dflow, bb_index);
567 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
568 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
570 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
571 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
572 bitmap_set_bit (sparse_invalidated, regno);
575 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
576 reg_info->begin, reg_info->n_refs);
577 bitmap_ior_into (dense_invalidated, defs);
585 /* Initialize the solution bit vectors for problem. */
588 df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
590 unsigned int bb_index;
593 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
595 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
596 bitmap_copy (bb_info->in, bb_info->gen);
597 bitmap_clear (bb_info->out);
602 /* Out of target gets or of in of source. */
605 df_ru_confluence_n (struct dataflow *dflow, edge e)
607 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
608 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
610 if (e->flags & EDGE_EH)
612 struct df_ru_problem_data *problem_data
613 = (struct df_ru_problem_data *) dflow->problem_data;
614 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
615 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
616 struct df *df = dflow->df;
619 bitmap tmp = BITMAP_ALLOC (NULL);
621 bitmap_copy (tmp, op2);
622 bitmap_and_compl_into (tmp, dense_invalidated);
624 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
626 bitmap_clear_range (tmp,
627 DF_REG_USE_GET (df, regno)->begin,
628 DF_REG_USE_GET (df, regno)->n_refs);
630 bitmap_ior_into (op1, tmp);
634 bitmap_ior_into (op1, op2);
638 /* Transfer function. */
641 df_ru_transfer_function (struct dataflow *dflow, int bb_index)
643 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
646 bitmap in = bb_info->in;
647 bitmap out = bb_info->out;
648 bitmap gen = bb_info->gen;
649 bitmap kill = bb_info->kill;
650 bitmap sparse_kill = bb_info->sparse_kill;
652 if (bitmap_empty_p (sparse_kill))
653 return bitmap_ior_and_compl (in, gen, out, kill);
656 struct df *df = dflow->df;
657 bool changed = false;
658 bitmap tmp = BITMAP_ALLOC (NULL);
659 bitmap_copy (tmp, in);
660 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
662 bitmap_clear_range (tmp,
663 DF_REG_USE_GET (df, regno)->begin,
664 DF_REG_USE_GET (df, regno)->n_refs);
666 bitmap_and_compl_into (tmp, kill);
667 bitmap_ior_into (tmp, gen);
668 changed = !bitmap_equal_p (tmp, in);
681 /* Free all storage associated with the problem. */
684 df_ru_free (struct dataflow *dflow)
687 struct df_ru_problem_data *problem_data
688 = (struct df_ru_problem_data *) dflow->problem_data;
692 for (i = 0; i < dflow->block_info_size; i++)
694 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
697 BITMAP_FREE (bb_info->kill);
698 BITMAP_FREE (bb_info->sparse_kill);
699 BITMAP_FREE (bb_info->gen);
700 BITMAP_FREE (bb_info->in);
701 BITMAP_FREE (bb_info->out);
705 free_alloc_pool (dflow->block_pool);
707 for (i = 0; i < problem_data->use_sites_size; i++)
709 bitmap bm = problem_data->use_sites[i];
714 free (problem_data->use_sites);
715 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
716 BITMAP_FREE (problem_data->dense_invalidated_by_call);
718 dflow->block_info_size = 0;
719 free (dflow->block_info);
720 free (dflow->problem_data);
726 /* Debugging info. */
729 df_ru_dump (struct dataflow *dflow, FILE *file)
732 struct df *df = dflow->df;
733 struct df_ru_problem_data *problem_data
734 = (struct df_ru_problem_data *) dflow->problem_data;
735 unsigned int m = max_reg_num ();
738 if (!dflow->block_info)
741 fprintf (file, "Reaching uses:\n");
743 fprintf (file, " sparse invalidated \t");
744 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
745 fprintf (file, " dense invalidated \t");
746 dump_bitmap (file, problem_data->dense_invalidated_by_call);
748 for (regno = 0; regno < m; regno++)
749 if (DF_REG_USE_GET (df, regno)->n_refs)
750 fprintf (file, "%d[%d,%d] ", regno,
751 DF_REG_USE_GET (df, regno)->begin,
752 DF_REG_USE_GET (df, regno)->n_refs);
753 fprintf (file, "\n");
757 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
758 df_print_bb_index (bb, file);
763 fprintf (file, " in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
764 dump_bitmap (file, bb_info->in);
765 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
766 dump_bitmap (file, bb_info->gen);
767 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
768 dump_bitmap (file, bb_info->kill);
769 fprintf (file, " out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
770 dump_bitmap (file, bb_info->out);
774 /* All of the information associated with every instance of the problem. */
776 static struct df_problem problem_RU =
778 DF_RU, /* Problem id. */
779 DF_BACKWARD, /* Direction. */
780 df_ru_alloc, /* Allocate the problem specific data. */
781 NULL, /* Reset global information. */
782 df_ru_free_bb_info, /* Free basic block info. */
783 df_ru_local_compute, /* Local compute function. */
784 df_ru_init_solution, /* Init the solution specific data. */
785 df_iterative_dataflow, /* Iterative solver. */
786 NULL, /* Confluence operator 0. */
787 df_ru_confluence_n, /* Confluence operator n. */
788 df_ru_transfer_function, /* Transfer function. */
789 NULL, /* Finalize function. */
790 df_ru_free, /* Free all of the problem information. */
791 df_ru_dump, /* Debugging. */
792 NULL, /* Dependent problem. */
793 0 /* Changeable flags. */
798 /* Create a new DATAFLOW instance and add it to an existing instance
799 of DF. The returned structure is what is used to get at the
803 df_ru_add_problem (struct df *df, int flags)
805 return df_add_problem (df, &problem_RU, flags);
809 /*----------------------------------------------------------------------------
812 Find the locations in the function where each definition site for a
814 ----------------------------------------------------------------------------*/
816 struct df_rd_problem_data
818 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
819 unsigned int def_sites_size; /* Size of def_sites. */
820 /* The set of defs to regs invalidated by call. */
821 bitmap sparse_invalidated_by_call;
822 /* The set of defs to regs invalidate by call for ru. */
823 bitmap dense_invalidated_by_call;
826 /* Get basic block info. */
828 struct df_rd_bb_info *
829 df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
831 return (struct df_rd_bb_info *) dflow->block_info[index];
835 /* Set basic block info. */
838 df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
839 struct df_rd_bb_info *bb_info)
841 dflow->block_info[index] = bb_info;
845 /* Free basic block info. */
848 df_rd_free_bb_info (struct dataflow *dflow,
849 basic_block bb ATTRIBUTE_UNUSED,
852 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
855 BITMAP_FREE (bb_info->kill);
856 BITMAP_FREE (bb_info->sparse_kill);
857 BITMAP_FREE (bb_info->gen);
858 BITMAP_FREE (bb_info->in);
859 BITMAP_FREE (bb_info->out);
860 pool_free (dflow->block_pool, bb_info);
865 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
866 not touched unless the block is new. */
869 df_rd_alloc (struct dataflow *dflow,
870 bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
873 unsigned int bb_index;
875 unsigned int reg_size = max_reg_num ();
877 if (!dflow->block_pool)
878 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
879 sizeof (struct df_rd_bb_info), 50);
881 if (dflow->problem_data)
884 struct df_rd_problem_data *problem_data
885 = (struct df_rd_problem_data *) dflow->problem_data;
887 for (i = 0; i < problem_data->def_sites_size; i++)
889 bitmap bm = problem_data->def_sites[i];
893 problem_data->def_sites[i] = NULL;
897 if (problem_data->def_sites_size < reg_size)
899 problem_data->def_sites
900 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
901 memset (problem_data->def_sites + problem_data->def_sites_size, 0,
902 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
903 problem_data->def_sites_size = reg_size;
906 bitmap_clear (problem_data->sparse_invalidated_by_call);
907 bitmap_clear (problem_data->dense_invalidated_by_call);
911 struct df_rd_problem_data *problem_data = XNEW (struct df_rd_problem_data);
912 dflow->problem_data = problem_data;
914 problem_data->def_sites = XCNEWVEC (bitmap, reg_size);
915 problem_data->def_sites_size = reg_size;
916 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
917 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
920 df_grow_bb_info (dflow);
922 /* Because of the clustering of all use sites for the same pseudo,
923 we have to process all of the blocks before doing the
926 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
928 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
931 bitmap_clear (bb_info->kill);
932 bitmap_clear (bb_info->sparse_kill);
933 bitmap_clear (bb_info->gen);
937 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
938 df_rd_set_bb_info (dflow, bb_index, bb_info);
939 bb_info->kill = BITMAP_ALLOC (NULL);
940 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
941 bb_info->gen = BITMAP_ALLOC (NULL);
942 bb_info->in = BITMAP_ALLOC (NULL);
943 bb_info->out = BITMAP_ALLOC (NULL);
949 /* Process a list of DEFs for df_rd_bb_local_compute. */
952 df_rd_bb_local_compute_process_def (struct dataflow *dflow,
953 struct df_rd_bb_info *bb_info,
955 enum df_ref_flags top_flag)
957 struct df *df = dflow->df;
960 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
962 unsigned int regno = DF_REF_REGNO (def);
963 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
964 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
966 /* Only the last def(s) for a regno in the block has any
968 if (!bitmap_bit_p (seen_in_block, regno))
970 /* The first def for regno in insn gets to knock out the
971 defs from other instructions. */
972 if ((!bitmap_bit_p (seen_in_insn, regno))
973 /* If the def is to only part of the reg, it does
974 not kill the other defs that reach here. */
975 && (!((DF_REF_FLAGS (def) & DF_REF_PARTIAL)
976 || (DF_REF_FLAGS (def) & DF_REF_MAY_CLOBBER))))
978 if (n_defs > DF_SPARSE_THRESHOLD)
980 bitmap_set_bit (bb_info->sparse_kill, regno);
981 bitmap_clear_range(bb_info->gen, begin, n_defs);
985 struct df_rd_problem_data * problem_data
986 = (struct df_rd_problem_data *)dflow->problem_data;
987 bitmap defs = df_ref_bitmap (problem_data->def_sites,
988 regno, begin, n_defs);
989 bitmap_ior_into (bb_info->kill, defs);
990 bitmap_and_compl_into (bb_info->gen, defs);
994 bitmap_set_bit (seen_in_insn, regno);
995 /* All defs for regno in the instruction may be put into
997 if (!(DF_REF_FLAGS (def)
998 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
999 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
1002 def = def->next_ref;
1006 /* Compute local reaching def info for basic block BB. */
1009 df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1011 struct df *df = dflow->df;
1012 basic_block bb = BASIC_BLOCK (bb_index);
1013 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1016 bitmap_clear (seen_in_block);
1017 bitmap_clear (seen_in_insn);
1019 df_rd_bb_local_compute_process_def (dflow, bb_info,
1020 df_get_artificial_defs (df, bb_index), 0);
1022 FOR_BB_INSNS_REVERSE (bb, insn)
1024 unsigned int uid = INSN_UID (insn);
1029 df_rd_bb_local_compute_process_def (dflow, bb_info,
1030 DF_INSN_UID_DEFS (df, uid), 0);
1032 /* This complex dance with the two bitmaps is required because
1033 instructions can assign twice to the same pseudo. This
1034 generally happens with calls that will have one def for the
1035 result and another def for the clobber. If only one vector
1036 is used and the clobber goes first, the result will be
1038 bitmap_ior_into (seen_in_block, seen_in_insn);
1039 bitmap_clear (seen_in_insn);
1042 /* Process the artificial defs at the top of the block last since we
1043 are going backwards through the block and these are logically at
1045 df_rd_bb_local_compute_process_def (dflow, bb_info,
1046 df_get_artificial_defs (df, bb_index),
1051 /* Compute local reaching def info for each basic block within BLOCKS. */
1054 df_rd_local_compute (struct dataflow *dflow,
1056 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1058 struct df *df = dflow->df;
1059 unsigned int bb_index;
1062 struct df_rd_problem_data *problem_data
1063 = (struct df_rd_problem_data *) dflow->problem_data;
1064 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1065 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1069 if (!df->def_info.refs_organized)
1070 df_reorganize_refs (&df->def_info);
1072 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1074 df_rd_bb_local_compute (dflow, bb_index);
1077 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1078 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1080 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1081 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1083 bitmap_set_bit (sparse_invalidated, regno);
1087 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1088 reg_info->begin, reg_info->n_refs);
1089 bitmap_ior_into (dense_invalidated, defs);
1096 /* Initialize the solution bit vectors for problem. */
1099 df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1101 unsigned int bb_index;
1104 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1106 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1108 bitmap_copy (bb_info->out, bb_info->gen);
1109 bitmap_clear (bb_info->in);
1113 /* In of target gets or of out of source. */
1116 df_rd_confluence_n (struct dataflow *dflow, edge e)
1118 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1119 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1121 if (e->flags & EDGE_EH)
1123 struct df_rd_problem_data *problem_data
1124 = (struct df_rd_problem_data *) dflow->problem_data;
1125 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1126 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1127 struct df *df = dflow->df;
1130 bitmap tmp = BITMAP_ALLOC (NULL);
1132 bitmap_copy (tmp, op2);
1133 bitmap_and_compl_into (tmp, dense_invalidated);
1135 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1137 bitmap_clear_range (tmp,
1138 DF_REG_DEF_GET (df, regno)->begin,
1139 DF_REG_DEF_GET (df, regno)->n_refs);
1141 bitmap_ior_into (op1, tmp);
1145 bitmap_ior_into (op1, op2);
1149 /* Transfer function. */
1152 df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1154 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1157 bitmap in = bb_info->in;
1158 bitmap out = bb_info->out;
1159 bitmap gen = bb_info->gen;
1160 bitmap kill = bb_info->kill;
1161 bitmap sparse_kill = bb_info->sparse_kill;
1163 if (bitmap_empty_p (sparse_kill))
1164 return bitmap_ior_and_compl (out, gen, in, kill);
1167 struct df *df = dflow->df;
1168 bool changed = false;
1169 bitmap tmp = BITMAP_ALLOC (NULL);
1170 bitmap_copy (tmp, in);
1171 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1173 bitmap_clear_range (tmp,
1174 DF_REG_DEF_GET (df, regno)->begin,
1175 DF_REG_DEF_GET (df, regno)->n_refs);
1177 bitmap_and_compl_into (tmp, kill);
1178 bitmap_ior_into (tmp, gen);
1179 changed = !bitmap_equal_p (tmp, out);
1192 /* Free all storage associated with the problem. */
1195 df_rd_free (struct dataflow *dflow)
1198 struct df_rd_problem_data *problem_data
1199 = (struct df_rd_problem_data *) dflow->problem_data;
1203 for (i = 0; i < dflow->block_info_size; i++)
1205 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1208 BITMAP_FREE (bb_info->kill);
1209 BITMAP_FREE (bb_info->sparse_kill);
1210 BITMAP_FREE (bb_info->gen);
1211 BITMAP_FREE (bb_info->in);
1212 BITMAP_FREE (bb_info->out);
1216 free_alloc_pool (dflow->block_pool);
1218 for (i = 0; i < problem_data->def_sites_size; i++)
1220 bitmap bm = problem_data->def_sites[i];
1225 free (problem_data->def_sites);
1226 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1227 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1229 dflow->block_info_size = 0;
1230 free (dflow->block_info);
1231 free (dflow->problem_data);
1237 /* Debugging info. */
1240 df_rd_dump (struct dataflow *dflow, FILE *file)
1242 struct df *df = dflow->df;
1244 struct df_rd_problem_data *problem_data
1245 = (struct df_rd_problem_data *) dflow->problem_data;
1246 unsigned int m = max_reg_num ();
1249 if (!dflow->block_info)
1252 fprintf (file, "Reaching defs:\n\n");
1254 fprintf (file, " sparse invalidated \t");
1255 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1256 fprintf (file, " dense invalidated \t");
1257 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1259 for (regno = 0; regno < m; regno++)
1260 if (DF_REG_DEF_GET (df, regno)->n_refs)
1261 fprintf (file, "%d[%d,%d] ", regno,
1262 DF_REG_DEF_GET (df, regno)->begin,
1263 DF_REG_DEF_GET (df, regno)->n_refs);
1264 fprintf (file, "\n");
1268 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1269 df_print_bb_index (bb, file);
1274 fprintf (file, " in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1275 dump_bitmap (file, bb_info->in);
1276 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1277 dump_bitmap (file, bb_info->gen);
1278 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1279 dump_bitmap (file, bb_info->kill);
1280 fprintf (file, " out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1281 dump_bitmap (file, bb_info->out);
1285 /* All of the information associated with every instance of the problem. */
1287 static struct df_problem problem_RD =
1289 DF_RD, /* Problem id. */
1290 DF_FORWARD, /* Direction. */
1291 df_rd_alloc, /* Allocate the problem specific data. */
1292 NULL, /* Reset global information. */
1293 df_rd_free_bb_info, /* Free basic block info. */
1294 df_rd_local_compute, /* Local compute function. */
1295 df_rd_init_solution, /* Init the solution specific data. */
1296 df_iterative_dataflow, /* Iterative solver. */
1297 NULL, /* Confluence operator 0. */
1298 df_rd_confluence_n, /* Confluence operator n. */
1299 df_rd_transfer_function, /* Transfer function. */
1300 NULL, /* Finalize function. */
1301 df_rd_free, /* Free all of the problem information. */
1302 df_rd_dump, /* Debugging. */
1303 NULL, /* Dependent problem. */
1304 0 /* Changeable flags. */
1309 /* Create a new DATAFLOW instance and add it to an existing instance
1310 of DF. The returned structure is what is used to get at the
1314 df_rd_add_problem (struct df *df, int flags)
1316 return df_add_problem (df, &problem_RD, flags);
1321 /*----------------------------------------------------------------------------
1324 Find the locations in the function where any use of a pseudo can reach
1325 in the backwards direction.
1326 ----------------------------------------------------------------------------*/
1328 /* Get basic block info. */
1330 struct df_lr_bb_info *
1331 df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1333 return (struct df_lr_bb_info *) dflow->block_info[index];
1337 /* Set basic block info. */
1340 df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1341 struct df_lr_bb_info *bb_info)
1343 dflow->block_info[index] = bb_info;
1347 /* Free basic block info. */
1350 df_lr_free_bb_info (struct dataflow *dflow,
1351 basic_block bb ATTRIBUTE_UNUSED,
1354 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1357 BITMAP_FREE (bb_info->use);
1358 BITMAP_FREE (bb_info->def);
1359 BITMAP_FREE (bb_info->in);
1360 BITMAP_FREE (bb_info->out);
1361 pool_free (dflow->block_pool, bb_info);
1366 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1367 not touched unless the block is new. */
1370 df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
1371 bitmap all_blocks ATTRIBUTE_UNUSED)
1373 unsigned int bb_index;
1376 if (!dflow->block_pool)
1377 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1378 sizeof (struct df_lr_bb_info), 50);
1380 df_grow_bb_info (dflow);
1382 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1384 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1387 bitmap_clear (bb_info->def);
1388 bitmap_clear (bb_info->use);
1392 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1393 df_lr_set_bb_info (dflow, bb_index, bb_info);
1394 bb_info->use = BITMAP_ALLOC (NULL);
1395 bb_info->def = BITMAP_ALLOC (NULL);
1396 bb_info->in = BITMAP_ALLOC (NULL);
1397 bb_info->out = BITMAP_ALLOC (NULL);
1403 /* Compute local live register info for basic block BB. */
1406 df_lr_bb_local_compute (struct dataflow *dflow,
1407 struct df *df, unsigned int bb_index)
1409 basic_block bb = BASIC_BLOCK (bb_index);
1410 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1415 /* Process the registers set in an exception handler. */
1416 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1417 if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1418 && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
1420 unsigned int dregno = DF_REF_REGNO (def);
1421 bitmap_set_bit (bb_info->def, dregno);
1422 bitmap_clear_bit (bb_info->use, dregno);
1425 /* Process the hardware registers that are always live. */
1426 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1427 /* Add use to set of uses in this BB. */
1428 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1429 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1431 FOR_BB_INSNS_REVERSE (bb, insn)
1433 unsigned int uid = INSN_UID (insn);
1440 for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1442 unsigned int dregno = DF_REF_REGNO (def);
1444 if (dregno >= FIRST_PSEUDO_REGISTER
1445 || !(SIBLING_CALL_P (insn)
1446 && bitmap_bit_p (df->exit_block_uses, dregno)
1447 && !refers_to_regno_p (dregno, dregno+1,
1448 current_function_return_rtx,
1451 /* If the def is to only part of the reg, it does
1452 not kill the other defs that reach here. */
1453 if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1455 bitmap_set_bit (bb_info->def, dregno);
1456 bitmap_clear_bit (bb_info->use, dregno);
1463 for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1465 unsigned int dregno = DF_REF_REGNO (def);
1467 if (DF_INSN_CONTAINS_ASM (df, insn)
1468 && dregno < FIRST_PSEUDO_REGISTER)
1471 unsigned int end = dregno
1472 + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1473 for (i = dregno; i <= end; ++i)
1474 regs_asm_clobbered[i] = 1;
1476 /* If the def is to only part of the reg, it does
1477 not kill the other defs that reach here. */
1478 if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1480 bitmap_set_bit (bb_info->def, dregno);
1481 bitmap_clear_bit (bb_info->use, dregno);
1486 for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
1487 /* Add use to set of uses in this BB. */
1488 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1491 /* Process the registers set in an exception handler. */
1492 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1493 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1494 && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
1496 unsigned int dregno = DF_REF_REGNO (def);
1497 bitmap_set_bit (bb_info->def, dregno);
1498 bitmap_clear_bit (bb_info->use, dregno);
1502 /* Process the uses that are live into an exception handler. */
1503 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1504 /* Add use to set of uses in this BB. */
1505 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1506 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1511 /* Compute local live register info for each basic block within BLOCKS. */
1514 df_lr_local_compute (struct dataflow *dflow,
1516 bitmap rescan_blocks)
1518 struct df *df = dflow->df;
1519 unsigned int bb_index;
1522 /* Assume that the stack pointer is unchanging if alloca hasn't
1524 if (bitmap_equal_p (all_blocks, rescan_blocks))
1525 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1527 bitmap_clear (df->hardware_regs_used);
1529 /* The all-important stack pointer must always be live. */
1530 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1532 /* Before reload, there are a few registers that must be forced
1533 live everywhere -- which might not already be the case for
1534 blocks within infinite loops. */
1535 if (!reload_completed)
1537 /* Any reference to any pseudo before reload is a potential
1538 reference of the frame pointer. */
1539 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1541 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1542 /* Pseudos with argument area equivalences may require
1543 reloading via the argument pointer. */
1544 if (fixed_regs[ARG_POINTER_REGNUM])
1545 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1548 /* Any constant, or pseudo with constant equivalences, may
1549 require reloading from memory using the pic register. */
1550 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1551 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1552 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1555 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1557 /* The exit block is special for this problem and its bits are
1558 computed from thin air. */
1559 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1560 bitmap_copy (bb_info->use, df->exit_block_uses);
1563 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1565 if (bb_index == EXIT_BLOCK)
1567 df_lr_bb_local_compute (dflow, df, bb_index);
1572 /* Initialize the solution vectors. */
1575 df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1577 unsigned int bb_index;
1580 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1582 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1583 bitmap_copy (bb_info->in, bb_info->use);
1584 bitmap_clear (bb_info->out);
1589 /* Confluence function that processes infinite loops. This might be a
1590 noreturn function that throws. And even if it isn't, getting the
1591 unwind info right helps debugging. */
1593 df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1595 struct df *df = dflow->df;
1597 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1598 if (bb != EXIT_BLOCK_PTR)
1599 bitmap_copy (op1, df->hardware_regs_used);
1603 /* Confluence function that ignores fake edges. */
1606 df_lr_confluence_n (struct dataflow *dflow, edge e)
1608 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1609 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1611 /* Call-clobbered registers die across exception and call edges. */
1612 /* ??? Abnormal call edges ignored for the moment, as this gets
1613 confused by sibling call edges, which crashes reg-stack. */
1614 if (e->flags & EDGE_EH)
1615 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1617 bitmap_ior_into (op1, op2);
1619 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1623 /* Transfer function. */
1626 df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1628 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1629 bitmap in = bb_info->in;
1630 bitmap out = bb_info->out;
1631 bitmap use = bb_info->use;
1632 bitmap def = bb_info->def;
1634 return bitmap_ior_and_compl (in, use, out, def);
1638 /* Free all storage associated with the problem. */
1641 df_lr_free (struct dataflow *dflow)
1643 if (dflow->block_info)
1646 for (i = 0; i < dflow->block_info_size; i++)
1648 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1651 BITMAP_FREE (bb_info->use);
1652 BITMAP_FREE (bb_info->def);
1653 BITMAP_FREE (bb_info->in);
1654 BITMAP_FREE (bb_info->out);
1657 free_alloc_pool (dflow->block_pool);
1659 dflow->block_info_size = 0;
1660 free (dflow->block_info);
1663 free (dflow->problem_data);
1668 /* Debugging info. */
1671 df_lr_dump (struct dataflow *dflow, FILE *file)
1675 if (!dflow->block_info)
1678 fprintf (file, "Live Registers:\n");
1681 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1682 df_print_bb_index (bb, file);
1687 fprintf (file, " in \t");
1688 dump_bitmap (file, bb_info->in);
1689 fprintf (file, " use \t");
1690 dump_bitmap (file, bb_info->use);
1691 fprintf (file, " def \t");
1692 dump_bitmap (file, bb_info->def);
1693 fprintf (file, " out \t");
1694 dump_bitmap (file, bb_info->out);
1698 /* All of the information associated with every instance of the problem. */
1700 static struct df_problem problem_LR =
1702 DF_LR, /* Problem id. */
1703 DF_BACKWARD, /* Direction. */
1704 df_lr_alloc, /* Allocate the problem specific data. */
1705 NULL, /* Reset global information. */
1706 df_lr_free_bb_info, /* Free basic block info. */
1707 df_lr_local_compute, /* Local compute function. */
1708 df_lr_init, /* Init the solution specific data. */
1709 df_iterative_dataflow, /* Iterative solver. */
1710 df_lr_confluence_0, /* Confluence operator 0. */
1711 df_lr_confluence_n, /* Confluence operator n. */
1712 df_lr_transfer_function, /* Transfer function. */
1713 NULL, /* Finalize function. */
1714 df_lr_free, /* Free all of the problem information. */
1715 df_lr_dump, /* Debugging. */
1716 NULL, /* Dependent problem. */
1721 /* Create a new DATAFLOW instance and add it to an existing instance
1722 of DF. The returned structure is what is used to get at the
1726 df_lr_add_problem (struct df *df, int flags)
1728 return df_add_problem (df, &problem_LR, flags);
1733 /*----------------------------------------------------------------------------
1734 UNINITIALIZED REGISTERS
1736 Find the set of uses for registers that are reachable from the entry
1737 block without passing thru a definition.
1738 ----------------------------------------------------------------------------*/
1740 /* Get basic block info. */
1742 struct df_ur_bb_info *
1743 df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1745 return (struct df_ur_bb_info *) dflow->block_info[index];
1749 /* Set basic block info. */
1752 df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1753 struct df_ur_bb_info *bb_info)
1755 dflow->block_info[index] = bb_info;
1759 /* Free basic block info. */
1762 df_ur_free_bb_info (struct dataflow *dflow,
1763 basic_block bb ATTRIBUTE_UNUSED,
1766 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1769 BITMAP_FREE (bb_info->gen);
1770 BITMAP_FREE (bb_info->kill);
1771 BITMAP_FREE (bb_info->in);
1772 BITMAP_FREE (bb_info->out);
1773 pool_free (dflow->block_pool, bb_info);
1778 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1779 not touched unless the block is new. */
1782 df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
1783 bitmap all_blocks ATTRIBUTE_UNUSED)
1785 unsigned int bb_index;
1788 if (!dflow->block_pool)
1789 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1790 sizeof (struct df_ur_bb_info), 100);
1792 df_grow_bb_info (dflow);
1794 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1796 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1799 bitmap_clear (bb_info->kill);
1800 bitmap_clear (bb_info->gen);
1804 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1805 df_ur_set_bb_info (dflow, bb_index, bb_info);
1806 bb_info->kill = BITMAP_ALLOC (NULL);
1807 bb_info->gen = BITMAP_ALLOC (NULL);
1808 bb_info->in = BITMAP_ALLOC (NULL);
1809 bb_info->out = BITMAP_ALLOC (NULL);
1815 /* Compute local uninitialized register info for basic block BB. */
1818 df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1820 struct df *df = dflow->df;
1821 basic_block bb = BASIC_BLOCK (bb_index);
1822 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1826 bitmap_clear (seen_in_block);
1827 bitmap_clear (seen_in_insn);
1829 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1830 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1832 unsigned int regno = DF_REF_REGNO (def);
1833 if (!bitmap_bit_p (seen_in_block, regno))
1835 bitmap_set_bit (seen_in_block, regno);
1836 bitmap_set_bit (bb_info->gen, regno);
1840 FOR_BB_INSNS_REVERSE (bb, insn)
1842 unsigned int uid = INSN_UID (insn);
1846 for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
1848 unsigned int regno = DF_REF_REGNO (def);
1849 /* Only the last def counts. */
1850 if (!bitmap_bit_p (seen_in_block, regno))
1852 bitmap_set_bit (seen_in_insn, regno);
1854 if (DF_REF_FLAGS (def)
1855 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
1857 /* Only must clobbers for the entire reg destroy the
1859 if ((DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
1860 && (!DF_REF_FLAGS (def) & DF_REF_PARTIAL))
1861 bitmap_set_bit (bb_info->kill, regno);
1864 bitmap_set_bit (bb_info->gen, regno);
1867 bitmap_ior_into (seen_in_block, seen_in_insn);
1868 bitmap_clear (seen_in_insn);
1871 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1872 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1874 unsigned int regno = DF_REF_REGNO (def);
1875 if (!bitmap_bit_p (seen_in_block, regno))
1877 bitmap_set_bit (seen_in_block, regno);
1878 bitmap_set_bit (bb_info->gen, regno);
1884 /* Compute local uninitialized register info. */
1887 df_ur_local_compute (struct dataflow *dflow,
1888 bitmap all_blocks ATTRIBUTE_UNUSED,
1889 bitmap rescan_blocks)
1891 unsigned int bb_index;
1896 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1898 df_ur_bb_local_compute (dflow, bb_index);
1905 /* Initialize the solution vectors. */
1908 df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1910 unsigned int bb_index;
1913 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1915 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1917 bitmap_copy (bb_info->out, bb_info->gen);
1918 bitmap_clear (bb_info->in);
1923 /* Or in the stack regs, hard regs and early clobber regs into the the
1924 ur_in sets of all of the blocks. */
1927 df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1929 struct df *df = dflow->df;
1930 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1931 bitmap tmp = BITMAP_ALLOC (NULL);
1933 unsigned int bb_index;
1935 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1937 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1938 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1940 /* No register may reach a location where it is not used. Thus
1941 we trim the rr result to the places where it is used. */
1942 bitmap_and_into (bb_info->in, bb_lr_info->in);
1943 bitmap_and_into (bb_info->out, bb_lr_info->out);
1946 /* Hard registers may still stick in the ur_out set, but not
1947 be in the ur_in set, if their only mention was in a call
1948 in this block. This is because a call kills in the lr
1949 problem but does not kill in the ur problem. To clean
1950 this up, we execute the transfer function on the lr_in
1951 set and then use that to knock bits out of ur_out. */
1952 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1954 bitmap_and_into (bb_info->out, tmp);
1962 /* Confluence function that ignores fake edges. */
1965 df_ur_confluence_n (struct dataflow *dflow, edge e)
1967 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1968 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1970 if (e->flags & EDGE_FAKE)
1973 bitmap_ior_into (op1, op2);
1977 /* Transfer function. */
1980 df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1982 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1983 bitmap in = bb_info->in;
1984 bitmap out = bb_info->out;
1985 bitmap gen = bb_info->gen;
1986 bitmap kill = bb_info->kill;
1988 return bitmap_ior_and_compl (out, gen, in, kill);
1992 /* Free all storage associated with the problem. */
1995 df_ur_free (struct dataflow *dflow)
1997 if (dflow->block_info)
2001 for (i = 0; i < dflow->block_info_size; i++)
2003 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
2006 BITMAP_FREE (bb_info->gen);
2007 BITMAP_FREE (bb_info->kill);
2008 BITMAP_FREE (bb_info->in);
2009 BITMAP_FREE (bb_info->out);
2013 free_alloc_pool (dflow->block_pool);
2014 dflow->block_info_size = 0;
2015 free (dflow->block_info);
2021 /* Debugging info. */
2024 df_ur_dump (struct dataflow *dflow, FILE *file)
2028 if (!dflow->block_info)
2031 fprintf (file, "Undefined regs:\n");
2035 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
2036 df_print_bb_index (bb, file);
2041 fprintf (file, " in \t");
2042 dump_bitmap (file, bb_info->in);
2043 fprintf (file, " gen \t");
2044 dump_bitmap (file, bb_info->gen);
2045 fprintf (file, " kill\t");
2046 dump_bitmap (file, bb_info->kill);
2047 fprintf (file, " out \t");
2048 dump_bitmap (file, bb_info->out);
2052 /* All of the information associated with every instance of the problem. */
2054 static struct df_problem problem_UR =
2056 DF_UR, /* Problem id. */
2057 DF_FORWARD, /* Direction. */
2058 df_ur_alloc, /* Allocate the problem specific data. */
2059 NULL, /* Reset global information. */
2060 df_ur_free_bb_info, /* Free basic block info. */
2061 df_ur_local_compute, /* Local compute function. */
2062 df_ur_init, /* Init the solution specific data. */
2063 df_iterative_dataflow, /* Iterative solver. */
2064 NULL, /* Confluence operator 0. */
2065 df_ur_confluence_n, /* Confluence operator n. */
2066 df_ur_transfer_function, /* Transfer function. */
2067 df_ur_local_finalize, /* Finalize function. */
2068 df_ur_free, /* Free all of the problem information. */
2069 df_ur_dump, /* Debugging. */
2070 df_lr_add_problem, /* Dependent problem. */
2071 0 /* Changeable flags. */
2075 /* Create a new DATAFLOW instance and add it to an existing instance
2076 of DF. The returned structure is what is used to get at the
2080 df_ur_add_problem (struct df *df, int flags)
2082 return df_add_problem (df, &problem_UR, flags);
2087 /*----------------------------------------------------------------------------
2088 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2090 Find the set of uses for registers that are reachable from the entry
2091 block without passing thru a definition.
2093 This is a variant of the UR problem above that has a lot of special
2094 features just for the register allocation phase.
2095 ----------------------------------------------------------------------------*/
2097 struct df_urec_problem_data
2099 bool earlyclobbers_found; /* True if any instruction contains an
2102 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2107 /* Get basic block info. */
2109 struct df_urec_bb_info *
2110 df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2112 return (struct df_urec_bb_info *) dflow->block_info[index];
2116 /* Set basic block info. */
2119 df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2120 struct df_urec_bb_info *bb_info)
2122 dflow->block_info[index] = bb_info;
2126 /* Free basic block info. */
2129 df_urec_free_bb_info (struct dataflow *dflow,
2130 basic_block bb ATTRIBUTE_UNUSED,
2133 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2136 BITMAP_FREE (bb_info->gen);
2137 BITMAP_FREE (bb_info->kill);
2138 BITMAP_FREE (bb_info->in);
2139 BITMAP_FREE (bb_info->out);
2140 BITMAP_FREE (bb_info->earlyclobber);
2141 pool_free (dflow->block_pool, bb_info);
2146 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2147 not touched unless the block is new. */
2150 df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
2151 bitmap all_blocks ATTRIBUTE_UNUSED)
2154 unsigned int bb_index;
2156 struct df_urec_problem_data *problem_data
2157 = (struct df_urec_problem_data *) dflow->problem_data;
2159 if (!dflow->block_pool)
2160 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2161 sizeof (struct df_urec_bb_info), 50);
2163 if (!dflow->problem_data)
2165 problem_data = XNEW (struct df_urec_problem_data);
2166 dflow->problem_data = problem_data;
2168 problem_data->earlyclobbers_found = false;
2170 df_grow_bb_info (dflow);
2172 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2174 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2177 bitmap_clear (bb_info->kill);
2178 bitmap_clear (bb_info->gen);
2179 bitmap_clear (bb_info->earlyclobber);
2183 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2184 df_urec_set_bb_info (dflow, bb_index, bb_info);
2185 bb_info->kill = BITMAP_ALLOC (NULL);
2186 bb_info->gen = BITMAP_ALLOC (NULL);
2187 bb_info->in = BITMAP_ALLOC (NULL);
2188 bb_info->out = BITMAP_ALLOC (NULL);
2189 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2195 /* The function modifies local info for register REG being changed in
2196 SETTER. DATA is used to pass the current basic block info. */
2199 df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2204 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2206 if (GET_CODE (reg) == SUBREG)
2207 reg = SUBREG_REG (reg);
2213 endregno = regno = REGNO (reg);
2214 if (regno < FIRST_PSEUDO_REGISTER)
2216 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2218 for (i = regno; i < endregno; i++)
2220 bitmap_set_bit (bb_info->kill, i);
2222 if (GET_CODE (setter) != CLOBBER)
2223 bitmap_set_bit (bb_info->gen, i);
2225 bitmap_clear_bit (bb_info->gen, i);
2230 bitmap_set_bit (bb_info->kill, regno);
2232 if (GET_CODE (setter) != CLOBBER)
2233 bitmap_set_bit (bb_info->gen, regno);
2235 bitmap_clear_bit (bb_info->gen, regno);
2238 /* Classes of registers which could be early clobbered in the current
2241 static VEC(int,heap) *earlyclobber_regclass;
2243 /* This function finds and stores register classes that could be early
2244 clobbered in INSN. If any earlyclobber classes are found, the function
2245 returns TRUE, in all other cases it returns FALSE. */
2248 df_urec_check_earlyclobber (rtx insn)
2253 extract_insn (insn);
2255 VEC_truncate (int, earlyclobber_regclass, 0);
2256 for (opno = 0; opno < recog_data.n_operands; opno++)
2261 enum reg_class class;
2262 const char *p = recog_data.constraints[opno];
2271 case '=': case '+': case '?':
2274 case 'm': case '<': case '>': case 'V': case 'o':
2275 case 'E': case 'F': case 'G': case 'H':
2276 case 's': case 'i': case 'n':
2277 case 'I': case 'J': case 'K': case 'L':
2278 case 'M': case 'N': case 'O': case 'P':
2280 case '0': case '1': case '2': case '3': case '4':
2281 case '5': case '6': case '7': case '8': case '9':
2282 /* These don't say anything we care about. */
2290 if (amp_p && class != NO_REGS)
2296 VEC_iterate (int, earlyclobber_regclass, i, rc);
2299 if (rc == (int) class)
2303 /* We use VEC_quick_push here because
2304 earlyclobber_regclass holds no more than
2305 N_REG_CLASSES elements. */
2306 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2316 class = GENERAL_REGS;
2320 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2325 p += CONSTRAINT_LEN (c, p);
2332 /* The function checks that pseudo-register *X has a class
2333 intersecting with the class of pseudo-register could be early
2334 clobbered in the same insn.
2336 This function is a no-op if earlyclobber_regclass is empty.
2338 Reload can assign the same hard register to uninitialized
2339 pseudo-register and early clobbered pseudo-register in an insn if
2340 the pseudo-register is used first time in given BB and not lived at
2341 the BB start. To prevent this we don't change life information for
2342 such pseudo-registers. */
2345 df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2347 enum reg_class pref_class, alt_class;
2349 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2351 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2356 if (bitmap_bit_p (bb_info->kill, regno)
2357 || bitmap_bit_p (bb_info->gen, regno))
2359 pref_class = reg_preferred_class (regno);
2360 alt_class = reg_alternate_class (regno);
2361 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2363 if (reg_classes_intersect_p (rc, pref_class)
2365 && reg_classes_intersect_p (rc, alt_class)))
2367 bitmap_set_bit (bb_info->earlyclobber, regno);
2375 /* The function processes all pseudo-registers in *X with the aid of
2376 previous function. */
2379 df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2381 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2385 /* Compute local uninitialized register info for basic block BB. */
2388 df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2390 struct df *df = dflow->df;
2391 basic_block bb = BASIC_BLOCK (bb_index);
2392 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2396 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2397 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2399 unsigned int regno = DF_REF_REGNO (def);
2400 bitmap_set_bit (bb_info->gen, regno);
2403 FOR_BB_INSNS (bb, insn)
2407 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2408 if (df_urec_check_earlyclobber (insn))
2410 struct df_urec_problem_data *problem_data
2411 = (struct df_urec_problem_data *) dflow->problem_data;
2412 problem_data->earlyclobbers_found = true;
2413 note_uses (&PATTERN (insn),
2414 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2419 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2420 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
2422 unsigned int regno = DF_REF_REGNO (def);
2423 bitmap_set_bit (bb_info->gen, regno);
2429 /* Compute local uninitialized register info. */
2432 df_urec_local_compute (struct dataflow *dflow,
2433 bitmap all_blocks ATTRIBUTE_UNUSED,
2434 bitmap rescan_blocks)
2436 unsigned int bb_index;
2440 HARD_REG_SET zero, stack_hard_regs, used;
2441 struct df_urec_problem_data *problem_data
2442 = (struct df_urec_problem_data *) dflow->problem_data;
2444 /* Any register that MAY be allocated to a register stack (like the
2445 387) is treated poorly. Each such register is marked as being
2446 live everywhere. This keeps the register allocator and the
2447 subsequent passes from doing anything useful with these values.
2449 FIXME: This seems like an incredibly poor idea. */
2451 CLEAR_HARD_REG_SET (zero);
2452 CLEAR_HARD_REG_SET (stack_hard_regs);
2453 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2454 SET_HARD_REG_BIT (stack_hard_regs, i);
2455 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2456 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2458 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2459 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2460 AND_HARD_REG_SET (used, stack_hard_regs);
2461 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2462 bitmap_set_bit (problem_data->stack_regs, i);
2468 /* We know that earlyclobber_regclass holds no more than
2469 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2470 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2472 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2474 df_urec_bb_local_compute (dflow, bb_index);
2477 VEC_free (int, heap, earlyclobber_regclass);
2481 /* Initialize the solution vectors. */
2484 df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2486 unsigned int bb_index;
2489 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2491 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2493 bitmap_copy (bb_info->out, bb_info->gen);
2494 bitmap_clear (bb_info->in);
2499 /* Or in the stack regs, hard regs and early clobber regs into the the
2500 ur_in sets of all of the blocks. */
2503 df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2505 struct df *df = dflow->df;
2506 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2507 bitmap tmp = BITMAP_ALLOC (NULL);
2509 unsigned int bb_index;
2510 struct df_urec_problem_data *problem_data
2511 = (struct df_urec_problem_data *) dflow->problem_data;
2513 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2515 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2516 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2518 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2520 if (problem_data->earlyclobbers_found)
2521 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2524 /* We can not use the same stack register for uninitialized
2525 pseudo-register and another living pseudo-register
2526 because if the uninitialized pseudo-register dies,
2527 subsequent pass reg-stack will be confused (it will
2528 believe that the other register dies). */
2529 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2530 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2534 /* No register may reach a location where it is not used. Thus
2535 we trim the rr result to the places where it is used. */
2536 bitmap_and_into (bb_info->in, bb_lr_info->in);
2537 bitmap_and_into (bb_info->out, bb_lr_info->out);
2540 /* Hard registers may still stick in the ur_out set, but not
2541 be in the ur_in set, if their only mention was in a call
2542 in this block. This is because a call kills in the lr
2543 problem but does not kill in the rr problem. To clean
2544 this up, we execute the transfer function on the lr_in
2545 set and then use that to knock bits out of ur_out. */
2546 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2548 bitmap_and_into (bb_info->out, tmp);
2553 BITMAP_FREE (problem_data->stack_regs);
2559 /* Confluence function that ignores fake edges. */
2562 df_urec_confluence_n (struct dataflow *dflow, edge e)
2564 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2565 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2567 if (e->flags & EDGE_FAKE)
2570 bitmap_ior_into (op1, op2);
2574 /* Transfer function. */
2577 df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2579 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2580 bitmap in = bb_info->in;
2581 bitmap out = bb_info->out;
2582 bitmap gen = bb_info->gen;
2583 bitmap kill = bb_info->kill;
2585 return bitmap_ior_and_compl (out, gen, in, kill);
2589 /* Free all storage associated with the problem. */
2592 df_urec_free (struct dataflow *dflow)
2594 if (dflow->block_info)
2598 for (i = 0; i < dflow->block_info_size; i++)
2600 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2603 BITMAP_FREE (bb_info->gen);
2604 BITMAP_FREE (bb_info->kill);
2605 BITMAP_FREE (bb_info->in);
2606 BITMAP_FREE (bb_info->out);
2607 BITMAP_FREE (bb_info->earlyclobber);
2611 free_alloc_pool (dflow->block_pool);
2613 dflow->block_info_size = 0;
2614 free (dflow->block_info);
2615 free (dflow->problem_data);
2621 /* Debugging info. */
2624 df_urec_dump (struct dataflow *dflow, FILE *file)
2628 if (!dflow->block_info)
2631 fprintf (file, "Undefined regs:\n");
2635 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2636 df_print_bb_index (bb, file);
2641 fprintf (file, " in \t");
2642 dump_bitmap (file, bb_info->in);
2643 fprintf (file, " gen \t");
2644 dump_bitmap (file, bb_info->gen);
2645 fprintf (file, " kill\t");
2646 dump_bitmap (file, bb_info->kill);
2647 fprintf (file, " ec\t");
2648 dump_bitmap (file, bb_info->earlyclobber);
2649 fprintf (file, " out \t");
2650 dump_bitmap (file, bb_info->out);
2654 /* All of the information associated with every instance of the problem. */
2656 static struct df_problem problem_UREC =
2658 DF_UREC, /* Problem id. */
2659 DF_FORWARD, /* Direction. */
2660 df_urec_alloc, /* Allocate the problem specific data. */
2661 NULL, /* Reset global information. */
2662 df_urec_free_bb_info, /* Free basic block info. */
2663 df_urec_local_compute, /* Local compute function. */
2664 df_urec_init, /* Init the solution specific data. */
2665 df_iterative_dataflow, /* Iterative solver. */
2666 NULL, /* Confluence operator 0. */
2667 df_urec_confluence_n, /* Confluence operator n. */
2668 df_urec_transfer_function, /* Transfer function. */
2669 df_urec_local_finalize, /* Finalize function. */
2670 df_urec_free, /* Free all of the problem information. */
2671 df_urec_dump, /* Debugging. */
2672 df_lr_add_problem, /* Dependent problem. */
2673 0 /* Changeable flags. */
2677 /* Create a new DATAFLOW instance and add it to an existing instance
2678 of DF. The returned structure is what is used to get at the
2682 df_urec_add_problem (struct df *df, int flags)
2684 return df_add_problem (df, &problem_UREC, flags);
2689 /*----------------------------------------------------------------------------
2690 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2692 Link either the defs to the uses and / or the uses to the defs.
2694 These problems are set up like the other dataflow problems so that
2695 they nicely fit into the framework. They are much simpler and only
2696 involve a single traversal of instructions and an examination of
2697 the reaching defs information (the dependent problem).
2698 ----------------------------------------------------------------------------*/
2700 /* Create def-use or use-def chains. */
2703 df_chain_alloc (struct dataflow *dflow,
2704 bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
2705 bitmap all_blocks ATTRIBUTE_UNUSED)
2708 struct df *df = dflow->df;
2711 /* Wholesale destruction of the old chains. */
2712 if (dflow->block_pool)
2713 free_alloc_pool (dflow->block_pool);
2715 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2716 sizeof (struct df_link), 100);
2718 if (dflow->flags & DF_DU_CHAIN)
2720 if (!df->def_info.refs_organized)
2721 df_reorganize_refs (&df->def_info);
2723 /* Clear out the pointers from the refs. */
2724 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2726 struct df_ref *ref = df->def_info.refs[i];
2727 DF_REF_CHAIN (ref) = NULL;
2731 if (dflow->flags & DF_UD_CHAIN)
2733 if (!df->use_info.refs_organized)
2734 df_reorganize_refs (&df->use_info);
2735 for (i = 0; i < DF_USES_SIZE (df); i++)
2737 struct df_ref *ref = df->use_info.refs[i];
2738 DF_REF_CHAIN (ref) = NULL;
2744 /* Reset all def_use and use_def chains in INSN. */
2747 df_chain_insn_reset (struct dataflow *dflow, rtx insn)
2749 struct df *df = dflow->df;
2750 unsigned int uid = INSN_UID (insn);
2751 struct df_insn_info *insn_info = NULL;
2754 if (uid < df->insns_size)
2755 insn_info = DF_INSN_UID_GET (df, uid);
2759 if (dflow->flags & DF_DU_CHAIN)
2761 ref = insn_info->defs;
2765 ref = ref->next_ref;
2769 if (dflow->flags & DF_UD_CHAIN)
2771 ref = insn_info->uses;
2775 ref = ref->next_ref;
2782 /* Reset all def_use and use_def chains in basic block. */
2785 df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
2787 struct df *df = dflow->df;
2789 basic_block bb = BASIC_BLOCK (bb_index);
2791 /* Some one deleted the basic block out from under us. */
2795 FOR_BB_INSNS (bb, insn)
2799 /* Record defs within INSN. */
2800 df_chain_insn_reset (dflow, insn);
2804 /* Get rid of any chains in artificial uses or defs. */
2805 if (dflow->flags & DF_DU_CHAIN)
2808 def = df_get_artificial_defs (df, bb_index);
2812 def = def->next_ref;
2816 if (dflow->flags & DF_UD_CHAIN)
2819 use = df_get_artificial_uses (df, bb_index);
2823 use = use->next_ref;
2829 /* Reset all of the chains when the set of basic blocks changes. */
2833 df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
2836 unsigned int bb_index;
2838 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
2840 df_chain_bb_reset (dflow, bb_index);
2843 free_alloc_pool (dflow->block_pool);
2844 dflow->block_pool = NULL;
2848 /* Create the chains for a list of USEs. */
2851 df_chain_create_bb_process_use (struct dataflow *dflow,
2854 enum df_ref_flags top_flag)
2856 struct df *df = dflow->df;
2858 unsigned int def_index;
2862 /* Do not want to go through this for an uninitialized var. */
2863 unsigned int uregno = DF_REF_REGNO (use);
2864 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2867 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2869 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2870 unsigned int last_index = first_index + count - 1;
2872 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2875 if (def_index > last_index)
2878 def = DF_DEFS_GET (df, def_index);
2879 if (dflow->flags & DF_DU_CHAIN)
2880 df_chain_create (dflow, def, use);
2881 if (dflow->flags & DF_UD_CHAIN)
2882 df_chain_create (dflow, use, def);
2886 use = use->next_ref;
2890 /* Reset the storage pool that the def-use or use-def chains have been
2891 allocated in. We do not need to re adjust the pointers in the refs,
2892 these have already been clean out.*/
2894 /* Create chains from reaching defs bitmaps for basic block BB. */
2896 df_chain_create_bb (struct dataflow *dflow,
2897 struct dataflow *rd_dflow,
2898 unsigned int bb_index)
2900 basic_block bb = BASIC_BLOCK (bb_index);
2901 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2903 bitmap cpy = BITMAP_ALLOC (NULL);
2904 struct df *df = dflow->df;
2907 bitmap_copy (cpy, bb_info->in);
2909 /* Since we are going forwards, process the artificial uses first
2910 then the artificial defs second. */
2913 /* Create the chains for the artificial uses from the EH_USES at the
2914 beginning of the block. */
2915 df_chain_create_bb_process_use (dflow, cpy,
2916 df_get_artificial_uses (df, bb->index),
2920 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2921 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2923 unsigned int dregno = DF_REF_REGNO (def);
2924 if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
2925 bitmap_clear_range (cpy,
2926 DF_REG_DEF_GET (df, dregno)->begin,
2927 DF_REG_DEF_GET (df, dregno)->n_refs);
2928 bitmap_set_bit (cpy, DF_REF_ID (def));
2931 /* Process the regular instructions next. */
2932 FOR_BB_INSNS (bb, insn)
2935 unsigned int uid = INSN_UID (insn);
2940 /* Now scan the uses and link them up with the defs that remain
2941 in the cpy vector. */
2943 df_chain_create_bb_process_use (dflow, cpy,
2944 DF_INSN_UID_USES (df, uid), 0);
2946 /* Since we are going forwards, process the defs second. This
2947 pass only changes the bits in cpy. */
2948 for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
2950 unsigned int dregno = DF_REF_REGNO (def);
2951 if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
2952 bitmap_clear_range (cpy,
2953 DF_REG_DEF_GET (df, dregno)->begin,
2954 DF_REG_DEF_GET (df, dregno)->n_refs);
2955 if (!(DF_REF_FLAGS (def)
2956 & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
2957 bitmap_set_bit (cpy, DF_REF_ID (def));
2961 /* Create the chains for the artificial uses of the hard registers
2962 at the end of the block. */
2963 df_chain_create_bb_process_use (dflow, cpy,
2964 df_get_artificial_uses (df, bb->index), 0);
2967 /* Create def-use chains from reaching use bitmaps for basic blocks
2971 df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2973 unsigned int bb_index;
2975 struct df *df = dflow->df;
2976 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2978 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2980 df_chain_create_bb (dflow, rd_dflow, bb_index);
2985 /* Free all storage associated with the problem. */
2988 df_chain_free (struct dataflow *dflow)
2990 free_alloc_pool (dflow->block_pool);
2995 /* Debugging info. */
2998 df_chains_dump (struct dataflow *dflow, FILE *file)
3000 struct df *df = dflow->df;
3003 if (dflow->flags & DF_DU_CHAIN)
3005 fprintf (file, "Def-use chains:\n");
3006 for (j = 0; j < df->def_info.bitmap_size; j++)
3008 struct df_ref *def = DF_DEFS_GET (df, j);
3011 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
3012 j, DF_REF_BBNO (def),
3014 DF_INSN_LUID (df, DF_REF_INSN (def)):
3016 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
3017 DF_REF_REGNO (def));
3018 if (def->flags & DF_REF_READ_WRITE)
3019 fprintf (file, "read/write ");
3020 df_chain_dump (DF_REF_CHAIN (def), file);
3021 fprintf (file, "\n");
3026 if (dflow->flags & DF_UD_CHAIN)
3028 fprintf (file, "Use-def chains:\n");
3029 for (j = 0; j < df->use_info.bitmap_size; j++)
3031 struct df_ref *use = DF_USES_GET (df, j);
3034 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3035 j, DF_REF_BBNO (use),
3037 DF_INSN_LUID (df, DF_REF_INSN (use))
3039 DF_REF_INSN (DF_USES_GET (df, j)) ?
3040 DF_REF_INSN_UID (DF_USES_GET (df,j))
3042 DF_REF_REGNO (use));
3043 if (use->flags & DF_REF_READ_WRITE)
3044 fprintf (file, "read/write ");
3045 if (use->flags & DF_REF_STRIPPED)
3046 fprintf (file, "stripped ");
3047 if (use->flags & DF_REF_IN_NOTE)
3048 fprintf (file, "note ");
3049 df_chain_dump (DF_REF_CHAIN (use), file);
3050 fprintf (file, "\n");
3057 static struct df_problem problem_CHAIN =
3059 DF_CHAIN, /* Problem id. */
3060 DF_NONE, /* Direction. */
3061 df_chain_alloc, /* Allocate the problem specific data. */
3062 df_chain_reset, /* Reset global information. */
3063 NULL, /* Free basic block info. */
3064 NULL, /* Local compute function. */
3065 NULL, /* Init the solution specific data. */
3066 NULL, /* Iterative solver. */
3067 NULL, /* Confluence operator 0. */
3068 NULL, /* Confluence operator n. */
3069 NULL, /* Transfer function. */
3070 df_chain_finalize, /* Finalize function. */
3071 df_chain_free, /* Free all of the problem information. */
3072 df_chains_dump, /* Debugging. */
3073 df_rd_add_problem, /* Dependent problem. */
3074 0 /* Changeable flags. */
3078 /* Create a new DATAFLOW instance and add it to an existing instance
3079 of DF. The returned structure is what is used to get at the
3083 df_chain_add_problem (struct df *df, int flags)
3085 return df_add_problem (df, &problem_CHAIN, flags);
3089 /*----------------------------------------------------------------------------
3090 REGISTER INFORMATION
3092 Currently this consists of only lifetime information and reg_dead
3094 ----------------------------------------------------------------------------*/
3096 #ifdef REG_DEAD_DEBUGGING
3098 print_note (char *prefix, rtx insn, rtx note)
3100 fprintf (stderr, "%s %d ", prefix, INSN_UID (insn));
3101 print_rtl (stderr, note);
3102 fprintf (stderr, "\n");
3106 /* Allocate the lifetime information. */
3109 df_ri_alloc (struct dataflow *dflow,
3110 bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
3111 bitmap all_blocks ATTRIBUTE_UNUSED)
3114 struct df *df = dflow->df;
3116 if (dflow->flags & DF_RI_LIFE)
3118 max_regno = max_reg_num ();
3119 allocate_reg_info (max_regno, FALSE, FALSE);
3121 /* Reset all the data we'll collect. */
3122 for (i = 0; i < max_regno; i++)
3124 REG_N_SETS (i) = DF_REG_DEF_COUNT (df, i);
3125 REG_N_REFS (i) = DF_REG_USE_COUNT (df, i) + REG_N_SETS (i);
3126 REG_N_DEATHS (i) = 0;
3127 REG_N_CALLS_CROSSED (i) = 0;
3128 REG_N_THROWING_CALLS_CROSSED (i) = 0;
3129 REG_LIVE_LENGTH (i) = 0;
3131 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3137 /* After reg-stack, the x86 floating point stack regs are difficult to
3138 analyze because of all of the pushes, pops and rotations. Thus, we
3139 just leave the notes alone. */
3142 df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
3145 return (regstack_completed
3146 && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG));
3153 /* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
3156 df_kill_notes (rtx insn, int flags)
3158 rtx *pprev = ®_NOTES (insn);
3163 switch (REG_NOTE_KIND (link))
3166 if (flags & DF_RI_LIFE)
3167 if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3168 REG_N_DEATHS (REGNO (XEXP (link, 0)))++;
3172 if (!df_ignore_stack_reg (REGNO (XEXP (link, 0))))
3174 rtx next = XEXP (link, 1);
3175 #ifdef REG_DEAD_DEBUGGING
3176 print_note ("deleting: ", insn, link);
3178 free_EXPR_LIST_node (link);
3179 *pprev = link = next;
3184 pprev = &XEXP (link, 1);
3192 /* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
3193 based on the bits in LIVE. Do not generate notes for registers in
3194 artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
3195 not generated if the reg is both read and written by the
3200 df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3201 bitmap live, bitmap do_not_gen,
3202 bitmap artificial_uses, int flags)
3204 bool all_dead = true;
3205 struct df_link *regs = mws->regs;
3206 unsigned int regno = DF_REF_REGNO (regs->ref);
3208 #ifdef REG_DEAD_DEBUGGING
3209 fprintf (stderr, "mw unused looking at %d\n", DF_REF_REGNO (regs->ref));
3210 df_ref_debug (regs->ref, stderr);
3214 unsigned int regno = DF_REF_REGNO (regs->ref);
3215 if ((bitmap_bit_p (live, regno))
3216 || bitmap_bit_p (artificial_uses, regno))
3226 struct df_link *regs = mws->regs;
3227 rtx note = alloc_EXPR_LIST (REG_UNUSED, *DF_REF_LOC (regs->ref),
3229 REG_NOTES (insn) = note;
3230 #ifdef REG_DEAD_DEBUGGING
3231 print_note ("adding 1: ", insn, note);
3233 bitmap_set_bit (do_not_gen, regno);
3234 /* Only do this if the value is totally dead. */
3235 if (flags & DF_RI_LIFE)
3237 REG_N_DEATHS (regno) ++;
3238 REG_LIVE_LENGTH (regno)++;
3243 struct df_link *regs = mws->regs;
3246 struct df_ref *ref = regs->ref;
3248 regno = DF_REF_REGNO (ref);
3249 if ((!bitmap_bit_p (live, regno))
3250 && (!bitmap_bit_p (artificial_uses, regno)))
3252 rtx note = alloc_EXPR_LIST (REG_UNUSED, regno_reg_rtx[regno],
3254 REG_NOTES (insn) = note;
3255 #ifdef REG_DEAD_DEBUGGING
3256 print_note ("adding 2: ", insn, note);
3259 bitmap_set_bit (do_not_gen, regno);
3266 /* Set the REG_DEAD notes for the multiword hardreg use in INSN based
3267 on the bits in LIVE. DO_NOT_GEN is used to keep REG_DEAD notes
3268 from being set if the instruction both reads and writes the
3272 df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
3273 bitmap live, bitmap do_not_gen,
3274 bitmap artificial_uses, int flags)
3276 bool all_dead = true;
3277 struct df_link *regs = mws->regs;
3278 unsigned int regno = DF_REF_REGNO (regs->ref);
3280 #ifdef REG_DEAD_DEBUGGING
3281 fprintf (stderr, "mw looking at %d\n", DF_REF_REGNO (regs->ref));
3282 df_ref_debug (regs->ref, stderr);
3286 unsigned int regno = DF_REF_REGNO (regs->ref);
3287 if ((bitmap_bit_p (live, regno))
3288 || bitmap_bit_p (artificial_uses, regno))
3298 if (!bitmap_bit_p (do_not_gen, regno))
3300 /* Add a dead note for the entire multi word register. */
3301 struct df_link *regs = mws->regs;
3302 rtx note = alloc_EXPR_LIST (REG_DEAD, *DF_REF_LOC (regs->ref),
3304 REG_NOTES (insn) = note;
3305 #ifdef REG_DEAD_DEBUGGING
3306 print_note ("adding 1: ", insn, note);
3309 if (flags & DF_RI_LIFE)
3311 struct df_link *regs = mws->regs;
3314 struct df_ref *ref = regs->ref;
3315 regno = DF_REF_REGNO (ref);
3316 REG_N_DEATHS (regno)++;
3324 struct df_link *regs = mws->regs;
3327 struct df_ref *ref = regs->ref;
3329 regno = DF_REF_REGNO (ref);
3330 if ((!bitmap_bit_p (live, regno))
3331 && (!bitmap_bit_p (artificial_uses, regno))
3332 && (!bitmap_bit_p (do_not_gen, regno)))
3334 rtx note = alloc_EXPR_LIST (REG_DEAD, regno_reg_rtx[regno],
3336 REG_NOTES (insn) = note;
3337 if (flags & DF_RI_LIFE)
3338 REG_N_DEATHS (regno)++;
3339 #ifdef REG_DEAD_DEBUGGING
3340 print_note ("adding 2: ", insn, note);
3350 /* Create a REG_UNUSED note if necessary for DEF in INSN updating LIVE
3351 and DO_NOT_GEN. Do not generate notes for registers in artificial
3355 df_create_unused_note (basic_block bb, rtx insn, struct df_ref *def,
3356 bitmap live, bitmap do_not_gen, bitmap artificial_uses,
3357 bitmap local_live, bitmap local_processed,
3358 int flags, int luid)
3360 unsigned int dregno = DF_REF_REGNO (def);
3362 #ifdef REG_DEAD_DEBUGGING
3363 fprintf (stderr, " regular looking at def ");
3364 df_ref_debug (def, stderr);
3367 if (bitmap_bit_p (live, dregno))
3369 if (flags & DF_RI_LIFE)
3371 /* If we have seen this regno, then it has already been
3372 processed correctly with the per insn increment. If we
3373 have not seen it we need to add the length from here to
3374 the end of the block to the live length. */
3375 if (bitmap_bit_p (local_processed, dregno))
3377 if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3378 bitmap_clear_bit (local_live, dregno);
3382 bitmap_set_bit (local_processed, dregno);
3383 REG_LIVE_LENGTH (dregno) += luid;
3387 else if ((!(DF_REF_FLAGS (def) & DF_REF_MW_HARDREG))
3388 && (!bitmap_bit_p (artificial_uses, dregno))
3389 && (!df_ignore_stack_reg (dregno)))
3391 rtx reg = GET_CODE (*DF_REF_LOC (def)) == SUBREG ?
3392 SUBREG_REG (*DF_REF_LOC (def)) : *DF_REF_LOC (def);
3393 rtx note = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
3394 REG_NOTES (insn) = note;
3395 #ifdef REG_DEAD_DEBUGGING
3396 print_note ("adding 3: ", insn, note);
3398 if (flags & DF_RI_LIFE)
3400 REG_N_DEATHS (dregno) ++;
3401 REG_LIVE_LENGTH (dregno)++;
3405 if ((flags & DF_RI_LIFE) && (dregno >= FIRST_PSEUDO_REGISTER))
3407 REG_FREQ (dregno) += REG_FREQ_FROM_BB (bb);
3408 if (REG_BASIC_BLOCK (dregno) == REG_BLOCK_UNKNOWN)
3409 REG_BASIC_BLOCK (dregno) = bb->index;
3410 else if (REG_BASIC_BLOCK (dregno) != bb->index)
3411 REG_BASIC_BLOCK (dregno) = REG_BLOCK_GLOBAL;
3414 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER + DF_REF_MAY_CLOBBER)))
3415 bitmap_set_bit (do_not_gen, dregno);
3417 /* Kill this register if it is not a subreg store. */
3418 if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
3419 bitmap_clear_bit (live, dregno);
3423 /* Recompute the REG_DEAD and REG_UNUSED notes and compute register
3424 info: lifetime, bb, and number of defs and uses for basic block
3425 BB. The three bitvectors are scratch regs used here. */
3428 df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index,
3429 bitmap live, bitmap do_not_gen, bitmap artificial_uses,
3430 bitmap local_live, bitmap local_processed, bitmap setjumps_crossed)
3432 struct df *df = dflow->df;
3433 basic_block bb = BASIC_BLOCK (bb_index);
3439 bitmap_copy (live, df_get_live_out (df, bb));
3440 bitmap_clear (artificial_uses);
3442 if (dflow->flags & DF_RI_LIFE)
3444 /* Process the regs live at the end of the block. Mark them as
3445 not local to any one basic block. */
3448 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3449 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3452 /* Process the artificial defs and uses at the bottom of the block
3453 to begin processing. */
3454 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
3455 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
3456 bitmap_clear_bit (live, DF_REF_REGNO (def));
3458 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
3459 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
3461 unsigned int regno = DF_REF_REGNO (use);
3462 bitmap_set_bit (live, regno);
3464 /* Notes are not generated for any of the artificial registers
3465 at the bottom of the block. */
3466 bitmap_set_bit (artificial_uses, regno);
3469 FOR_BB_INSNS_REVERSE (bb, insn)
3471 unsigned int uid = INSN_UID (insn);
3474 struct df_mw_hardreg *mws;
3479 if (dflow->flags & DF_RI_LIFE)
3481 /* Increment the live_length for all of the registers that
3482 are are referenced in this block and live at this
3483 particular point. */
3486 EXECUTE_IF_SET_IN_BITMAP (local_live, 0, regno, bi)
3488 REG_LIVE_LENGTH (regno)++;
3493 bitmap_clear (do_not_gen);
3494 df_kill_notes (insn, dflow->flags);
3496 /* Process the defs. */
3499 if (dflow->flags & DF_RI_LIFE)
3501 bool can_throw = can_throw_internal (insn);
3502 bool set_jump = (find_reg_note (insn, REG_SETJMP, NULL) != NULL);
3503 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3505 REG_N_CALLS_CROSSED (regno)++;
3507 REG_N_THROWING_CALLS_CROSSED (regno)++;
3509 /* We have a problem with any pseudoreg that lives
3510 across the setjmp. ANSI says that if a user
3511 variable does not change in value between the
3512 setjmp and the longjmp, then the longjmp
3513 preserves it. This includes longjmp from a place
3514 where the pseudo appears dead. (In principle,
3515 the value still exists if it is in scope.) If
3516 the pseudo goes in a hard reg, some other value
3517 may occupy that hard reg where this pseudo is
3518 dead, thus clobbering the pseudo. Conclusion:
3519 such a pseudo must not go in a hard reg. */
3520 if (set_jump && regno >= FIRST_PSEUDO_REGISTER)
3521 bitmap_set_bit (setjumps_crossed, regno);
3525 /* We only care about real sets for calls. Clobbers only
3526 may clobber and cannot be depended on. */
3527 for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3529 if ((mws->type == DF_REF_REG_DEF)
3530 && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
3531 df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
3532 artificial_uses, dflow->flags);
3535 /* All of the defs except the return value are some sort of
3536 clobber. This code is for the return. */
3537 for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3538 if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
3539 df_create_unused_note (bb, insn, def, live, do_not_gen,
3540 artificial_uses, local_live,
3541 local_processed, dflow->flags, luid);
3547 for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3549 if (mws->type == DF_REF_REG_DEF)
3550 df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
3551 artificial_uses, dflow->flags);
3554 for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
3555 df_create_unused_note (bb, insn, def, live, do_not_gen,
3556 artificial_uses, local_live,
3557 local_processed, dflow->flags, luid);
3560 /* Process the uses. */
3561 for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
3563 if ((mws->type != DF_REF_REG_DEF)
3564 && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
3565 df_set_dead_notes_for_mw (insn, mws, live, do_not_gen,
3566 artificial_uses, dflow->flags);
3569 for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
3571 unsigned int uregno = DF_REF_REGNO (use);
3573 if ((dflow->flags & DF_RI_LIFE) && (uregno >= FIRST_PSEUDO_REGISTER))
3575 REG_FREQ (uregno) += REG_FREQ_FROM_BB (bb);
3576 if (REG_BASIC_BLOCK (uregno) == REG_BLOCK_UNKNOWN)
3577 REG_BASIC_BLOCK (uregno) = bb->index;
3578 else if (REG_BASIC_BLOCK (uregno) != bb->index)
3579 REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
3582 #ifdef REG_DEAD_DEBUGGING
3583 fprintf (stderr, " regular looking at use ");
3584 df_ref_debug (use, stderr);
3586 if (!bitmap_bit_p (live, uregno))
3588 if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG))
3589 && (!bitmap_bit_p (do_not_gen, uregno))
3590 && (!bitmap_bit_p (artificial_uses, uregno))
3591 && (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
3592 && (!df_ignore_stack_reg (uregno)))
3594 rtx reg = GET_CODE (*DF_REF_LOC (use)) == SUBREG ?
3595 SUBREG_REG (*DF_REF_LOC (use)) : *DF_REF_LOC (use);
3596 rtx note = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
3597 REG_NOTES (insn) = note;
3598 if (dflow->flags & DF_RI_LIFE)
3599 REG_N_DEATHS (uregno)++;
3601 #ifdef REG_DEAD_DEBUGGING
3602 print_note ("adding 4: ", insn, note);
3605 /* This register is now live. */
3606 bitmap_set_bit (live, uregno);
3608 if (dflow->flags & DF_RI_LIFE)
3610 /* If we have seen this regno, then it has already
3611 been processed correctly with the per insn
3612 increment. If we have not seen it we set the bit
3613 so that begins to get processed locally. Note
3614 that we don't even get here if the variable was
3615 live at the end of the block since just a ref
3616 inside the block does not effect the
3618 REG_LIVE_LENGTH (uregno) ++;
3619 bitmap_set_bit (local_live, uregno);
3620 bitmap_set_bit (local_processed, uregno);
3626 if (dflow->flags & DF_RI_LIFE)
3628 /* Add the length of the block to all of the registers that were
3629 not referenced, but still live in this block. */
3632 bitmap_and_compl_into (live, local_processed);
3633 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3635 REG_LIVE_LENGTH (regno) += luid;
3637 bitmap_clear (local_processed);
3638 bitmap_clear (local_live);
3643 /* Compute register info: lifetime, bb, and number of defs and uses. */
3645 df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3646 bitmap blocks_to_scan)
3648 unsigned int bb_index;
3650 bitmap live = BITMAP_ALLOC (NULL);
3651 bitmap do_not_gen = BITMAP_ALLOC (NULL);
3652 bitmap artificial_uses = BITMAP_ALLOC (NULL);
3653 bitmap local_live = NULL;
3654 bitmap local_processed = NULL;
3655 bitmap setjumps_crossed = NULL;
3657 if (dflow->flags & DF_RI_LIFE)
3659 local_live = BITMAP_ALLOC (NULL);
3660 local_processed = BITMAP_ALLOC (NULL);
3661 setjumps_crossed = BITMAP_ALLOC (NULL);
3665 #ifdef REG_DEAD_DEBUGGING
3666 df_lr_dump (dflow->df->problems_by_index [DF_LR], stderr);
3667 print_rtl_with_bb (stderr, get_insns());
3670 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3672 df_ri_bb_compute (dflow, bb_index, live, do_not_gen, artificial_uses,
3673 local_live, local_processed, setjumps_crossed);
3677 BITMAP_FREE (do_not_gen);
3678 BITMAP_FREE (artificial_uses);
3679 if (dflow->flags & DF_RI_LIFE)
3683 /* See the setjump comment in df_ri_bb_compute. */
3684 EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed, 0, regno, bi)
3686 REG_BASIC_BLOCK (regno) = REG_BLOCK_UNKNOWN;
3687 REG_LIVE_LENGTH (regno) = -1;
3690 BITMAP_FREE (local_live);
3691 BITMAP_FREE (local_processed);
3692 BITMAP_FREE (setjumps_crossed);
3697 /* Free all storage associated with the problem. */
3700 df_ri_free (struct dataflow *dflow)
3702 free (dflow->problem_data);
3707 /* Debugging info. */
3710 df_ri_dump (struct dataflow *dflow, FILE *file)
3712 print_rtl_with_bb (file, get_insns ());
3714 if (dflow->flags & DF_RI_LIFE)
3716 fprintf (file, "Register info:\n");
3717 dump_flow_info (file, -1);
3721 /* All of the information associated every instance of the problem. */
3723 static struct df_problem problem_RI =
3725 DF_RI, /* Problem id. */
3726 DF_NONE, /* Direction. */
3727 df_ri_alloc, /* Allocate the problem specific data. */
3728 NULL, /* Reset global information. */
3729 NULL, /* Free basic block info. */
3730 df_ri_compute, /* Local compute function. */
3731 NULL, /* Init the solution specific data. */
3732 NULL, /* Iterative solver. */
3733 NULL, /* Confluence operator 0. */
3734 NULL, /* Confluence operator n. */
3735 NULL, /* Transfer function. */
3736 NULL, /* Finalize function. */
3737 df_ri_free, /* Free all of the problem information. */
3738 df_ri_dump, /* Debugging. */
3740 /* Technically this is only dependent on the live registers problem
3741 but it will produce infomation if built one of uninitialized
3742 register problems (UR, UREC) is also run. */
3743 df_lr_add_problem, /* Dependent problem. */
3744 0 /* Changeable flags. */
3748 /* Create a new DATAFLOW instance and add it to an existing instance
3749 of DF. The returned structure is what is used to get at the
3753 df_ri_add_problem (struct df *df, int flags)
3755 return df_add_problem (df, &problem_RI, flags);