1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
46 #define DF_SPARSE_THRESHOLD 32
48 static bitmap seen_in_block = NULL;
49 static bitmap seen_in_insn = NULL;
52 /*----------------------------------------------------------------------------
53 Public functions access functions for the dataflow problems.
54 ----------------------------------------------------------------------------*/
56 /* Get the instance of the problem that DFLOW is dependent on. */
59 df_get_dependent_problem (struct dataflow *dflow)
61 struct df *df = dflow->df;
62 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
64 gcc_assert (dependent_problem);
65 return df->problems_by_index[dependent_problem->id];
69 /* Create a du or ud chain from SRC to DST and link it into SRC. */
72 df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
74 struct df_link *head = DF_REF_CHAIN (src);
75 struct df_link *link = pool_alloc (dflow->block_pool);;
77 DF_REF_CHAIN (src) = link;
84 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
85 chains for ref and check to see if the reverse chains can also be
86 deleted. If LINK is not NULL it must be a link off of ref. In
87 this case, the other end is not deleted. */
90 df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
92 struct df_link *chain = DF_REF_CHAIN (ref);
95 /* Link was the first element in the chain. */
97 DF_REF_CHAIN (ref) = link->next;
100 /* Link is an internal element in the chain. */
101 struct df_link *prev = chain;
106 prev->next = chain->next;
113 pool_free (dflow->block_pool, link);
117 /* If chain is NULL here, it was because of a recursive call
118 when the other flavor of chains was not built. Just run thru
119 the entire chain calling the other side and then deleting the
123 struct df_link *next = chain->next;
124 /* Delete the other side if it exists. */
125 df_chain_unlink (dflow, chain->ref, chain);
132 /* Copy the du or ud chain starting at FROM_REF and attach it to
136 df_chain_copy (struct dataflow *dflow,
137 struct df_ref *to_ref,
138 struct df_link *from_ref)
142 df_chain_create (dflow, to_ref, from_ref->ref);
143 from_ref = from_ref->next;
148 /* Get the live in set for BB no matter what problem happens to be
152 df_get_live_in (struct df *df, basic_block bb)
154 gcc_assert (df->problems_by_index[DF_LR]);
156 if (df->problems_by_index[DF_UREC])
157 return DF_RA_LIVE_IN (df, bb);
158 else if (df->problems_by_index[DF_UR])
159 return DF_LIVE_IN (df, bb);
161 return DF_UPWARD_LIVE_IN (df, bb);
165 /* Get the live out set for BB no matter what problem happens to be
169 df_get_live_out (struct df *df, basic_block bb)
171 gcc_assert (df->problems_by_index[DF_LR]);
173 if (df->problems_by_index[DF_UREC])
174 return DF_RA_LIVE_OUT (df, bb);
175 else if (df->problems_by_index[DF_UR])
176 return DF_LIVE_OUT (df, bb);
178 return DF_UPWARD_LIVE_OUT (df, bb);
182 /*----------------------------------------------------------------------------
184 ----------------------------------------------------------------------------*/
186 /* Generic versions to get the void* version of the block info. Only
187 used inside the problem instace vectors. */
189 /* Grow the bb_info array. */
192 df_grow_bb_info (struct dataflow *dflow)
194 unsigned int new_size = last_basic_block + 1;
195 if (dflow->block_info_size < new_size)
197 new_size += new_size / 4;
198 dflow->block_info = xrealloc (dflow->block_info,
199 new_size *sizeof (void*));
200 memset (dflow->block_info + dflow->block_info_size, 0,
201 (new_size - dflow->block_info_size) *sizeof (void *));
202 dflow->block_info_size = new_size;
206 /* Dump a def-use or use-def chain for REF to FILE. */
209 df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
211 fprintf (file, "{ ");
212 for (; link; link = link->next)
214 fprintf (file, "%c%d(bb %d insn %d) ",
215 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
216 DF_REF_ID (link->ref),
217 DF_REF_BBNO (link->ref),
218 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
224 /* Print some basic block info as part of df_dump. */
227 df_print_bb_index (basic_block bb, FILE *file)
232 fprintf (file, "( ");
233 FOR_EACH_EDGE (e, ei, bb->preds)
235 basic_block pred = e->src;
236 fprintf (file, "%d ", pred->index);
238 fprintf (file, ")->[%d]->( ", bb->index);
239 FOR_EACH_EDGE (e, ei, bb->succs)
241 basic_block succ = e->dest;
242 fprintf (file, "%d ", succ->index);
244 fprintf (file, ")\n");
248 /* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
251 df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
253 bitmap ids = maps[regno];
257 unsigned int end = start + count;;
258 ids = BITMAP_ALLOC (NULL);
260 for (i = start; i < end; i++)
261 bitmap_set_bit (ids, i);
267 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
273 seen_in_block = BITMAP_ALLOC (NULL);
274 seen_in_insn = BITMAP_ALLOC (NULL);
281 BITMAP_FREE (seen_in_block);
282 BITMAP_FREE (seen_in_insn);
287 /*----------------------------------------------------------------------------
290 Find the locations in the function where each use site for a pseudo
293 ----------------------------------------------------------------------------*/
295 struct df_ru_problem_data
297 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
298 unsigned int use_sites_size; /* Size of use_sites. */
299 /* The set of defs to regs invalidated by call. */
300 bitmap sparse_invalidated_by_call;
301 /* The set of defs to regs invalidate by call for ru. */
302 bitmap dense_invalidated_by_call;
305 /* Get basic block info. */
307 struct df_ru_bb_info *
308 df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
310 return (struct df_ru_bb_info *) dflow->block_info[index];
314 /* Set basic block info. */
317 df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
318 struct df_ru_bb_info *bb_info)
320 dflow->block_info[index] = bb_info;
324 /* Free basic block info. */
327 df_ru_free_bb_info (struct dataflow *dflow,
328 basic_block bb ATTRIBUTE_UNUSED,
331 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
334 BITMAP_FREE (bb_info->kill);
335 BITMAP_FREE (bb_info->sparse_kill);
336 BITMAP_FREE (bb_info->gen);
337 BITMAP_FREE (bb_info->in);
338 BITMAP_FREE (bb_info->out);
339 pool_free (dflow->block_pool, bb_info);
344 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
345 not touched unless the block is new. */
348 df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
350 unsigned int bb_index;
352 unsigned int reg_size = max_reg_num ();
354 if (! dflow->block_pool)
355 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
356 sizeof (struct df_ru_bb_info), 50);
358 if (dflow->problem_data)
361 struct df_ru_problem_data *problem_data =
362 (struct df_ru_problem_data *) dflow->problem_data;
364 for (i = 0; i < problem_data->use_sites_size; i++)
366 bitmap bm = problem_data->use_sites[i];
370 problem_data->use_sites[i] = NULL;
374 if (problem_data->use_sites_size < reg_size)
376 problem_data->use_sites
377 = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
378 memset (problem_data->use_sites + problem_data->use_sites_size, 0,
379 (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
380 problem_data->use_sites_size = reg_size;
383 bitmap_clear (problem_data->sparse_invalidated_by_call);
384 bitmap_clear (problem_data->dense_invalidated_by_call);
388 struct df_ru_problem_data *problem_data =
389 xmalloc (sizeof (struct df_ru_problem_data));
390 dflow->problem_data = problem_data;
392 problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
393 problem_data->use_sites_size = reg_size;
394 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
395 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
398 df_grow_bb_info (dflow);
400 /* Because of the clustering of all def sites for the same pseudo,
401 we have to process all of the blocks before doing the
404 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
406 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
409 bitmap_clear (bb_info->kill);
410 bitmap_clear (bb_info->sparse_kill);
411 bitmap_clear (bb_info->gen);
415 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
416 df_ru_set_bb_info (dflow, bb_index, bb_info);
417 bb_info->kill = BITMAP_ALLOC (NULL);
418 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
419 bb_info->gen = BITMAP_ALLOC (NULL);
420 bb_info->in = BITMAP_ALLOC (NULL);
421 bb_info->out = BITMAP_ALLOC (NULL);
427 /* Process a list of DEFs for df_ru_bb_local_compute. */
430 df_ru_bb_local_compute_process_def (struct dataflow *dflow,
431 struct df_ru_bb_info *bb_info,
434 struct df *df = dflow->df;
437 unsigned int regno = DF_REF_REGNO (def);
438 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
439 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
440 if (!bitmap_bit_p (seen_in_block, regno))
442 /* The first def for regno, causes the kill info to be
443 generated and the gen information to cleared. */
444 if (!bitmap_bit_p (seen_in_insn, regno))
446 if (n_uses > DF_SPARSE_THRESHOLD)
448 bitmap_set_bit (bb_info->sparse_kill, regno);
449 bitmap_clear_range (bb_info->gen, begin, n_uses);
453 struct df_ru_problem_data *problem_data =
454 (struct df_ru_problem_data *) dflow->problem_data;
456 df_ref_bitmap (problem_data->use_sites, regno,
458 bitmap_ior_into (bb_info->kill, uses);
459 bitmap_and_compl_into (bb_info->gen, uses);
462 bitmap_set_bit (seen_in_insn, regno);
469 /* Process a list of USEs for df_ru_bb_local_compute. */
472 df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
474 enum df_ref_flags top_flag)
478 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
480 /* Add use to set of gens in this BB unless we have seen a
481 def in a previous instruction. */
482 unsigned int regno = DF_REF_REGNO (use);
483 if (!bitmap_bit_p (seen_in_block, regno))
484 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
490 /* Compute local reaching use (upward exposed use) info for basic
491 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
493 df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
495 struct df *df = dflow->df;
496 basic_block bb = BASIC_BLOCK (bb_index);
497 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
500 /* Set when a def for regno is seen. */
501 bitmap_clear (seen_in_block);
502 bitmap_clear (seen_in_insn);
505 /* Variables defined in the prolog that are used by the exception
507 df_ru_bb_local_compute_process_use (bb_info,
508 df_get_artificial_uses (df, bb_index),
512 /* Process the artificial defs first since these are at the top of
514 df_ru_bb_local_compute_process_def (dflow, bb_info,
515 df_get_artificial_defs (df, bb_index));
517 FOR_BB_INSNS (bb, insn)
519 unsigned int uid = INSN_UID (insn);
523 df_ru_bb_local_compute_process_def (dflow, bb_info,
524 DF_INSN_UID_GET (df, uid)->defs);
526 /* The use processing must happen after the defs processing even
527 though the uses logically happen first since the defs clear
528 the gen set. Otherwise, a use for regno occuring in the same
529 instruction as a def for regno would be cleared. */
530 df_ru_bb_local_compute_process_use (bb_info,
531 DF_INSN_UID_GET (df, uid)->uses, 0);
533 bitmap_ior_into (seen_in_block, seen_in_insn);
534 bitmap_clear (seen_in_insn);
537 /* Process the hardware registers that are always live. */
538 df_ru_bb_local_compute_process_use (bb_info,
539 df_get_artificial_uses (df, bb_index), 0);
543 /* Compute local reaching use (upward exposed use) info for each basic
544 block within BLOCKS. */
546 df_ru_local_compute (struct dataflow *dflow,
548 bitmap rescan_blocks ATTRIBUTE_UNUSED)
550 struct df *df = dflow->df;
551 unsigned int bb_index;
554 struct df_ru_problem_data *problem_data =
555 (struct df_ru_problem_data *) dflow->problem_data;
556 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
557 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
561 if (!df->use_info.refs_organized)
562 df_reorganize_refs (&df->use_info);
564 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
566 df_ru_bb_local_compute (dflow, bb_index);
569 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
570 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
572 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
573 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
574 bitmap_set_bit (sparse_invalidated, regno);
577 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
578 reg_info->begin, reg_info->n_refs);
579 bitmap_ior_into (dense_invalidated, defs);
587 /* Initialize the solution bit vectors for problem. */
590 df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
592 unsigned int bb_index;
595 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
597 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
598 bitmap_copy (bb_info->in, bb_info->gen);
599 bitmap_clear (bb_info->out);
604 /* Out of target gets or of in of source. */
607 df_ru_confluence_n (struct dataflow *dflow, edge e)
609 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
610 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
612 if (e->flags & EDGE_EH)
614 struct df_ru_problem_data *problem_data =
615 (struct df_ru_problem_data *) dflow->problem_data;
616 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
617 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
618 struct df *df = dflow->df;
621 bitmap tmp = BITMAP_ALLOC (NULL);
623 bitmap_copy (tmp, op2);
624 bitmap_and_compl_into (tmp, dense_invalidated);
626 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
628 bitmap_clear_range (tmp,
629 DF_REG_USE_GET (df, regno)->begin,
630 DF_REG_USE_GET (df, regno)->n_refs);
632 bitmap_ior_into (op1, tmp);
636 bitmap_ior_into (op1, op2);
640 /* Transfer function. */
643 df_ru_transfer_function (struct dataflow *dflow, int bb_index)
645 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
648 bitmap in = bb_info->in;
649 bitmap out = bb_info->out;
650 bitmap gen = bb_info->gen;
651 bitmap kill = bb_info->kill;
652 bitmap sparse_kill = bb_info->sparse_kill;
654 if (bitmap_empty_p (sparse_kill))
655 return bitmap_ior_and_compl (in, gen, out, kill);
658 struct df *df = dflow->df;
659 bool changed = false;
660 bitmap tmp = BITMAP_ALLOC (NULL);
661 bitmap_copy (tmp, in);
662 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
664 bitmap_clear_range (tmp,
665 DF_REG_USE_GET (df, regno)->begin,
666 DF_REG_USE_GET (df, regno)->n_refs);
668 bitmap_and_compl_into (tmp, kill);
669 bitmap_ior_into (tmp, gen);
670 changed = !bitmap_equal_p (tmp, in);
683 /* Free all storage associated with the problem. */
686 df_ru_free (struct dataflow *dflow)
689 struct df_ru_problem_data *problem_data =
690 (struct df_ru_problem_data *) dflow->problem_data;
694 for (i = 0; i < dflow->block_info_size; i++)
696 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
699 BITMAP_FREE (bb_info->kill);
700 BITMAP_FREE (bb_info->sparse_kill);
701 BITMAP_FREE (bb_info->gen);
702 BITMAP_FREE (bb_info->in);
703 BITMAP_FREE (bb_info->out);
707 free_alloc_pool (dflow->block_pool);
709 for (i = 0; i < problem_data->use_sites_size; i++)
711 bitmap bm = problem_data->use_sites[i];
716 free (problem_data->use_sites);
717 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
718 BITMAP_FREE (problem_data->dense_invalidated_by_call);
720 dflow->block_info_size = 0;
721 free (dflow->block_info);
722 free (dflow->problem_data);
728 /* Debugging info. */
731 df_ru_dump (struct dataflow *dflow, FILE *file)
734 struct df *df = dflow->df;
735 struct df_ru_problem_data *problem_data =
736 (struct df_ru_problem_data *) dflow->problem_data;
737 unsigned int m = max_reg_num ();
740 fprintf (file, "Reaching uses:\n");
742 fprintf (file, " sparse invalidated \t");
743 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
744 fprintf (file, " dense invalidated \t");
745 dump_bitmap (file, problem_data->dense_invalidated_by_call);
747 for (regno = 0; regno < m; regno++)
748 if (DF_REG_USE_GET (df, regno)->n_refs)
749 fprintf (file, "%d[%d,%d] ", regno,
750 DF_REG_USE_GET (df, regno)->begin,
751 DF_REG_USE_GET (df, regno)->n_refs);
752 fprintf (file, "\n");
756 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
757 df_print_bb_index (bb, file);
762 fprintf (file, " in \t");
763 dump_bitmap (file, bb_info->in);
764 fprintf (file, " gen \t");
765 dump_bitmap (file, bb_info->gen);
766 fprintf (file, " kill\t");
767 dump_bitmap (file, bb_info->kill);
768 fprintf (file, " out \t");
769 dump_bitmap (file, bb_info->out);
773 /* All of the information associated with every instance of the problem. */
775 static struct df_problem problem_RU =
777 DF_RU, /* Problem id. */
778 DF_BACKWARD, /* Direction. */
779 df_ru_alloc, /* Allocate the problem specific data. */
780 NULL, /* Reset global information. */
781 df_ru_free_bb_info, /* Free basic block info. */
782 df_ru_local_compute, /* Local compute function. */
783 df_ru_init_solution, /* Init the solution specific data. */
784 df_iterative_dataflow, /* Iterative solver. */
785 NULL, /* Confluence operator 0. */
786 df_ru_confluence_n, /* Confluence operator n. */
787 df_ru_transfer_function, /* Transfer function. */
788 NULL, /* Finalize function. */
789 df_ru_free, /* Free all of the problem information. */
790 df_ru_dump, /* Debugging. */
791 NULL /* Dependent problem. */
796 /* Create a new DATAFLOW instance and add it to an existing instance
797 of DF. The returned structure is what is used to get at the
801 df_ru_add_problem (struct df *df)
803 return df_add_problem (df, &problem_RU);
807 /*----------------------------------------------------------------------------
810 Find the locations in the function where each definition site for a
812 ----------------------------------------------------------------------------*/
814 struct df_rd_problem_data
816 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
817 unsigned int def_sites_size; /* Size of def_sites. */
818 /* The set of defs to regs invalidated by call. */
819 bitmap sparse_invalidated_by_call;
820 /* The set of defs to regs invalidate by call for ru. */
821 bitmap dense_invalidated_by_call;
824 /* Get basic block info. */
826 struct df_rd_bb_info *
827 df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
829 return (struct df_rd_bb_info *) dflow->block_info[index];
833 /* Set basic block info. */
836 df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
837 struct df_rd_bb_info *bb_info)
839 dflow->block_info[index] = bb_info;
843 /* Free basic block info. */
846 df_rd_free_bb_info (struct dataflow *dflow,
847 basic_block bb ATTRIBUTE_UNUSED,
850 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
853 BITMAP_FREE (bb_info->kill);
854 BITMAP_FREE (bb_info->sparse_kill);
855 BITMAP_FREE (bb_info->gen);
856 BITMAP_FREE (bb_info->in);
857 BITMAP_FREE (bb_info->out);
858 pool_free (dflow->block_pool, bb_info);
863 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
864 not touched unless the block is new. */
867 df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
869 unsigned int bb_index;
871 unsigned int reg_size = max_reg_num ();
873 if (! dflow->block_pool)
874 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
875 sizeof (struct df_rd_bb_info), 50);
877 if (dflow->problem_data)
880 struct df_rd_problem_data *problem_data =
881 (struct df_rd_problem_data *) dflow->problem_data;
883 for (i = 0; i < problem_data->def_sites_size; i++)
885 bitmap bm = problem_data->def_sites[i];
889 problem_data->def_sites[i] = NULL;
893 if (problem_data->def_sites_size < reg_size)
895 problem_data->def_sites
896 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
897 memset (problem_data->def_sites + problem_data->def_sites_size, 0,
898 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
899 problem_data->def_sites_size = reg_size;
902 bitmap_clear (problem_data->sparse_invalidated_by_call);
903 bitmap_clear (problem_data->dense_invalidated_by_call);
907 struct df_rd_problem_data *problem_data =
908 xmalloc (sizeof (struct df_rd_problem_data));
909 dflow->problem_data = problem_data;
911 problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
912 problem_data->def_sites_size = reg_size;
913 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
914 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
917 df_grow_bb_info (dflow);
919 /* Because of the clustering of all def sites for the same pseudo,
920 we have to process all of the blocks before doing the
923 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
925 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
928 bitmap_clear (bb_info->kill);
929 bitmap_clear (bb_info->sparse_kill);
930 bitmap_clear (bb_info->gen);
934 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
935 df_rd_set_bb_info (dflow, bb_index, bb_info);
936 bb_info->kill = BITMAP_ALLOC (NULL);
937 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
938 bb_info->gen = BITMAP_ALLOC (NULL);
939 bb_info->in = BITMAP_ALLOC (NULL);
940 bb_info->out = BITMAP_ALLOC (NULL);
946 /* Process a list of DEFs for df_rd_bb_local_compute. */
949 df_rd_bb_local_compute_process_def (struct dataflow *dflow,
950 struct df_rd_bb_info *bb_info,
953 struct df *df = dflow->df;
956 unsigned int regno = DF_REF_REGNO (def);
957 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
958 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
960 /* Only the last def(s) for a regno in the block has any
962 if (!bitmap_bit_p (seen_in_block, regno))
964 /* The first def for regno in insn gets to knock out the
965 defs from other instructions. */
966 if (!bitmap_bit_p (seen_in_insn, regno))
968 if (n_defs > DF_SPARSE_THRESHOLD)
970 bitmap_set_bit (bb_info->sparse_kill, regno);
971 bitmap_clear_range (bb_info->gen, begin, n_defs);
975 struct df_rd_problem_data *problem_data =
976 (struct df_rd_problem_data *) dflow->problem_data;
978 df_ref_bitmap (problem_data->def_sites, regno,
980 bitmap_ior_into (bb_info->kill, defs);
981 bitmap_and_compl_into (bb_info->gen, defs);
985 bitmap_set_bit (seen_in_insn, regno);
986 /* All defs for regno in the instruction may be put into
988 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
989 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
995 /* Compute local reaching def info for basic block BB. */
998 df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1000 struct df *df = dflow->df;
1001 basic_block bb = BASIC_BLOCK (bb_index);
1002 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1005 bitmap_clear (seen_in_block);
1006 bitmap_clear (seen_in_insn);
1008 FOR_BB_INSNS_REVERSE (bb, insn)
1010 unsigned int uid = INSN_UID (insn);
1012 if (! INSN_P (insn))
1015 df_rd_bb_local_compute_process_def (dflow, bb_info,
1016 DF_INSN_UID_GET (df, uid)->defs);
1018 /* This complex dance with the two bitmaps is required because
1019 instructions can assign twice to the same pseudo. This
1020 generally happens with calls that will have one def for the
1021 result and another def for the clobber. If only one vector
1022 is used and the clobber goes first, the result will be
1024 bitmap_ior_into (seen_in_block, seen_in_insn);
1025 bitmap_clear (seen_in_insn);
1028 /* Process the artificial defs last since we are going backwards
1029 thur the block and these are logically at the start. */
1030 df_rd_bb_local_compute_process_def (dflow, bb_info,
1031 df_get_artificial_defs (df, bb_index));
1035 /* Compute local reaching def info for each basic block within BLOCKS. */
1038 df_rd_local_compute (struct dataflow *dflow,
1040 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1042 struct df *df = dflow->df;
1043 unsigned int bb_index;
1046 struct df_rd_problem_data *problem_data =
1047 (struct df_rd_problem_data *) dflow->problem_data;
1048 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1049 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1053 if (!df->def_info.refs_organized)
1054 df_reorganize_refs (&df->def_info);
1056 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1058 df_rd_bb_local_compute (dflow, bb_index);
1061 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1062 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1064 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1065 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1067 bitmap_set_bit (sparse_invalidated, regno);
1071 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1072 reg_info->begin, reg_info->n_refs);
1073 bitmap_ior_into (dense_invalidated, defs);
1080 /* Initialize the solution bit vectors for problem. */
1083 df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1085 unsigned int bb_index;
1088 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1090 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1092 bitmap_copy (bb_info->out, bb_info->gen);
1093 bitmap_clear (bb_info->in);
1097 /* In of target gets or of out of source. */
1100 df_rd_confluence_n (struct dataflow *dflow, edge e)
1102 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1103 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1105 if (e->flags & EDGE_EH)
1107 struct df_rd_problem_data *problem_data =
1108 (struct df_rd_problem_data *) dflow->problem_data;
1109 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1110 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1111 struct df *df = dflow->df;
1114 bitmap tmp = BITMAP_ALLOC (NULL);
1116 bitmap_copy (tmp, op2);
1117 bitmap_and_compl_into (tmp, dense_invalidated);
1119 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1121 bitmap_clear_range (tmp,
1122 DF_REG_DEF_GET (df, regno)->begin,
1123 DF_REG_DEF_GET (df, regno)->n_refs);
1125 bitmap_ior_into (op1, tmp);
1129 bitmap_ior_into (op1, op2);
1133 /* Transfer function. */
1136 df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1138 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1141 bitmap in = bb_info->in;
1142 bitmap out = bb_info->out;
1143 bitmap gen = bb_info->gen;
1144 bitmap kill = bb_info->kill;
1145 bitmap sparse_kill = bb_info->sparse_kill;
1147 if (bitmap_empty_p (sparse_kill))
1148 return bitmap_ior_and_compl (out, gen, in, kill);
1151 struct df *df = dflow->df;
1152 bool changed = false;
1153 bitmap tmp = BITMAP_ALLOC (NULL);
1154 bitmap_copy (tmp, in);
1155 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1157 bitmap_clear_range (tmp,
1158 DF_REG_DEF_GET (df, regno)->begin,
1159 DF_REG_DEF_GET (df, regno)->n_refs);
1161 bitmap_and_compl_into (tmp, kill);
1162 bitmap_ior_into (tmp, gen);
1163 changed = !bitmap_equal_p (tmp, out);
1176 /* Free all storage associated with the problem. */
1179 df_rd_free (struct dataflow *dflow)
1182 struct df_rd_problem_data *problem_data =
1183 (struct df_rd_problem_data *) dflow->problem_data;
1187 for (i = 0; i < dflow->block_info_size; i++)
1189 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1192 BITMAP_FREE (bb_info->kill);
1193 BITMAP_FREE (bb_info->sparse_kill);
1194 BITMAP_FREE (bb_info->gen);
1195 BITMAP_FREE (bb_info->in);
1196 BITMAP_FREE (bb_info->out);
1200 free_alloc_pool (dflow->block_pool);
1202 for (i = 0; i < problem_data->def_sites_size; i++)
1204 bitmap bm = problem_data->def_sites[i];
1209 free (problem_data->def_sites);
1210 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1211 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1213 dflow->block_info_size = 0;
1214 free (dflow->block_info);
1215 free (dflow->problem_data);
1221 /* Debugging info. */
1224 df_rd_dump (struct dataflow *dflow, FILE *file)
1226 struct df *df = dflow->df;
1228 struct df_rd_problem_data *problem_data =
1229 (struct df_rd_problem_data *) dflow->problem_data;
1230 unsigned int m = max_reg_num ();
1233 fprintf (file, "Reaching defs:\n\n");
1235 fprintf (file, " sparse invalidated \t");
1236 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1237 fprintf (file, " dense invalidated \t");
1238 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1240 for (regno = 0; regno < m; regno++)
1241 if (DF_REG_DEF_GET (df, regno)->n_refs)
1242 fprintf (file, "%d[%d,%d] ", regno,
1243 DF_REG_DEF_GET (df, regno)->begin,
1244 DF_REG_DEF_GET (df, regno)->n_refs);
1245 fprintf (file, "\n");
1249 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1250 df_print_bb_index (bb, file);
1255 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1256 dump_bitmap (file, bb_info->in);
1257 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1258 dump_bitmap (file, bb_info->gen);
1259 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1260 dump_bitmap (file, bb_info->kill);
1261 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1262 dump_bitmap (file, bb_info->out);
1266 /* All of the information associated with every instance of the problem. */
1268 static struct df_problem problem_RD =
1270 DF_RD, /* Problem id. */
1271 DF_FORWARD, /* Direction. */
1272 df_rd_alloc, /* Allocate the problem specific data. */
1273 NULL, /* Reset global information. */
1274 df_rd_free_bb_info, /* Free basic block info. */
1275 df_rd_local_compute, /* Local compute function. */
1276 df_rd_init_solution, /* Init the solution specific data. */
1277 df_iterative_dataflow, /* Iterative solver. */
1278 NULL, /* Confluence operator 0. */
1279 df_rd_confluence_n, /* Confluence operator n. */
1280 df_rd_transfer_function, /* Transfer function. */
1281 NULL, /* Finalize function. */
1282 df_rd_free, /* Free all of the problem information. */
1283 df_rd_dump, /* Debugging. */
1284 NULL /* Dependent problem. */
1289 /* Create a new DATAFLOW instance and add it to an existing instance
1290 of DF. The returned structure is what is used to get at the
1294 df_rd_add_problem (struct df *df)
1296 return df_add_problem (df, &problem_RD);
1301 /*----------------------------------------------------------------------------
1304 Find the locations in the function where any use of a pseudo can reach
1305 in the backwards direction.
1306 ----------------------------------------------------------------------------*/
1308 /* Get basic block info. */
1310 struct df_lr_bb_info *
1311 df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1313 return (struct df_lr_bb_info *) dflow->block_info[index];
1317 /* Set basic block info. */
1320 df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1321 struct df_lr_bb_info *bb_info)
1323 dflow->block_info[index] = bb_info;
1327 /* Free basic block info. */
1330 df_lr_free_bb_info (struct dataflow *dflow,
1331 basic_block bb ATTRIBUTE_UNUSED,
1334 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1337 BITMAP_FREE (bb_info->use);
1338 BITMAP_FREE (bb_info->def);
1339 BITMAP_FREE (bb_info->in);
1340 BITMAP_FREE (bb_info->out);
1341 pool_free (dflow->block_pool, bb_info);
1346 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1347 not touched unless the block is new. */
1350 df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1352 unsigned int bb_index;
1355 if (! dflow->block_pool)
1356 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1357 sizeof (struct df_lr_bb_info), 50);
1359 df_grow_bb_info (dflow);
1361 /* Because of the clustering of all def sites for the same pseudo,
1362 we have to process all of the blocks before doing the
1365 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1367 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1370 bitmap_clear (bb_info->def);
1371 bitmap_clear (bb_info->use);
1375 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1376 df_lr_set_bb_info (dflow, bb_index, bb_info);
1377 bb_info->use = BITMAP_ALLOC (NULL);
1378 bb_info->def = BITMAP_ALLOC (NULL);
1379 bb_info->in = BITMAP_ALLOC (NULL);
1380 bb_info->out = BITMAP_ALLOC (NULL);
1386 /* Compute local live register info for basic block BB. */
1389 df_lr_bb_local_compute (struct dataflow *dflow,
1390 struct df *df, unsigned int bb_index)
1392 basic_block bb = BASIC_BLOCK (bb_index);
1393 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1398 /* Process the hardware registers that are always live. */
1399 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1400 /* Add use to set of uses in this BB. */
1401 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1402 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1404 FOR_BB_INSNS_REVERSE (bb, insn)
1406 unsigned int uid = INSN_UID (insn);
1408 if (! INSN_P (insn))
1413 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1415 unsigned int dregno = DF_REF_REGNO (def);
1417 if (dregno >= FIRST_PSEUDO_REGISTER
1418 || !(SIBLING_CALL_P (insn)
1419 && bitmap_bit_p (df->exit_block_uses, dregno)
1420 && !refers_to_regno_p (dregno, dregno+1,
1421 current_function_return_rtx,
1424 /* Add def to set of defs in this BB. */
1425 bitmap_set_bit (bb_info->def, dregno);
1426 bitmap_clear_bit (bb_info->use, dregno);
1432 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1434 unsigned int dregno = DF_REF_REGNO (def);
1436 if (DF_INSN_CONTAINS_ASM (df, insn)
1437 && dregno < FIRST_PSEUDO_REGISTER)
1441 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1442 for (i = dregno; i <= end; ++i)
1443 regs_asm_clobbered[i] = 1;
1445 /* Add def to set of defs in this BB. */
1446 bitmap_set_bit (bb_info->def, dregno);
1447 bitmap_clear_bit (bb_info->use, dregno);
1451 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1452 /* Add use to set of uses in this BB. */
1453 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1456 /* Process the registers set in an exception handler. */
1457 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1459 unsigned int dregno = DF_REF_REGNO (def);
1460 bitmap_set_bit (bb_info->def, dregno);
1461 bitmap_clear_bit (bb_info->use, dregno);
1465 /* Process the uses that are live into an exception handler. */
1466 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1467 /* Add use to set of uses in this BB. */
1468 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1469 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1473 /* Compute local live register info for each basic block within BLOCKS. */
1476 df_lr_local_compute (struct dataflow *dflow,
1478 bitmap rescan_blocks)
1480 struct df *df = dflow->df;
1481 unsigned int bb_index;
1484 /* Assume that the stack pointer is unchanging if alloca hasn't
1486 if (bitmap_equal_p (all_blocks, rescan_blocks))
1487 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1489 bitmap_clear (df->hardware_regs_used);
1491 /* The all-important stack pointer must always be live. */
1492 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1494 /* Before reload, there are a few registers that must be forced
1495 live everywhere -- which might not already be the case for
1496 blocks within infinite loops. */
1497 if (! reload_completed)
1499 /* Any reference to any pseudo before reload is a potential
1500 reference of the frame pointer. */
1501 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1503 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1504 /* Pseudos with argument area equivalences may require
1505 reloading via the argument pointer. */
1506 if (fixed_regs[ARG_POINTER_REGNUM])
1507 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1510 /* Any constant, or pseudo with constant equivalences, may
1511 require reloading from memory using the pic register. */
1512 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1513 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1514 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1517 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1519 /* The exit block is special for this problem and its bits are
1520 computed from thin air. */
1521 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1522 bitmap_copy (bb_info->use, df->exit_block_uses);
1525 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1527 if (bb_index == EXIT_BLOCK)
1529 df_lr_bb_local_compute (dflow, df, bb_index);
1534 /* Initialize the solution vectors. */
1537 df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1539 unsigned int bb_index;
1542 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1544 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1545 bitmap_copy (bb_info->in, bb_info->use);
1546 bitmap_clear (bb_info->out);
1551 /* Confluence function that processes infinite loops. This might be a
1552 noreturn function that throws. And even if it isn't, getting the
1553 unwind info right helps debugging. */
1555 df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1557 struct df *df = dflow->df;
1559 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1560 if (bb != EXIT_BLOCK_PTR)
1561 bitmap_copy (op1, df->hardware_regs_used);
1565 /* Confluence function that ignores fake edges. */
1567 df_lr_confluence_n (struct dataflow *dflow, edge e)
1569 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1570 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1572 /* Call-clobbered registers die across exception and call edges. */
1573 /* ??? Abnormal call edges ignored for the moment, as this gets
1574 confused by sibling call edges, which crashes reg-stack. */
1575 if (e->flags & EDGE_EH)
1576 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1578 bitmap_ior_into (op1, op2);
1580 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1584 /* Transfer function. */
1586 df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1588 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1589 bitmap in = bb_info->in;
1590 bitmap out = bb_info->out;
1591 bitmap use = bb_info->use;
1592 bitmap def = bb_info->def;
1594 return bitmap_ior_and_compl (in, use, out, def);
1598 /* Free all storage associated with the problem. */
1601 df_lr_free (struct dataflow *dflow)
1603 if (dflow->block_info)
1606 for (i = 0; i < dflow->block_info_size; i++)
1608 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1611 BITMAP_FREE (bb_info->use);
1612 BITMAP_FREE (bb_info->def);
1613 BITMAP_FREE (bb_info->in);
1614 BITMAP_FREE (bb_info->out);
1617 free_alloc_pool (dflow->block_pool);
1619 dflow->block_info_size = 0;
1620 free (dflow->block_info);
1626 /* Debugging info. */
1629 df_lr_dump (struct dataflow *dflow, FILE *file)
1633 fprintf (file, "Live Registers:\n");
1636 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1637 df_print_bb_index (bb, file);
1642 fprintf (file, " in \t");
1643 dump_bitmap (file, bb_info->in);
1644 fprintf (file, " use \t");
1645 dump_bitmap (file, bb_info->use);
1646 fprintf (file, " def \t");
1647 dump_bitmap (file, bb_info->def);
1648 fprintf (file, " out \t");
1649 dump_bitmap (file, bb_info->out);
1653 /* All of the information associated with every instance of the problem. */
1655 static struct df_problem problem_LR =
1657 DF_LR, /* Problem id. */
1658 DF_BACKWARD, /* Direction. */
1659 df_lr_alloc, /* Allocate the problem specific data. */
1660 NULL, /* Reset global information. */
1661 df_lr_free_bb_info, /* Free basic block info. */
1662 df_lr_local_compute, /* Local compute function. */
1663 df_lr_init, /* Init the solution specific data. */
1664 df_iterative_dataflow, /* Iterative solver. */
1665 df_lr_confluence_0, /* Confluence operator 0. */
1666 df_lr_confluence_n, /* Confluence operator n. */
1667 df_lr_transfer_function, /* Transfer function. */
1668 NULL, /* Finalize function. */
1669 df_lr_free, /* Free all of the problem information. */
1670 df_lr_dump, /* Debugging. */
1671 NULL /* Dependent problem. */
1675 /* Create a new DATAFLOW instance and add it to an existing instance
1676 of DF. The returned structure is what is used to get at the
1680 df_lr_add_problem (struct df *df)
1682 return df_add_problem (df, &problem_LR);
1687 /*----------------------------------------------------------------------------
1688 UNINITIALIZED REGISTERS
1690 Find the set of uses for registers that are reachable from the entry
1691 block without passing thru a definition.
1692 ----------------------------------------------------------------------------*/
1694 /* Get basic block info. */
1696 struct df_ur_bb_info *
1697 df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1699 return (struct df_ur_bb_info *) dflow->block_info[index];
1703 /* Set basic block info. */
1706 df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1707 struct df_ur_bb_info *bb_info)
1709 dflow->block_info[index] = bb_info;
1713 /* Free basic block info. */
1716 df_ur_free_bb_info (struct dataflow *dflow,
1717 basic_block bb ATTRIBUTE_UNUSED,
1720 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1723 BITMAP_FREE (bb_info->gen);
1724 BITMAP_FREE (bb_info->kill);
1725 BITMAP_FREE (bb_info->in);
1726 BITMAP_FREE (bb_info->out);
1727 pool_free (dflow->block_pool, bb_info);
1732 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1733 not touched unless the block is new. */
1736 df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1738 unsigned int bb_index;
1741 if (! dflow->block_pool)
1742 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1743 sizeof (struct df_ur_bb_info), 100);
1745 df_grow_bb_info (dflow);
1747 /* Because of the clustering of all def sites for the same pseudo,
1748 we have to process all of the blocks before doing the
1751 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1753 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1756 bitmap_clear (bb_info->kill);
1757 bitmap_clear (bb_info->gen);
1761 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1762 df_ur_set_bb_info (dflow, bb_index, bb_info);
1763 bb_info->kill = BITMAP_ALLOC (NULL);
1764 bb_info->gen = BITMAP_ALLOC (NULL);
1765 bb_info->in = BITMAP_ALLOC (NULL);
1766 bb_info->out = BITMAP_ALLOC (NULL);
1772 /* Compute local uninitialized register info for basic block BB. */
1775 df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1777 struct df *df = dflow->df;
1778 basic_block bb = BASIC_BLOCK (bb_index);
1779 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1783 bitmap_clear (seen_in_block);
1784 bitmap_clear (seen_in_insn);
1786 FOR_BB_INSNS_REVERSE (bb, insn)
1788 unsigned int uid = INSN_UID (insn);
1792 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1794 unsigned int regno = DF_REF_REGNO (def);
1795 /* Only the last def counts. */
1796 if (!bitmap_bit_p (seen_in_block, regno))
1798 bitmap_set_bit (seen_in_insn, regno);
1800 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1801 bitmap_set_bit (bb_info->kill, regno);
1803 bitmap_set_bit (bb_info->gen, regno);
1806 bitmap_ior_into (seen_in_block, seen_in_insn);
1807 bitmap_clear (seen_in_insn);
1810 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1812 unsigned int regno = DF_REF_REGNO (def);
1813 if (!bitmap_bit_p (seen_in_block, regno))
1815 bitmap_set_bit (seen_in_block, regno);
1816 bitmap_set_bit (bb_info->gen, regno);
1822 /* Compute local uninitialized register info. */
1825 df_ur_local_compute (struct dataflow *dflow,
1826 bitmap all_blocks ATTRIBUTE_UNUSED,
1827 bitmap rescan_blocks)
1829 unsigned int bb_index;
1834 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1836 df_ur_bb_local_compute (dflow, bb_index);
1843 /* Initialize the solution vectors. */
1846 df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1848 unsigned int bb_index;
1851 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1853 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1855 bitmap_copy (bb_info->out, bb_info->gen);
1856 bitmap_clear (bb_info->in);
1861 /* Or in the stack regs, hard regs and early clobber regs into the the
1862 ur_in sets of all of the blocks. */
1865 df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1867 struct df *df = dflow->df;
1868 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1869 bitmap tmp = BITMAP_ALLOC (NULL);
1871 unsigned int bb_index;
1873 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1875 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1876 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1878 bitmap_ior_into (bb_info->in, df_all_hard_regs);
1879 bitmap_ior_into (bb_info->out, df_all_hard_regs);
1881 /* No register may reach a location where it is not used. Thus
1882 we trim the rr result to the places where it is used. */
1883 bitmap_and_into (bb_info->in, bb_lr_info->in);
1884 bitmap_and_into (bb_info->out, bb_lr_info->out);
1887 /* Hard registers may still stick in the ur_out set, but not
1888 be in the ur_in set, if their only mention was in a call
1889 in this block. This is because a call kills in the lr
1890 problem but does not kill in the ur problem. To clean
1891 this up, we execute the transfer function on the lr_in
1892 set and then use that to knock bits out of ur_out. */
1893 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1895 bitmap_and_into (bb_info->out, tmp);
1903 /* Confluence function that ignores fake edges. */
1906 df_ur_confluence_n (struct dataflow *dflow, edge e)
1908 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1909 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1911 if (e->flags & EDGE_FAKE)
1914 bitmap_ior_into (op1, op2);
1918 /* Transfer function. */
1921 df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1923 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1924 bitmap in = bb_info->in;
1925 bitmap out = bb_info->out;
1926 bitmap gen = bb_info->gen;
1927 bitmap kill = bb_info->kill;
1929 return bitmap_ior_and_compl (out, gen, in, kill);
1933 /* Free all storage associated with the problem. */
1936 df_ur_free (struct dataflow *dflow)
1938 if (dflow->block_info)
1942 for (i = 0; i < dflow->block_info_size; i++)
1944 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1947 BITMAP_FREE (bb_info->gen);
1948 BITMAP_FREE (bb_info->kill);
1949 BITMAP_FREE (bb_info->in);
1950 BITMAP_FREE (bb_info->out);
1954 free_alloc_pool (dflow->block_pool);
1955 dflow->block_info_size = 0;
1956 free (dflow->block_info);
1962 /* Debugging info. */
1965 df_ur_dump (struct dataflow *dflow, FILE *file)
1969 fprintf (file, "Undefined regs:\n");
1973 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
1974 df_print_bb_index (bb, file);
1979 fprintf (file, " in \t");
1980 dump_bitmap (file, bb_info->in);
1981 fprintf (file, " gen \t");
1982 dump_bitmap (file, bb_info->gen);
1983 fprintf (file, " kill\t");
1984 dump_bitmap (file, bb_info->kill);
1985 fprintf (file, " out \t");
1986 dump_bitmap (file, bb_info->out);
1990 /* All of the information associated with every instance of the problem. */
1992 static struct df_problem problem_UR =
1994 DF_UR, /* Problem id. */
1995 DF_FORWARD, /* Direction. */
1996 df_ur_alloc, /* Allocate the problem specific data. */
1997 NULL, /* Reset global information. */
1998 df_ur_free_bb_info, /* Free basic block info. */
1999 df_ur_local_compute, /* Local compute function. */
2000 df_ur_init, /* Init the solution specific data. */
2001 df_iterative_dataflow, /* Iterative solver. */
2002 NULL, /* Confluence operator 0. */
2003 df_ur_confluence_n, /* Confluence operator n. */
2004 df_ur_transfer_function, /* Transfer function. */
2005 df_ur_local_finalize, /* Finalize function. */
2006 df_ur_free, /* Free all of the problem information. */
2007 df_ur_dump, /* Debugging. */
2008 &problem_LR /* Dependent problem. */
2012 /* Create a new DATAFLOW instance and add it to an existing instance
2013 of DF. The returned structure is what is used to get at the
2017 df_ur_add_problem (struct df *df)
2019 return df_add_problem (df, &problem_UR);
2024 /*----------------------------------------------------------------------------
2025 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2027 Find the set of uses for registers that are reachable from the entry
2028 block without passing thru a definition.
2030 This is a variant of the UR problem above that has a lot of special
2031 features just for the register allocation phase.
2032 ----------------------------------------------------------------------------*/
2034 struct df_urec_problem_data
2036 bool earlyclobbers_found; /* True if any instruction contains an
2039 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2044 /* Get basic block info. */
2046 struct df_urec_bb_info *
2047 df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2049 return (struct df_urec_bb_info *) dflow->block_info[index];
2053 /* Set basic block info. */
2056 df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2057 struct df_urec_bb_info *bb_info)
2059 dflow->block_info[index] = bb_info;
2063 /* Free basic block info. */
2066 df_urec_free_bb_info (struct dataflow *dflow,
2067 basic_block bb ATTRIBUTE_UNUSED,
2070 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2073 BITMAP_FREE (bb_info->gen);
2074 BITMAP_FREE (bb_info->kill);
2075 BITMAP_FREE (bb_info->in);
2076 BITMAP_FREE (bb_info->out);
2077 BITMAP_FREE (bb_info->earlyclobber);
2078 pool_free (dflow->block_pool, bb_info);
2083 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2084 not touched unless the block is new. */
2087 df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2089 unsigned int bb_index;
2091 struct df_urec_problem_data *problem_data =
2092 (struct df_urec_problem_data *) dflow->problem_data;
2094 if (! dflow->block_pool)
2095 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2096 sizeof (struct df_urec_bb_info), 50);
2098 if (!dflow->problem_data)
2100 problem_data = xmalloc (sizeof (struct df_urec_problem_data));
2101 dflow->problem_data = problem_data;
2103 problem_data->earlyclobbers_found = false;
2105 df_grow_bb_info (dflow);
2107 /* Because of the clustering of all def sites for the same pseudo,
2108 we have to process all of the blocks before doing the
2111 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2113 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2116 bitmap_clear (bb_info->kill);
2117 bitmap_clear (bb_info->gen);
2118 bitmap_clear (bb_info->earlyclobber);
2122 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2123 df_urec_set_bb_info (dflow, bb_index, bb_info);
2124 bb_info->kill = BITMAP_ALLOC (NULL);
2125 bb_info->gen = BITMAP_ALLOC (NULL);
2126 bb_info->in = BITMAP_ALLOC (NULL);
2127 bb_info->out = BITMAP_ALLOC (NULL);
2128 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2134 /* The function modifies local info for register REG being changed in
2135 SETTER. DATA is used to pass the current basic block info. */
2138 df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2143 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2145 if (GET_CODE (reg) == SUBREG)
2146 reg = SUBREG_REG (reg);
2152 endregno = regno = REGNO (reg);
2153 if (regno < FIRST_PSEUDO_REGISTER)
2155 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2157 for (i = regno; i < endregno; i++)
2159 bitmap_set_bit (bb_info->kill, i);
2161 if (GET_CODE (setter) != CLOBBER)
2162 bitmap_set_bit (bb_info->gen, i);
2164 bitmap_clear_bit (bb_info->gen, i);
2169 bitmap_set_bit (bb_info->kill, regno);
2171 if (GET_CODE (setter) != CLOBBER)
2172 bitmap_set_bit (bb_info->gen, regno);
2174 bitmap_clear_bit (bb_info->gen, regno);
2177 /* Classes of registers which could be early clobbered in the current
2181 DEF_VEC_ALLOC_I(int,heap);
2183 static VEC(int,heap) *earlyclobber_regclass;
2185 /* This function finds and stores register classes that could be early
2186 clobbered in INSN. If any earlyclobber classes are found, the function
2187 returns TRUE, in all other cases it returns FALSE. */
2190 df_urec_check_earlyclobber (rtx insn)
2195 extract_insn (insn);
2197 VEC_truncate (int, earlyclobber_regclass, 0);
2198 for (opno = 0; opno < recog_data.n_operands; opno++)
2203 enum reg_class class;
2204 const char *p = recog_data.constraints[opno];
2213 case '=': case '+': case '?':
2216 case 'm': case '<': case '>': case 'V': case 'o':
2217 case 'E': case 'F': case 'G': case 'H':
2218 case 's': case 'i': case 'n':
2219 case 'I': case 'J': case 'K': case 'L':
2220 case 'M': case 'N': case 'O': case 'P':
2222 case '0': case '1': case '2': case '3': case '4':
2223 case '5': case '6': case '7': case '8': case '9':
2224 /* These don't say anything we care about. */
2232 if (amp_p && class != NO_REGS)
2238 VEC_iterate (int, earlyclobber_regclass, i, rc);
2241 if (rc == (int) class)
2245 /* We use VEC_quick_push here because
2246 earlyclobber_regclass holds no more than
2247 N_REG_CLASSES elements. */
2248 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2258 class = GENERAL_REGS;
2262 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2267 p += CONSTRAINT_LEN (c, p);
2274 /* The function checks that pseudo-register *X has a class
2275 intersecting with the class of pseudo-register could be early
2276 clobbered in the same insn.
2278 This function is a no-op if earlyclobber_regclass is empty.
2280 Reload can assign the same hard register to uninitialized
2281 pseudo-register and early clobbered pseudo-register in an insn if
2282 the pseudo-register is used first time in given BB and not lived at
2283 the BB start. To prevent this we don't change life information for
2284 such pseudo-registers. */
2287 df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2289 enum reg_class pref_class, alt_class;
2291 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2293 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2298 if (bitmap_bit_p (bb_info->kill, regno)
2299 || bitmap_bit_p (bb_info->gen, regno))
2301 pref_class = reg_preferred_class (regno);
2302 alt_class = reg_alternate_class (regno);
2303 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2305 if (reg_classes_intersect_p (rc, pref_class)
2307 && reg_classes_intersect_p (rc, alt_class)))
2309 bitmap_set_bit (bb_info->earlyclobber, regno);
2317 /* The function processes all pseudo-registers in *X with the aid of
2318 previous function. */
2321 df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2323 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2327 /* Compute local uninitialized register info for basic block BB. */
2330 df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2332 struct df *df = dflow->df;
2333 basic_block bb = BASIC_BLOCK (bb_index);
2334 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2338 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2340 unsigned int regno = DF_REF_REGNO (def);
2341 bitmap_set_bit (bb_info->gen, regno);
2344 FOR_BB_INSNS (bb, insn)
2348 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2349 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2350 && df_urec_check_earlyclobber (insn))
2352 struct df_urec_problem_data *problem_data =
2353 (struct df_urec_problem_data *) dflow->problem_data;
2354 problem_data->earlyclobbers_found = true;
2355 note_uses (&PATTERN (insn),
2356 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2363 /* Compute local uninitialized register info. */
2366 df_urec_local_compute (struct dataflow *dflow,
2367 bitmap all_blocks ATTRIBUTE_UNUSED,
2368 bitmap rescan_blocks)
2370 unsigned int bb_index;
2374 HARD_REG_SET zero, stack_hard_regs, used;
2375 struct df_urec_problem_data *problem_data =
2376 (struct df_urec_problem_data *) dflow->problem_data;
2378 /* Any register that MAY be allocated to a register stack (like the
2379 387) is treated poorly. Each such register is marked as being
2380 live everywhere. This keeps the register allocator and the
2381 subsequent passes from doing anything useful with these values.
2383 FIXME: This seems like an incredibly poor idea. */
2385 CLEAR_HARD_REG_SET (zero);
2386 CLEAR_HARD_REG_SET (stack_hard_regs);
2387 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2388 SET_HARD_REG_BIT (stack_hard_regs, i);
2389 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2390 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2392 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2393 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2394 AND_HARD_REG_SET (used, stack_hard_regs);
2395 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2396 bitmap_set_bit (problem_data->stack_regs, i);
2402 /* We know that earlyclobber_regclass holds no more than
2403 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2404 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2406 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2408 df_urec_bb_local_compute (dflow, bb_index);
2411 VEC_free (int, heap, earlyclobber_regclass);
2415 /* Initialize the solution vectors. */
2418 df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2420 unsigned int bb_index;
2423 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2425 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2427 /* FIXME: This is a hack, it has been copied over from
2428 make_accurate_live_analysis by Vlad. Most likely it is necessary
2429 because the generation of gen and kill information for hardware
2430 registers in ur is a subset of what is really necessary and what
2431 is done for the lr problem. */
2433 /* Inside the register allocator, partial availability is only
2434 allowed for the psuedo registers. To implement this, the rr is
2435 initially iored with a mask ones for the hard registers and zeros
2436 for the pseudos before being iterated. This means that each
2437 hardware register will be live unless explicitly killed by some
2438 statement. Eventually most of these bit will die because the
2439 results of rr are anded with the results of lr before being used.
2440 Outside of register allocation, a more conservative strategy of
2441 completely ignoring the unintialized registers is imployed in the
2442 finalizer function. */
2443 if (df_state & DF_SCAN_GLOBAL)
2445 bitmap_ior (bb_info->out, bb_info->gen, df_all_hard_regs);
2446 bitmap_copy (bb_info->in, df_all_hard_regs);
2450 bitmap_copy (bb_info->out, bb_info->gen);
2451 bitmap_clear (bb_info->in);
2457 /* Or in the stack regs, hard regs and early clobber regs into the the
2458 ur_in sets of all of the blocks. */
2461 df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2463 struct df *df = dflow->df;
2464 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2465 bitmap tmp = BITMAP_ALLOC (NULL);
2467 unsigned int bb_index;
2468 struct df_urec_problem_data *problem_data =
2469 (struct df_urec_problem_data *) dflow->problem_data;
2471 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2473 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2474 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2476 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2478 if (problem_data->earlyclobbers_found)
2479 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2482 /* We can not use the same stack register for uninitialized
2483 pseudo-register and another living pseudo-register
2484 because if the uninitialized pseudo-register dies,
2485 subsequent pass reg-stack will be confused (it will
2486 believe that the other register dies). */
2487 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2488 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2492 if (!(df_state & DF_SCAN_GLOBAL))
2494 bitmap_ior_into (bb_info->in, df_all_hard_regs);
2495 bitmap_ior_into (bb_info->out, df_all_hard_regs);
2498 /* No register may reach a location where it is not used. Thus
2499 we trim the rr result to the places where it is used. */
2500 bitmap_and_into (bb_info->in, bb_lr_info->in);
2501 bitmap_and_into (bb_info->out, bb_lr_info->out);
2504 /* Hard registers may still stick in the ur_out set, but not
2505 be in the ur_in set, if their only mention was in a call
2506 in this block. This is because a call kills in the lr
2507 problem but does not kill in the rr problem. To clean
2508 this up, we execute the transfer function on the lr_in
2509 set and then use that to knock bits out of ur_out. */
2510 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2512 bitmap_and_into (bb_info->out, tmp);
2517 BITMAP_FREE (problem_data->stack_regs);
2523 /* Confluence function that ignores fake edges. */
2526 df_urec_confluence_n (struct dataflow *dflow, edge e)
2528 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2529 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2531 if (e->flags & EDGE_FAKE)
2534 bitmap_ior_into (op1, op2);
2538 /* Transfer function. */
2541 df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2543 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2544 bitmap in = bb_info->in;
2545 bitmap out = bb_info->out;
2546 bitmap gen = bb_info->gen;
2547 bitmap kill = bb_info->kill;
2549 return bitmap_ior_and_compl (out, gen, in, kill);
2553 /* Free all storage associated with the problem. */
2556 df_urec_free (struct dataflow *dflow)
2558 if (dflow->block_info)
2562 for (i = 0; i < dflow->block_info_size; i++)
2564 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2567 BITMAP_FREE (bb_info->gen);
2568 BITMAP_FREE (bb_info->kill);
2569 BITMAP_FREE (bb_info->in);
2570 BITMAP_FREE (bb_info->out);
2571 BITMAP_FREE (bb_info->earlyclobber);
2575 free_alloc_pool (dflow->block_pool);
2577 dflow->block_info_size = 0;
2578 free (dflow->block_info);
2579 free (dflow->problem_data);
2585 /* Debugging info. */
2588 df_urec_dump (struct dataflow *dflow, FILE *file)
2592 fprintf (file, "Undefined regs:\n");
2596 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2597 df_print_bb_index (bb, file);
2602 fprintf (file, " in \t");
2603 dump_bitmap (file, bb_info->in);
2604 fprintf (file, " gen \t");
2605 dump_bitmap (file, bb_info->gen);
2606 fprintf (file, " kill\t");
2607 dump_bitmap (file, bb_info->kill);
2608 fprintf (file, " ec\t");
2609 dump_bitmap (file, bb_info->earlyclobber);
2610 fprintf (file, " out \t");
2611 dump_bitmap (file, bb_info->out);
2615 /* All of the information associated with every instance of the problem. */
2617 static struct df_problem problem_UREC =
2619 DF_UREC, /* Problem id. */
2620 DF_FORWARD, /* Direction. */
2621 df_urec_alloc, /* Allocate the problem specific data. */
2622 NULL, /* Reset global information. */
2623 df_urec_free_bb_info, /* Free basic block info. */
2624 df_urec_local_compute, /* Local compute function. */
2625 df_urec_init, /* Init the solution specific data. */
2626 df_iterative_dataflow, /* Iterative solver. */
2627 NULL, /* Confluence operator 0. */
2628 df_urec_confluence_n, /* Confluence operator n. */
2629 df_urec_transfer_function, /* Transfer function. */
2630 df_urec_local_finalize, /* Finalize function. */
2631 df_urec_free, /* Free all of the problem information. */
2632 df_urec_dump, /* Debugging. */
2633 &problem_LR /* Dependent problem. */
2637 /* Create a new DATAFLOW instance and add it to an existing instance
2638 of DF. The returned structure is what is used to get at the
2642 df_urec_add_problem (struct df *df)
2644 return df_add_problem (df, &problem_UREC);
2649 /*----------------------------------------------------------------------------
2650 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2652 Link either the defs to the uses and / or the uses to the defs.
2654 These problems are set up like the other dataflow problems so that
2655 they nicely fit into the framework. They are much simpler and only
2656 involve a single traversal of instructions and an examination of
2657 the reaching defs information (the dependent problem).
2658 ----------------------------------------------------------------------------*/
2660 struct df_chain_problem_data
2666 /* Create def-use or use-def chains. */
2669 df_chain_alloc (struct dataflow *dflow,
2670 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2672 struct df *df = dflow->df;
2674 struct df_chain_problem_data *problem_data =
2675 (struct df_chain_problem_data *) dflow->problem_data;
2677 /* Wholesale destruction of the old chains. */
2678 if (dflow->block_pool)
2679 free_alloc_pool (dflow->block_pool);
2681 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2682 sizeof (struct df_link), 100);
2684 if (problem_data->flags & DF_DU_CHAIN)
2686 if (!df->def_info.refs_organized)
2687 df_reorganize_refs (&df->def_info);
2689 /* Clear out the pointers from the refs. */
2690 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2692 struct df_ref *ref = df->def_info.refs[i];
2693 DF_REF_CHAIN (ref) = NULL;
2697 if (problem_data->flags & DF_UD_CHAIN)
2699 if (!df->use_info.refs_organized)
2700 df_reorganize_refs (&df->use_info);
2701 for (i = 0; i < DF_USES_SIZE (df); i++)
2703 struct df_ref *ref = df->use_info.refs[i];
2704 DF_REF_CHAIN (ref) = NULL;
2710 /* Reset all def_use and use_def chains in INSN. */
2713 df_chain_insn_reset (struct dataflow *dflow, rtx insn)
2715 struct df *df = dflow->df;
2716 struct df_chain_problem_data *problem_data =
2717 (struct df_chain_problem_data *) dflow->problem_data;
2718 unsigned int uid = INSN_UID (insn);
2719 struct df_insn_info *insn_info = NULL;
2722 if (uid < df->insns_size)
2723 insn_info = DF_INSN_UID_GET (df, uid);
2727 if (problem_data->flags & DF_DU_CHAIN)
2729 ref = insn_info->defs;
2733 ref = ref->next_ref;
2737 if (problem_data->flags & DF_UD_CHAIN)
2739 ref = insn_info->uses;
2743 ref = ref->next_ref;
2750 /* Reset all def_use and use_def chains in basic block. */
2753 df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
2755 struct df *df = dflow->df;
2756 struct df_chain_problem_data *problem_data =
2757 (struct df_chain_problem_data *) dflow->problem_data;
2759 basic_block bb = BASIC_BLOCK (bb_index);
2761 /* Some one deleted the basic block out from under us. */
2765 FOR_BB_INSNS (bb, insn)
2769 /* Record defs within INSN. */
2770 df_chain_insn_reset (dflow, insn);
2774 /* Get rid of any chains in artifical uses or defs. */
2775 if (problem_data->flags & DF_DU_CHAIN)
2778 def = df_get_artificial_defs (df, bb_index);
2782 def = def->next_ref;
2786 if (problem_data->flags & DF_UD_CHAIN)
2789 use = df_get_artificial_uses (df, bb_index);
2793 use = use->next_ref;
2799 /* Reset all of the chains when the set of basic blocks changes. */
2803 df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
2806 unsigned int bb_index;
2808 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
2810 df_chain_bb_reset (dflow, bb_index);
2813 free_alloc_pool (dflow->block_pool);
2814 dflow->block_pool = NULL;
2818 /* Create the chains for a list of USEs. */
2821 df_chain_create_bb_process_use (struct dataflow *dflow,
2822 struct df_chain_problem_data *problem_data,
2825 enum df_ref_flags top_flag)
2827 struct df *df = dflow->df;
2829 unsigned int def_index;
2833 /* Do not want to go thur this for an uninitialized var. */
2834 unsigned int uregno = DF_REF_REGNO (use);
2835 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2838 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2840 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2841 unsigned int last_index = first_index + count - 1;
2843 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2846 if (def_index > last_index)
2849 def = DF_DEFS_GET (df, def_index);
2850 if (problem_data->flags & DF_DU_CHAIN)
2851 df_chain_create (dflow, def, use);
2852 if (problem_data->flags & DF_UD_CHAIN)
2853 df_chain_create (dflow, use, def);
2857 use = use->next_ref;
2861 /* Reset the storage pool that the def-use or use-def chains have been
2862 allocated in. We do not need to re adjust the pointers in the refs,
2863 these have already been clean out.*/
2865 /* Create chains from reaching defs bitmaps for basic block BB. */
2867 df_chain_create_bb (struct dataflow *dflow,
2868 struct dataflow *rd_dflow,
2869 unsigned int bb_index)
2871 basic_block bb = BASIC_BLOCK (bb_index);
2872 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2874 bitmap cpy = BITMAP_ALLOC (NULL);
2875 struct df *df = dflow->df;
2876 struct df_chain_problem_data *problem_data =
2877 (struct df_chain_problem_data *) dflow->problem_data;
2880 bitmap_copy (cpy, bb_info->in);
2882 /* Since we are going forwards, process the artificial uses first
2883 then the artificial defs second. */
2886 /* Create the chains for the artificial uses from the EH_USES at the
2887 beginning of the block. */
2888 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2889 df_get_artificial_uses (df, bb->index),
2893 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2895 unsigned int dregno = DF_REF_REGNO (def);
2896 bitmap_clear_range (cpy,
2897 DF_REG_DEF_GET (df, dregno)->begin,
2898 DF_REG_DEF_GET (df, dregno)->n_refs);
2899 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2900 bitmap_set_bit (cpy, DF_REF_ID (def));
2903 /* Process the regular instructions next. */
2904 FOR_BB_INSNS (bb, insn)
2907 unsigned int uid = INSN_UID (insn);
2909 if (! INSN_P (insn))
2912 /* Now scan the uses and link them up with the defs that remain
2913 in the cpy vector. */
2915 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2916 DF_INSN_UID_GET (df, uid)->uses, 0);
2918 /* Since we are going forwards, process the defs second. This
2919 pass only changes the bits in cpy. */
2920 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2922 unsigned int dregno = DF_REF_REGNO (def);
2923 bitmap_clear_range (cpy,
2924 DF_REG_DEF_GET (df, dregno)->begin,
2925 DF_REG_DEF_GET (df, dregno)->n_refs);
2926 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2927 bitmap_set_bit (cpy, DF_REF_ID (def));
2931 /* Create the chains for the artificial uses of the hard registers
2932 at the end of the block. */
2933 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2934 df_get_artificial_uses (df, bb->index), 0);
2937 /* Create def-use chains from reaching use bitmaps for basic blocks
2941 df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2943 unsigned int bb_index;
2945 struct df *df = dflow->df;
2946 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2948 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2950 df_chain_create_bb (dflow, rd_dflow, bb_index);
2955 /* Free all storage associated with the problem. */
2958 df_chain_free (struct dataflow *dflow)
2960 free_alloc_pool (dflow->block_pool);
2961 free (dflow->problem_data);
2966 /* Debugging info. */
2969 df_chains_dump (struct dataflow *dflow, FILE *file)
2971 struct df *df = dflow->df;
2973 struct df_chain_problem_data *problem_data =
2974 (struct df_chain_problem_data *) dflow->problem_data;
2976 if (problem_data->flags & DF_DU_CHAIN)
2978 fprintf (file, "Def-use chains:\n");
2979 for (j = 0; j < df->def_info.bitmap_size; j++)
2981 struct df_ref *def = DF_DEFS_GET (df, j);
2984 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2985 j, DF_REF_BBNO (def),
2986 DF_INSN_LUID (df, DF_REF_INSN (def)),
2987 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
2988 DF_REF_REGNO (def));
2989 if (def->flags & DF_REF_READ_WRITE)
2990 fprintf (file, "read/write ");
2991 df_chain_dump (df, DF_REF_CHAIN (def), file);
2992 fprintf (file, "\n");
2997 if (problem_data->flags & DF_UD_CHAIN)
2999 fprintf (file, "Use-def chains:\n");
3000 for (j = 0; j < df->use_info.bitmap_size; j++)
3002 struct df_ref *use = DF_USES_GET (df, j);
3005 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3006 j, DF_REF_BBNO (use),
3008 DF_INSN_LUID (df, DF_REF_INSN (use))
3010 DF_REF_INSN (DF_USES_GET (df, j)) ?
3011 DF_REF_INSN_UID (DF_USES_GET (df,j))
3013 DF_REF_REGNO (use));
3014 if (use->flags & DF_REF_READ_WRITE)
3015 fprintf (file, "read/write ");
3016 if (use->flags & DF_REF_STRIPPED)
3017 fprintf (file, "stripped ");
3018 if (use->flags & DF_REF_IN_NOTE)
3019 fprintf (file, "note ");
3020 df_chain_dump (df, DF_REF_CHAIN (use), file);
3021 fprintf (file, "\n");
3028 static struct df_problem problem_CHAIN =
3030 DF_CHAIN, /* Problem id. */
3031 DF_NONE, /* Direction. */
3032 df_chain_alloc, /* Allocate the problem specific data. */
3033 df_chain_reset, /* Reset global information. */
3034 NULL, /* Free basic block info. */
3035 NULL, /* Local compute function. */
3036 NULL, /* Init the solution specific data. */
3037 NULL, /* Iterative solver. */
3038 NULL, /* Confluence operator 0. */
3039 NULL, /* Confluence operator n. */
3040 NULL, /* Transfer function. */
3041 df_chain_finalize, /* Finalize function. */
3042 df_chain_free, /* Free all of the problem information. */
3043 df_chains_dump, /* Debugging. */
3044 &problem_RD /* Dependent problem. */
3048 /* Create a new DATAFLOW instance and add it to an existing instance
3049 of DF. The returned structure is what is used to get at the
3053 df_chain_add_problem (struct df *df, int flags)
3055 struct df_chain_problem_data *problem_data =
3056 xmalloc (sizeof (struct df_chain_problem_data));
3057 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
3059 dflow->problem_data = problem_data;
3060 problem_data->flags = flags;
3066 /*----------------------------------------------------------------------------
3067 REGISTER INFORMATION
3069 Currently this consists of only lifetime information. But the plan is
3070 to enhance it so that it produces all of the register information needed
3071 by the register allocators.
3072 ----------------------------------------------------------------------------*/
3075 struct df_ri_problem_data
3081 /* Allocate the lifetime information. */
3084 df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
3086 struct df_ri_problem_data *problem_data =
3087 (struct df_ri_problem_data *) dflow->problem_data;
3089 if (!dflow->problem_data)
3091 struct df_ri_problem_data *problem_data =
3092 xmalloc (sizeof (struct df_ri_problem_data));
3093 dflow->problem_data = problem_data;
3096 problem_data->lifetime = xrealloc (problem_data->lifetime,
3097 max_reg_num () *sizeof (int));
3098 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
3101 /* Compute register info: lifetime, bb, and number of defs and uses
3102 for basic block BB. */
3105 df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
3107 struct df *df = dflow->df;
3108 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
3109 struct df_ri_problem_data *problem_data =
3110 (struct df_ri_problem_data *) dflow->problem_data;
3111 basic_block bb = BASIC_BLOCK (bb_index);
3114 bitmap_copy (live, bb_info->out);
3116 FOR_BB_INSNS_REVERSE (bb, insn)
3118 unsigned int uid = INSN_UID (insn);
3124 if (! INSN_P (insn))
3127 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
3129 unsigned int dregno = DF_REF_REGNO (def);
3131 /* Kill this register. */
3132 bitmap_clear_bit (live, dregno);
3135 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
3137 unsigned int uregno = DF_REF_REGNO (use);
3139 /* This register is now live. */
3140 bitmap_set_bit (live, uregno);
3143 /* Increment lifetimes of all live registers. */
3144 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3146 problem_data->lifetime[regno]++;
3152 /* Compute register info: lifetime, bb, and number of defs and uses. */
3154 df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3155 bitmap blocks_to_scan)
3157 unsigned int bb_index;
3161 live = BITMAP_ALLOC (NULL);
3163 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3165 df_ri_bb_compute (dflow, bb_index, live);
3172 /* Free all storage associated with the problem. */
3175 df_ri_free (struct dataflow *dflow)
3177 struct df_ri_problem_data *problem_data =
3178 (struct df_ri_problem_data *) dflow->problem_data;
3180 free (problem_data->lifetime);
3181 free (dflow->problem_data);
3186 /* Debugging info. */
3189 df_ri_dump (struct dataflow *dflow, FILE *file)
3191 struct df_ri_problem_data *problem_data =
3192 (struct df_ri_problem_data *) dflow->problem_data;
3195 fprintf (file, "Register info:\n");
3196 for (j = 0; j < max_reg_num (); j++)
3198 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3202 /* All of the information associated every instance of the problem. */
3204 static struct df_problem problem_RI =
3206 DF_RI, /* Problem id. */
3207 DF_NONE, /* Direction. */
3208 df_ri_alloc, /* Allocate the problem specific data. */
3209 NULL, /* Reset global information. */
3210 NULL, /* Free basic block info. */
3211 df_ri_compute, /* Local compute function. */
3212 NULL, /* Init the solution specific data. */
3213 NULL, /* Iterative solver. */
3214 NULL, /* Confluence operator 0. */
3215 NULL, /* Confluence operator n. */
3216 NULL, /* Transfer function. */
3217 NULL, /* Finalize function. */
3218 df_ri_free, /* Free all of the problem information. */
3219 df_ri_dump, /* Debugging. */
3220 &problem_UR /* Dependent problem. */
3224 /* Create a new DATAFLOW instance and add it to an existing instance
3225 of DF. The returned structure is what is used to get at the
3229 df_ri_add_problem (struct df *df)
3231 return df_add_problem (df, &problem_RI);
3235 /* Return total lifetime (in insns) of REG. */
3237 df_reg_lifetime (struct df *df, rtx reg)
3239 struct dataflow *dflow = df->problems_by_index[DF_RI];
3240 struct df_ri_problem_data *problem_data =
3241 (struct df_ri_problem_data *) dflow->problem_data;
3242 return problem_data->lifetime[REGNO (reg)];