1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
47 #define DF_SPARSE_THRESHOLD 32
49 static bitmap seen_in_block = NULL;
50 static bitmap seen_in_insn = NULL;
53 /*----------------------------------------------------------------------------
54 Public functions access functions for the dataflow problems.
55 ----------------------------------------------------------------------------*/
57 /* Get the instance of the problem that DFLOW is dependent on. */
60 df_get_dependent_problem (struct dataflow *dflow)
62 struct df *df = dflow->df;
63 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
65 gcc_assert (dependent_problem);
66 return df->problems_by_index[dependent_problem->id];
70 /* Create a du or ud chain from SRC to DST and link it into SRC. */
73 df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
75 struct df_link *head = DF_REF_CHAIN (src);
76 struct df_link *link = pool_alloc (dflow->block_pool);;
78 DF_REF_CHAIN (src) = link;
85 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
86 chains for ref and check to see if the reverse chains can also be
87 deleted. If LINK is not NULL it must be a link off of ref. In
88 this case, the other end is not deleted. */
91 df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
93 struct df_link *chain = DF_REF_CHAIN (ref);
96 /* Link was the first element in the chain. */
98 DF_REF_CHAIN (ref) = link->next;
101 /* Link is an internal element in the chain. */
102 struct df_link *prev = chain;
107 prev->next = chain->next;
114 pool_free (dflow->block_pool, link);
118 /* If chain is NULL here, it was because of a recursive call
119 when the other flavor of chains was not built. Just run thru
120 the entire chain calling the other side and then deleting the
124 struct df_link *next = chain->next;
125 /* Delete the other side if it exists. */
126 df_chain_unlink (dflow, chain->ref, chain);
133 /* Copy the du or ud chain starting at FROM_REF and attach it to
137 df_chain_copy (struct dataflow *dflow,
138 struct df_ref *to_ref,
139 struct df_link *from_ref)
143 df_chain_create (dflow, to_ref, from_ref->ref);
144 from_ref = from_ref->next;
149 /* Get the live in set for BB no matter what problem happens to be
153 df_get_live_in (struct df *df, basic_block bb)
155 gcc_assert (df->problems_by_index[DF_LR]);
157 if (df->problems_by_index[DF_UREC])
158 return DF_RA_LIVE_IN (df, bb);
159 else if (df->problems_by_index[DF_UR])
160 return DF_LIVE_IN (df, bb);
162 return DF_UPWARD_LIVE_IN (df, bb);
166 /* Get the live out set for BB no matter what problem happens to be
170 df_get_live_out (struct df *df, basic_block bb)
172 gcc_assert (df->problems_by_index[DF_LR]);
174 if (df->problems_by_index[DF_UREC])
175 return DF_RA_LIVE_OUT (df, bb);
176 else if (df->problems_by_index[DF_UR])
177 return DF_LIVE_OUT (df, bb);
179 return DF_UPWARD_LIVE_OUT (df, bb);
183 /*----------------------------------------------------------------------------
185 ----------------------------------------------------------------------------*/
187 /* Generic versions to get the void* version of the block info. Only
188 used inside the problem instance vectors. */
190 /* Grow the bb_info array. */
193 df_grow_bb_info (struct dataflow *dflow)
195 unsigned int new_size = last_basic_block + 1;
196 if (dflow->block_info_size < new_size)
198 new_size += new_size / 4;
199 dflow->block_info = xrealloc (dflow->block_info,
200 new_size *sizeof (void*));
201 memset (dflow->block_info + dflow->block_info_size, 0,
202 (new_size - dflow->block_info_size) *sizeof (void *));
203 dflow->block_info_size = new_size;
207 /* Dump a def-use or use-def chain for REF to FILE. */
210 df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
212 fprintf (file, "{ ");
213 for (; link; link = link->next)
215 fprintf (file, "%c%d(bb %d insn %d) ",
216 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
217 DF_REF_ID (link->ref),
218 DF_REF_BBNO (link->ref),
219 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
225 /* Print some basic block info as part of df_dump. */
228 df_print_bb_index (basic_block bb, FILE *file)
233 fprintf (file, "( ");
234 FOR_EACH_EDGE (e, ei, bb->preds)
236 basic_block pred = e->src;
237 fprintf (file, "%d ", pred->index);
239 fprintf (file, ")->[%d]->( ", bb->index);
240 FOR_EACH_EDGE (e, ei, bb->succs)
242 basic_block succ = e->dest;
243 fprintf (file, "%d ", succ->index);
245 fprintf (file, ")\n");
249 /* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
252 df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
254 bitmap ids = maps[regno];
258 unsigned int end = start + count;;
259 ids = BITMAP_ALLOC (NULL);
261 for (i = start; i < end; i++)
262 bitmap_set_bit (ids, i);
268 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
274 seen_in_block = BITMAP_ALLOC (NULL);
275 seen_in_insn = BITMAP_ALLOC (NULL);
282 BITMAP_FREE (seen_in_block);
283 BITMAP_FREE (seen_in_insn);
288 /*----------------------------------------------------------------------------
291 Find the locations in the function where each use site for a pseudo
294 ----------------------------------------------------------------------------*/
296 struct df_ru_problem_data
298 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
299 unsigned int use_sites_size; /* Size of use_sites. */
300 /* The set of defs to regs invalidated by call. */
301 bitmap sparse_invalidated_by_call;
302 /* The set of defs to regs invalidated by call for ru. */
303 bitmap dense_invalidated_by_call;
306 /* Get basic block info. */
308 struct df_ru_bb_info *
309 df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
311 return (struct df_ru_bb_info *) dflow->block_info[index];
315 /* Set basic block info. */
318 df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
319 struct df_ru_bb_info *bb_info)
321 dflow->block_info[index] = bb_info;
325 /* Free basic block info. */
328 df_ru_free_bb_info (struct dataflow *dflow,
329 basic_block bb ATTRIBUTE_UNUSED,
332 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
335 BITMAP_FREE (bb_info->kill);
336 BITMAP_FREE (bb_info->sparse_kill);
337 BITMAP_FREE (bb_info->gen);
338 BITMAP_FREE (bb_info->in);
339 BITMAP_FREE (bb_info->out);
340 pool_free (dflow->block_pool, bb_info);
345 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
346 not touched unless the block is new. */
349 df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
351 unsigned int bb_index;
353 unsigned int reg_size = max_reg_num ();
355 if (! dflow->block_pool)
356 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
357 sizeof (struct df_ru_bb_info), 50);
359 if (dflow->problem_data)
362 struct df_ru_problem_data *problem_data =
363 (struct df_ru_problem_data *) dflow->problem_data;
365 for (i = 0; i < problem_data->use_sites_size; i++)
367 bitmap bm = problem_data->use_sites[i];
371 problem_data->use_sites[i] = NULL;
375 if (problem_data->use_sites_size < reg_size)
377 problem_data->use_sites
378 = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
379 memset (problem_data->use_sites + problem_data->use_sites_size, 0,
380 (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
381 problem_data->use_sites_size = reg_size;
384 bitmap_clear (problem_data->sparse_invalidated_by_call);
385 bitmap_clear (problem_data->dense_invalidated_by_call);
389 struct df_ru_problem_data *problem_data = XNEW (struct df_ru_problem_data);
390 dflow->problem_data = problem_data;
392 problem_data->use_sites = XCNEWVEC (bitmap, reg_size);
393 problem_data->use_sites_size = reg_size;
394 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
395 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
398 df_grow_bb_info (dflow);
400 /* Because of the clustering of all def sites for the same pseudo,
401 we have to process all of the blocks before doing the
404 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
406 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
409 bitmap_clear (bb_info->kill);
410 bitmap_clear (bb_info->sparse_kill);
411 bitmap_clear (bb_info->gen);
415 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
416 df_ru_set_bb_info (dflow, bb_index, bb_info);
417 bb_info->kill = BITMAP_ALLOC (NULL);
418 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
419 bb_info->gen = BITMAP_ALLOC (NULL);
420 bb_info->in = BITMAP_ALLOC (NULL);
421 bb_info->out = BITMAP_ALLOC (NULL);
427 /* Process a list of DEFs for df_ru_bb_local_compute. */
430 df_ru_bb_local_compute_process_def (struct dataflow *dflow,
431 struct df_ru_bb_info *bb_info,
433 enum df_ref_flags top_flag)
435 struct df *df = dflow->df;
438 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
440 unsigned int regno = DF_REF_REGNO (def);
441 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
442 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
443 if (!bitmap_bit_p (seen_in_block, regno))
445 /* The first def for regno, causes the kill info to be
446 generated and the gen information to cleared. */
447 if (!bitmap_bit_p (seen_in_insn, regno))
449 if (n_uses > DF_SPARSE_THRESHOLD)
451 bitmap_set_bit (bb_info->sparse_kill, regno);
452 bitmap_clear_range (bb_info->gen, begin, n_uses);
456 struct df_ru_problem_data * problem_data =
457 (struct df_ru_problem_data *)dflow->problem_data;
459 df_ref_bitmap (problem_data->use_sites, regno,
461 bitmap_ior_into (bb_info->kill, uses);
462 bitmap_and_compl_into (bb_info->gen, uses);
465 bitmap_set_bit (seen_in_insn, regno);
473 /* Process a list of USEs for df_ru_bb_local_compute. */
476 df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
478 enum df_ref_flags top_flag)
482 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
484 /* Add use to set of gens in this BB unless we have seen a
485 def in a previous instruction. */
486 unsigned int regno = DF_REF_REGNO (use);
487 if (!bitmap_bit_p (seen_in_block, regno))
488 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
494 /* Compute local reaching use (upward exposed use) info for basic
495 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
497 df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
499 struct df *df = dflow->df;
500 basic_block bb = BASIC_BLOCK (bb_index);
501 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
504 /* Set when a def for regno is seen. */
505 bitmap_clear (seen_in_block);
506 bitmap_clear (seen_in_insn);
509 /* Variables defined in the prolog that are used by the exception
511 df_ru_bb_local_compute_process_use (bb_info,
512 df_get_artificial_uses (df, bb_index),
515 df_ru_bb_local_compute_process_def (dflow, bb_info,
516 df_get_artificial_defs (df, bb_index),
519 FOR_BB_INSNS (bb, insn)
521 unsigned int uid = INSN_UID (insn);
525 df_ru_bb_local_compute_process_def (dflow, bb_info,
526 DF_INSN_UID_GET (df, uid)->defs, 0);
528 /* The use processing must happen after the defs processing even
529 though the uses logically happen first since the defs clear
530 the gen set. Otherwise, a use for regno occuring in the same
531 instruction as a def for regno would be cleared. */
532 df_ru_bb_local_compute_process_use (bb_info,
533 DF_INSN_UID_GET (df, uid)->uses, 0);
535 bitmap_ior_into (seen_in_block, seen_in_insn);
536 bitmap_clear (seen_in_insn);
539 /* Process the hardware registers that are always live. */
540 df_ru_bb_local_compute_process_use (bb_info,
541 df_get_artificial_uses (df, bb_index), 0);
543 df_ru_bb_local_compute_process_def (dflow, bb_info,
544 df_get_artificial_defs (df, bb_index), 0);
548 /* Compute local reaching use (upward exposed use) info for each basic
549 block within BLOCKS. */
551 df_ru_local_compute (struct dataflow *dflow,
553 bitmap rescan_blocks ATTRIBUTE_UNUSED)
555 struct df *df = dflow->df;
556 unsigned int bb_index;
559 struct df_ru_problem_data *problem_data =
560 (struct df_ru_problem_data *) dflow->problem_data;
561 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
562 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
566 if (!df->use_info.refs_organized)
567 df_reorganize_refs (&df->use_info);
569 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
571 df_ru_bb_local_compute (dflow, bb_index);
574 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
575 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
577 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
578 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
579 bitmap_set_bit (sparse_invalidated, regno);
582 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
583 reg_info->begin, reg_info->n_refs);
584 bitmap_ior_into (dense_invalidated, defs);
592 /* Initialize the solution bit vectors for problem. */
595 df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
597 unsigned int bb_index;
600 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
602 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
603 bitmap_copy (bb_info->in, bb_info->gen);
604 bitmap_clear (bb_info->out);
609 /* Out of target gets or of in of source. */
612 df_ru_confluence_n (struct dataflow *dflow, edge e)
614 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
615 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
617 if (e->flags & EDGE_EH)
619 struct df_ru_problem_data *problem_data =
620 (struct df_ru_problem_data *) dflow->problem_data;
621 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
622 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
623 struct df *df = dflow->df;
626 bitmap tmp = BITMAP_ALLOC (NULL);
628 bitmap_copy (tmp, op2);
629 bitmap_and_compl_into (tmp, dense_invalidated);
631 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
633 bitmap_clear_range (tmp,
634 DF_REG_USE_GET (df, regno)->begin,
635 DF_REG_USE_GET (df, regno)->n_refs);
637 bitmap_ior_into (op1, tmp);
641 bitmap_ior_into (op1, op2);
645 /* Transfer function. */
648 df_ru_transfer_function (struct dataflow *dflow, int bb_index)
650 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
653 bitmap in = bb_info->in;
654 bitmap out = bb_info->out;
655 bitmap gen = bb_info->gen;
656 bitmap kill = bb_info->kill;
657 bitmap sparse_kill = bb_info->sparse_kill;
659 if (bitmap_empty_p (sparse_kill))
660 return bitmap_ior_and_compl (in, gen, out, kill);
663 struct df *df = dflow->df;
664 bool changed = false;
665 bitmap tmp = BITMAP_ALLOC (NULL);
666 bitmap_copy (tmp, in);
667 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
669 bitmap_clear_range (tmp,
670 DF_REG_USE_GET (df, regno)->begin,
671 DF_REG_USE_GET (df, regno)->n_refs);
673 bitmap_and_compl_into (tmp, kill);
674 bitmap_ior_into (tmp, gen);
675 changed = !bitmap_equal_p (tmp, in);
688 /* Free all storage associated with the problem. */
691 df_ru_free (struct dataflow *dflow)
694 struct df_ru_problem_data *problem_data =
695 (struct df_ru_problem_data *) dflow->problem_data;
699 for (i = 0; i < dflow->block_info_size; i++)
701 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
704 BITMAP_FREE (bb_info->kill);
705 BITMAP_FREE (bb_info->sparse_kill);
706 BITMAP_FREE (bb_info->gen);
707 BITMAP_FREE (bb_info->in);
708 BITMAP_FREE (bb_info->out);
712 free_alloc_pool (dflow->block_pool);
714 for (i = 0; i < problem_data->use_sites_size; i++)
716 bitmap bm = problem_data->use_sites[i];
721 free (problem_data->use_sites);
722 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
723 BITMAP_FREE (problem_data->dense_invalidated_by_call);
725 dflow->block_info_size = 0;
726 free (dflow->block_info);
727 free (dflow->problem_data);
733 /* Debugging info. */
736 df_ru_dump (struct dataflow *dflow, FILE *file)
739 struct df *df = dflow->df;
740 struct df_ru_problem_data *problem_data =
741 (struct df_ru_problem_data *) dflow->problem_data;
742 unsigned int m = max_reg_num ();
745 fprintf (file, "Reaching uses:\n");
747 fprintf (file, " sparse invalidated \t");
748 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
749 fprintf (file, " dense invalidated \t");
750 dump_bitmap (file, problem_data->dense_invalidated_by_call);
752 for (regno = 0; regno < m; regno++)
753 if (DF_REG_USE_GET (df, regno)->n_refs)
754 fprintf (file, "%d[%d,%d] ", regno,
755 DF_REG_USE_GET (df, regno)->begin,
756 DF_REG_USE_GET (df, regno)->n_refs);
757 fprintf (file, "\n");
761 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
762 df_print_bb_index (bb, file);
767 fprintf (file, " in \t");
768 dump_bitmap (file, bb_info->in);
769 fprintf (file, " gen \t");
770 dump_bitmap (file, bb_info->gen);
771 fprintf (file, " kill\t");
772 dump_bitmap (file, bb_info->kill);
773 fprintf (file, " out \t");
774 dump_bitmap (file, bb_info->out);
778 /* All of the information associated with every instance of the problem. */
780 static struct df_problem problem_RU =
782 DF_RU, /* Problem id. */
783 DF_BACKWARD, /* Direction. */
784 df_ru_alloc, /* Allocate the problem specific data. */
785 NULL, /* Reset global information. */
786 df_ru_free_bb_info, /* Free basic block info. */
787 df_ru_local_compute, /* Local compute function. */
788 df_ru_init_solution, /* Init the solution specific data. */
789 df_iterative_dataflow, /* Iterative solver. */
790 NULL, /* Confluence operator 0. */
791 df_ru_confluence_n, /* Confluence operator n. */
792 df_ru_transfer_function, /* Transfer function. */
793 NULL, /* Finalize function. */
794 df_ru_free, /* Free all of the problem information. */
795 df_ru_dump, /* Debugging. */
796 NULL /* Dependent problem. */
801 /* Create a new DATAFLOW instance and add it to an existing instance
802 of DF. The returned structure is what is used to get at the
806 df_ru_add_problem (struct df *df)
808 return df_add_problem (df, &problem_RU);
812 /*----------------------------------------------------------------------------
815 Find the locations in the function where each definition site for a
817 ----------------------------------------------------------------------------*/
819 struct df_rd_problem_data
821 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
822 unsigned int def_sites_size; /* Size of def_sites. */
823 /* The set of defs to regs invalidated by call. */
824 bitmap sparse_invalidated_by_call;
825 /* The set of defs to regs invalidate by call for ru. */
826 bitmap dense_invalidated_by_call;
829 /* Get basic block info. */
831 struct df_rd_bb_info *
832 df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
834 return (struct df_rd_bb_info *) dflow->block_info[index];
838 /* Set basic block info. */
841 df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
842 struct df_rd_bb_info *bb_info)
844 dflow->block_info[index] = bb_info;
848 /* Free basic block info. */
851 df_rd_free_bb_info (struct dataflow *dflow,
852 basic_block bb ATTRIBUTE_UNUSED,
855 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
858 BITMAP_FREE (bb_info->kill);
859 BITMAP_FREE (bb_info->sparse_kill);
860 BITMAP_FREE (bb_info->gen);
861 BITMAP_FREE (bb_info->in);
862 BITMAP_FREE (bb_info->out);
863 pool_free (dflow->block_pool, bb_info);
868 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
869 not touched unless the block is new. */
872 df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
874 unsigned int bb_index;
876 unsigned int reg_size = max_reg_num ();
878 if (! dflow->block_pool)
879 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
880 sizeof (struct df_rd_bb_info), 50);
882 if (dflow->problem_data)
885 struct df_rd_problem_data *problem_data =
886 (struct df_rd_problem_data *) dflow->problem_data;
888 for (i = 0; i < problem_data->def_sites_size; i++)
890 bitmap bm = problem_data->def_sites[i];
894 problem_data->def_sites[i] = NULL;
898 if (problem_data->def_sites_size < reg_size)
900 problem_data->def_sites
901 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
902 memset (problem_data->def_sites + problem_data->def_sites_size, 0,
903 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
904 problem_data->def_sites_size = reg_size;
907 bitmap_clear (problem_data->sparse_invalidated_by_call);
908 bitmap_clear (problem_data->dense_invalidated_by_call);
912 struct df_rd_problem_data *problem_data = XNEW (struct df_rd_problem_data);
913 dflow->problem_data = problem_data;
915 problem_data->def_sites = XCNEWVEC (bitmap, reg_size);
916 problem_data->def_sites_size = reg_size;
917 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
918 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
921 df_grow_bb_info (dflow);
923 /* Because of the clustering of all def sites for the same pseudo,
924 we have to process all of the blocks before doing the
927 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
929 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
932 bitmap_clear (bb_info->kill);
933 bitmap_clear (bb_info->sparse_kill);
934 bitmap_clear (bb_info->gen);
938 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
939 df_rd_set_bb_info (dflow, bb_index, bb_info);
940 bb_info->kill = BITMAP_ALLOC (NULL);
941 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
942 bb_info->gen = BITMAP_ALLOC (NULL);
943 bb_info->in = BITMAP_ALLOC (NULL);
944 bb_info->out = BITMAP_ALLOC (NULL);
950 /* Process a list of DEFs for df_rd_bb_local_compute. */
953 df_rd_bb_local_compute_process_def (struct dataflow *dflow,
954 struct df_rd_bb_info *bb_info,
956 enum df_ref_flags top_flag)
958 struct df *df = dflow->df;
961 if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
963 unsigned int regno = DF_REF_REGNO (def);
964 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
965 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
967 /* Only the last def(s) for a regno in the block has any
969 if (!bitmap_bit_p (seen_in_block, regno))
971 /* The first def for regno in insn gets to knock out the
972 defs from other instructions. */
973 if (!bitmap_bit_p (seen_in_insn, regno))
975 if (n_defs > DF_SPARSE_THRESHOLD)
977 bitmap_set_bit (bb_info->sparse_kill, regno);
978 bitmap_clear_range(bb_info->gen, begin, n_defs);
982 struct df_rd_problem_data * problem_data =
983 (struct df_rd_problem_data *)dflow->problem_data;
985 df_ref_bitmap (problem_data->def_sites, regno,
987 bitmap_ior_into (bb_info->kill, defs);
988 bitmap_and_compl_into (bb_info->gen, defs);
992 bitmap_set_bit (seen_in_insn, regno);
993 /* All defs for regno in the instruction may be put into
995 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
996 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
1003 /* Compute local reaching def info for basic block BB. */
1006 df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1008 struct df *df = dflow->df;
1009 basic_block bb = BASIC_BLOCK (bb_index);
1010 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1013 bitmap_clear (seen_in_block);
1014 bitmap_clear (seen_in_insn);
1016 df_rd_bb_local_compute_process_def (dflow, bb_info,
1017 df_get_artificial_defs (df, bb_index), 0);
1019 FOR_BB_INSNS_REVERSE (bb, insn)
1021 unsigned int uid = INSN_UID (insn);
1023 if (! INSN_P (insn))
1026 df_rd_bb_local_compute_process_def (dflow, bb_info,
1027 DF_INSN_UID_GET (df, uid)->defs, 0);
1029 /* This complex dance with the two bitmaps is required because
1030 instructions can assign twice to the same pseudo. This
1031 generally happens with calls that will have one def for the
1032 result and another def for the clobber. If only one vector
1033 is used and the clobber goes first, the result will be
1035 bitmap_ior_into (seen_in_block, seen_in_insn);
1036 bitmap_clear (seen_in_insn);
1039 /* Process the artificial defs at the top of the block last since we
1040 are going backwards through the block and these are logically at
1042 df_rd_bb_local_compute_process_def (dflow, bb_info,
1043 df_get_artificial_defs (df, bb_index),
1048 /* Compute local reaching def info for each basic block within BLOCKS. */
1051 df_rd_local_compute (struct dataflow *dflow,
1053 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1055 struct df *df = dflow->df;
1056 unsigned int bb_index;
1059 struct df_rd_problem_data *problem_data =
1060 (struct df_rd_problem_data *) dflow->problem_data;
1061 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1062 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1066 if (!df->def_info.refs_organized)
1067 df_reorganize_refs (&df->def_info);
1069 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1071 df_rd_bb_local_compute (dflow, bb_index);
1074 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1075 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1077 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1078 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1080 bitmap_set_bit (sparse_invalidated, regno);
1084 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1085 reg_info->begin, reg_info->n_refs);
1086 bitmap_ior_into (dense_invalidated, defs);
1093 /* Initialize the solution bit vectors for problem. */
1096 df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1098 unsigned int bb_index;
1101 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1103 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1105 bitmap_copy (bb_info->out, bb_info->gen);
1106 bitmap_clear (bb_info->in);
1110 /* In of target gets or of out of source. */
1113 df_rd_confluence_n (struct dataflow *dflow, edge e)
1115 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1116 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1118 if (e->flags & EDGE_EH)
1120 struct df_rd_problem_data *problem_data =
1121 (struct df_rd_problem_data *) dflow->problem_data;
1122 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1123 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1124 struct df *df = dflow->df;
1127 bitmap tmp = BITMAP_ALLOC (NULL);
1129 bitmap_copy (tmp, op2);
1130 bitmap_and_compl_into (tmp, dense_invalidated);
1132 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1134 bitmap_clear_range (tmp,
1135 DF_REG_DEF_GET (df, regno)->begin,
1136 DF_REG_DEF_GET (df, regno)->n_refs);
1138 bitmap_ior_into (op1, tmp);
1142 bitmap_ior_into (op1, op2);
1146 /* Transfer function. */
1149 df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1151 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1154 bitmap in = bb_info->in;
1155 bitmap out = bb_info->out;
1156 bitmap gen = bb_info->gen;
1157 bitmap kill = bb_info->kill;
1158 bitmap sparse_kill = bb_info->sparse_kill;
1160 if (bitmap_empty_p (sparse_kill))
1161 return bitmap_ior_and_compl (out, gen, in, kill);
1164 struct df *df = dflow->df;
1165 bool changed = false;
1166 bitmap tmp = BITMAP_ALLOC (NULL);
1167 bitmap_copy (tmp, in);
1168 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1170 bitmap_clear_range (tmp,
1171 DF_REG_DEF_GET (df, regno)->begin,
1172 DF_REG_DEF_GET (df, regno)->n_refs);
1174 bitmap_and_compl_into (tmp, kill);
1175 bitmap_ior_into (tmp, gen);
1176 changed = !bitmap_equal_p (tmp, out);
1189 /* Free all storage associated with the problem. */
1192 df_rd_free (struct dataflow *dflow)
1195 struct df_rd_problem_data *problem_data =
1196 (struct df_rd_problem_data *) dflow->problem_data;
1200 for (i = 0; i < dflow->block_info_size; i++)
1202 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1205 BITMAP_FREE (bb_info->kill);
1206 BITMAP_FREE (bb_info->sparse_kill);
1207 BITMAP_FREE (bb_info->gen);
1208 BITMAP_FREE (bb_info->in);
1209 BITMAP_FREE (bb_info->out);
1213 free_alloc_pool (dflow->block_pool);
1215 for (i = 0; i < problem_data->def_sites_size; i++)
1217 bitmap bm = problem_data->def_sites[i];
1222 free (problem_data->def_sites);
1223 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1224 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1226 dflow->block_info_size = 0;
1227 free (dflow->block_info);
1228 free (dflow->problem_data);
1234 /* Debugging info. */
1237 df_rd_dump (struct dataflow *dflow, FILE *file)
1239 struct df *df = dflow->df;
1241 struct df_rd_problem_data *problem_data =
1242 (struct df_rd_problem_data *) dflow->problem_data;
1243 unsigned int m = max_reg_num ();
1246 fprintf (file, "Reaching defs:\n\n");
1248 fprintf (file, " sparse invalidated \t");
1249 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1250 fprintf (file, " dense invalidated \t");
1251 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1253 for (regno = 0; regno < m; regno++)
1254 if (DF_REG_DEF_GET (df, regno)->n_refs)
1255 fprintf (file, "%d[%d,%d] ", regno,
1256 DF_REG_DEF_GET (df, regno)->begin,
1257 DF_REG_DEF_GET (df, regno)->n_refs);
1258 fprintf (file, "\n");
1262 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1263 df_print_bb_index (bb, file);
1268 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1269 dump_bitmap (file, bb_info->in);
1270 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1271 dump_bitmap (file, bb_info->gen);
1272 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1273 dump_bitmap (file, bb_info->kill);
1274 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1275 dump_bitmap (file, bb_info->out);
1279 /* All of the information associated with every instance of the problem. */
1281 static struct df_problem problem_RD =
1283 DF_RD, /* Problem id. */
1284 DF_FORWARD, /* Direction. */
1285 df_rd_alloc, /* Allocate the problem specific data. */
1286 NULL, /* Reset global information. */
1287 df_rd_free_bb_info, /* Free basic block info. */
1288 df_rd_local_compute, /* Local compute function. */
1289 df_rd_init_solution, /* Init the solution specific data. */
1290 df_iterative_dataflow, /* Iterative solver. */
1291 NULL, /* Confluence operator 0. */
1292 df_rd_confluence_n, /* Confluence operator n. */
1293 df_rd_transfer_function, /* Transfer function. */
1294 NULL, /* Finalize function. */
1295 df_rd_free, /* Free all of the problem information. */
1296 df_rd_dump, /* Debugging. */
1297 NULL /* Dependent problem. */
1302 /* Create a new DATAFLOW instance and add it to an existing instance
1303 of DF. The returned structure is what is used to get at the
1307 df_rd_add_problem (struct df *df)
1309 return df_add_problem (df, &problem_RD);
1314 /*----------------------------------------------------------------------------
1317 Find the locations in the function where any use of a pseudo can reach
1318 in the backwards direction.
1319 ----------------------------------------------------------------------------*/
1321 /* Get basic block info. */
1323 struct df_lr_bb_info *
1324 df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1326 return (struct df_lr_bb_info *) dflow->block_info[index];
1330 /* Set basic block info. */
1333 df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1334 struct df_lr_bb_info *bb_info)
1336 dflow->block_info[index] = bb_info;
1340 /* Free basic block info. */
1343 df_lr_free_bb_info (struct dataflow *dflow,
1344 basic_block bb ATTRIBUTE_UNUSED,
1347 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1350 BITMAP_FREE (bb_info->use);
1351 BITMAP_FREE (bb_info->def);
1352 BITMAP_FREE (bb_info->in);
1353 BITMAP_FREE (bb_info->out);
1354 pool_free (dflow->block_pool, bb_info);
1359 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1360 not touched unless the block is new. */
1363 df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1365 unsigned int bb_index;
1368 if (! dflow->block_pool)
1369 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1370 sizeof (struct df_lr_bb_info), 50);
1372 df_grow_bb_info (dflow);
1374 /* Because of the clustering of all def sites for the same pseudo,
1375 we have to process all of the blocks before doing the
1378 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1380 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1383 bitmap_clear (bb_info->def);
1384 bitmap_clear (bb_info->use);
1388 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1389 df_lr_set_bb_info (dflow, bb_index, bb_info);
1390 bb_info->use = BITMAP_ALLOC (NULL);
1391 bb_info->def = BITMAP_ALLOC (NULL);
1392 bb_info->in = BITMAP_ALLOC (NULL);
1393 bb_info->out = BITMAP_ALLOC (NULL);
1399 /* Compute local live register info for basic block BB. */
1402 df_lr_bb_local_compute (struct dataflow *dflow,
1403 struct df *df, unsigned int bb_index)
1405 basic_block bb = BASIC_BLOCK (bb_index);
1406 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1411 /* Process the registers set in an exception handler. */
1412 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1413 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1415 unsigned int dregno = DF_REF_REGNO (def);
1416 bitmap_set_bit (bb_info->def, dregno);
1417 bitmap_clear_bit (bb_info->use, dregno);
1420 /* Process the hardware registers that are always live. */
1421 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1422 /* Add use to set of uses in this BB. */
1423 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1424 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1426 FOR_BB_INSNS_REVERSE (bb, insn)
1428 unsigned int uid = INSN_UID (insn);
1430 if (! INSN_P (insn))
1435 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1437 unsigned int dregno = DF_REF_REGNO (def);
1439 if (dregno >= FIRST_PSEUDO_REGISTER
1440 || !(SIBLING_CALL_P (insn)
1441 && bitmap_bit_p (df->exit_block_uses, dregno)
1442 && !refers_to_regno_p (dregno, dregno+1,
1443 current_function_return_rtx,
1446 /* Add def to set of defs in this BB. */
1447 bitmap_set_bit (bb_info->def, dregno);
1448 bitmap_clear_bit (bb_info->use, dregno);
1454 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1456 unsigned int dregno = DF_REF_REGNO (def);
1458 if (DF_INSN_CONTAINS_ASM (df, insn)
1459 && dregno < FIRST_PSEUDO_REGISTER)
1463 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1464 for (i = dregno; i <= end; ++i)
1465 regs_asm_clobbered[i] = 1;
1467 /* Add def to set of defs in this BB. */
1468 bitmap_set_bit (bb_info->def, dregno);
1469 bitmap_clear_bit (bb_info->use, dregno);
1473 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1474 /* Add use to set of uses in this BB. */
1475 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1478 /* Process the registers set in an exception handler. */
1479 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1480 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1482 unsigned int dregno = DF_REF_REGNO (def);
1483 bitmap_set_bit (bb_info->def, dregno);
1484 bitmap_clear_bit (bb_info->use, dregno);
1488 /* Process the uses that are live into an exception handler. */
1489 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1490 /* Add use to set of uses in this BB. */
1491 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1492 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1496 /* Compute local live register info for each basic block within BLOCKS. */
1499 df_lr_local_compute (struct dataflow *dflow,
1501 bitmap rescan_blocks)
1503 struct df *df = dflow->df;
1504 unsigned int bb_index;
1507 /* Assume that the stack pointer is unchanging if alloca hasn't
1509 if (bitmap_equal_p (all_blocks, rescan_blocks))
1510 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1512 bitmap_clear (df->hardware_regs_used);
1514 /* The all-important stack pointer must always be live. */
1515 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1517 /* Before reload, there are a few registers that must be forced
1518 live everywhere -- which might not already be the case for
1519 blocks within infinite loops. */
1520 if (! reload_completed)
1522 /* Any reference to any pseudo before reload is a potential
1523 reference of the frame pointer. */
1524 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1526 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1527 /* Pseudos with argument area equivalences may require
1528 reloading via the argument pointer. */
1529 if (fixed_regs[ARG_POINTER_REGNUM])
1530 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1533 /* Any constant, or pseudo with constant equivalences, may
1534 require reloading from memory using the pic register. */
1535 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1536 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1537 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1540 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1542 /* The exit block is special for this problem and its bits are
1543 computed from thin air. */
1544 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1545 bitmap_copy (bb_info->use, df->exit_block_uses);
1548 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1550 if (bb_index == EXIT_BLOCK)
1552 df_lr_bb_local_compute (dflow, df, bb_index);
1557 /* Initialize the solution vectors. */
1560 df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1562 unsigned int bb_index;
1565 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1567 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1568 bitmap_copy (bb_info->in, bb_info->use);
1569 bitmap_clear (bb_info->out);
1574 /* Confluence function that processes infinite loops. This might be a
1575 noreturn function that throws. And even if it isn't, getting the
1576 unwind info right helps debugging. */
1578 df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1580 struct df *df = dflow->df;
1582 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1583 if (bb != EXIT_BLOCK_PTR)
1584 bitmap_copy (op1, df->hardware_regs_used);
1588 /* Confluence function that ignores fake edges. */
1590 df_lr_confluence_n (struct dataflow *dflow, edge e)
1592 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1593 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1595 /* Call-clobbered registers die across exception and call edges. */
1596 /* ??? Abnormal call edges ignored for the moment, as this gets
1597 confused by sibling call edges, which crashes reg-stack. */
1598 if (e->flags & EDGE_EH)
1599 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1601 bitmap_ior_into (op1, op2);
1603 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1607 /* Transfer function. */
1609 df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1611 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1612 bitmap in = bb_info->in;
1613 bitmap out = bb_info->out;
1614 bitmap use = bb_info->use;
1615 bitmap def = bb_info->def;
1617 return bitmap_ior_and_compl (in, use, out, def);
1621 /* Free all storage associated with the problem. */
1624 df_lr_free (struct dataflow *dflow)
1626 if (dflow->block_info)
1629 for (i = 0; i < dflow->block_info_size; i++)
1631 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1634 BITMAP_FREE (bb_info->use);
1635 BITMAP_FREE (bb_info->def);
1636 BITMAP_FREE (bb_info->in);
1637 BITMAP_FREE (bb_info->out);
1640 free_alloc_pool (dflow->block_pool);
1642 dflow->block_info_size = 0;
1643 free (dflow->block_info);
1649 /* Debugging info. */
1652 df_lr_dump (struct dataflow *dflow, FILE *file)
1656 fprintf (file, "Live Registers:\n");
1659 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1660 df_print_bb_index (bb, file);
1665 fprintf (file, " in \t");
1666 dump_bitmap (file, bb_info->in);
1667 fprintf (file, " use \t");
1668 dump_bitmap (file, bb_info->use);
1669 fprintf (file, " def \t");
1670 dump_bitmap (file, bb_info->def);
1671 fprintf (file, " out \t");
1672 dump_bitmap (file, bb_info->out);
1676 /* All of the information associated with every instance of the problem. */
1678 static struct df_problem problem_LR =
1680 DF_LR, /* Problem id. */
1681 DF_BACKWARD, /* Direction. */
1682 df_lr_alloc, /* Allocate the problem specific data. */
1683 NULL, /* Reset global information. */
1684 df_lr_free_bb_info, /* Free basic block info. */
1685 df_lr_local_compute, /* Local compute function. */
1686 df_lr_init, /* Init the solution specific data. */
1687 df_iterative_dataflow, /* Iterative solver. */
1688 df_lr_confluence_0, /* Confluence operator 0. */
1689 df_lr_confluence_n, /* Confluence operator n. */
1690 df_lr_transfer_function, /* Transfer function. */
1691 NULL, /* Finalize function. */
1692 df_lr_free, /* Free all of the problem information. */
1693 df_lr_dump, /* Debugging. */
1694 NULL /* Dependent problem. */
1698 /* Create a new DATAFLOW instance and add it to an existing instance
1699 of DF. The returned structure is what is used to get at the
1703 df_lr_add_problem (struct df *df)
1705 return df_add_problem (df, &problem_LR);
1710 /*----------------------------------------------------------------------------
1711 UNINITIALIZED REGISTERS
1713 Find the set of uses for registers that are reachable from the entry
1714 block without passing thru a definition.
1715 ----------------------------------------------------------------------------*/
1717 /* Get basic block info. */
1719 struct df_ur_bb_info *
1720 df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1722 return (struct df_ur_bb_info *) dflow->block_info[index];
1726 /* Set basic block info. */
1729 df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1730 struct df_ur_bb_info *bb_info)
1732 dflow->block_info[index] = bb_info;
1736 /* Free basic block info. */
1739 df_ur_free_bb_info (struct dataflow *dflow,
1740 basic_block bb ATTRIBUTE_UNUSED,
1743 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1746 BITMAP_FREE (bb_info->gen);
1747 BITMAP_FREE (bb_info->kill);
1748 BITMAP_FREE (bb_info->in);
1749 BITMAP_FREE (bb_info->out);
1750 pool_free (dflow->block_pool, bb_info);
1755 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1756 not touched unless the block is new. */
1759 df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1761 unsigned int bb_index;
1764 if (! dflow->block_pool)
1765 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1766 sizeof (struct df_ur_bb_info), 100);
1768 df_grow_bb_info (dflow);
1770 /* Because of the clustering of all def sites for the same pseudo,
1771 we have to process all of the blocks before doing the
1774 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1776 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1779 bitmap_clear (bb_info->kill);
1780 bitmap_clear (bb_info->gen);
1784 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1785 df_ur_set_bb_info (dflow, bb_index, bb_info);
1786 bb_info->kill = BITMAP_ALLOC (NULL);
1787 bb_info->gen = BITMAP_ALLOC (NULL);
1788 bb_info->in = BITMAP_ALLOC (NULL);
1789 bb_info->out = BITMAP_ALLOC (NULL);
1795 /* Compute local uninitialized register info for basic block BB. */
1798 df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1800 struct df *df = dflow->df;
1801 basic_block bb = BASIC_BLOCK (bb_index);
1802 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1806 bitmap_clear (seen_in_block);
1807 bitmap_clear (seen_in_insn);
1809 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1810 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
1812 unsigned int regno = DF_REF_REGNO (def);
1813 if (!bitmap_bit_p (seen_in_block, regno))
1815 bitmap_set_bit (seen_in_block, regno);
1816 bitmap_set_bit (bb_info->gen, regno);
1820 FOR_BB_INSNS_REVERSE (bb, insn)
1822 unsigned int uid = INSN_UID (insn);
1826 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1828 unsigned int regno = DF_REF_REGNO (def);
1829 /* Only the last def counts. */
1830 if (!bitmap_bit_p (seen_in_block, regno))
1832 bitmap_set_bit (seen_in_insn, regno);
1834 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1835 bitmap_set_bit (bb_info->kill, regno);
1837 bitmap_set_bit (bb_info->gen, regno);
1840 bitmap_ior_into (seen_in_block, seen_in_insn);
1841 bitmap_clear (seen_in_insn);
1844 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1845 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
1847 unsigned int regno = DF_REF_REGNO (def);
1848 if (!bitmap_bit_p (seen_in_block, regno))
1850 bitmap_set_bit (seen_in_block, regno);
1851 bitmap_set_bit (bb_info->gen, regno);
1857 /* Compute local uninitialized register info. */
1860 df_ur_local_compute (struct dataflow *dflow,
1861 bitmap all_blocks ATTRIBUTE_UNUSED,
1862 bitmap rescan_blocks)
1864 unsigned int bb_index;
1869 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1871 df_ur_bb_local_compute (dflow, bb_index);
1878 /* Initialize the solution vectors. */
1881 df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1883 unsigned int bb_index;
1886 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1888 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1890 bitmap_copy (bb_info->out, bb_info->gen);
1891 bitmap_clear (bb_info->in);
1896 /* Or in the stack regs, hard regs and early clobber regs into the the
1897 ur_in sets of all of the blocks. */
1900 df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1902 struct df *df = dflow->df;
1903 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1904 bitmap tmp = BITMAP_ALLOC (NULL);
1906 unsigned int bb_index;
1908 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1910 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1911 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1913 /* No register may reach a location where it is not used. Thus
1914 we trim the rr result to the places where it is used. */
1915 bitmap_and_into (bb_info->in, bb_lr_info->in);
1916 bitmap_and_into (bb_info->out, bb_lr_info->out);
1919 /* Hard registers may still stick in the ur_out set, but not
1920 be in the ur_in set, if their only mention was in a call
1921 in this block. This is because a call kills in the lr
1922 problem but does not kill in the ur problem. To clean
1923 this up, we execute the transfer function on the lr_in
1924 set and then use that to knock bits out of ur_out. */
1925 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1927 bitmap_and_into (bb_info->out, tmp);
1935 /* Confluence function that ignores fake edges. */
1938 df_ur_confluence_n (struct dataflow *dflow, edge e)
1940 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1941 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1943 if (e->flags & EDGE_FAKE)
1946 bitmap_ior_into (op1, op2);
1950 /* Transfer function. */
1953 df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1955 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1956 bitmap in = bb_info->in;
1957 bitmap out = bb_info->out;
1958 bitmap gen = bb_info->gen;
1959 bitmap kill = bb_info->kill;
1961 return bitmap_ior_and_compl (out, gen, in, kill);
1965 /* Free all storage associated with the problem. */
1968 df_ur_free (struct dataflow *dflow)
1970 if (dflow->block_info)
1974 for (i = 0; i < dflow->block_info_size; i++)
1976 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1979 BITMAP_FREE (bb_info->gen);
1980 BITMAP_FREE (bb_info->kill);
1981 BITMAP_FREE (bb_info->in);
1982 BITMAP_FREE (bb_info->out);
1986 free_alloc_pool (dflow->block_pool);
1987 dflow->block_info_size = 0;
1988 free (dflow->block_info);
1994 /* Debugging info. */
1997 df_ur_dump (struct dataflow *dflow, FILE *file)
2001 fprintf (file, "Undefined regs:\n");
2005 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
2006 df_print_bb_index (bb, file);
2011 fprintf (file, " in \t");
2012 dump_bitmap (file, bb_info->in);
2013 fprintf (file, " gen \t");
2014 dump_bitmap (file, bb_info->gen);
2015 fprintf (file, " kill\t");
2016 dump_bitmap (file, bb_info->kill);
2017 fprintf (file, " out \t");
2018 dump_bitmap (file, bb_info->out);
2022 /* All of the information associated with every instance of the problem. */
2024 static struct df_problem problem_UR =
2026 DF_UR, /* Problem id. */
2027 DF_FORWARD, /* Direction. */
2028 df_ur_alloc, /* Allocate the problem specific data. */
2029 NULL, /* Reset global information. */
2030 df_ur_free_bb_info, /* Free basic block info. */
2031 df_ur_local_compute, /* Local compute function. */
2032 df_ur_init, /* Init the solution specific data. */
2033 df_iterative_dataflow, /* Iterative solver. */
2034 NULL, /* Confluence operator 0. */
2035 df_ur_confluence_n, /* Confluence operator n. */
2036 df_ur_transfer_function, /* Transfer function. */
2037 df_ur_local_finalize, /* Finalize function. */
2038 df_ur_free, /* Free all of the problem information. */
2039 df_ur_dump, /* Debugging. */
2040 &problem_LR /* Dependent problem. */
2044 /* Create a new DATAFLOW instance and add it to an existing instance
2045 of DF. The returned structure is what is used to get at the
2049 df_ur_add_problem (struct df *df)
2051 return df_add_problem (df, &problem_UR);
2056 /*----------------------------------------------------------------------------
2057 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
2059 Find the set of uses for registers that are reachable from the entry
2060 block without passing thru a definition.
2062 This is a variant of the UR problem above that has a lot of special
2063 features just for the register allocation phase.
2064 ----------------------------------------------------------------------------*/
2066 struct df_urec_problem_data
2068 bool earlyclobbers_found; /* True if any instruction contains an
2071 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2076 /* Get basic block info. */
2078 struct df_urec_bb_info *
2079 df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2081 return (struct df_urec_bb_info *) dflow->block_info[index];
2085 /* Set basic block info. */
2088 df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2089 struct df_urec_bb_info *bb_info)
2091 dflow->block_info[index] = bb_info;
2095 /* Free basic block info. */
2098 df_urec_free_bb_info (struct dataflow *dflow,
2099 basic_block bb ATTRIBUTE_UNUSED,
2102 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2105 BITMAP_FREE (bb_info->gen);
2106 BITMAP_FREE (bb_info->kill);
2107 BITMAP_FREE (bb_info->in);
2108 BITMAP_FREE (bb_info->out);
2109 BITMAP_FREE (bb_info->earlyclobber);
2110 pool_free (dflow->block_pool, bb_info);
2115 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2116 not touched unless the block is new. */
2119 df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2121 unsigned int bb_index;
2123 struct df_urec_problem_data *problem_data =
2124 (struct df_urec_problem_data *) dflow->problem_data;
2126 if (! dflow->block_pool)
2127 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2128 sizeof (struct df_urec_bb_info), 50);
2130 if (!dflow->problem_data)
2132 problem_data = XNEW (struct df_urec_problem_data);
2133 dflow->problem_data = problem_data;
2135 problem_data->earlyclobbers_found = false;
2137 df_grow_bb_info (dflow);
2139 /* Because of the clustering of all def sites for the same pseudo,
2140 we have to process all of the blocks before doing the
2143 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2145 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2148 bitmap_clear (bb_info->kill);
2149 bitmap_clear (bb_info->gen);
2150 bitmap_clear (bb_info->earlyclobber);
2154 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2155 df_urec_set_bb_info (dflow, bb_index, bb_info);
2156 bb_info->kill = BITMAP_ALLOC (NULL);
2157 bb_info->gen = BITMAP_ALLOC (NULL);
2158 bb_info->in = BITMAP_ALLOC (NULL);
2159 bb_info->out = BITMAP_ALLOC (NULL);
2160 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2166 /* The function modifies local info for register REG being changed in
2167 SETTER. DATA is used to pass the current basic block info. */
2170 df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2175 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2177 if (GET_CODE (reg) == SUBREG)
2178 reg = SUBREG_REG (reg);
2184 endregno = regno = REGNO (reg);
2185 if (regno < FIRST_PSEUDO_REGISTER)
2187 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2189 for (i = regno; i < endregno; i++)
2191 bitmap_set_bit (bb_info->kill, i);
2193 if (GET_CODE (setter) != CLOBBER)
2194 bitmap_set_bit (bb_info->gen, i);
2196 bitmap_clear_bit (bb_info->gen, i);
2201 bitmap_set_bit (bb_info->kill, regno);
2203 if (GET_CODE (setter) != CLOBBER)
2204 bitmap_set_bit (bb_info->gen, regno);
2206 bitmap_clear_bit (bb_info->gen, regno);
2209 /* Classes of registers which could be early clobbered in the current
2212 static VEC(int,heap) *earlyclobber_regclass;
2214 /* This function finds and stores register classes that could be early
2215 clobbered in INSN. If any earlyclobber classes are found, the function
2216 returns TRUE, in all other cases it returns FALSE. */
2219 df_urec_check_earlyclobber (rtx insn)
2224 extract_insn (insn);
2226 VEC_truncate (int, earlyclobber_regclass, 0);
2227 for (opno = 0; opno < recog_data.n_operands; opno++)
2232 enum reg_class class;
2233 const char *p = recog_data.constraints[opno];
2242 case '=': case '+': case '?':
2245 case 'm': case '<': case '>': case 'V': case 'o':
2246 case 'E': case 'F': case 'G': case 'H':
2247 case 's': case 'i': case 'n':
2248 case 'I': case 'J': case 'K': case 'L':
2249 case 'M': case 'N': case 'O': case 'P':
2251 case '0': case '1': case '2': case '3': case '4':
2252 case '5': case '6': case '7': case '8': case '9':
2253 /* These don't say anything we care about. */
2261 if (amp_p && class != NO_REGS)
2267 VEC_iterate (int, earlyclobber_regclass, i, rc);
2270 if (rc == (int) class)
2274 /* We use VEC_quick_push here because
2275 earlyclobber_regclass holds no more than
2276 N_REG_CLASSES elements. */
2277 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2287 class = GENERAL_REGS;
2291 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2296 p += CONSTRAINT_LEN (c, p);
2303 /* The function checks that pseudo-register *X has a class
2304 intersecting with the class of pseudo-register could be early
2305 clobbered in the same insn.
2307 This function is a no-op if earlyclobber_regclass is empty.
2309 Reload can assign the same hard register to uninitialized
2310 pseudo-register and early clobbered pseudo-register in an insn if
2311 the pseudo-register is used first time in given BB and not lived at
2312 the BB start. To prevent this we don't change life information for
2313 such pseudo-registers. */
2316 df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2318 enum reg_class pref_class, alt_class;
2320 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2322 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2327 if (bitmap_bit_p (bb_info->kill, regno)
2328 || bitmap_bit_p (bb_info->gen, regno))
2330 pref_class = reg_preferred_class (regno);
2331 alt_class = reg_alternate_class (regno);
2332 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2334 if (reg_classes_intersect_p (rc, pref_class)
2336 && reg_classes_intersect_p (rc, alt_class)))
2338 bitmap_set_bit (bb_info->earlyclobber, regno);
2346 /* The function processes all pseudo-registers in *X with the aid of
2347 previous function. */
2350 df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2352 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2356 /* Compute local uninitialized register info for basic block BB. */
2359 df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2361 struct df *df = dflow->df;
2362 basic_block bb = BASIC_BLOCK (bb_index);
2363 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2367 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2368 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2370 unsigned int regno = DF_REF_REGNO (def);
2371 bitmap_set_bit (bb_info->gen, regno);
2374 FOR_BB_INSNS (bb, insn)
2378 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2379 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2380 && df_urec_check_earlyclobber (insn))
2382 struct df_urec_problem_data *problem_data =
2383 (struct df_urec_problem_data *) dflow->problem_data;
2384 problem_data->earlyclobbers_found = true;
2385 note_uses (&PATTERN (insn),
2386 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2391 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2392 if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
2394 unsigned int regno = DF_REF_REGNO (def);
2395 bitmap_set_bit (bb_info->gen, regno);
2401 /* Compute local uninitialized register info. */
2404 df_urec_local_compute (struct dataflow *dflow,
2405 bitmap all_blocks ATTRIBUTE_UNUSED,
2406 bitmap rescan_blocks)
2408 unsigned int bb_index;
2412 HARD_REG_SET zero, stack_hard_regs, used;
2413 struct df_urec_problem_data *problem_data =
2414 (struct df_urec_problem_data *) dflow->problem_data;
2416 /* Any register that MAY be allocated to a register stack (like the
2417 387) is treated poorly. Each such register is marked as being
2418 live everywhere. This keeps the register allocator and the
2419 subsequent passes from doing anything useful with these values.
2421 FIXME: This seems like an incredibly poor idea. */
2423 CLEAR_HARD_REG_SET (zero);
2424 CLEAR_HARD_REG_SET (stack_hard_regs);
2425 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2426 SET_HARD_REG_BIT (stack_hard_regs, i);
2427 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2428 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2430 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2431 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2432 AND_HARD_REG_SET (used, stack_hard_regs);
2433 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2434 bitmap_set_bit (problem_data->stack_regs, i);
2440 /* We know that earlyclobber_regclass holds no more than
2441 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2442 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2444 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2446 df_urec_bb_local_compute (dflow, bb_index);
2449 VEC_free (int, heap, earlyclobber_regclass);
2453 /* Initialize the solution vectors. */
2456 df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2458 unsigned int bb_index;
2461 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2463 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2465 bitmap_copy (bb_info->out, bb_info->gen);
2466 bitmap_clear (bb_info->in);
2471 /* Or in the stack regs, hard regs and early clobber regs into the the
2472 ur_in sets of all of the blocks. */
2475 df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2477 struct df *df = dflow->df;
2478 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2479 bitmap tmp = BITMAP_ALLOC (NULL);
2481 unsigned int bb_index;
2482 struct df_urec_problem_data *problem_data =
2483 (struct df_urec_problem_data *) dflow->problem_data;
2485 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2487 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2488 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2490 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2492 if (problem_data->earlyclobbers_found)
2493 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2496 /* We can not use the same stack register for uninitialized
2497 pseudo-register and another living pseudo-register
2498 because if the uninitialized pseudo-register dies,
2499 subsequent pass reg-stack will be confused (it will
2500 believe that the other register dies). */
2501 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2502 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2506 /* No register may reach a location where it is not used. Thus
2507 we trim the rr result to the places where it is used. */
2508 bitmap_and_into (bb_info->in, bb_lr_info->in);
2509 bitmap_and_into (bb_info->out, bb_lr_info->out);
2512 /* Hard registers may still stick in the ur_out set, but not
2513 be in the ur_in set, if their only mention was in a call
2514 in this block. This is because a call kills in the lr
2515 problem but does not kill in the rr problem. To clean
2516 this up, we execute the transfer function on the lr_in
2517 set and then use that to knock bits out of ur_out. */
2518 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2520 bitmap_and_into (bb_info->out, tmp);
2525 BITMAP_FREE (problem_data->stack_regs);
2531 /* Confluence function that ignores fake edges. */
2534 df_urec_confluence_n (struct dataflow *dflow, edge e)
2536 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2537 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2539 if (e->flags & EDGE_FAKE)
2542 bitmap_ior_into (op1, op2);
2546 /* Transfer function. */
2549 df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2551 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2552 bitmap in = bb_info->in;
2553 bitmap out = bb_info->out;
2554 bitmap gen = bb_info->gen;
2555 bitmap kill = bb_info->kill;
2557 return bitmap_ior_and_compl (out, gen, in, kill);
2561 /* Free all storage associated with the problem. */
2564 df_urec_free (struct dataflow *dflow)
2566 if (dflow->block_info)
2570 for (i = 0; i < dflow->block_info_size; i++)
2572 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2575 BITMAP_FREE (bb_info->gen);
2576 BITMAP_FREE (bb_info->kill);
2577 BITMAP_FREE (bb_info->in);
2578 BITMAP_FREE (bb_info->out);
2579 BITMAP_FREE (bb_info->earlyclobber);
2583 free_alloc_pool (dflow->block_pool);
2585 dflow->block_info_size = 0;
2586 free (dflow->block_info);
2587 free (dflow->problem_data);
2593 /* Debugging info. */
2596 df_urec_dump (struct dataflow *dflow, FILE *file)
2600 fprintf (file, "Undefined regs:\n");
2604 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2605 df_print_bb_index (bb, file);
2610 fprintf (file, " in \t");
2611 dump_bitmap (file, bb_info->in);
2612 fprintf (file, " gen \t");
2613 dump_bitmap (file, bb_info->gen);
2614 fprintf (file, " kill\t");
2615 dump_bitmap (file, bb_info->kill);
2616 fprintf (file, " ec\t");
2617 dump_bitmap (file, bb_info->earlyclobber);
2618 fprintf (file, " out \t");
2619 dump_bitmap (file, bb_info->out);
2623 /* All of the information associated with every instance of the problem. */
2625 static struct df_problem problem_UREC =
2627 DF_UREC, /* Problem id. */
2628 DF_FORWARD, /* Direction. */
2629 df_urec_alloc, /* Allocate the problem specific data. */
2630 NULL, /* Reset global information. */
2631 df_urec_free_bb_info, /* Free basic block info. */
2632 df_urec_local_compute, /* Local compute function. */
2633 df_urec_init, /* Init the solution specific data. */
2634 df_iterative_dataflow, /* Iterative solver. */
2635 NULL, /* Confluence operator 0. */
2636 df_urec_confluence_n, /* Confluence operator n. */
2637 df_urec_transfer_function, /* Transfer function. */
2638 df_urec_local_finalize, /* Finalize function. */
2639 df_urec_free, /* Free all of the problem information. */
2640 df_urec_dump, /* Debugging. */
2641 &problem_LR /* Dependent problem. */
2645 /* Create a new DATAFLOW instance and add it to an existing instance
2646 of DF. The returned structure is what is used to get at the
2650 df_urec_add_problem (struct df *df)
2652 return df_add_problem (df, &problem_UREC);
2657 /*----------------------------------------------------------------------------
2658 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2660 Link either the defs to the uses and / or the uses to the defs.
2662 These problems are set up like the other dataflow problems so that
2663 they nicely fit into the framework. They are much simpler and only
2664 involve a single traversal of instructions and an examination of
2665 the reaching defs information (the dependent problem).
2666 ----------------------------------------------------------------------------*/
2668 struct df_chain_problem_data
2674 /* Create def-use or use-def chains. */
2677 df_chain_alloc (struct dataflow *dflow,
2678 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2680 struct df *df = dflow->df;
2682 struct df_chain_problem_data *problem_data =
2683 (struct df_chain_problem_data *) dflow->problem_data;
2685 /* Wholesale destruction of the old chains. */
2686 if (dflow->block_pool)
2687 free_alloc_pool (dflow->block_pool);
2689 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2690 sizeof (struct df_link), 100);
2692 if (problem_data->flags & DF_DU_CHAIN)
2694 if (!df->def_info.refs_organized)
2695 df_reorganize_refs (&df->def_info);
2697 /* Clear out the pointers from the refs. */
2698 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2700 struct df_ref *ref = df->def_info.refs[i];
2701 DF_REF_CHAIN (ref) = NULL;
2705 if (problem_data->flags & DF_UD_CHAIN)
2707 if (!df->use_info.refs_organized)
2708 df_reorganize_refs (&df->use_info);
2709 for (i = 0; i < DF_USES_SIZE (df); i++)
2711 struct df_ref *ref = df->use_info.refs[i];
2712 DF_REF_CHAIN (ref) = NULL;
2718 /* Reset all def_use and use_def chains in INSN. */
2721 df_chain_insn_reset (struct dataflow *dflow, rtx insn)
2723 struct df *df = dflow->df;
2724 struct df_chain_problem_data *problem_data =
2725 (struct df_chain_problem_data *) dflow->problem_data;
2726 unsigned int uid = INSN_UID (insn);
2727 struct df_insn_info *insn_info = NULL;
2730 if (uid < df->insns_size)
2731 insn_info = DF_INSN_UID_GET (df, uid);
2735 if (problem_data->flags & DF_DU_CHAIN)
2737 ref = insn_info->defs;
2741 ref = ref->next_ref;
2745 if (problem_data->flags & DF_UD_CHAIN)
2747 ref = insn_info->uses;
2751 ref = ref->next_ref;
2758 /* Reset all def_use and use_def chains in basic block. */
2761 df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
2763 struct df *df = dflow->df;
2764 struct df_chain_problem_data *problem_data =
2765 (struct df_chain_problem_data *) dflow->problem_data;
2767 basic_block bb = BASIC_BLOCK (bb_index);
2769 /* Some one deleted the basic block out from under us. */
2773 FOR_BB_INSNS (bb, insn)
2777 /* Record defs within INSN. */
2778 df_chain_insn_reset (dflow, insn);
2782 /* Get rid of any chains in artificial uses or defs. */
2783 if (problem_data->flags & DF_DU_CHAIN)
2786 def = df_get_artificial_defs (df, bb_index);
2790 def = def->next_ref;
2794 if (problem_data->flags & DF_UD_CHAIN)
2797 use = df_get_artificial_uses (df, bb_index);
2801 use = use->next_ref;
2807 /* Reset all of the chains when the set of basic blocks changes. */
2811 df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
2814 unsigned int bb_index;
2816 EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
2818 df_chain_bb_reset (dflow, bb_index);
2821 free_alloc_pool (dflow->block_pool);
2822 dflow->block_pool = NULL;
2826 /* Create the chains for a list of USEs. */
2829 df_chain_create_bb_process_use (struct dataflow *dflow,
2830 struct df_chain_problem_data *problem_data,
2833 enum df_ref_flags top_flag)
2835 struct df *df = dflow->df;
2837 unsigned int def_index;
2841 /* Do not want to go through this for an uninitialized var. */
2842 unsigned int uregno = DF_REF_REGNO (use);
2843 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2846 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2848 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2849 unsigned int last_index = first_index + count - 1;
2851 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2854 if (def_index > last_index)
2857 def = DF_DEFS_GET (df, def_index);
2858 if (problem_data->flags & DF_DU_CHAIN)
2859 df_chain_create (dflow, def, use);
2860 if (problem_data->flags & DF_UD_CHAIN)
2861 df_chain_create (dflow, use, def);
2865 use = use->next_ref;
2869 /* Reset the storage pool that the def-use or use-def chains have been
2870 allocated in. We do not need to re adjust the pointers in the refs,
2871 these have already been clean out.*/
2873 /* Create chains from reaching defs bitmaps for basic block BB. */
2875 df_chain_create_bb (struct dataflow *dflow,
2876 struct dataflow *rd_dflow,
2877 unsigned int bb_index)
2879 basic_block bb = BASIC_BLOCK (bb_index);
2880 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2882 bitmap cpy = BITMAP_ALLOC (NULL);
2883 struct df *df = dflow->df;
2884 struct df_chain_problem_data *problem_data =
2885 (struct df_chain_problem_data *) dflow->problem_data;
2888 bitmap_copy (cpy, bb_info->in);
2890 /* Since we are going forwards, process the artificial uses first
2891 then the artificial defs second. */
2894 /* Create the chains for the artificial uses from the EH_USES at the
2895 beginning of the block. */
2896 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2897 df_get_artificial_uses (df, bb->index),
2901 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2902 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
2904 unsigned int dregno = DF_REF_REGNO (def);
2905 bitmap_clear_range (cpy,
2906 DF_REG_DEF_GET (df, dregno)->begin,
2907 DF_REG_DEF_GET (df, dregno)->n_refs);
2908 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2909 bitmap_set_bit (cpy, DF_REF_ID (def));
2912 /* Process the regular instructions next. */
2913 FOR_BB_INSNS (bb, insn)
2916 unsigned int uid = INSN_UID (insn);
2918 if (! INSN_P (insn))
2921 /* Now scan the uses and link them up with the defs that remain
2922 in the cpy vector. */
2924 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2925 DF_INSN_UID_GET (df, uid)->uses, 0);
2927 /* Since we are going forwards, process the defs second. This
2928 pass only changes the bits in cpy. */
2929 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2931 unsigned int dregno = DF_REF_REGNO (def);
2932 bitmap_clear_range (cpy,
2933 DF_REG_DEF_GET (df, dregno)->begin,
2934 DF_REG_DEF_GET (df, dregno)->n_refs);
2935 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2936 bitmap_set_bit (cpy, DF_REF_ID (def));
2940 /* Create the chains for the artificial uses of the hard registers
2941 at the end of the block. */
2942 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2943 df_get_artificial_uses (df, bb->index), 0);
2946 /* Create def-use chains from reaching use bitmaps for basic blocks
2950 df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2952 unsigned int bb_index;
2954 struct df *df = dflow->df;
2955 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2957 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2959 df_chain_create_bb (dflow, rd_dflow, bb_index);
2964 /* Free all storage associated with the problem. */
2967 df_chain_free (struct dataflow *dflow)
2969 free_alloc_pool (dflow->block_pool);
2970 free (dflow->problem_data);
2975 /* Debugging info. */
2978 df_chains_dump (struct dataflow *dflow, FILE *file)
2980 struct df *df = dflow->df;
2982 struct df_chain_problem_data *problem_data =
2983 (struct df_chain_problem_data *) dflow->problem_data;
2985 if (problem_data->flags & DF_DU_CHAIN)
2987 fprintf (file, "Def-use chains:\n");
2988 for (j = 0; j < df->def_info.bitmap_size; j++)
2990 struct df_ref *def = DF_DEFS_GET (df, j);
2993 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2994 j, DF_REF_BBNO (def),
2996 DF_INSN_LUID (df, DF_REF_INSN (def)):
2998 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
2999 DF_REF_REGNO (def));
3000 if (def->flags & DF_REF_READ_WRITE)
3001 fprintf (file, "read/write ");
3002 df_chain_dump (df, DF_REF_CHAIN (def), file);
3003 fprintf (file, "\n");
3008 if (problem_data->flags & DF_UD_CHAIN)
3010 fprintf (file, "Use-def chains:\n");
3011 for (j = 0; j < df->use_info.bitmap_size; j++)
3013 struct df_ref *use = DF_USES_GET (df, j);
3016 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
3017 j, DF_REF_BBNO (use),
3019 DF_INSN_LUID (df, DF_REF_INSN (use))
3021 DF_REF_INSN (DF_USES_GET (df, j)) ?
3022 DF_REF_INSN_UID (DF_USES_GET (df,j))
3024 DF_REF_REGNO (use));
3025 if (use->flags & DF_REF_READ_WRITE)
3026 fprintf (file, "read/write ");
3027 if (use->flags & DF_REF_STRIPPED)
3028 fprintf (file, "stripped ");
3029 if (use->flags & DF_REF_IN_NOTE)
3030 fprintf (file, "note ");
3031 df_chain_dump (df, DF_REF_CHAIN (use), file);
3032 fprintf (file, "\n");
3039 static struct df_problem problem_CHAIN =
3041 DF_CHAIN, /* Problem id. */
3042 DF_NONE, /* Direction. */
3043 df_chain_alloc, /* Allocate the problem specific data. */
3044 df_chain_reset, /* Reset global information. */
3045 NULL, /* Free basic block info. */
3046 NULL, /* Local compute function. */
3047 NULL, /* Init the solution specific data. */
3048 NULL, /* Iterative solver. */
3049 NULL, /* Confluence operator 0. */
3050 NULL, /* Confluence operator n. */
3051 NULL, /* Transfer function. */
3052 df_chain_finalize, /* Finalize function. */
3053 df_chain_free, /* Free all of the problem information. */
3054 df_chains_dump, /* Debugging. */
3055 &problem_RD /* Dependent problem. */
3059 /* Create a new DATAFLOW instance and add it to an existing instance
3060 of DF. The returned structure is what is used to get at the
3064 df_chain_add_problem (struct df *df, int flags)
3066 struct df_chain_problem_data *problem_data =
3067 XNEW (struct df_chain_problem_data);
3068 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
3070 dflow->problem_data = problem_data;
3071 problem_data->flags = flags;
3077 /*----------------------------------------------------------------------------
3078 REGISTER INFORMATION
3080 Currently this consists of only lifetime information. But the plan is
3081 to enhance it so that it produces all of the register information needed
3082 by the register allocators.
3083 ----------------------------------------------------------------------------*/
3086 struct df_ri_problem_data
3092 /* Allocate the lifetime information. */
3095 df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
3097 struct df_ri_problem_data *problem_data =
3098 (struct df_ri_problem_data *) dflow->problem_data;
3100 if (!dflow->problem_data)
3102 struct df_ri_problem_data *problem_data = XNEW (struct df_ri_problem_data);
3103 dflow->problem_data = problem_data;
3106 problem_data->lifetime = xrealloc (problem_data->lifetime,
3107 max_reg_num () *sizeof (int));
3108 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
3111 /* Compute register info: lifetime, bb, and number of defs and uses
3112 for basic block BB. */
3115 df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
3117 struct df *df = dflow->df;
3118 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
3119 struct df_ri_problem_data *problem_data =
3120 (struct df_ri_problem_data *) dflow->problem_data;
3121 basic_block bb = BASIC_BLOCK (bb_index);
3124 bitmap_copy (live, bb_info->out);
3126 FOR_BB_INSNS_REVERSE (bb, insn)
3128 unsigned int uid = INSN_UID (insn);
3134 if (! INSN_P (insn))
3137 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
3139 unsigned int dregno = DF_REF_REGNO (def);
3141 /* Kill this register. */
3142 bitmap_clear_bit (live, dregno);
3145 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
3147 unsigned int uregno = DF_REF_REGNO (use);
3149 if (!bitmap_bit_p (live, uregno))
3151 use->flags |= DF_REF_DIES_AFTER_THIS_USE;
3152 /* This register is now live. */
3153 bitmap_set_bit (live, uregno);
3156 use->flags &= ~DF_REF_DIES_AFTER_THIS_USE;
3159 /* Increment lifetimes of all live registers. */
3160 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
3162 problem_data->lifetime[regno]++;
3168 /* Compute register info: lifetime, bb, and number of defs and uses. */
3170 df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3171 bitmap blocks_to_scan)
3173 unsigned int bb_index;
3177 live = BITMAP_ALLOC (NULL);
3179 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3181 df_ri_bb_compute (dflow, bb_index, live);
3188 /* Free all storage associated with the problem. */
3191 df_ri_free (struct dataflow *dflow)
3193 struct df_ri_problem_data *problem_data =
3194 (struct df_ri_problem_data *) dflow->problem_data;
3196 free (problem_data->lifetime);
3197 free (dflow->problem_data);
3202 /* Debugging info. */
3205 df_ri_dump (struct dataflow *dflow, FILE *file)
3207 struct df_ri_problem_data *problem_data =
3208 (struct df_ri_problem_data *) dflow->problem_data;
3211 fprintf (file, "Register info:\n");
3212 for (j = 0; j < max_reg_num (); j++)
3214 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3218 /* All of the information associated every instance of the problem. */
3220 static struct df_problem problem_RI =
3222 DF_RI, /* Problem id. */
3223 DF_NONE, /* Direction. */
3224 df_ri_alloc, /* Allocate the problem specific data. */
3225 NULL, /* Reset global information. */
3226 NULL, /* Free basic block info. */
3227 df_ri_compute, /* Local compute function. */
3228 NULL, /* Init the solution specific data. */
3229 NULL, /* Iterative solver. */
3230 NULL, /* Confluence operator 0. */
3231 NULL, /* Confluence operator n. */
3232 NULL, /* Transfer function. */
3233 NULL, /* Finalize function. */
3234 df_ri_free, /* Free all of the problem information. */
3235 df_ri_dump, /* Debugging. */
3236 &problem_UR /* Dependent problem. */
3240 /* Create a new DATAFLOW instance and add it to an existing instance
3241 of DF. The returned structure is what is used to get at the
3245 df_ri_add_problem (struct df *df)
3247 return df_add_problem (df, &problem_RI);
3251 /* Return total lifetime (in insns) of REG. */
3253 df_reg_lifetime (struct df *df, rtx reg)
3255 struct dataflow *dflow = df->problems_by_index[DF_RI];
3256 struct df_ri_problem_data *problem_data =
3257 (struct df_ri_problem_data *) dflow->problem_data;
3258 return problem_data->lifetime[REGNO (reg)];