1 /* Standard problems for dataflow support routines.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 2, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING. If not, write to the Free
23 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 #include "coretypes.h"
32 #include "insn-config.h"
37 #include "alloc-pool.h"
39 #include "hard-reg-set.h"
40 #include "basic-block.h"
46 #define DF_SPARSE_THRESHOLD 32
48 static bitmap seen_in_block = NULL;
49 static bitmap seen_in_insn = NULL;
52 /*----------------------------------------------------------------------------
53 Public functions access functions for the dataflow problems.
54 ----------------------------------------------------------------------------*/
56 /* Get the instance of the problem that DFLOW is dependent on. */
59 df_get_dependent_problem (struct dataflow *dflow)
61 struct df *df = dflow->df;
62 struct df_problem *dependent_problem = dflow->problem->dependent_problem;
64 gcc_assert (dependent_problem);
65 return df->problems_by_index[dependent_problem->id];
69 /* Create a du or ud chain from SRC to DST and link it into SRC. */
72 df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
74 struct df_link *head = DF_REF_CHAIN (src);
75 struct df_link *link = pool_alloc (dflow->block_pool);;
77 DF_REF_CHAIN (src) = link;
84 /* Delete a du or ud chain for REF. If LINK is NULL, delete all
85 chains for ref and check to see if the reverse chains can also be
86 deleted. If LINK is not NULL it must be a link off of ref. In
87 this case, the other end is not deleted. */
90 df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
92 struct df_link *chain = DF_REF_CHAIN (ref);
95 /* Link was the first element in the chain. */
97 DF_REF_CHAIN (ref) = link->next;
100 /* Link is an internal element in the chain. */
101 struct df_link *prev = chain;
106 prev->next = chain->next;
113 pool_free (dflow->block_pool, link);
117 /* If chain is NULL here, it was because of a recursive call
118 when the other flavor of chains was not built. Just run thru
119 the entire chain calling the other side and then deleting the
123 struct df_link *next = chain->next;
124 /* Delete the other side if it exists. */
125 df_chain_unlink (dflow, chain->ref, chain);
132 /* Copy the du or ud chain starting at FROM_REF and attach it to
136 df_chain_copy (struct dataflow *dflow,
137 struct df_ref *to_ref,
138 struct df_link *from_ref)
142 df_chain_create (dflow, to_ref, from_ref->ref);
143 from_ref = from_ref->next;
148 /* Get the live in set for BB no matter what problem happens to be
152 df_get_live_in (struct df *df, basic_block bb)
154 gcc_assert (df->problems_by_index[DF_LR]);
156 if (df->problems_by_index[DF_UREC])
157 return DF_RA_LIVE_IN (df, bb);
158 else if (df->problems_by_index[DF_UR])
159 return DF_LIVE_IN (df, bb);
161 return DF_UPWARD_LIVE_IN (df, bb);
165 /* Get the live out set for BB no matter what problem happens to be
169 df_get_live_out (struct df *df, basic_block bb)
171 gcc_assert (df->problems_by_index[DF_LR]);
173 if (df->problems_by_index[DF_UREC])
174 return DF_RA_LIVE_OUT (df, bb);
175 else if (df->problems_by_index[DF_UR])
176 return DF_LIVE_OUT (df, bb);
178 return DF_UPWARD_LIVE_OUT (df, bb);
182 /*----------------------------------------------------------------------------
184 ----------------------------------------------------------------------------*/
186 /* Generic versions to get the void* version of the block info. Only
187 used inside the problem instace vectors. */
189 /* Grow the bb_info array. */
192 df_grow_bb_info (struct dataflow *dflow)
194 unsigned int new_size = last_basic_block + 1;
195 if (dflow->block_info_size < new_size)
197 new_size += new_size / 4;
198 dflow->block_info = xrealloc (dflow->block_info,
199 new_size *sizeof (void*));
200 memset (dflow->block_info + dflow->block_info_size, 0,
201 (new_size - dflow->block_info_size) *sizeof (void *));
202 dflow->block_info_size = new_size;
206 /* Dump a def-use or use-def chain for REF to FILE. */
209 df_chain_dump (struct df *df ATTRIBUTE_UNUSED, struct df_link *link, FILE *file)
211 fprintf (file, "{ ");
212 for (; link; link = link->next)
214 fprintf (file, "%c%d(bb %d insn %d) ",
215 DF_REF_REG_DEF_P (link->ref) ? 'd' : 'u',
216 DF_REF_ID (link->ref),
217 DF_REF_BBNO (link->ref),
218 DF_REF_INSN (link->ref) ? DF_REF_INSN_UID (link->ref) : -1);
224 /* Print some basic block info as part of df_dump. */
227 df_print_bb_index (basic_block bb, FILE *file)
232 fprintf (file, "( ");
233 FOR_EACH_EDGE (e, ei, bb->preds)
235 basic_block pred = e->src;
236 fprintf (file, "%d ", pred->index);
238 fprintf (file, ")->[%d]->( ", bb->index);
239 FOR_EACH_EDGE (e, ei, bb->succs)
241 basic_block succ = e->dest;
242 fprintf (file, "%d ", succ->index);
244 fprintf (file, ")\n");
248 /* Return the set of reference ids in CHAIN, caching the result in *BMAP. */
251 df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
253 bitmap ids = maps[regno];
257 unsigned int end = start + count;;
258 ids = BITMAP_ALLOC (NULL);
260 for (i = start; i < end; i++)
261 bitmap_set_bit (ids, i);
267 /* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
273 seen_in_block = BITMAP_ALLOC (NULL);
274 seen_in_insn = BITMAP_ALLOC (NULL);
281 BITMAP_FREE (seen_in_block);
282 BITMAP_FREE (seen_in_insn);
287 /*----------------------------------------------------------------------------
290 Find the locations in the function where each use site for a pseudo
293 ----------------------------------------------------------------------------*/
295 struct df_ru_problem_data
297 bitmap *use_sites; /* Bitmap of uses for each pseudo. */
298 unsigned int use_sites_size; /* Size of use_sites. */
299 /* The set of defs to regs invalidated by call. */
300 bitmap sparse_invalidated_by_call;
301 /* The set of defs to regs invalidate by call for ru. */
302 bitmap dense_invalidated_by_call;
305 /* Get basic block info. */
307 struct df_ru_bb_info *
308 df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
310 return (struct df_ru_bb_info *) dflow->block_info[index];
314 /* Set basic block info. */
317 df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
318 struct df_ru_bb_info *bb_info)
320 dflow->block_info[index] = bb_info;
324 /* Free basic block info. */
327 df_ru_free_bb_info (struct dataflow *dflow, void *vbb_info)
329 struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
332 BITMAP_FREE (bb_info->kill);
333 BITMAP_FREE (bb_info->sparse_kill);
334 BITMAP_FREE (bb_info->gen);
335 BITMAP_FREE (bb_info->in);
336 BITMAP_FREE (bb_info->out);
337 pool_free (dflow->block_pool, bb_info);
342 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
343 not touched unless the block is new. */
346 df_ru_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
348 unsigned int bb_index;
350 unsigned int reg_size = max_reg_num ();
352 if (! dflow->block_pool)
353 dflow->block_pool = create_alloc_pool ("df_ru_block pool",
354 sizeof (struct df_ru_bb_info), 50);
356 if (dflow->problem_data)
359 struct df_ru_problem_data *problem_data =
360 (struct df_ru_problem_data *) dflow->problem_data;
362 for (i = 0; i < problem_data->use_sites_size; i++)
364 bitmap bm = problem_data->use_sites[i];
368 problem_data->use_sites[i] = NULL;
372 if (problem_data->use_sites_size > reg_size)
374 problem_data->use_sites
375 = xrealloc (problem_data->use_sites, reg_size *sizeof (bitmap));
376 memset (problem_data->use_sites, 0,
377 (reg_size - problem_data->use_sites_size) *sizeof (bitmap));
378 problem_data->use_sites_size = reg_size;
381 bitmap_clear (problem_data->sparse_invalidated_by_call);
382 bitmap_clear (problem_data->dense_invalidated_by_call);
386 struct df_ru_problem_data *problem_data =
387 xmalloc (sizeof (struct df_ru_problem_data));
388 dflow->problem_data = problem_data;
390 problem_data->use_sites = xcalloc (reg_size, sizeof (bitmap));
391 problem_data->use_sites_size = reg_size;
392 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
393 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
396 df_grow_bb_info (dflow);
398 /* Because of the clustering of all def sites for the same pseudo,
399 we have to process all of the blocks before doing the
402 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
404 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
407 bitmap_clear (bb_info->kill);
408 bitmap_clear (bb_info->sparse_kill);
409 bitmap_clear (bb_info->gen);
413 bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
414 df_ru_set_bb_info (dflow, bb_index, bb_info);
415 bb_info->kill = BITMAP_ALLOC (NULL);
416 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
417 bb_info->gen = BITMAP_ALLOC (NULL);
418 bb_info->in = BITMAP_ALLOC (NULL);
419 bb_info->out = BITMAP_ALLOC (NULL);
425 /* Process a list of DEFs for df_ru_bb_local_compute. */
428 df_ru_bb_local_compute_process_def (struct dataflow *dflow,
429 struct df_ru_bb_info *bb_info,
432 struct df *df = dflow->df;
435 unsigned int regno = DF_REF_REGNO (def);
436 unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
437 unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
438 if (!bitmap_bit_p (seen_in_block, regno))
440 /* The first def for regno, causes the kill info to be
441 generated and the gen information to cleared. */
442 if (!bitmap_bit_p (seen_in_insn, regno))
444 if (n_uses > DF_SPARSE_THRESHOLD)
446 bitmap_set_bit (bb_info->sparse_kill, regno);
447 bitmap_clear_range (bb_info->gen, begin, n_uses);
451 struct df_ru_problem_data *problem_data =
452 (struct df_ru_problem_data *) dflow->problem_data;
454 df_ref_bitmap (problem_data->use_sites, regno,
456 bitmap_ior_into (bb_info->kill, uses);
457 bitmap_and_compl_into (bb_info->gen, uses);
460 bitmap_set_bit (seen_in_insn, regno);
467 /* Process a list of USEs for df_ru_bb_local_compute. */
470 df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
472 enum df_ref_flags top_flag)
476 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
478 /* Add use to set of gens in this BB unless we have seen a
479 def in a previous instruction. */
480 unsigned int regno = DF_REF_REGNO (use);
481 if (!bitmap_bit_p (seen_in_block, regno))
482 bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
488 /* Compute local reaching use (upward exposed use) info for basic
489 block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
491 df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
493 struct df *df = dflow->df;
494 basic_block bb = BASIC_BLOCK (bb_index);
495 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
498 /* Set when a def for regno is seen. */
499 bitmap_clear (seen_in_block);
500 bitmap_clear (seen_in_insn);
503 /* Variables defined in the prolog that are used by the exception
505 df_ru_bb_local_compute_process_use (bb_info,
506 df_get_artificial_uses (df, bb_index),
510 /* Process the artificial defs first since these are at the top of
512 df_ru_bb_local_compute_process_def (dflow, bb_info,
513 df_get_artificial_defs (df, bb_index));
515 FOR_BB_INSNS (bb, insn)
517 unsigned int uid = INSN_UID (insn);
521 df_ru_bb_local_compute_process_def (dflow, bb_info,
522 DF_INSN_UID_GET (df, uid)->defs);
524 /* The use processing must happen after the defs processing even
525 though the uses logically happen first since the defs clear
526 the gen set. Otherwise, a use for regno occuring in the same
527 instruction as a def for regno would be cleared. */
528 df_ru_bb_local_compute_process_use (bb_info,
529 DF_INSN_UID_GET (df, uid)->uses, 0);
531 bitmap_ior_into (seen_in_block, seen_in_insn);
532 bitmap_clear (seen_in_insn);
535 /* Process the hardware registers that are always live. */
536 df_ru_bb_local_compute_process_use (bb_info,
537 df_get_artificial_uses (df, bb_index), 0);
541 /* Compute local reaching use (upward exposed use) info for each basic
542 block within BLOCKS. */
544 df_ru_local_compute (struct dataflow *dflow,
546 bitmap rescan_blocks ATTRIBUTE_UNUSED)
548 struct df *df = dflow->df;
549 unsigned int bb_index;
552 struct df_ru_problem_data *problem_data =
553 (struct df_ru_problem_data *) dflow->problem_data;
554 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
555 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
559 if (!df->use_info.refs_organized)
560 df_reorganize_refs (&df->use_info);
562 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
564 df_ru_bb_local_compute (dflow, bb_index);
567 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
568 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
570 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
571 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
572 bitmap_set_bit (sparse_invalidated, regno);
575 bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
576 reg_info->begin, reg_info->n_refs);
577 bitmap_ior_into (dense_invalidated, defs);
585 /* Initialize the solution bit vectors for problem. */
588 df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
590 unsigned int bb_index;
593 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
595 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
596 bitmap_copy (bb_info->in, bb_info->gen);
597 bitmap_clear (bb_info->out);
602 /* Out of target gets or of in of source. */
605 df_ru_confluence_n (struct dataflow *dflow, edge e)
607 bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
608 bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
610 if (e->flags & EDGE_EH)
612 struct df_ru_problem_data *problem_data =
613 (struct df_ru_problem_data *) dflow->problem_data;
614 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
615 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
616 struct df *df = dflow->df;
619 bitmap_ior_and_compl_into (op1, op2, dense_invalidated);
620 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
622 bitmap_clear_range (op1,
623 DF_REG_USE_GET (df, regno)->begin,
624 DF_REG_USE_GET (df, regno)->n_refs);
628 bitmap_ior_into (op1, op2);
632 /* Transfer function. */
635 df_ru_transfer_function (struct dataflow *dflow, int bb_index)
637 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
640 bitmap in = bb_info->in;
641 bitmap out = bb_info->out;
642 bitmap gen = bb_info->gen;
643 bitmap kill = bb_info->kill;
644 bitmap sparse_kill = bb_info->sparse_kill;
646 if (bitmap_empty_p (sparse_kill))
647 return bitmap_ior_and_compl (in, gen, out, kill);
650 struct df *df = dflow->df;
651 bool changed = false;
652 bitmap tmp = BITMAP_ALLOC (NULL);
653 bitmap_copy (tmp, in);
654 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
656 bitmap_clear_range (tmp,
657 DF_REG_USE_GET (df, regno)->begin,
658 DF_REG_USE_GET (df, regno)->n_refs);
660 bitmap_and_compl_into (tmp, kill);
661 bitmap_ior_into (tmp, gen);
662 changed = !bitmap_equal_p (tmp, out);
675 /* Free all storage associated with the problem. */
678 df_ru_free (struct dataflow *dflow)
681 struct df_ru_problem_data *problem_data =
682 (struct df_ru_problem_data *) dflow->problem_data;
684 for (i = 0; i < dflow->block_info_size; i++)
686 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
689 BITMAP_FREE (bb_info->kill);
690 BITMAP_FREE (bb_info->sparse_kill);
691 BITMAP_FREE (bb_info->gen);
692 BITMAP_FREE (bb_info->in);
693 BITMAP_FREE (bb_info->out);
697 free_alloc_pool (dflow->block_pool);
699 for (i = 0; i < problem_data->use_sites_size; i++)
701 bitmap bm = problem_data->use_sites[i];
706 free (problem_data->use_sites);
707 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
708 BITMAP_FREE (problem_data->dense_invalidated_by_call);
710 dflow->block_info_size = 0;
711 free (dflow->block_info);
712 free (dflow->problem_data);
717 /* Debugging info. */
720 df_ru_dump (struct dataflow *dflow, FILE *file)
723 struct df *df = dflow->df;
724 struct df_ru_problem_data *problem_data =
725 (struct df_ru_problem_data *) dflow->problem_data;
726 unsigned int m = max_reg_num ();
729 fprintf (file, "Reaching uses:\n");
731 fprintf (file, " sparse invalidated \t");
732 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
733 fprintf (file, " dense invalidated \t");
734 dump_bitmap (file, problem_data->dense_invalidated_by_call);
736 for (regno = 0; regno < m; regno++)
737 if (DF_REG_USE_GET (df, regno)->n_refs)
738 fprintf (file, "%d[%d,%d] ", regno,
739 DF_REG_USE_GET (df, regno)->begin,
740 DF_REG_USE_GET (df, regno)->n_refs);
741 fprintf (file, "\n");
745 struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
746 df_print_bb_index (bb, file);
751 fprintf (file, " in \t");
752 dump_bitmap (file, bb_info->in);
753 fprintf (file, " gen \t");
754 dump_bitmap (file, bb_info->gen);
755 fprintf (file, " kill\t");
756 dump_bitmap (file, bb_info->kill);
757 fprintf (file, " out \t");
758 dump_bitmap (file, bb_info->out);
762 /* All of the information associated with every instance of the problem. */
764 static struct df_problem problem_RU =
766 DF_RU, /* Problem id. */
767 DF_BACKWARD, /* Direction. */
768 df_ru_alloc, /* Allocate the problem specific data. */
769 df_ru_free_bb_info, /* Free basic block info. */
770 df_ru_local_compute, /* Local compute function. */
771 df_ru_init_solution, /* Init the solution specific data. */
772 df_iterative_dataflow, /* Iterative solver. */
773 NULL, /* Confluence operator 0. */
774 df_ru_confluence_n, /* Confluence operator n. */
775 df_ru_transfer_function, /* Transfer function. */
776 NULL, /* Finalize function. */
777 df_ru_free, /* Free all of the problem information. */
778 df_ru_dump, /* Debugging. */
779 NULL /* Dependent problem. */
784 /* Create a new DATAFLOW instance and add it to an existing instance
785 of DF. The returned structure is what is used to get at the
789 df_ru_add_problem (struct df *df)
791 return df_add_problem (df, &problem_RU);
795 /*----------------------------------------------------------------------------
798 Find the locations in the function where each definition site for a
800 ----------------------------------------------------------------------------*/
802 struct df_rd_problem_data
804 bitmap *def_sites; /* Bitmap of defs for each pseudo. */
805 unsigned int def_sites_size; /* Size of def_sites. */
806 /* The set of defs to regs invalidated by call. */
807 bitmap sparse_invalidated_by_call;
808 /* The set of defs to regs invalidate by call for ru. */
809 bitmap dense_invalidated_by_call;
812 /* Get basic block info. */
814 struct df_rd_bb_info *
815 df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
817 return (struct df_rd_bb_info *) dflow->block_info[index];
821 /* Set basic block info. */
824 df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
825 struct df_rd_bb_info *bb_info)
827 dflow->block_info[index] = bb_info;
831 /* Free basic block info. */
834 df_rd_free_bb_info (struct dataflow *dflow, void *vbb_info)
836 struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
839 BITMAP_FREE (bb_info->kill);
840 BITMAP_FREE (bb_info->sparse_kill);
841 BITMAP_FREE (bb_info->gen);
842 BITMAP_FREE (bb_info->in);
843 BITMAP_FREE (bb_info->out);
844 pool_free (dflow->block_pool, bb_info);
849 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
850 not touched unless the block is new. */
853 df_rd_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
855 unsigned int bb_index;
857 unsigned int reg_size = max_reg_num ();
859 if (! dflow->block_pool)
860 dflow->block_pool = create_alloc_pool ("df_rd_block pool",
861 sizeof (struct df_rd_bb_info), 50);
863 if (dflow->problem_data)
866 struct df_rd_problem_data *problem_data =
867 (struct df_rd_problem_data *) dflow->problem_data;
869 for (i = 0; i < problem_data->def_sites_size; i++)
871 bitmap bm = problem_data->def_sites[i];
875 problem_data->def_sites[i] = NULL;
879 if (problem_data->def_sites_size > reg_size)
881 problem_data->def_sites
882 = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
883 memset (problem_data->def_sites, 0,
884 (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
885 problem_data->def_sites_size = reg_size;
888 bitmap_clear (problem_data->sparse_invalidated_by_call);
889 bitmap_clear (problem_data->dense_invalidated_by_call);
893 struct df_rd_problem_data *problem_data =
894 xmalloc (sizeof (struct df_rd_problem_data));
895 dflow->problem_data = problem_data;
897 problem_data->def_sites = xcalloc (reg_size, sizeof (bitmap));
898 problem_data->def_sites_size = reg_size;
899 problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
900 problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
903 df_grow_bb_info (dflow);
905 /* Because of the clustering of all def sites for the same pseudo,
906 we have to process all of the blocks before doing the
909 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
911 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
914 bitmap_clear (bb_info->kill);
915 bitmap_clear (bb_info->sparse_kill);
916 bitmap_clear (bb_info->gen);
920 bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
921 df_rd_set_bb_info (dflow, bb_index, bb_info);
922 bb_info->kill = BITMAP_ALLOC (NULL);
923 bb_info->sparse_kill = BITMAP_ALLOC (NULL);
924 bb_info->gen = BITMAP_ALLOC (NULL);
925 bb_info->in = BITMAP_ALLOC (NULL);
926 bb_info->out = BITMAP_ALLOC (NULL);
932 /* Process a list of DEFs for df_rd_bb_local_compute. */
935 df_rd_bb_local_compute_process_def (struct dataflow *dflow,
936 struct df_rd_bb_info *bb_info,
939 struct df *df = dflow->df;
942 unsigned int regno = DF_REF_REGNO (def);
943 unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
944 unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
946 /* Only the last def(s) for a regno in the block has any
948 if (!bitmap_bit_p (seen_in_block, regno))
950 /* The first def for regno in insn gets to knock out the
951 defs from other instructions. */
952 if (!bitmap_bit_p (seen_in_insn, regno))
954 if (n_defs > DF_SPARSE_THRESHOLD)
956 bitmap_set_bit (bb_info->sparse_kill, regno);
957 bitmap_clear_range (bb_info->gen, begin, n_defs);
961 struct df_rd_problem_data *problem_data =
962 (struct df_rd_problem_data *) dflow->problem_data;
964 df_ref_bitmap (problem_data->def_sites, regno,
966 bitmap_ior_into (bb_info->kill, defs);
967 bitmap_and_compl_into (bb_info->gen, defs);
971 bitmap_set_bit (seen_in_insn, regno);
972 /* All defs for regno in the instruction may be put into
974 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
975 bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
981 /* Compute local reaching def info for basic block BB. */
984 df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
986 struct df *df = dflow->df;
987 basic_block bb = BASIC_BLOCK (bb_index);
988 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
991 bitmap_clear (seen_in_block);
992 bitmap_clear (seen_in_insn);
994 FOR_BB_INSNS_REVERSE (bb, insn)
996 unsigned int uid = INSN_UID (insn);
1001 df_rd_bb_local_compute_process_def (dflow, bb_info,
1002 DF_INSN_UID_GET (df, uid)->defs);
1004 /* This complex dance with the two bitmaps is required because
1005 instructions can assign twice to the same pseudo. This
1006 generally happens with calls that will have one def for the
1007 result and another def for the clobber. If only one vector
1008 is used and the clobber goes first, the result will be
1010 bitmap_ior_into (seen_in_block, seen_in_insn);
1011 bitmap_clear (seen_in_insn);
1014 /* Process the artificial defs last since we are going backwards
1015 thur the block and these are logically at the start. */
1016 df_rd_bb_local_compute_process_def (dflow, bb_info,
1017 df_get_artificial_defs (df, bb_index));
1021 /* Compute local reaching def info for each basic block within BLOCKS. */
1024 df_rd_local_compute (struct dataflow *dflow,
1026 bitmap rescan_blocks ATTRIBUTE_UNUSED)
1028 struct df *df = dflow->df;
1029 unsigned int bb_index;
1032 struct df_rd_problem_data *problem_data =
1033 (struct df_rd_problem_data *) dflow->problem_data;
1034 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1035 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1039 if (!df->def_info.refs_organized)
1040 df_reorganize_refs (&df->def_info);
1042 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1044 df_rd_bb_local_compute (dflow, bb_index);
1047 /* Set up the knockout bit vectors to be applied across EH_EDGES. */
1048 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
1050 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
1051 if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
1053 bitmap_set_bit (sparse_invalidated, regno);
1057 bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
1058 reg_info->begin, reg_info->n_refs);
1059 bitmap_ior_into (dense_invalidated, defs);
1066 /* Initialize the solution bit vectors for problem. */
1069 df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
1071 unsigned int bb_index;
1074 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1076 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1078 bitmap_copy (bb_info->out, bb_info->gen);
1079 bitmap_clear (bb_info->in);
1083 /* In of target gets or of out of source. */
1086 df_rd_confluence_n (struct dataflow *dflow, edge e)
1088 bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
1089 bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
1091 if (e->flags & EDGE_EH)
1093 struct df_rd_problem_data *problem_data =
1094 (struct df_rd_problem_data *) dflow->problem_data;
1095 bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
1096 bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
1097 struct df *df = dflow->df;
1100 bitmap_ior_and_compl_into (op1, op2, dense_invalidated);
1101 EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
1103 bitmap_clear_range (op1,
1104 DF_REG_DEF_GET (df, regno)->begin,
1105 DF_REG_DEF_GET (df, regno)->n_refs);
1109 bitmap_ior_into (op1, op2);
1113 /* Transfer function. */
1116 df_rd_transfer_function (struct dataflow *dflow, int bb_index)
1118 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
1121 bitmap in = bb_info->in;
1122 bitmap out = bb_info->out;
1123 bitmap gen = bb_info->gen;
1124 bitmap kill = bb_info->kill;
1125 bitmap sparse_kill = bb_info->sparse_kill;
1127 if (bitmap_empty_p (sparse_kill))
1128 return bitmap_ior_and_compl (out, gen, in, kill);
1131 struct df *df = dflow->df;
1132 bool changed = false;
1133 bitmap tmp = BITMAP_ALLOC (NULL);
1134 bitmap_copy (tmp, in);
1135 EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
1137 bitmap_clear_range (tmp,
1138 DF_REG_DEF_GET (df, regno)->begin,
1139 DF_REG_DEF_GET (df, regno)->n_refs);
1141 bitmap_and_compl_into (tmp, kill);
1142 bitmap_ior_into (tmp, gen);
1143 changed = !bitmap_equal_p (tmp, out);
1156 /* Free all storage associated with the problem. */
1159 df_rd_free (struct dataflow *dflow)
1162 struct df_rd_problem_data *problem_data =
1163 (struct df_rd_problem_data *) dflow->problem_data;
1165 for (i = 0; i < dflow->block_info_size; i++)
1167 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
1170 BITMAP_FREE (bb_info->kill);
1171 BITMAP_FREE (bb_info->sparse_kill);
1172 BITMAP_FREE (bb_info->gen);
1173 BITMAP_FREE (bb_info->in);
1174 BITMAP_FREE (bb_info->out);
1178 free_alloc_pool (dflow->block_pool);
1180 for (i = 0; i < problem_data->def_sites_size; i++)
1182 bitmap bm = problem_data->def_sites[i];
1187 free (problem_data->def_sites);
1188 BITMAP_FREE (problem_data->sparse_invalidated_by_call);
1189 BITMAP_FREE (problem_data->dense_invalidated_by_call);
1191 dflow->block_info_size = 0;
1192 free (dflow->block_info);
1193 free (dflow->problem_data);
1198 /* Debugging info. */
1201 df_rd_dump (struct dataflow *dflow, FILE *file)
1203 struct df *df = dflow->df;
1205 struct df_rd_problem_data *problem_data =
1206 (struct df_rd_problem_data *) dflow->problem_data;
1207 unsigned int m = max_reg_num ();
1210 fprintf (file, "Reaching defs:\n\n");
1212 fprintf (file, " sparse invalidated \t");
1213 dump_bitmap (file, problem_data->sparse_invalidated_by_call);
1214 fprintf (file, " dense invalidated \t");
1215 dump_bitmap (file, problem_data->dense_invalidated_by_call);
1217 for (regno = 0; regno < m; regno++)
1218 if (DF_REG_DEF_GET (df, regno)->n_refs)
1219 fprintf (file, "%d[%d,%d] ", regno,
1220 DF_REG_DEF_GET (df, regno)->begin,
1221 DF_REG_DEF_GET (df, regno)->n_refs);
1222 fprintf (file, "\n");
1226 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
1227 df_print_bb_index (bb, file);
1232 fprintf (file, " in\t(%d)\n", (int) bitmap_count_bits (bb_info->in));
1233 dump_bitmap (file, bb_info->in);
1234 fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
1235 dump_bitmap (file, bb_info->gen);
1236 fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
1237 dump_bitmap (file, bb_info->kill);
1238 fprintf (file, " out\t(%d)\n", (int) bitmap_count_bits (bb_info->out));
1239 dump_bitmap (file, bb_info->out);
1243 /* All of the information associated with every instance of the problem. */
1245 static struct df_problem problem_RD =
1247 DF_RD, /* Problem id. */
1248 DF_FORWARD, /* Direction. */
1249 df_rd_alloc, /* Allocate the problem specific data. */
1250 df_rd_free_bb_info, /* Free basic block info. */
1251 df_rd_local_compute, /* Local compute function. */
1252 df_rd_init_solution, /* Init the solution specific data. */
1253 df_iterative_dataflow, /* Iterative solver. */
1254 NULL, /* Confluence operator 0. */
1255 df_rd_confluence_n, /* Confluence operator n. */
1256 df_rd_transfer_function, /* Transfer function. */
1257 NULL, /* Finalize function. */
1258 df_rd_free, /* Free all of the problem information. */
1259 df_rd_dump, /* Debugging. */
1260 NULL /* Dependent problem. */
1265 /* Create a new DATAFLOW instance and add it to an existing instance
1266 of DF. The returned structure is what is used to get at the
1270 df_rd_add_problem (struct df *df)
1272 return df_add_problem (df, &problem_RD);
1277 /*----------------------------------------------------------------------------
1280 Find the locations in the function where any use of a pseudo can reach
1281 in the backwards direction.
1282 ----------------------------------------------------------------------------*/
1284 /* Get basic block info. */
1286 struct df_lr_bb_info *
1287 df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
1289 return (struct df_lr_bb_info *) dflow->block_info[index];
1293 /* Set basic block info. */
1296 df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
1297 struct df_lr_bb_info *bb_info)
1299 dflow->block_info[index] = bb_info;
1303 /* Free basic block info. */
1306 df_lr_free_bb_info (struct dataflow *dflow, void *vbb_info)
1308 struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
1311 BITMAP_FREE (bb_info->use);
1312 BITMAP_FREE (bb_info->def);
1313 BITMAP_FREE (bb_info->in);
1314 BITMAP_FREE (bb_info->out);
1315 pool_free (dflow->block_pool, bb_info);
1320 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1321 not touched unless the block is new. */
1324 df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1326 unsigned int bb_index;
1329 if (! dflow->block_pool)
1330 dflow->block_pool = create_alloc_pool ("df_lr_block pool",
1331 sizeof (struct df_lr_bb_info), 50);
1333 df_grow_bb_info (dflow);
1335 /* Because of the clustering of all def sites for the same pseudo,
1336 we have to process all of the blocks before doing the
1339 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1341 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1344 bitmap_clear (bb_info->def);
1345 bitmap_clear (bb_info->use);
1349 bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
1350 df_lr_set_bb_info (dflow, bb_index, bb_info);
1351 bb_info->use = BITMAP_ALLOC (NULL);
1352 bb_info->def = BITMAP_ALLOC (NULL);
1353 bb_info->in = BITMAP_ALLOC (NULL);
1354 bb_info->out = BITMAP_ALLOC (NULL);
1360 /* Compute local live register info for basic block BB. */
1363 df_lr_bb_local_compute (struct dataflow *dflow,
1364 struct df *df, unsigned int bb_index)
1366 basic_block bb = BASIC_BLOCK (bb_index);
1367 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1372 /* Process the hardware registers that are always live. */
1373 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1374 /* Add use to set of uses in this BB. */
1375 if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
1376 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1378 FOR_BB_INSNS_REVERSE (bb, insn)
1380 unsigned int uid = INSN_UID (insn);
1382 if (! INSN_P (insn))
1387 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1389 unsigned int dregno = DF_REF_REGNO (def);
1391 if (dregno >= FIRST_PSEUDO_REGISTER
1392 || !(SIBLING_CALL_P (insn)
1393 && bitmap_bit_p (df->exit_block_uses, dregno)
1394 && !refers_to_regno_p (dregno, dregno+1,
1395 current_function_return_rtx,
1398 /* Add def to set of defs in this BB. */
1399 bitmap_set_bit (bb_info->def, dregno);
1400 bitmap_clear_bit (bb_info->use, dregno);
1406 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1408 unsigned int dregno = DF_REF_REGNO (def);
1410 if (DF_INSN_CONTAINS_ASM (df, insn)
1411 && dregno < FIRST_PSEUDO_REGISTER)
1415 dregno + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
1416 for (i = dregno; i <= end; ++i)
1417 regs_asm_clobbered[i] = 1;
1419 /* Add def to set of defs in this BB. */
1420 bitmap_set_bit (bb_info->def, dregno);
1421 bitmap_clear_bit (bb_info->use, dregno);
1425 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
1426 /* Add use to set of uses in this BB. */
1427 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1430 /* Process the registers set in an exception handler. */
1431 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1433 unsigned int dregno = DF_REF_REGNO (def);
1434 bitmap_set_bit (bb_info->def, dregno);
1435 bitmap_clear_bit (bb_info->use, dregno);
1439 /* Process the uses that are live into an exception handler. */
1440 for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
1441 /* Add use to set of uses in this BB. */
1442 if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
1443 bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
1447 /* Compute local live register info for each basic block within BLOCKS. */
1450 df_lr_local_compute (struct dataflow *dflow,
1452 bitmap rescan_blocks)
1454 struct df *df = dflow->df;
1455 unsigned int bb_index;
1458 /* Assume that the stack pointer is unchanging if alloca hasn't
1460 if (bitmap_equal_p (all_blocks, rescan_blocks))
1461 memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
1463 bitmap_clear (df->hardware_regs_used);
1465 /* The all-important stack pointer must always be live. */
1466 bitmap_set_bit (df->hardware_regs_used, STACK_POINTER_REGNUM);
1468 /* Before reload, there are a few registers that must be forced
1469 live everywhere -- which might not already be the case for
1470 blocks within infinite loops. */
1471 if (! reload_completed)
1473 /* Any reference to any pseudo before reload is a potential
1474 reference of the frame pointer. */
1475 bitmap_set_bit (df->hardware_regs_used, FRAME_POINTER_REGNUM);
1477 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1478 /* Pseudos with argument area equivalences may require
1479 reloading via the argument pointer. */
1480 if (fixed_regs[ARG_POINTER_REGNUM])
1481 bitmap_set_bit (df->hardware_regs_used, ARG_POINTER_REGNUM);
1484 /* Any constant, or pseudo with constant equivalences, may
1485 require reloading from memory using the pic register. */
1486 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1487 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1488 bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
1491 if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
1493 /* The exit block is special for this problem and its bits are
1494 computed from thin air. */
1495 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
1496 bitmap_copy (bb_info->use, df->exit_block_uses);
1499 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1501 if (bb_index == EXIT_BLOCK)
1503 df_lr_bb_local_compute (dflow, df, bb_index);
1508 /* Initialize the solution vectors. */
1511 df_lr_init (struct dataflow *dflow, bitmap all_blocks)
1513 unsigned int bb_index;
1516 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1518 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1519 bitmap_copy (bb_info->in, bb_info->use);
1520 bitmap_clear (bb_info->out);
1525 /* Confluence function that processes infinite loops. This might be a
1526 noreturn function that throws. And even if it isn't, getting the
1527 unwind info right helps debugging. */
1529 df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
1531 struct df *df = dflow->df;
1533 bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
1534 if (bb != EXIT_BLOCK_PTR)
1535 bitmap_copy (op1, df->hardware_regs_used);
1539 /* Confluence function that ignores fake edges. */
1541 df_lr_confluence_n (struct dataflow *dflow, edge e)
1543 bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
1544 bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
1546 /* Call-clobbered registers die across exception and call edges. */
1547 /* ??? Abnormal call edges ignored for the moment, as this gets
1548 confused by sibling call edges, which crashes reg-stack. */
1549 if (e->flags & EDGE_EH)
1550 bitmap_ior_and_compl_into (op1, op2, df_invalidated_by_call);
1552 bitmap_ior_into (op1, op2);
1554 bitmap_ior_into (op1, dflow->df->hardware_regs_used);
1558 /* Transfer function. */
1560 df_lr_transfer_function (struct dataflow *dflow, int bb_index)
1562 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
1563 bitmap in = bb_info->in;
1564 bitmap out = bb_info->out;
1565 bitmap use = bb_info->use;
1566 bitmap def = bb_info->def;
1568 return bitmap_ior_and_compl (in, use, out, def);
1572 /* Free all storage associated with the problem. */
1575 df_lr_free (struct dataflow *dflow)
1578 for (i = 0; i < dflow->block_info_size; i++)
1580 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
1583 BITMAP_FREE (bb_info->use);
1584 BITMAP_FREE (bb_info->def);
1585 BITMAP_FREE (bb_info->in);
1586 BITMAP_FREE (bb_info->out);
1589 free_alloc_pool (dflow->block_pool);
1591 dflow->block_info_size = 0;
1592 free (dflow->block_info);
1597 /* Debugging info. */
1600 df_lr_dump (struct dataflow *dflow, FILE *file)
1604 fprintf (file, "Live Registers:\n");
1607 struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
1608 df_print_bb_index (bb, file);
1613 fprintf (file, " in \t");
1614 dump_bitmap (file, bb_info->in);
1615 fprintf (file, " use \t");
1616 dump_bitmap (file, bb_info->use);
1617 fprintf (file, " def \t");
1618 dump_bitmap (file, bb_info->def);
1619 fprintf (file, " out \t");
1620 dump_bitmap (file, bb_info->out);
1624 /* All of the information associated with every instance of the problem. */
1626 static struct df_problem problem_LR =
1628 DF_LR, /* Problem id. */
1629 DF_BACKWARD, /* Direction. */
1630 df_lr_alloc, /* Allocate the problem specific data. */
1631 df_lr_free_bb_info, /* Free basic block info. */
1632 df_lr_local_compute, /* Local compute function. */
1633 df_lr_init, /* Init the solution specific data. */
1634 df_iterative_dataflow, /* Iterative solver. */
1635 df_lr_confluence_0, /* Confluence operator 0. */
1636 df_lr_confluence_n, /* Confluence operator n. */
1637 df_lr_transfer_function, /* Transfer function. */
1638 NULL, /* Finalize function. */
1639 df_lr_free, /* Free all of the problem information. */
1640 df_lr_dump, /* Debugging. */
1641 NULL /* Dependent problem. */
1645 /* Create a new DATAFLOW instance and add it to an existing instance
1646 of DF. The returned structure is what is used to get at the
1650 df_lr_add_problem (struct df *df)
1652 return df_add_problem (df, &problem_LR);
1657 /*----------------------------------------------------------------------------
1658 UNINITIALIZED REGISTERS
1660 Find the set of uses for registers that are reachable from the entry
1661 block without passing thru a definition.
1662 ----------------------------------------------------------------------------*/
1664 /* Get basic block info. */
1666 struct df_ur_bb_info *
1667 df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
1669 return (struct df_ur_bb_info *) dflow->block_info[index];
1673 /* Set basic block info. */
1676 df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
1677 struct df_ur_bb_info *bb_info)
1679 dflow->block_info[index] = bb_info;
1683 /* Free basic block info. */
1686 df_ur_free_bb_info (struct dataflow *dflow, void *vbb_info)
1688 struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
1691 BITMAP_FREE (bb_info->gen);
1692 BITMAP_FREE (bb_info->kill);
1693 BITMAP_FREE (bb_info->in);
1694 BITMAP_FREE (bb_info->out);
1695 pool_free (dflow->block_pool, bb_info);
1700 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
1701 not touched unless the block is new. */
1704 df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
1706 unsigned int bb_index;
1709 if (! dflow->block_pool)
1710 dflow->block_pool = create_alloc_pool ("df_ur_block pool",
1711 sizeof (struct df_ur_bb_info), 100);
1713 df_grow_bb_info (dflow);
1715 /* Because of the clustering of all def sites for the same pseudo,
1716 we have to process all of the blocks before doing the
1719 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
1721 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1724 bitmap_clear (bb_info->kill);
1725 bitmap_clear (bb_info->gen);
1729 bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
1730 df_ur_set_bb_info (dflow, bb_index, bb_info);
1731 bb_info->kill = BITMAP_ALLOC (NULL);
1732 bb_info->gen = BITMAP_ALLOC (NULL);
1733 bb_info->in = BITMAP_ALLOC (NULL);
1734 bb_info->out = BITMAP_ALLOC (NULL);
1740 /* Compute local uninitialized register info for basic block BB. */
1743 df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
1745 struct df *df = dflow->df;
1746 basic_block bb = BASIC_BLOCK (bb_index);
1747 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1751 bitmap_clear (seen_in_block);
1752 bitmap_clear (seen_in_insn);
1754 FOR_BB_INSNS_REVERSE (bb, insn)
1756 unsigned int uid = INSN_UID (insn);
1760 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
1762 unsigned int regno = DF_REF_REGNO (def);
1763 /* Only the last def counts. */
1764 if (!bitmap_bit_p (seen_in_block, regno))
1766 bitmap_set_bit (seen_in_insn, regno);
1768 if (DF_REF_FLAGS (def) & DF_REF_CLOBBER)
1769 bitmap_set_bit (bb_info->kill, regno);
1771 bitmap_set_bit (bb_info->gen, regno);
1774 bitmap_ior_into (seen_in_block, seen_in_insn);
1775 bitmap_clear (seen_in_insn);
1778 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
1780 unsigned int regno = DF_REF_REGNO (def);
1781 if (!bitmap_bit_p (seen_in_block, regno))
1783 bitmap_set_bit (seen_in_block, regno);
1784 bitmap_set_bit (bb_info->gen, regno);
1790 /* Compute local uninitialized register info. */
1793 df_ur_local_compute (struct dataflow *dflow,
1794 bitmap all_blocks ATTRIBUTE_UNUSED,
1795 bitmap rescan_blocks)
1797 unsigned int bb_index;
1802 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
1804 df_ur_bb_local_compute (dflow, bb_index);
1811 /* Initialize the solution vectors. */
1814 df_ur_init (struct dataflow *dflow, bitmap all_blocks)
1816 unsigned int bb_index;
1819 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1821 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1823 bitmap_copy (bb_info->out, bb_info->gen);
1824 bitmap_clear (bb_info->in);
1829 /* Or in the stack regs, hard regs and early clobber regs into the the
1830 ur_in sets of all of the blocks. */
1833 df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
1835 struct df *df = dflow->df;
1836 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
1837 bitmap tmp = BITMAP_ALLOC (NULL);
1839 unsigned int bb_index;
1841 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
1843 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1844 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
1846 bitmap_ior_into (bb_info->in, df_all_hard_regs);
1847 bitmap_ior_into (bb_info->out, df_all_hard_regs);
1849 /* No register may reach a location where it is not used. Thus
1850 we trim the rr result to the places where it is used. */
1851 bitmap_and_into (bb_info->in, bb_lr_info->in);
1852 bitmap_and_into (bb_info->out, bb_lr_info->out);
1855 /* Hard registers may still stick in the ur_out set, but not
1856 be in the ur_in set, if their only mention was in a call
1857 in this block. This is because a call kills in the lr
1858 problem but does not kill in the ur problem. To clean
1859 this up, we execute the transfer function on the lr_in
1860 set and then use that to knock bits out of ur_out. */
1861 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
1863 bitmap_and_into (bb_info->out, tmp);
1871 /* Confluence function that ignores fake edges. */
1874 df_ur_confluence_n (struct dataflow *dflow, edge e)
1876 bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
1877 bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
1879 if (e->flags & EDGE_FAKE)
1882 bitmap_ior_into (op1, op2);
1886 /* Transfer function. */
1889 df_ur_transfer_function (struct dataflow *dflow, int bb_index)
1891 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
1892 bitmap in = bb_info->in;
1893 bitmap out = bb_info->out;
1894 bitmap gen = bb_info->gen;
1895 bitmap kill = bb_info->kill;
1897 return bitmap_ior_and_compl (out, gen, in, kill);
1901 /* Free all storage associated with the problem. */
1904 df_ur_free (struct dataflow *dflow)
1908 for (i = 0; i < dflow->block_info_size; i++)
1910 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
1913 BITMAP_FREE (bb_info->gen);
1914 BITMAP_FREE (bb_info->kill);
1915 BITMAP_FREE (bb_info->in);
1916 BITMAP_FREE (bb_info->out);
1920 free_alloc_pool (dflow->block_pool);
1921 dflow->block_info_size = 0;
1922 free (dflow->block_info);
1927 /* Debugging info. */
1930 df_ur_dump (struct dataflow *dflow, FILE *file)
1934 fprintf (file, "Undefined regs:\n");
1938 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
1939 df_print_bb_index (bb, file);
1944 fprintf (file, " in \t");
1945 dump_bitmap (file, bb_info->in);
1946 fprintf (file, " gen \t");
1947 dump_bitmap (file, bb_info->gen);
1948 fprintf (file, " kill\t");
1949 dump_bitmap (file, bb_info->kill);
1950 fprintf (file, " out \t");
1951 dump_bitmap (file, bb_info->out);
1955 /* All of the information associated with every instance of the problem. */
1957 static struct df_problem problem_UR =
1959 DF_UR, /* Problem id. */
1960 DF_FORWARD, /* Direction. */
1961 df_ur_alloc, /* Allocate the problem specific data. */
1962 df_ur_free_bb_info, /* Free basic block info. */
1963 df_ur_local_compute, /* Local compute function. */
1964 df_ur_init, /* Init the solution specific data. */
1965 df_iterative_dataflow, /* Iterative solver. */
1966 NULL, /* Confluence operator 0. */
1967 df_ur_confluence_n, /* Confluence operator n. */
1968 df_ur_transfer_function, /* Transfer function. */
1969 df_ur_local_finalize, /* Finalize function. */
1970 df_ur_free, /* Free all of the problem information. */
1971 df_ur_dump, /* Debugging. */
1972 &problem_LR /* Dependent problem. */
1976 /* Create a new DATAFLOW instance and add it to an existing instance
1977 of DF. The returned structure is what is used to get at the
1981 df_ur_add_problem (struct df *df)
1983 return df_add_problem (df, &problem_UR);
1988 /*----------------------------------------------------------------------------
1989 UNINITIALIZED REGISTERS WITH EARLYCLOBBER
1991 Find the set of uses for registers that are reachable from the entry
1992 block without passing thru a definition.
1994 This is a variant of the UR problem above that has a lot of special
1995 features just for the register allocation phase.
1996 ----------------------------------------------------------------------------*/
1998 struct df_urec_problem_data
2000 bool earlyclobbers_found; /* True if any instruction contains an
2003 bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
2008 /* Get basic block info. */
2010 struct df_urec_bb_info *
2011 df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
2013 return (struct df_urec_bb_info *) dflow->block_info[index];
2017 /* Set basic block info. */
2020 df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
2021 struct df_urec_bb_info *bb_info)
2023 dflow->block_info[index] = bb_info;
2027 /* Free basic block info. */
2030 df_urec_free_bb_info (struct dataflow *dflow, void *vbb_info)
2032 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
2035 BITMAP_FREE (bb_info->gen);
2036 BITMAP_FREE (bb_info->kill);
2037 BITMAP_FREE (bb_info->in);
2038 BITMAP_FREE (bb_info->out);
2039 BITMAP_FREE (bb_info->earlyclobber);
2040 pool_free (dflow->block_pool, bb_info);
2045 /* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
2046 not touched unless the block is new. */
2049 df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
2051 unsigned int bb_index;
2053 struct df_urec_problem_data *problem_data =
2054 (struct df_urec_problem_data *) dflow->problem_data;
2056 if (! dflow->block_pool)
2057 dflow->block_pool = create_alloc_pool ("df_urec_block pool",
2058 sizeof (struct df_urec_bb_info), 50);
2060 if (!dflow->problem_data)
2062 problem_data = xmalloc (sizeof (struct df_urec_problem_data));
2063 dflow->problem_data = problem_data;
2065 problem_data->earlyclobbers_found = false;
2067 df_grow_bb_info (dflow);
2069 /* Because of the clustering of all def sites for the same pseudo,
2070 we have to process all of the blocks before doing the
2073 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
2075 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2078 bitmap_clear (bb_info->kill);
2079 bitmap_clear (bb_info->gen);
2080 bitmap_clear (bb_info->earlyclobber);
2084 bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
2085 df_urec_set_bb_info (dflow, bb_index, bb_info);
2086 bb_info->kill = BITMAP_ALLOC (NULL);
2087 bb_info->gen = BITMAP_ALLOC (NULL);
2088 bb_info->in = BITMAP_ALLOC (NULL);
2089 bb_info->out = BITMAP_ALLOC (NULL);
2090 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2096 /* The function modifies local info for register REG being changed in
2097 SETTER. DATA is used to pass the current basic block info. */
2100 df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
2105 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2107 if (GET_CODE (reg) == SUBREG)
2108 reg = SUBREG_REG (reg);
2114 endregno = regno = REGNO (reg);
2115 if (regno < FIRST_PSEUDO_REGISTER)
2117 endregno +=hard_regno_nregs[regno][GET_MODE (reg)];
2119 for (i = regno; i < endregno; i++)
2121 bitmap_set_bit (bb_info->kill, i);
2123 if (GET_CODE (setter) != CLOBBER)
2124 bitmap_set_bit (bb_info->gen, i);
2126 bitmap_clear_bit (bb_info->gen, i);
2131 bitmap_set_bit (bb_info->kill, regno);
2133 if (GET_CODE (setter) != CLOBBER)
2134 bitmap_set_bit (bb_info->gen, regno);
2136 bitmap_clear_bit (bb_info->gen, regno);
2139 /* Classes of registers which could be early clobbered in the current
2143 DEF_VEC_ALLOC_I(int,heap);
2145 static VEC(int,heap) *earlyclobber_regclass;
2147 /* This function finds and stores register classes that could be early
2148 clobbered in INSN. If any earlyclobber classes are found, the function
2149 returns TRUE, in all other cases it returns FALSE. */
2152 df_urec_check_earlyclobber (rtx insn)
2157 extract_insn (insn);
2159 VEC_truncate (int, earlyclobber_regclass, 0);
2160 for (opno = 0; opno < recog_data.n_operands; opno++)
2165 enum reg_class class;
2166 const char *p = recog_data.constraints[opno];
2175 case '=': case '+': case '?':
2178 case 'm': case '<': case '>': case 'V': case 'o':
2179 case 'E': case 'F': case 'G': case 'H':
2180 case 's': case 'i': case 'n':
2181 case 'I': case 'J': case 'K': case 'L':
2182 case 'M': case 'N': case 'O': case 'P':
2184 case '0': case '1': case '2': case '3': case '4':
2185 case '5': case '6': case '7': case '8': case '9':
2186 /* These don't say anything we care about. */
2194 if (amp_p && class != NO_REGS)
2200 VEC_iterate (int, earlyclobber_regclass, i, rc);
2203 if (rc == (int) class)
2207 /* We use VEC_quick_push here because
2208 earlyclobber_regclass holds no more than
2209 N_REG_CLASSES elements. */
2210 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2220 class = GENERAL_REGS;
2224 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2229 p += CONSTRAINT_LEN (c, p);
2236 /* The function checks that pseudo-register *X has a class
2237 intersecting with the class of pseudo-register could be early
2238 clobbered in the same insn.
2240 This function is a no-op if earlyclobber_regclass is empty.
2242 Reload can assign the same hard register to uninitialized
2243 pseudo-register and early clobbered pseudo-register in an insn if
2244 the pseudo-register is used first time in given BB and not lived at
2245 the BB start. To prevent this we don't change life information for
2246 such pseudo-registers. */
2249 df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
2251 enum reg_class pref_class, alt_class;
2253 struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
2255 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2260 if (bitmap_bit_p (bb_info->kill, regno)
2261 || bitmap_bit_p (bb_info->gen, regno))
2263 pref_class = reg_preferred_class (regno);
2264 alt_class = reg_alternate_class (regno);
2265 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2267 if (reg_classes_intersect_p (rc, pref_class)
2269 && reg_classes_intersect_p (rc, alt_class)))
2271 bitmap_set_bit (bb_info->earlyclobber, regno);
2279 /* The function processes all pseudo-registers in *X with the aid of
2280 previous function. */
2283 df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2285 for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
2289 /* Compute local uninitialized register info for basic block BB. */
2292 df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
2294 struct df *df = dflow->df;
2295 basic_block bb = BASIC_BLOCK (bb_index);
2296 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2300 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2302 unsigned int regno = DF_REF_REGNO (def);
2303 bitmap_set_bit (bb_info->gen, regno);
2306 FOR_BB_INSNS (bb, insn)
2310 note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
2311 if (df_state & (DF_SCAN_GLOBAL | DF_SCAN_POST_ALLOC)
2312 && df_urec_check_earlyclobber (insn))
2314 struct df_urec_problem_data *problem_data =
2315 (struct df_urec_problem_data *) dflow->problem_data;
2316 problem_data->earlyclobbers_found = true;
2317 note_uses (&PATTERN (insn),
2318 df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
2325 /* Compute local uninitialized register info. */
2328 df_urec_local_compute (struct dataflow *dflow,
2329 bitmap all_blocks ATTRIBUTE_UNUSED,
2330 bitmap rescan_blocks)
2332 unsigned int bb_index;
2336 HARD_REG_SET zero, stack_hard_regs, used;
2337 struct df_urec_problem_data *problem_data =
2338 (struct df_urec_problem_data *) dflow->problem_data;
2340 /* Any register that MAY be allocated to a register stack (like the
2341 387) is treated poorly. Each such register is marked as being
2342 live everywhere. This keeps the register allocator and the
2343 subsequent passes from doing anything useful with these values.
2345 FIXME: This seems like an incredibly poor idea. */
2347 CLEAR_HARD_REG_SET (zero);
2348 CLEAR_HARD_REG_SET (stack_hard_regs);
2349 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2350 SET_HARD_REG_BIT (stack_hard_regs, i);
2351 problem_data->stack_regs = BITMAP_ALLOC (NULL);
2352 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2354 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2355 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2356 AND_HARD_REG_SET (used, stack_hard_regs);
2357 GO_IF_HARD_REG_EQUAL (used, zero, skip);
2358 bitmap_set_bit (problem_data->stack_regs, i);
2364 /* We know that earlyclobber_regclass holds no more than
2365 N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
2366 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2368 EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
2370 df_urec_bb_local_compute (dflow, bb_index);
2373 VEC_free (int, heap, earlyclobber_regclass);
2377 /* Initialize the solution vectors. */
2380 df_urec_init (struct dataflow *dflow, bitmap all_blocks)
2382 unsigned int bb_index;
2385 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2387 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2389 /* FIXME: This is a hack, it has been copied over from
2390 make_accurate_live_analysis by Vlad. Most likely it is necessary
2391 because the generation of gen and kill information for hardware
2392 registers in ur is a subset of what is really necessary and what
2393 is done for the lr problem. */
2395 /* Inside the register allocator, partial availability is only
2396 allowed for the psuedo registers. To implement this, the rr is
2397 initially iored with a mask ones for the hard registers and zeros
2398 for the pseudos before being iterated. This means that each
2399 hardware register will be live unless explicitly killed by some
2400 statement. Eventually most of these bit will die because the
2401 results of rr are anded with the results of lr before being used.
2402 Outside of register allocation, a more conservative strategy of
2403 completely ignoring the unintialized registers is imployed in the
2404 finalizer function. */
2405 if (df_state & DF_SCAN_GLOBAL)
2407 bitmap_ior (bb_info->out, bb_info->gen, df_all_hard_regs);
2408 bitmap_copy (bb_info->in, df_all_hard_regs);
2412 bitmap_copy (bb_info->out, bb_info->gen);
2413 bitmap_clear (bb_info->in);
2419 /* Or in the stack regs, hard regs and early clobber regs into the the
2420 ur_in sets of all of the blocks. */
2423 df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
2425 struct df *df = dflow->df;
2426 struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
2427 bitmap tmp = BITMAP_ALLOC (NULL);
2429 unsigned int bb_index;
2430 struct df_urec_problem_data *problem_data =
2431 (struct df_urec_problem_data *) dflow->problem_data;
2433 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2435 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2436 struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
2438 if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
2440 if (problem_data->earlyclobbers_found)
2441 bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
2444 /* We can not use the same stack register for uninitialized
2445 pseudo-register and another living pseudo-register
2446 because if the uninitialized pseudo-register dies,
2447 subsequent pass reg-stack will be confused (it will
2448 believe that the other register dies). */
2449 bitmap_ior_into (bb_info->in, problem_data->stack_regs);
2450 bitmap_ior_into (bb_info->out, problem_data->stack_regs);
2454 if (!(df_state & DF_SCAN_GLOBAL))
2456 bitmap_ior_into (bb_info->in, df_all_hard_regs);
2457 bitmap_ior_into (bb_info->out, df_all_hard_regs);
2460 /* No register may reach a location where it is not used. Thus
2461 we trim the rr result to the places where it is used. */
2462 bitmap_and_into (bb_info->in, bb_lr_info->in);
2463 bitmap_and_into (bb_info->out, bb_lr_info->out);
2466 /* Hard registers may still stick in the ur_out set, but not
2467 be in the ur_in set, if their only mention was in a call
2468 in this block. This is because a call kills in the lr
2469 problem but does not kill in the rr problem. To clean
2470 this up, we execute the transfer function on the lr_in
2471 set and then use that to knock bits out of ur_out. */
2472 bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
2474 bitmap_and_into (bb_info->out, tmp);
2479 BITMAP_FREE (problem_data->stack_regs);
2485 /* Confluence function that ignores fake edges. */
2488 df_urec_confluence_n (struct dataflow *dflow, edge e)
2490 bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
2491 bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
2493 if (e->flags & EDGE_FAKE)
2496 bitmap_ior_into (op1, op2);
2500 /* Transfer function. */
2503 df_urec_transfer_function (struct dataflow *dflow, int bb_index)
2505 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
2506 bitmap in = bb_info->in;
2507 bitmap out = bb_info->out;
2508 bitmap gen = bb_info->gen;
2509 bitmap kill = bb_info->kill;
2511 return bitmap_ior_and_compl (out, gen, in, kill);
2515 /* Free all storage associated with the problem. */
2518 df_urec_free (struct dataflow *dflow)
2522 for (i = 0; i < dflow->block_info_size; i++)
2524 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
2527 BITMAP_FREE (bb_info->gen);
2528 BITMAP_FREE (bb_info->kill);
2529 BITMAP_FREE (bb_info->in);
2530 BITMAP_FREE (bb_info->out);
2531 BITMAP_FREE (bb_info->earlyclobber);
2535 free_alloc_pool (dflow->block_pool);
2537 dflow->block_info_size = 0;
2538 free (dflow->block_info);
2539 free (dflow->problem_data);
2544 /* Debugging info. */
2547 df_urec_dump (struct dataflow *dflow, FILE *file)
2551 fprintf (file, "Undefined regs:\n");
2555 struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
2556 df_print_bb_index (bb, file);
2561 fprintf (file, " in \t");
2562 dump_bitmap (file, bb_info->in);
2563 fprintf (file, " gen \t");
2564 dump_bitmap (file, bb_info->gen);
2565 fprintf (file, " kill\t");
2566 dump_bitmap (file, bb_info->kill);
2567 fprintf (file, " ec\t");
2568 dump_bitmap (file, bb_info->earlyclobber);
2569 fprintf (file, " out \t");
2570 dump_bitmap (file, bb_info->out);
2574 /* All of the information associated with every instance of the problem. */
2576 static struct df_problem problem_UREC =
2578 DF_UREC, /* Problem id. */
2579 DF_FORWARD, /* Direction. */
2580 df_urec_alloc, /* Allocate the problem specific data. */
2581 df_urec_free_bb_info, /* Free basic block info. */
2582 df_urec_local_compute, /* Local compute function. */
2583 df_urec_init, /* Init the solution specific data. */
2584 df_iterative_dataflow, /* Iterative solver. */
2585 NULL, /* Confluence operator 0. */
2586 df_urec_confluence_n, /* Confluence operator n. */
2587 df_urec_transfer_function, /* Transfer function. */
2588 df_urec_local_finalize, /* Finalize function. */
2589 df_urec_free, /* Free all of the problem information. */
2590 df_urec_dump, /* Debugging. */
2591 &problem_LR /* Dependent problem. */
2595 /* Create a new DATAFLOW instance and add it to an existing instance
2596 of DF. The returned structure is what is used to get at the
2600 df_urec_add_problem (struct df *df)
2602 return df_add_problem (df, &problem_UREC);
2607 /*----------------------------------------------------------------------------
2608 CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
2610 Link either the defs to the uses and / or the uses to the defs.
2612 These problems are set up like the other dataflow problems so that
2613 they nicely fit into the framework. They are much simpler and only
2614 involve a single traversal of instructions and an examination of
2615 the reaching defs information (the dependent problem).
2616 ----------------------------------------------------------------------------*/
2618 struct df_chain_problem_data
2624 /* Create def-use or use-def chains. */
2627 df_chain_alloc (struct dataflow *dflow,
2628 bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2630 struct df *df = dflow->df;
2632 struct df_chain_problem_data *problem_data =
2633 (struct df_chain_problem_data *) dflow->problem_data;
2635 /* Wholesale destruction of the old chains. */
2636 if (dflow->block_pool)
2637 free_alloc_pool (dflow->block_pool);
2639 dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
2640 sizeof (struct df_link), 100);
2642 if (problem_data->flags & DF_DU_CHAIN)
2644 if (!df->def_info.refs_organized)
2645 df_reorganize_refs (&df->def_info);
2647 /* Clear out the pointers from the refs. */
2648 for (i = 0; i < DF_DEFS_SIZE (df); i++)
2650 struct df_ref *ref = df->def_info.refs[i];
2651 DF_REF_CHAIN (ref) = NULL;
2655 if (problem_data->flags & DF_UD_CHAIN)
2657 if (!df->use_info.refs_organized)
2658 df_reorganize_refs (&df->use_info);
2659 for (i = 0; i < DF_USES_SIZE (df); i++)
2661 struct df_ref *ref = df->use_info.refs[i];
2662 DF_REF_CHAIN (ref) = NULL;
2668 /* Create the chains for a list of USEs. */
2671 df_chain_create_bb_process_use (struct dataflow *dflow,
2672 struct df_chain_problem_data *problem_data,
2675 enum df_ref_flags top_flag)
2677 struct df *df = dflow->df;
2679 unsigned int def_index;
2683 /* Do not want to go thur this for an uninitialized var. */
2684 unsigned int uregno = DF_REF_REGNO (use);
2685 int count = DF_REG_DEF_GET (df, uregno)->n_refs;
2688 if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
2690 unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
2691 unsigned int last_index = first_index + count - 1;
2693 EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
2696 if (def_index > last_index)
2699 def = DF_DEFS_GET (df, def_index);
2700 if (problem_data->flags & DF_DU_CHAIN)
2701 df_chain_create (dflow, def, use);
2702 if (problem_data->flags & DF_UD_CHAIN)
2703 df_chain_create (dflow, use, def);
2707 use = use->next_ref;
2711 /* Reset the storage pool that the def-use or use-def chains have been
2712 allocated in. We do not need to re adjust the pointers in the refs,
2713 these have already been clean out.*/
2715 /* Create chains from reaching defs bitmaps for basic block BB. */
2717 df_chain_create_bb (struct dataflow *dflow,
2718 struct dataflow *rd_dflow,
2719 unsigned int bb_index)
2721 basic_block bb = BASIC_BLOCK (bb_index);
2722 struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
2724 bitmap cpy = BITMAP_ALLOC (NULL);
2725 struct df *df = dflow->df;
2726 struct df_chain_problem_data *problem_data =
2727 (struct df_chain_problem_data *) dflow->problem_data;
2730 bitmap_copy (cpy, bb_info->in);
2732 /* Since we are going forwards, process the artificial uses first
2733 then the artificial defs second. */
2736 /* Create the chains for the artificial uses from the EH_USES at the
2737 beginning of the block. */
2738 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2739 df_get_artificial_uses (df, bb->index),
2743 for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
2745 unsigned int dregno = DF_REF_REGNO (def);
2746 bitmap_clear_range (cpy,
2747 DF_REG_DEF_GET (df, dregno)->begin,
2748 DF_REG_DEF_GET (df, dregno)->n_refs);
2749 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2750 bitmap_set_bit (cpy, DF_REF_ID (def));
2753 /* Process the regular instructions next. */
2754 FOR_BB_INSNS (bb, insn)
2757 unsigned int uid = INSN_UID (insn);
2759 if (! INSN_P (insn))
2762 /* Now scan the uses and link them up with the defs that remain
2763 in the cpy vector. */
2765 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2766 DF_INSN_UID_GET (df, uid)->uses, 0);
2768 /* Since we are going forwards, process the defs second. This
2769 pass only changes the bits in cpy. */
2770 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2772 unsigned int dregno = DF_REF_REGNO (def);
2773 bitmap_clear_range (cpy,
2774 DF_REG_DEF_GET (df, dregno)->begin,
2775 DF_REG_DEF_GET (df, dregno)->n_refs);
2776 if (! (DF_REF_FLAGS (def) & DF_REF_CLOBBER))
2777 bitmap_set_bit (cpy, DF_REF_ID (def));
2781 /* Create the chains for the artificial uses of the hard registers
2782 at the end of the block. */
2783 df_chain_create_bb_process_use (dflow, problem_data, cpy,
2784 df_get_artificial_uses (df, bb->index), 0);
2787 /* Create def-use chains from reaching use bitmaps for basic blocks
2791 df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
2793 unsigned int bb_index;
2795 struct df *df = dflow->df;
2796 struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
2798 EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
2800 df_chain_create_bb (dflow, rd_dflow, bb_index);
2805 /* Free all storage associated with the problem. */
2808 df_chain_free (struct dataflow *dflow)
2810 free_alloc_pool (dflow->block_pool);
2811 free (dflow->problem_data);
2816 /* Debugging info. */
2819 df_chains_dump (struct dataflow *dflow, FILE *file)
2821 struct df *df = dflow->df;
2823 struct df_chain_problem_data *problem_data =
2824 (struct df_chain_problem_data *) dflow->problem_data;
2826 if (problem_data->flags & DF_DU_CHAIN)
2828 fprintf (file, "Def-use chains:\n");
2829 for (j = 0; j < df->def_info.bitmap_size; j++)
2831 struct df_ref *def = DF_DEFS_GET (df, j);
2834 fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
2835 j, DF_REF_BBNO (def),
2836 DF_INSN_LUID (df, DF_REF_INSN (def)),
2837 DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
2838 DF_REF_REGNO (def));
2839 if (def->flags & DF_REF_READ_WRITE)
2840 fprintf (file, "read/write ");
2841 df_chain_dump (df, DF_REF_CHAIN (def), file);
2842 fprintf (file, "\n");
2847 if (problem_data->flags & DF_UD_CHAIN)
2849 fprintf (file, "Use-def chains:\n");
2850 for (j = 0; j < df->use_info.bitmap_size; j++)
2852 struct df_ref *use = DF_USES_GET (df, j);
2855 fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
2856 j, DF_REF_BBNO (use),
2858 DF_INSN_LUID (df, DF_REF_INSN (use))
2860 DF_REF_INSN (DF_USES_GET (df, j)) ?
2861 DF_REF_INSN_UID (DF_USES_GET (df,j))
2863 DF_REF_REGNO (use));
2864 if (use->flags & DF_REF_READ_WRITE)
2865 fprintf (file, "read/write ");
2866 if (use->flags & DF_REF_STRIPPED)
2867 fprintf (file, "stripped ");
2868 if (use->flags & DF_REF_IN_NOTE)
2869 fprintf (file, "note ");
2870 df_chain_dump (df, DF_REF_CHAIN (use), file);
2871 fprintf (file, "\n");
2878 static struct df_problem problem_CHAIN =
2880 DF_CHAIN, /* Problem id. */
2881 DF_NONE, /* Direction. */
2882 df_chain_alloc, /* Allocate the problem specific data. */
2883 NULL, /* Free basic block info. */
2884 NULL, /* Local compute function. */
2885 NULL, /* Init the solution specific data. */
2886 NULL, /* Iterative solver. */
2887 NULL, /* Confluence operator 0. */
2888 NULL, /* Confluence operator n. */
2889 NULL, /* Transfer function. */
2890 df_chain_finalize, /* Finalize function. */
2891 df_chain_free, /* Free all of the problem information. */
2892 df_chains_dump, /* Debugging. */
2893 &problem_RD /* Dependent problem. */
2897 /* Create a new DATAFLOW instance and add it to an existing instance
2898 of DF. The returned structure is what is used to get at the
2902 df_chain_add_problem (struct df *df, int flags)
2904 struct df_chain_problem_data *problem_data =
2905 xmalloc (sizeof (struct df_chain_problem_data));
2906 struct dataflow *dflow = df_add_problem (df, &problem_CHAIN);
2908 dflow->problem_data = problem_data;
2909 problem_data->flags = flags;
2915 /*----------------------------------------------------------------------------
2916 REGISTER INFORMATION
2918 Currently this consists of only lifetime information. But the plan is
2919 to enhance it so that it produces all of the register information needed
2920 by the register allocators.
2921 ----------------------------------------------------------------------------*/
2924 struct df_ri_problem_data
2930 /* Allocate the lifetime information. */
2933 df_ri_alloc (struct dataflow *dflow, bitmap blocks_to_rescan ATTRIBUTE_UNUSED)
2935 struct df_ri_problem_data *problem_data =
2936 (struct df_ri_problem_data *) dflow->problem_data;
2938 if (!dflow->problem_data)
2940 struct df_ri_problem_data *problem_data =
2941 xmalloc (sizeof (struct df_ri_problem_data));
2942 dflow->problem_data = problem_data;
2945 problem_data->lifetime = xrealloc (problem_data->lifetime,
2946 max_reg_num () *sizeof (int));
2947 memset (problem_data->lifetime, 0, max_reg_num () *sizeof (int));
2950 /* Compute register info: lifetime, bb, and number of defs and uses
2951 for basic block BB. */
2954 df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index, bitmap live)
2956 struct df *df = dflow->df;
2957 struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
2958 struct df_ri_problem_data *problem_data =
2959 (struct df_ri_problem_data *) dflow->problem_data;
2960 basic_block bb = BASIC_BLOCK (bb_index);
2963 bitmap_copy (live, bb_info->out);
2965 FOR_BB_INSNS_REVERSE (bb, insn)
2967 unsigned int uid = INSN_UID (insn);
2973 if (! INSN_P (insn))
2976 for (def = DF_INSN_UID_GET (df, uid)->defs; def; def = def->next_ref)
2978 unsigned int dregno = DF_REF_REGNO (def);
2980 /* Kill this register. */
2981 bitmap_clear_bit (live, dregno);
2984 for (use = DF_INSN_UID_GET (df, uid)->uses; use; use = use->next_ref)
2986 unsigned int uregno = DF_REF_REGNO (use);
2988 /* This register is now live. */
2989 bitmap_set_bit (live, uregno);
2992 /* Increment lifetimes of all live registers. */
2993 EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
2995 problem_data->lifetime[regno]++;
3001 /* Compute register info: lifetime, bb, and number of defs and uses. */
3003 df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
3004 bitmap blocks_to_scan)
3006 unsigned int bb_index;
3010 live = BITMAP_ALLOC (NULL);
3012 EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
3014 df_ri_bb_compute (dflow, bb_index, live);
3021 /* Free all storage associated with the problem. */
3024 df_ri_free (struct dataflow *dflow)
3026 struct df_ri_problem_data *problem_data =
3027 (struct df_ri_problem_data *) dflow->problem_data;
3029 free (problem_data->lifetime);
3030 free (dflow->problem_data);
3035 /* Debugging info. */
3038 df_ri_dump (struct dataflow *dflow, FILE *file)
3040 struct df_ri_problem_data *problem_data =
3041 (struct df_ri_problem_data *) dflow->problem_data;
3044 fprintf (file, "Register info:\n");
3045 for (j = 0; j < max_reg_num (); j++)
3047 fprintf (file, "reg %d life %d\n", j, problem_data->lifetime[j]);
3051 /* All of the information associated every instance of the problem. */
3053 static struct df_problem problem_RI =
3055 DF_RI, /* Problem id. */
3056 DF_NONE, /* Direction. */
3057 df_ri_alloc, /* Allocate the problem specific data. */
3058 NULL, /* Free basic block info. */
3059 df_ri_compute, /* Local compute function. */
3060 NULL, /* Init the solution specific data. */
3061 NULL, /* Iterative solver. */
3062 NULL, /* Confluence operator 0. */
3063 NULL, /* Confluence operator n. */
3064 NULL, /* Transfer function. */
3065 NULL, /* Finalize function. */
3066 df_ri_free, /* Free all of the problem information. */
3067 df_ri_dump, /* Debugging. */
3068 &problem_UR /* Dependent problem. */
3072 /* Create a new DATAFLOW instance and add it to an existing instance
3073 of DF. The returned structure is what is used to get at the
3077 df_ri_add_problem (struct df *df)
3079 return df_add_problem (df, &problem_RI);
3083 /* Return total lifetime (in insns) of REG. */
3085 df_reg_lifetime (struct df *df, rtx reg)
3087 struct dataflow *dflow = df->problems_by_index[DF_RI];
3088 struct df_ri_problem_data *problem_data =
3089 (struct df_ri_problem_data *) dflow->problem_data;
3090 return problem_data->lifetime[REGNO (reg)];