1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
13 This file is part of GCC.
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 2, or (at your option) any later
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING. If not, write to the Free
27 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
33 #include "coretypes.h"
37 #include "insn-config.h"
42 #include "alloc-pool.h"
44 #include "hard-reg-set.h"
45 #include "basic-block.h"
51 #include "target-def.h"
55 #define HAVE_epilogue 0
58 #define HAVE_prologue 0
60 #ifndef HAVE_sibcall_epilogue
61 #define HAVE_sibcall_epilogue 0
65 #define EPILOGUE_USES(REGNO) 0
68 /* The bitmap_obstack is used to hold some static variables that
69 should not be reset after each function is compiled. */
71 static bitmap_obstack persistent_obstack;
73 /* The set of hard registers in eliminables[i].from. */
75 static HARD_REG_SET elim_reg_set;
77 /* This is a bitmap copy of regs_invalidated_by_call so that we can
78 easily add it into bitmaps, etc. */
80 bitmap df_invalidated_by_call = NULL;
82 /* Initialize ur_in and ur_out as if all hard registers were partially
85 static void df_ref_record (struct dataflow *, rtx, rtx *,
86 basic_block, rtx, enum df_ref_type,
87 enum df_ref_flags, bool record_live);
88 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
89 enum df_ref_flags, bool record_live);
90 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
91 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
92 basic_block, rtx, enum df_ref_flags);
94 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
95 static void df_bb_refs_record (struct dataflow *, basic_block);
96 static void df_refs_record (struct dataflow *, bitmap);
97 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
98 basic_block, rtx, enum df_ref_type,
100 static void df_record_entry_block_defs (struct dataflow *);
101 static void df_record_exit_block_uses (struct dataflow *);
102 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
103 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
104 static void df_grow_insn_info (struct df *);
107 /*----------------------------------------------------------------------------
108 SCANNING DATAFLOW PROBLEM
110 There are several ways in which scanning looks just like the other
111 dataflow problems. It shares the all the mechanisms for local info
112 as well as basic block info. Where it differs is when and how often
113 it gets run. It also has no need for the iterative solver.
114 ----------------------------------------------------------------------------*/
116 /* Problem data for the scanning dataflow function. */
117 struct df_scan_problem_data
120 alloc_pool insn_pool;
122 alloc_pool mw_reg_pool;
123 alloc_pool mw_link_pool;
126 typedef struct df_scan_bb_info *df_scan_bb_info_t;
129 df_scan_free_internal (struct dataflow *dflow)
131 struct df *df = dflow->df;
132 struct df_scan_problem_data *problem_data
133 = (struct df_scan_problem_data *) dflow->problem_data;
135 free (df->def_info.regs);
136 free (df->def_info.refs);
137 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
139 free (df->use_info.regs);
140 free (df->use_info.refs);
141 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
147 free (dflow->block_info);
148 dflow->block_info = NULL;
149 dflow->block_info_size = 0;
151 BITMAP_FREE (df->hardware_regs_used);
152 BITMAP_FREE (df->entry_block_defs);
153 BITMAP_FREE (df->exit_block_uses);
155 free_alloc_pool (dflow->block_pool);
156 free_alloc_pool (problem_data->ref_pool);
157 free_alloc_pool (problem_data->insn_pool);
158 free_alloc_pool (problem_data->reg_pool);
159 free_alloc_pool (problem_data->mw_reg_pool);
160 free_alloc_pool (problem_data->mw_link_pool);
164 /* Get basic block info. */
166 struct df_scan_bb_info *
167 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
169 gcc_assert (index < dflow->block_info_size);
170 return (struct df_scan_bb_info *) dflow->block_info[index];
174 /* Set basic block info. */
177 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
178 struct df_scan_bb_info *bb_info)
180 gcc_assert (index < dflow->block_info_size);
181 dflow->block_info[index] = (void *) bb_info;
185 /* Free basic block info. */
188 df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
190 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
193 df_bb_refs_delete (dflow, bb->index);
194 pool_free (dflow->block_pool, bb_info);
199 /* Allocate the problem data for the scanning problem. This should be
200 called when the problem is created or when the entire function is to
204 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
205 bitmap all_blocks ATTRIBUTE_UNUSED)
207 struct df *df = dflow->df;
208 struct df_scan_problem_data *problem_data;
209 unsigned int insn_num = get_max_uid () + 1;
210 unsigned int block_size = 50;
211 unsigned int bb_index;
214 /* Given the number of pools, this is really faster than tearing
216 if (dflow->problem_data)
217 df_scan_free_internal (dflow);
220 = create_alloc_pool ("df_scan_block pool",
221 sizeof (struct df_scan_bb_info),
224 problem_data = XNEW (struct df_scan_problem_data);
225 dflow->problem_data = problem_data;
227 problem_data->ref_pool
228 = create_alloc_pool ("df_scan_ref pool",
229 sizeof (struct df_ref), block_size);
230 problem_data->insn_pool
231 = create_alloc_pool ("df_scan_insn pool",
232 sizeof (struct df_insn_info), block_size);
233 problem_data->reg_pool
234 = create_alloc_pool ("df_scan_reg pool",
235 sizeof (struct df_reg_info), block_size);
236 problem_data->mw_reg_pool
237 = create_alloc_pool ("df_scan_mw_reg pool",
238 sizeof (struct df_mw_hardreg), block_size);
239 problem_data->mw_link_pool
240 = create_alloc_pool ("df_scan_mw_link pool",
241 sizeof (struct df_link), block_size);
243 insn_num += insn_num / 4;
244 df_grow_reg_info (dflow, &df->def_info);
245 df_grow_ref_info (&df->def_info, insn_num);
247 df_grow_reg_info (dflow, &df->use_info);
248 df_grow_ref_info (&df->use_info, insn_num *2);
250 df_grow_insn_info (df);
251 df_grow_bb_info (dflow);
253 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
255 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
258 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
259 df_scan_set_bb_info (dflow, bb_index, bb_info);
261 bb_info->artificial_defs = NULL;
262 bb_info->artificial_uses = NULL;
265 df->hardware_regs_used = BITMAP_ALLOC (NULL);
266 df->entry_block_defs = BITMAP_ALLOC (NULL);
267 df->exit_block_uses = BITMAP_ALLOC (NULL);
271 /* Free all of the data associated with the scan problem. */
274 df_scan_free (struct dataflow *dflow)
276 struct df *df = dflow->df;
278 if (dflow->problem_data)
280 df_scan_free_internal (dflow);
281 free (dflow->problem_data);
284 if (df->blocks_to_scan)
285 BITMAP_FREE (df->blocks_to_scan);
287 if (df->blocks_to_analyze)
288 BITMAP_FREE (df->blocks_to_analyze);
294 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
296 struct df *df = dflow->df;
299 fprintf (file, " invalidated by call \t");
300 dump_bitmap (file, df_invalidated_by_call);
301 fprintf (file, " hardware regs used \t");
302 dump_bitmap (file, df->hardware_regs_used);
303 fprintf (file, " entry block uses \t");
304 dump_bitmap (file, df->entry_block_defs);
305 fprintf (file, " exit block uses \t");
306 dump_bitmap (file, df->exit_block_uses);
307 fprintf (file, " regs ever live \t");
308 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
309 if (regs_ever_live[i])
310 fprintf (file, "%d ", i);
311 fprintf (file, "\n");
314 static struct df_problem problem_SCAN =
316 DF_SCAN, /* Problem id. */
317 DF_NONE, /* Direction. */
318 df_scan_alloc, /* Allocate the problem specific data. */
319 NULL, /* Reset global information. */
320 df_scan_free_bb_info, /* Free basic block info. */
321 NULL, /* Local compute function. */
322 NULL, /* Init the solution specific data. */
323 NULL, /* Iterative solver. */
324 NULL, /* Confluence operator 0. */
325 NULL, /* Confluence operator n. */
326 NULL, /* Transfer function. */
327 NULL, /* Finalize function. */
328 df_scan_free, /* Free all of the problem information. */
329 df_scan_dump, /* Debugging. */
330 NULL, /* Dependent problem. */
331 0 /* Changeable flags. */
335 /* Create a new DATAFLOW instance and add it to an existing instance
336 of DF. The returned structure is what is used to get at the
340 df_scan_add_problem (struct df *df, int flags)
342 return df_add_problem (df, &problem_SCAN, flags);
345 /*----------------------------------------------------------------------------
346 Storage Allocation Utilities
347 ----------------------------------------------------------------------------*/
350 /* First, grow the reg_info information. If the current size is less than
351 the number of psuedos, grow to 25% more than the number of
354 Second, assure that all of the slots up to max_reg_num have been
355 filled with reg_info structures. */
358 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
360 unsigned int max_reg = max_reg_num ();
361 unsigned int new_size = max_reg;
362 struct df_scan_problem_data *problem_data
363 = (struct df_scan_problem_data *) dflow->problem_data;
366 if (ref_info->regs_size < new_size)
368 new_size += new_size / 4;
369 ref_info->regs = xrealloc (ref_info->regs,
370 new_size *sizeof (struct df_reg_info*));
371 ref_info->regs_size = new_size;
374 for (i = ref_info->regs_inited; i < max_reg; i++)
376 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
377 memset (reg_info, 0, sizeof (struct df_reg_info));
378 ref_info->regs[i] = reg_info;
381 ref_info->regs_inited = max_reg;
385 /* Grow the ref information. */
388 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
390 if (ref_info->refs_size < new_size)
392 ref_info->refs = xrealloc (ref_info->refs,
393 new_size *sizeof (struct df_ref *));
394 memset (ref_info->refs + ref_info->refs_size, 0,
395 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
396 ref_info->refs_size = new_size;
401 /* Grow the ref information. If the current size is less than the
402 number of instructions, grow to 25% more than the number of
406 df_grow_insn_info (struct df *df)
408 unsigned int new_size = get_max_uid () + 1;
409 if (df->insns_size < new_size)
411 new_size += new_size / 4;
412 df->insns = xrealloc (df->insns,
413 new_size *sizeof (struct df_insn_info *));
414 memset (df->insns + df->insns_size, 0,
415 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
416 df->insns_size = new_size;
423 /*----------------------------------------------------------------------------
424 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
425 ----------------------------------------------------------------------------*/
427 /* Rescan some BLOCKS or all the blocks defined by the last call to
428 df_set_blocks if BLOCKS is NULL); */
431 df_rescan_blocks (struct df *df, bitmap blocks)
433 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
435 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
438 df->def_info.refs_organized_size = 0;
439 df->use_info.refs_organized_size = 0;
444 unsigned int bb_index;
446 bool cleared_bits = false;
448 /* Need to assure that there are space in all of the tables. */
449 unsigned int insn_num = get_max_uid () + 1;
450 insn_num += insn_num / 4;
452 df_grow_reg_info (dflow, &df->def_info);
453 df_grow_ref_info (&df->def_info, insn_num);
455 df_grow_reg_info (dflow, &df->use_info);
456 df_grow_ref_info (&df->use_info, insn_num *2);
458 df_grow_insn_info (df);
459 df_grow_bb_info (dflow);
461 bitmap_copy (local_blocks_to_scan, blocks);
463 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
465 basic_block bb = BASIC_BLOCK (bb_index);
468 bitmap_clear_bit (local_blocks_to_scan, bb_index);
474 bitmap_copy (blocks, local_blocks_to_scan);
476 df->def_info.add_refs_inline = true;
477 df->use_info.add_refs_inline = true;
479 for (i = df->num_problems_defined; i; i--)
481 bitmap blocks_to_reset = NULL;
482 if (dflow->problem->reset_fun)
484 if (!blocks_to_reset)
486 blocks_to_reset = BITMAP_ALLOC (NULL);
487 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
488 if (df->blocks_to_scan)
489 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
491 dflow->problem->reset_fun (dflow, blocks_to_reset);
494 BITMAP_FREE (blocks_to_reset);
497 df_refs_delete (dflow, local_blocks_to_scan);
499 /* This may be a mistake, but if an explicit blocks is passed in
500 and the set of blocks to analyze has been explicitly set, add
501 the extra blocks to blocks_to_analyze. The alternative is to
502 put an assert here. We do not want this to just go by
503 silently or else we may get storage leaks. */
504 if (df->blocks_to_analyze)
505 bitmap_ior_into (df->blocks_to_analyze, blocks);
509 /* If we are going to do everything, just reallocate everything.
510 Most stuff is allocated in pools so this is faster than
512 if (df->blocks_to_analyze)
513 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
517 bitmap_set_bit (local_blocks_to_scan, bb->index);
519 df_scan_alloc (dflow, local_blocks_to_scan, NULL);
521 df->def_info.add_refs_inline = false;
522 df->use_info.add_refs_inline = false;
525 df_refs_record (dflow, local_blocks_to_scan);
527 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
530 if (!df->blocks_to_scan)
531 df->blocks_to_scan = BITMAP_ALLOC (NULL);
533 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
534 BITMAP_FREE (local_blocks_to_scan);
538 /* Create a new ref of type DF_REF_TYPE for register REG at address
539 LOC within INSN of BB. */
542 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
544 enum df_ref_type ref_type,
545 enum df_ref_flags ref_flags)
547 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
548 struct df_scan_bb_info *bb_info;
550 df_grow_reg_info (dflow, &df->use_info);
551 df_grow_reg_info (dflow, &df->def_info);
552 df_grow_bb_info (dflow);
554 /* Make sure there is the bb_info for this block. */
555 bb_info = df_scan_get_bb_info (dflow, bb->index);
558 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
559 df_scan_set_bb_info (dflow, bb->index, bb_info);
560 bb_info->artificial_defs = NULL;
561 bb_info->artificial_uses = NULL;
564 if (ref_type == DF_REF_REG_DEF)
565 df->def_info.add_refs_inline = true;
567 df->use_info.add_refs_inline = true;
569 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
574 /*----------------------------------------------------------------------------
575 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
576 ----------------------------------------------------------------------------*/
579 /* Get the artificial uses for a basic block. */
582 df_get_artificial_defs (struct df *df, unsigned int bb_index)
584 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
585 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
589 /* Get the artificial uses for a basic block. */
592 df_get_artificial_uses (struct df *df, unsigned int bb_index)
594 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
595 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
599 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
602 df_reg_chain_create (struct df_reg_info *reg_info,
605 struct df_ref *head = reg_info->reg_chain;
606 reg_info->reg_chain = ref;
608 DF_REF_NEXT_REG (ref) = head;
610 /* We cannot actually link to the head of the chain. */
611 DF_REF_PREV_REG (ref) = NULL;
614 DF_REF_PREV_REG (head) = ref;
618 /* Remove REF from the CHAIN. Return the head of the chain. This
619 will be CHAIN unless the REF was at the beginning of the chain. */
621 static struct df_ref *
622 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
624 struct df_ref *orig_chain = chain;
625 struct df_ref *prev = NULL;
632 prev->next_ref = ref->next_ref;
633 ref->next_ref = NULL;
638 chain = ref->next_ref;
639 ref->next_ref = NULL;
645 chain = chain->next_ref;
648 /* Someone passed in a ref that was not in the chain. */
654 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
655 the def-use or use-def chain if it exists. Returns the next ref in
656 uses or defs chain. */
659 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
661 struct df *df = dflow->df;
662 struct df_ref *next = DF_REF_NEXT_REG (ref);
663 struct df_ref *prev = DF_REF_PREV_REG (ref);
664 struct df_scan_problem_data *problem_data
665 = (struct df_scan_problem_data *) dflow->problem_data;
666 struct df_reg_info *reg_info;
667 struct df_ref *next_ref = ref->next_ref;
668 unsigned int id = DF_REF_ID (ref);
670 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
672 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
673 df->def_info.bitmap_size--;
674 if (df->def_info.refs && (id < df->def_info.refs_size))
675 DF_DEFS_SET (df, id, NULL);
679 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
680 df->use_info.bitmap_size--;
681 if (df->use_info.refs && (id < df->use_info.refs_size))
682 DF_USES_SET (df, id, NULL);
685 /* Delete any def-use or use-def chains that start here. */
686 if (DF_REF_CHAIN (ref))
687 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
691 /* Unlink from the reg chain. If there is no prev, this is the
692 first of the list. If not, just join the next and prev. */
695 DF_REF_NEXT_REG (prev) = next;
697 DF_REF_PREV_REG (next) = prev;
701 reg_info->reg_chain = next;
703 DF_REF_PREV_REG (next) = NULL;
706 pool_free (problem_data->ref_pool, ref);
711 /* Unlink REF from all def-use/use-def chains, etc. */
714 df_ref_remove (struct df *df, struct df_ref *ref)
716 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
717 if (DF_REF_REG_DEF_P (ref))
719 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
721 struct df_scan_bb_info *bb_info
722 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
723 bb_info->artificial_defs
724 = df_ref_unlink (bb_info->artificial_defs, ref);
727 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref))
728 = df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
730 if (df->def_info.add_refs_inline)
731 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
735 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
737 struct df_scan_bb_info *bb_info
738 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
739 bb_info->artificial_uses
740 = df_ref_unlink (bb_info->artificial_uses, ref);
743 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref))
744 = df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
746 if (df->use_info.add_refs_inline)
747 DF_USES_SET (df, DF_REF_ID (ref), NULL);
750 df_reg_chain_unlink (dflow, ref);
754 /* Create the insn record for INSN. If there was one there, zero it out. */
756 static struct df_insn_info *
757 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
759 struct df *df = dflow->df;
760 struct df_scan_problem_data *problem_data
761 = (struct df_scan_problem_data *) dflow->problem_data;
763 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
766 insn_rec = pool_alloc (problem_data->insn_pool);
767 DF_INSN_SET (df, insn, insn_rec);
769 memset (insn_rec, 0, sizeof (struct df_insn_info));
775 /* Delete all of the refs information from INSN. */
778 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
780 struct df *df = dflow->df;
781 unsigned int uid = INSN_UID (insn);
782 struct df_insn_info *insn_info = NULL;
784 struct df_scan_problem_data *problem_data
785 = (struct df_scan_problem_data *) dflow->problem_data;
787 if (uid < df->insns_size)
788 insn_info = DF_INSN_UID_GET (df, uid);
792 struct df_mw_hardreg *hardregs = insn_info->mw_hardregs;
796 struct df_mw_hardreg *next_hr = hardregs->next;
797 struct df_link *link = hardregs->regs;
800 struct df_link *next_l = link->next;
801 pool_free (problem_data->mw_link_pool, link);
805 pool_free (problem_data->mw_reg_pool, hardregs);
809 ref = insn_info->defs;
811 ref = df_reg_chain_unlink (dflow, ref);
813 ref = insn_info->uses;
815 ref = df_reg_chain_unlink (dflow, ref);
817 pool_free (problem_data->insn_pool, insn_info);
818 DF_INSN_SET (df, insn, NULL);
823 /* Delete all of the refs information from basic_block with BB_INDEX. */
826 df_bb_refs_delete (struct dataflow *dflow, int bb_index)
831 struct df_scan_bb_info *bb_info
832 = df_scan_get_bb_info (dflow, bb_index);
834 basic_block bb = BASIC_BLOCK (bb_index);
835 FOR_BB_INSNS (bb, insn)
839 /* Record defs within INSN. */
840 df_insn_refs_delete (dflow, insn);
844 /* Get rid of any artificial uses or defs. */
847 def = bb_info->artificial_defs;
849 def = df_reg_chain_unlink (dflow, def);
850 bb_info->artificial_defs = NULL;
851 use = bb_info->artificial_uses;
853 use = df_reg_chain_unlink (dflow, use);
854 bb_info->artificial_uses = NULL;
859 /* Delete all of the refs information from BLOCKS. */
862 df_refs_delete (struct dataflow *dflow, bitmap blocks)
865 unsigned int bb_index;
867 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
869 df_bb_refs_delete (dflow, bb_index);
874 /* Take build ref table for either the uses or defs from the reg-use
875 or reg-def chains. */
878 df_reorganize_refs (struct df_ref_info *ref_info)
880 unsigned int m = ref_info->regs_inited;
882 unsigned int offset = 0;
883 unsigned int size = 0;
885 if (ref_info->refs_organized_size)
888 if (ref_info->refs_size < ref_info->bitmap_size)
890 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
891 df_grow_ref_info (ref_info, new_size);
894 for (regno = 0; regno < m; regno++)
896 struct df_reg_info *reg_info = ref_info->regs[regno];
900 struct df_ref *ref = reg_info->reg_chain;
901 reg_info->begin = offset;
904 ref_info->refs[offset] = ref;
905 DF_REF_ID (ref) = offset++;
906 ref = DF_REF_NEXT_REG (ref);
910 reg_info->n_refs = count;
914 /* The bitmap size is not decremented when refs are deleted. So
915 reset it now that we have squished out all of the empty
917 ref_info->bitmap_size = size;
918 ref_info->refs_organized_size = size;
919 ref_info->add_refs_inline = true;
923 /*----------------------------------------------------------------------------
924 Hard core instruction scanning code. No external interfaces here,
925 just a lot of routines that look inside insns.
926 ----------------------------------------------------------------------------*/
928 /* Create a ref and add it to the reg-def or reg-use chains. */
930 static struct df_ref *
931 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
932 basic_block bb, rtx insn,
933 enum df_ref_type ref_type,
934 enum df_ref_flags ref_flags)
936 struct df_ref *this_ref;
937 struct df *df = dflow->df;
938 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
939 struct df_scan_problem_data *problem_data
940 = (struct df_scan_problem_data *) dflow->problem_data;
942 this_ref = pool_alloc (problem_data->ref_pool);
943 DF_REF_REG (this_ref) = reg;
944 DF_REF_REGNO (this_ref) = regno;
945 DF_REF_LOC (this_ref) = loc;
946 DF_REF_INSN (this_ref) = insn;
947 DF_REF_CHAIN (this_ref) = NULL;
948 DF_REF_TYPE (this_ref) = ref_type;
949 DF_REF_FLAGS (this_ref) = ref_flags;
950 DF_REF_DATA (this_ref) = NULL;
951 DF_REF_BB (this_ref) = bb;
953 /* Link the ref into the reg_def and reg_use chains and keep a count
959 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
960 unsigned int size = df->def_info.refs_organized_size
961 ? df->def_info.refs_organized_size
962 : df->def_info.bitmap_size;
964 /* Add the ref to the reg_def chain. */
966 df_reg_chain_create (reg_info, this_ref);
967 DF_REF_ID (this_ref) = size;
968 if (df->def_info.add_refs_inline)
970 if (size >= df->def_info.refs_size)
972 int new_size = size + size / 4;
973 df_grow_ref_info (&df->def_info, new_size);
975 /* Add the ref to the big array of defs. */
976 DF_DEFS_SET (df, size, this_ref);
977 if (df->def_info.refs_organized_size)
978 df->def_info.refs_organized_size++;
981 df->def_info.bitmap_size++;
983 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
985 struct df_scan_bb_info *bb_info
986 = df_scan_get_bb_info (dflow, bb->index);
987 this_ref->next_ref = bb_info->artificial_defs;
988 bb_info->artificial_defs = this_ref;
992 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
993 DF_INSN_GET (df, insn)->defs = this_ref;
998 case DF_REF_REG_MEM_LOAD:
999 case DF_REF_REG_MEM_STORE:
1000 case DF_REF_REG_USE:
1002 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
1003 unsigned int size = df->use_info.refs_organized_size
1004 ? df->use_info.refs_organized_size
1005 : df->use_info.bitmap_size;
1007 /* Add the ref to the reg_use chain. */
1009 df_reg_chain_create (reg_info, this_ref);
1010 DF_REF_ID (this_ref) = size;
1011 if (df->use_info.add_refs_inline)
1013 if (size >= df->use_info.refs_size)
1015 int new_size = size + size / 4;
1016 df_grow_ref_info (&df->use_info, new_size);
1018 /* Add the ref to the big array of defs. */
1019 DF_USES_SET (df, size, this_ref);
1020 if (df->def_info.refs_organized_size)
1021 df->def_info.refs_organized_size++;
1024 df->use_info.bitmap_size++;
1025 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
1027 struct df_scan_bb_info *bb_info
1028 = df_scan_get_bb_info (dflow, bb->index);
1029 this_ref->next_ref = bb_info->artificial_uses;
1030 bb_info->artificial_uses = this_ref;
1034 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
1035 DF_INSN_GET (df, insn)->uses = this_ref;
1048 /* Create new references of type DF_REF_TYPE for each part of register REG
1049 at address LOC within INSN of BB. */
1052 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1053 basic_block bb, rtx insn,
1054 enum df_ref_type ref_type,
1055 enum df_ref_flags ref_flags,
1058 struct df *df = dflow->df;
1062 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1064 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1065 all. Notably only those representing a word extraction from a multi-word
1066 reg. As written in the docu those should have the form
1067 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1068 XXX Is that true? We could also use the global word_mode variable. */
1069 if ((dflow->flags & DF_SUBREGS) == 0
1070 && GET_CODE (reg) == SUBREG
1071 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1072 || GET_MODE_SIZE (GET_MODE (reg))
1073 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1075 loc = &SUBREG_REG (reg);
1077 ref_flags |= DF_REF_STRIPPED;
1080 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1081 if (regno < FIRST_PSEUDO_REGISTER)
1084 unsigned int endregno;
1085 struct df_mw_hardreg *hardreg = NULL;
1086 struct df_scan_problem_data *problem_data
1087 = (struct df_scan_problem_data *) dflow->problem_data;
1089 if (!(dflow->flags & DF_HARD_REGS))
1092 if (GET_CODE (reg) == SUBREG)
1094 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1095 SUBREG_BYTE (reg), GET_MODE (reg));
1096 endregno = subreg_nregs (reg);
1099 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1102 /* If this is a multiword hardreg, we create some extra datastructures that
1103 will enable us to easily build REG_DEAD and REG_UNUSED notes. */
1104 if ((endregno != regno + 1) && insn)
1106 struct df_insn_info *insn_info = DF_INSN_GET (df, insn);
1107 /* Sets to a subreg of a multiword register are partial.
1108 Sets to a non-subreg of a multiword register are not. */
1109 if (GET_CODE (oldreg) == SUBREG)
1110 ref_flags |= DF_REF_PARTIAL;
1111 ref_flags |= DF_REF_MW_HARDREG;
1112 hardreg = pool_alloc (problem_data->mw_reg_pool);
1113 hardreg->next = insn_info->mw_hardregs;
1114 insn_info->mw_hardregs = hardreg;
1115 hardreg->type = ref_type;
1116 hardreg->flags = ref_flags;
1117 hardreg->mw_reg = reg;
1118 hardreg->regs = NULL;
1122 for (i = regno; i < endregno; i++)
1126 /* Calls are handled at call site because regs_ever_live
1127 doesn't include clobbered regs, only used ones. */
1128 if (ref_type == DF_REF_REG_DEF && record_live)
1129 regs_ever_live[i] = 1;
1130 else if ((ref_type == DF_REF_REG_USE
1131 || ref_type == DF_REF_REG_MEM_STORE
1132 || ref_type == DF_REF_REG_MEM_LOAD)
1133 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1135 /* Set regs_ever_live on uses of non-eliminable frame
1136 pointers and arg pointers. */
1137 if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
1138 && (regno == FRAME_POINTER_REGNUM
1139 || regno == ARG_POINTER_REGNUM)))
1140 regs_ever_live[i] = 1;
1143 ref = df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1144 bb, insn, ref_type, ref_flags);
1147 struct df_link *link = pool_alloc (problem_data->mw_link_pool);
1149 link->next = hardreg->regs;
1151 hardreg->regs = link;
1157 df_ref_create_structure (dflow, reg, loc,
1158 bb, insn, ref_type, ref_flags);
1163 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1164 covered by the outer mode is smaller than that covered by the inner mode,
1165 is a read-modify-write operation.
1166 This function returns true iff the SUBREG X is such a SUBREG. */
1169 df_read_modify_subreg_p (rtx x)
1171 unsigned int isize, osize;
1172 if (GET_CODE (x) != SUBREG)
1174 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1175 osize = GET_MODE_SIZE (GET_MODE (x));
1176 return (isize > osize && isize > UNITS_PER_WORD);
1180 /* Process all the registers defined in the rtx, X.
1181 Autoincrement/decrement definitions will be picked up by
1185 df_def_record_1 (struct dataflow *dflow, rtx x,
1186 basic_block bb, rtx insn,
1187 enum df_ref_flags flags, bool record_live)
1191 bool dst_in_strict_lowpart = false;
1193 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1195 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1198 loc = &SET_DEST (x);
1201 /* It is legal to have a set destination be a parallel. */
1202 if (GET_CODE (dst) == PARALLEL)
1206 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1208 rtx temp = XVECEXP (dst, 0, i);
1209 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1210 || GET_CODE (temp) == SET)
1211 df_def_record_1 (dflow, temp, bb, insn,
1212 GET_CODE (temp) == CLOBBER
1213 ? flags | DF_REF_MUST_CLOBBER : flags,
1219 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1220 be handy for the reg allocator. */
1221 while (GET_CODE (dst) == STRICT_LOW_PART
1222 || GET_CODE (dst) == ZERO_EXTRACT
1223 || df_read_modify_subreg_p (dst))
1226 /* Strict low part always contains SUBREG, but we do not want to make
1227 it appear outside, as whole register is always considered. */
1228 if (GET_CODE (dst) == STRICT_LOW_PART)
1230 loc = &XEXP (dst, 0);
1234 loc = &XEXP (dst, 0);
1235 if (GET_CODE (dst) == STRICT_LOW_PART)
1236 dst_in_strict_lowpart = true;
1238 flags |= DF_REF_READ_WRITE;
1242 /* Sets to a subreg of a single word register are partial sets if
1243 they are wrapped in a strict lowpart, and not partial otherwise.
1245 if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
1246 && dst_in_strict_lowpart)
1247 flags |= DF_REF_PARTIAL;
1250 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1251 df_ref_record (dflow, dst, loc, bb, insn,
1252 DF_REF_REG_DEF, flags, record_live);
1256 /* Process all the registers defined in the pattern rtx, X. */
1259 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1261 RTX_CODE code = GET_CODE (x);
1263 if (code == SET || code == CLOBBER)
1265 /* Mark the single def within the pattern. */
1266 df_def_record_1 (dflow, x, bb, insn,
1267 code == CLOBBER ? DF_REF_MUST_CLOBBER : 0, true);
1269 else if (code == COND_EXEC)
1271 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1273 else if (code == PARALLEL)
1277 /* Mark the multiple defs within the pattern. */
1278 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1279 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1284 /* Process all the registers used in the rtx at address LOC. */
1287 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1288 basic_block bb, rtx insn, enum df_ref_flags flags)
1296 code = GET_CODE (x);
1312 /* If we are clobbering a MEM, mark any registers inside the address
1314 if (MEM_P (XEXP (x, 0)))
1315 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1316 DF_REF_REG_MEM_STORE, bb, insn, flags);
1318 /* If we're clobbering a REG then we have a def so ignore. */
1322 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1323 flags & DF_REF_IN_NOTE);
1327 /* While we're here, optimize this case. */
1328 flags |= DF_REF_PARTIAL;
1329 /* In case the SUBREG is not of a REG, do not optimize. */
1330 if (!REG_P (SUBREG_REG (x)))
1332 loc = &SUBREG_REG (x);
1333 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1336 /* ... Fall through ... */
1339 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1344 rtx dst = SET_DEST (x);
1345 gcc_assert (!(flags & DF_REF_IN_NOTE));
1346 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
1348 switch (GET_CODE (dst))
1351 if (df_read_modify_subreg_p (dst))
1353 df_uses_record (dflow, &SUBREG_REG (dst),
1355 insn, flags | DF_REF_READ_WRITE);
1366 df_uses_record (dflow, &XEXP (dst, 0),
1367 DF_REF_REG_MEM_STORE,
1370 case STRICT_LOW_PART:
1372 rtx *temp = &XEXP (dst, 0);
1373 /* A strict_low_part uses the whole REG and not just the
1375 dst = XEXP (dst, 0);
1376 df_uses_record (dflow,
1377 (GET_CODE (dst) == SUBREG)
1378 ? &SUBREG_REG (dst) : temp,
1380 insn, DF_REF_READ_WRITE);
1385 df_uses_record (dflow, &XEXP (dst, 0),
1386 DF_REF_REG_USE, bb, insn,
1388 df_uses_record (dflow, &XEXP (dst, 1),
1389 DF_REF_REG_USE, bb, insn, flags);
1390 df_uses_record (dflow, &XEXP (dst, 2),
1391 DF_REF_REG_USE, bb, insn, flags);
1392 dst = XEXP (dst, 0);
1404 case UNSPEC_VOLATILE:
1408 /* Traditional and volatile asm instructions must be
1409 considered to use and clobber all hard registers, all
1410 pseudo-registers and all of memory. So must TRAP_IF and
1411 UNSPEC_VOLATILE operations.
1413 Consider for instance a volatile asm that changes the fpu
1414 rounding mode. An insn should not be moved across this
1415 even if it only uses pseudo-regs because it might give an
1416 incorrectly rounded result.
1418 However, flow.c's liveness computation did *not* do this,
1419 giving the reasoning as " ?!? Unfortunately, marking all
1420 hard registers as live causes massive problems for the
1421 register allocator and marking all pseudos as live creates
1422 mountains of uninitialized variable warnings."
1424 In order to maintain the status quo with regard to liveness
1425 and uses, we do what flow.c did and just mark any regs we
1426 can find in ASM_OPERANDS as used. Later on, when liveness
1427 is computed, asm insns are scanned and regs_asm_clobbered
1430 For all ASM_OPERANDS, we must traverse the vector of input
1431 operands. We can not just fall through here since then we
1432 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1433 which do not indicate traditional asms unlike their normal
1435 if (code == ASM_OPERANDS)
1439 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1440 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1441 DF_REF_REG_USE, bb, insn, flags);
1453 /* Catch the def of the register being modified. */
1454 flags |= DF_REF_READ_WRITE;
1455 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1456 DF_REF_REG_DEF, flags, true);
1458 /* ... Fall through to handle uses ... */
1464 /* Recursively scan the operands of this expression. */
1466 const char *fmt = GET_RTX_FORMAT (code);
1469 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1473 /* Tail recursive case: save a function call level. */
1479 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1481 else if (fmt[i] == 'E')
1484 for (j = 0; j < XVECLEN (x, i); j++)
1485 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1492 /* Return true if *LOC contains an asm. */
1495 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1499 if (GET_CODE (*loc) == ASM_OPERANDS)
1505 /* Return true if INSN contains an ASM. */
1508 df_insn_contains_asm (rtx insn)
1510 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1515 /* Record all the refs for DF within INSN of basic block BB. */
1518 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1520 struct df *df = dflow->df;
1527 if (df_insn_contains_asm (insn))
1528 DF_INSN_CONTAINS_ASM (df, insn) = true;
1530 /* Record register defs. */
1531 df_defs_record (dflow, PATTERN (insn), bb, insn);
1533 if (dflow->flags & DF_EQUIV_NOTES)
1534 for (note = REG_NOTES (insn); note;
1535 note = XEXP (note, 1))
1537 switch (REG_NOTE_KIND (note))
1541 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1542 bb, insn, DF_REF_IN_NOTE);
1552 /* Record the registers used to pass arguments, and explicitly
1553 noted as clobbered. */
1554 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1555 note = XEXP (note, 1))
1557 if (GET_CODE (XEXP (note, 0)) == USE)
1558 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1561 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1563 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1564 if (REG_P (XEXP (XEXP (note, 0), 0)))
1566 rtx reg = XEXP (XEXP (note, 0), 0);
1571 regno_last = regno_first = REGNO (reg);
1572 if (regno_first < FIRST_PSEUDO_REGISTER)
1574 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1575 for (i = regno_first; i <= regno_last; i++)
1576 regs_ever_live[i] = 1;
1581 /* The stack ptr is used (honorarily) by a CALL insn. */
1582 df_uses_record (dflow, ®no_reg_rtx[STACK_POINTER_REGNUM],
1583 DF_REF_REG_USE, bb, insn,
1586 if (dflow->flags & DF_HARD_REGS)
1590 /* Calls may also reference any of the global registers,
1591 so they are recorded as used. */
1592 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1594 df_uses_record (dflow, ®no_reg_rtx[i],
1595 DF_REF_REG_USE, bb, insn,
1597 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1598 df_ref_record (dflow, regno_reg_rtx[ui], ®no_reg_rtx[ui], bb,
1599 insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER, false);
1603 /* Record the register uses. */
1604 df_uses_record (dflow, &PATTERN (insn),
1605 DF_REF_REG_USE, bb, insn, 0);
1611 df_has_eh_preds (basic_block bb)
1616 FOR_EACH_EDGE (e, ei, bb->preds)
1618 if (e->flags & EDGE_EH)
1624 /* Record all the refs within the basic block BB. */
1627 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1629 struct df *df = dflow->df;
1632 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1633 bitmap artificial_uses_at_bottom = NULL;
1635 if (dflow->flags & DF_HARD_REGS)
1636 artificial_uses_at_bottom = BITMAP_ALLOC (NULL);
1638 /* Need to make sure that there is a record in the basic block info. */
1641 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1642 df_scan_set_bb_info (dflow, bb->index, bb_info);
1643 bb_info->artificial_defs = NULL;
1644 bb_info->artificial_uses = NULL;
1647 /* Scan the block an insn at a time from beginning to end. */
1648 FOR_BB_INSNS (bb, insn)
1650 df_insn_create_insn_record (dflow, insn);
1653 /* Record defs within INSN. */
1654 DF_INSN_LUID (df, insn) = luid++;
1655 df_insn_refs_record (dflow, bb, insn);
1657 DF_INSN_LUID (df, insn) = luid;
1660 #ifdef EH_RETURN_DATA_REGNO
1661 if ((dflow->flags & DF_HARD_REGS)
1662 && df_has_eh_preds (bb))
1665 /* Mark the registers that will contain data for the handler. */
1668 unsigned regno = EH_RETURN_DATA_REGNO (i);
1669 if (regno == INVALID_REGNUM)
1671 df_ref_record (dflow, regno_reg_rtx[regno], ®no_reg_rtx[regno],
1673 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1680 if ((dflow->flags & DF_HARD_REGS)
1681 && df_has_eh_preds (bb))
1685 /* This code is putting in a artificial ref for the use at the
1686 TOP of the block that receives the exception. It is too
1687 cumbersome to actually put the ref on the edge. We could
1688 either model this at the top of the receiver block or the
1689 bottom of the sender block.
1691 The bottom of the sender block is problematic because not all
1692 out-edges of the a block are eh-edges. However, it is true
1693 that all edges into a block are either eh-edges or none of
1694 them are eh-edges. Thus, we can model this at the top of the
1695 eh-receiver for all of the edges at once. */
1696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1698 df_uses_record (dflow, ®no_reg_rtx[i],
1699 DF_REF_REG_USE, bb, NULL,
1700 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1703 /* The following code (down thru the arg_pointer setting APPEARS
1704 to be necessary because there is nothing that actually
1705 describes what the exception handling code may actually need
1707 if (reload_completed)
1709 if (frame_pointer_needed)
1711 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1712 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1713 bitmap_set_bit (artificial_uses_at_bottom, HARD_FRAME_POINTER_REGNUM);
1716 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1717 if (fixed_regs[ARG_POINTER_REGNUM])
1718 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1723 if ((dflow->flags & DF_HARD_REGS)
1724 && bb->index >= NUM_FIXED_BLOCKS)
1726 /* Before reload, there are a few registers that must be forced
1727 live everywhere -- which might not already be the case for
1728 blocks within infinite loops. */
1729 if (!reload_completed)
1732 /* Any reference to any pseudo before reload is a potential
1733 reference of the frame pointer. */
1734 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1736 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1737 /* Pseudos with argument area equivalences may require
1738 reloading via the argument pointer. */
1739 if (fixed_regs[ARG_POINTER_REGNUM])
1740 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1743 /* Any constant, or pseudo with constant equivalences, may
1744 require reloading from memory using the pic register. */
1745 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1746 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1747 bitmap_set_bit (artificial_uses_at_bottom, PIC_OFFSET_TABLE_REGNUM);
1749 /* The all-important stack pointer must always be live. */
1750 bitmap_set_bit (artificial_uses_at_bottom, STACK_POINTER_REGNUM);
1753 if (dflow->flags & DF_HARD_REGS)
1758 EXECUTE_IF_SET_IN_BITMAP (artificial_uses_at_bottom, 0, regno, bi)
1760 df_uses_record (dflow, ®no_reg_rtx[regno],
1761 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1764 BITMAP_FREE (artificial_uses_at_bottom);
1769 /* Record all the refs in the basic blocks specified by BLOCKS. */
1772 df_refs_record (struct dataflow *dflow, bitmap blocks)
1774 unsigned int bb_index;
1777 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1779 basic_block bb = BASIC_BLOCK (bb_index);
1780 df_bb_refs_record (dflow, bb);
1783 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1784 df_record_exit_block_uses (dflow);
1786 if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1787 df_record_entry_block_defs (dflow);
1791 /*----------------------------------------------------------------------------
1792 Specialized hard register scanning functions.
1793 ----------------------------------------------------------------------------*/
1795 /* Mark a register in SET. Hard registers in large modes get all
1796 of their component registers set as well. */
1799 df_mark_reg (rtx reg, void *vset)
1801 bitmap set = (bitmap) vset;
1802 int regno = REGNO (reg);
1804 gcc_assert (GET_MODE (reg) != BLKmode);
1806 bitmap_set_bit (set, regno);
1807 if (regno < FIRST_PSEUDO_REGISTER)
1809 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1811 bitmap_set_bit (set, regno + n);
1816 /* Record the (conservative) set of hard registers that are defined on
1817 entry to the function. */
1820 df_record_entry_block_defs (struct dataflow *dflow)
1825 struct df *df = dflow->df;
1827 bitmap_clear (df->entry_block_defs);
1829 if (!(dflow->flags & DF_HARD_REGS))
1832 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1834 if (FUNCTION_ARG_REGNO_P (i))
1835 #ifdef INCOMING_REGNO
1836 bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1838 bitmap_set_bit (df->entry_block_defs, i);
1842 /* Once the prologue has been generated, all of these registers
1843 should just show up in the first regular block. */
1844 if (HAVE_prologue && epilogue_completed)
1846 /* Defs for the callee saved registers are inserted so that the
1847 pushes have some defining location. */
1848 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1849 if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1850 bitmap_set_bit (df->entry_block_defs, i);
1854 /* The always important stack pointer. */
1855 bitmap_set_bit (df->entry_block_defs, STACK_POINTER_REGNUM);
1857 #ifdef INCOMING_RETURN_ADDR_RTX
1858 if (REG_P (INCOMING_RETURN_ADDR_RTX))
1859 bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
1862 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1863 only STATIC_CHAIN_REGNUM is defined. If they are different,
1864 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
1865 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1866 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1868 #ifdef STATIC_CHAIN_REGNUM
1869 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1873 r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1875 bitmap_set_bit (df->entry_block_defs, REGNO (r));
1878 if ((!reload_completed) || frame_pointer_needed)
1880 /* Any reference to any pseudo before reload is a potential
1881 reference of the frame pointer. */
1882 bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1883 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1884 /* If they are different, also mark the hard frame pointer as live. */
1885 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1886 bitmap_set_bit (df->entry_block_defs, HARD_FRAME_POINTER_REGNUM);
1890 /* These registers are live everywhere. */
1891 if (!reload_completed)
1894 /* The ia-64, the only machine that uses this, does not define these
1895 until after reload. */
1896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1899 bitmap_set_bit (df->entry_block_defs, i);
1903 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1904 /* Pseudos with argument area equivalences may require
1905 reloading via the argument pointer. */
1906 if (fixed_regs[ARG_POINTER_REGNUM])
1907 bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1910 #ifdef PIC_OFFSET_TABLE_REGNUM
1911 /* Any constant, or pseudo with constant equivalences, may
1912 require reloading from memory using the pic register. */
1913 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1914 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1915 bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1919 targetm.live_on_entry (df->entry_block_defs);
1921 EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1923 df_ref_record (dflow, regno_reg_rtx[i], ®no_reg_rtx[i],
1924 ENTRY_BLOCK_PTR, NULL,
1925 DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1930 /* Record the set of hard registers that are used in the exit block. */
1933 df_record_exit_block_uses (struct dataflow *dflow)
1937 struct df *df = dflow->df;
1939 bitmap_clear (df->exit_block_uses);
1941 if (!(dflow->flags & DF_HARD_REGS))
1944 /* If exiting needs the right stack value, consider the stack
1945 pointer live at the end of the function. */
1946 if ((HAVE_epilogue && epilogue_completed)
1947 || !EXIT_IGNORE_STACK
1948 || (!FRAME_POINTER_REQUIRED
1949 && !current_function_calls_alloca
1950 && flag_omit_frame_pointer)
1951 || current_function_sp_is_unchanging)
1953 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1956 /* Mark the frame pointer if needed at the end of the function.
1957 If we end up eliminating it, it will be removed from the live
1958 list of each basic block by reload. */
1960 if ((!reload_completed) || frame_pointer_needed)
1962 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1963 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1964 /* If they are different, also mark the hard frame pointer as live. */
1965 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1966 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1970 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1971 /* Many architectures have a GP register even without flag_pic.
1972 Assume the pic register is not in use, or will be handled by
1973 other means, if it is not fixed. */
1974 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1975 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1976 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1979 /* Mark all global registers, and all registers used by the
1980 epilogue as being live at the end of the function since they
1981 may be referenced by our caller. */
1982 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1983 if (global_regs[i] || EPILOGUE_USES (i))
1984 bitmap_set_bit (df->exit_block_uses, i);
1986 if (HAVE_epilogue && epilogue_completed)
1988 /* Mark all call-saved registers that we actually used. */
1989 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1990 if (regs_ever_live[i] && !LOCAL_REGNO (i)
1991 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1992 bitmap_set_bit (df->exit_block_uses, i);
1995 #ifdef EH_RETURN_DATA_REGNO
1996 /* Mark the registers that will contain data for the handler. */
1997 if (reload_completed && current_function_calls_eh_return)
2000 unsigned regno = EH_RETURN_DATA_REGNO (i);
2001 if (regno == INVALID_REGNUM)
2003 bitmap_set_bit (df->exit_block_uses, regno);
2007 #ifdef EH_RETURN_STACKADJ_RTX
2008 if ((!HAVE_epilogue || ! epilogue_completed)
2009 && current_function_calls_eh_return)
2011 rtx tmp = EH_RETURN_STACKADJ_RTX;
2012 if (tmp && REG_P (tmp))
2013 df_mark_reg (tmp, df->exit_block_uses);
2017 #ifdef EH_RETURN_HANDLER_RTX
2018 if ((!HAVE_epilogue || ! epilogue_completed)
2019 && current_function_calls_eh_return)
2021 rtx tmp = EH_RETURN_HANDLER_RTX;
2022 if (tmp && REG_P (tmp))
2023 df_mark_reg (tmp, df->exit_block_uses);
2027 /* Mark function return value. */
2028 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
2030 if (dflow->flags & DF_HARD_REGS)
2031 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
2032 df_uses_record (dflow, ®no_reg_rtx[i],
2033 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
2037 static bool initialized = false;
2039 /* Initialize some platform specific structures. */
2042 df_hard_reg_init (void)
2045 #ifdef ELIMINABLE_REGS
2046 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2048 /* After reload, some ports add certain bits to regs_ever_live so
2049 this cannot be reset. */
2051 if (!reload_completed)
2052 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2057 bitmap_obstack_initialize (&persistent_obstack);
2059 /* Record which registers will be eliminated. We use this in
2061 CLEAR_HARD_REG_SET (elim_reg_set);
2063 #ifdef ELIMINABLE_REGS
2064 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2065 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2067 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2070 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
2072 /* Inconveniently, this is only readily available in hard reg set
2074 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2075 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2076 bitmap_set_bit (df_invalidated_by_call, i);