1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
9 This file is part of GCC.
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
31 #include "insn-config.h"
36 #include "alloc-pool.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
45 #include "target-def.h"
47 #include "tree-pass.h"
50 #define HAVE_epilogue 0
53 #define HAVE_prologue 0
55 #ifndef HAVE_sibcall_epilogue
56 #define HAVE_sibcall_epilogue 0
60 #define EPILOGUE_USES(REGNO) 0
63 /* The bitmap_obstack is used to hold some static variables that
64 should not be reset after each function is compiled. */
66 static bitmap_obstack persistent_obstack;
68 /* The set of hard registers in eliminables[i].from. */
70 static HARD_REG_SET elim_reg_set;
72 /* This is a bitmap copy of regs_invalidated_by_call so that we can
73 easily add it into bitmaps, etc. */
75 bitmap df_invalidated_by_call = NULL;
77 /* Initialize ur_in and ur_out as if all hard registers were partially
80 struct df_collection_rec
82 struct df_ref ** def_vec;
83 unsigned int next_def;
84 struct df_ref ** use_vec;
85 unsigned int next_use;
86 struct df_ref ** eq_use_vec;
87 unsigned int next_eq_use;
88 struct df_mw_hardreg **mw_vec;
92 static struct df_ref * df_null_ref_rec[1];
93 static struct df_mw_hardreg * df_null_mw_rec[1];
95 static void df_ref_record (struct df_collection_rec *,
97 basic_block, rtx, enum df_ref_type,
98 enum df_ref_flags, int, int, enum machine_mode);
99 static void df_def_record_1 (struct df_collection_rec *,
100 rtx, basic_block, rtx,
102 static void df_defs_record (struct df_collection_rec *,
103 rtx, basic_block, rtx,
105 static void df_uses_record (struct df_collection_rec *,
106 rtx *, enum df_ref_type,
107 basic_block, rtx, enum df_ref_flags,
108 int, int, enum machine_mode);
110 static struct df_ref *df_ref_create_structure (struct df_collection_rec *, rtx, rtx *,
111 basic_block, rtx, enum df_ref_type,
113 int, int, enum machine_mode);
115 static void df_insn_refs_collect (struct df_collection_rec*,
117 static void df_canonize_collection_rec (struct df_collection_rec *);
119 static void df_get_regular_block_artificial_uses (bitmap);
120 static void df_get_eh_block_artificial_uses (bitmap);
122 static void df_record_entry_block_defs (bitmap);
123 static void df_record_exit_block_uses (bitmap);
124 static void df_get_exit_block_use_set (bitmap);
125 static void df_get_entry_block_def_set (bitmap);
126 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
127 static void df_ref_chain_delete_du_chain (struct df_ref **);
128 static void df_ref_chain_delete (struct df_ref **);
130 static void df_refs_add_to_chains (struct df_collection_rec *,
133 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
134 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
135 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
136 static void df_install_ref (struct df_ref *, struct df_reg_info *,
137 struct df_ref_info *, bool);
139 static int df_ref_compare (const void *, const void *);
140 static int df_mw_compare (const void *, const void *);
142 /* Indexed by hardware reg number, is true if that register is ever
143 used in the current function.
145 In df-scan.c, this is set up to record the hard regs used
146 explicitly. Reload adds in the hard regs used for holding pseudo
147 regs. Final uses it to generate the code in the function prologue
148 and epilogue to save and restore registers as needed. */
150 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
152 /*----------------------------------------------------------------------------
153 SCANNING DATAFLOW PROBLEM
155 There are several ways in which scanning looks just like the other
156 dataflow problems. It shares the all the mechanisms for local info
157 as well as basic block info. Where it differs is when and how often
158 it gets run. It also has no need for the iterative solver.
159 ----------------------------------------------------------------------------*/
161 /* Problem data for the scanning dataflow function. */
162 struct df_scan_problem_data
165 alloc_pool ref_extract_pool;
166 alloc_pool insn_pool;
168 alloc_pool mw_reg_pool;
169 bitmap_obstack reg_bitmaps;
170 bitmap_obstack insn_bitmaps;
173 typedef struct df_scan_bb_info *df_scan_bb_info_t;
176 df_scan_free_internal (void)
178 struct df_scan_problem_data *problem_data
179 = (struct df_scan_problem_data *) df_scan->problem_data;
181 free (df->def_info.refs);
182 free (df->def_info.begin);
183 free (df->def_info.count);
184 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
186 free (df->use_info.refs);
187 free (df->use_info.begin);
188 free (df->use_info.count);
189 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
195 free (df->eq_use_regs);
196 df->eq_use_regs = NULL;
204 free (df_scan->block_info);
205 df_scan->block_info = NULL;
206 df_scan->block_info_size = 0;
208 BITMAP_FREE (df->hardware_regs_used);
209 BITMAP_FREE (df->regular_block_artificial_uses);
210 BITMAP_FREE (df->eh_block_artificial_uses);
211 BITMAP_FREE (df->entry_block_defs);
212 BITMAP_FREE (df->exit_block_uses);
213 BITMAP_FREE (df->insns_to_delete);
214 BITMAP_FREE (df->insns_to_rescan);
215 BITMAP_FREE (df->insns_to_notes_rescan);
217 free_alloc_pool (df_scan->block_pool);
218 free_alloc_pool (problem_data->ref_pool);
219 free_alloc_pool (problem_data->ref_extract_pool);
220 free_alloc_pool (problem_data->insn_pool);
221 free_alloc_pool (problem_data->reg_pool);
222 free_alloc_pool (problem_data->mw_reg_pool);
223 bitmap_obstack_release (&problem_data->reg_bitmaps);
224 bitmap_obstack_release (&problem_data->insn_bitmaps);
225 free (df_scan->problem_data);
229 /* Set basic block info. */
232 df_scan_set_bb_info (unsigned int index,
233 struct df_scan_bb_info *bb_info)
235 gcc_assert (df_scan);
236 df_grow_bb_info (df_scan);
237 df_scan->block_info[index] = (void *) bb_info;
241 /* Free basic block info. */
244 df_scan_free_bb_info (basic_block bb, void *vbb_info)
246 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
247 unsigned int bb_index = bb->index;
251 FOR_BB_INSNS (bb, insn)
254 /* Record defs within INSN. */
255 df_insn_delete (bb, INSN_UID (insn));
258 if (bb_index < df_scan->block_info_size)
259 bb_info = df_scan_get_bb_info (bb_index);
261 /* Get rid of any artificial uses or defs. */
262 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
263 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
264 df_ref_chain_delete (bb_info->artificial_defs);
265 df_ref_chain_delete (bb_info->artificial_uses);
266 bb_info->artificial_defs = NULL;
267 bb_info->artificial_uses = NULL;
268 pool_free (df_scan->block_pool, bb_info);
273 /* Allocate the problem data for the scanning problem. This should be
274 called when the problem is created or when the entire function is to
277 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
279 struct df_scan_problem_data *problem_data;
280 unsigned int insn_num = get_max_uid () + 1;
281 unsigned int block_size = 400;
284 /* Given the number of pools, this is really faster than tearing
286 if (df_scan->problem_data)
287 df_scan_free_internal ();
290 = create_alloc_pool ("df_scan_block pool",
291 sizeof (struct df_scan_bb_info),
294 problem_data = XNEW (struct df_scan_problem_data);
295 df_scan->problem_data = problem_data;
296 df_scan->computed = true;
298 problem_data->ref_pool
299 = create_alloc_pool ("df_scan_ref pool",
300 sizeof (struct df_ref), block_size);
301 problem_data->ref_extract_pool
302 = create_alloc_pool ("df_scan_ref extract pool",
303 sizeof (struct df_ref_extract), block_size);
304 problem_data->insn_pool
305 = create_alloc_pool ("df_scan_insn pool",
306 sizeof (struct df_insn_info), block_size);
307 problem_data->reg_pool
308 = create_alloc_pool ("df_scan_reg pool",
309 sizeof (struct df_reg_info), block_size);
310 problem_data->mw_reg_pool
311 = create_alloc_pool ("df_scan_mw_reg pool",
312 sizeof (struct df_mw_hardreg), block_size);
314 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
315 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
317 insn_num += insn_num / 4;
320 df_grow_insn_info ();
321 df_grow_bb_info (df_scan);
325 unsigned int bb_index = bb->index;
326 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
329 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
330 df_scan_set_bb_info (bb_index, bb_info);
332 bb_info->artificial_defs = NULL;
333 bb_info->artificial_uses = NULL;
336 df->hardware_regs_used = BITMAP_ALLOC (&problem_data->reg_bitmaps);
337 df->regular_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
338 df->eh_block_artificial_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
339 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
340 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
341 df->insns_to_delete = BITMAP_ALLOC (&problem_data->insn_bitmaps);
342 df->insns_to_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
343 df->insns_to_notes_rescan = BITMAP_ALLOC (&problem_data->insn_bitmaps);
344 df_scan->optional_p = false;
348 /* Free all of the data associated with the scan problem. */
353 if (df_scan->problem_data)
354 df_scan_free_internal ();
356 if (df->blocks_to_analyze)
358 BITMAP_FREE (df->blocks_to_analyze);
359 df->blocks_to_analyze = NULL;
365 /* Dump the preamble for DF_SCAN dump. */
367 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
371 fprintf (file, ";; invalidated by call \t");
372 df_print_regset (file, df_invalidated_by_call);
373 fprintf (file, ";; hardware regs used \t");
374 df_print_regset (file, df->hardware_regs_used);
375 fprintf (file, ";; regular block artificial uses \t");
376 df_print_regset (file, df->regular_block_artificial_uses);
377 fprintf (file, ";; eh block artificial uses \t");
378 df_print_regset (file, df->eh_block_artificial_uses);
379 fprintf (file, ";; entry block defs \t");
380 df_print_regset (file, df->entry_block_defs);
381 fprintf (file, ";; exit block uses \t");
382 df_print_regset (file, df->exit_block_uses);
383 fprintf (file, ";; regs ever live \t");
384 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
385 if (df_regs_ever_live_p (i))
386 fprintf (file, " %d[%s]", i, reg_names[i]);
388 fprintf (file, "\n");
391 /* Dump the bb_info for a given basic block. */
393 df_scan_start_block (basic_block bb, FILE *file)
395 struct df_scan_bb_info *bb_info
396 = df_scan_get_bb_info (bb->index);
400 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
401 df_refs_chain_dump (bb_info->artificial_defs, true, file);
402 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
403 df_refs_chain_dump (bb_info->artificial_uses, true, file);
404 fprintf (file, "\n");
409 FOR_BB_INSNS (bb, insn)
411 df_insn_debug (insn, false, file);
416 static struct df_problem problem_SCAN =
418 DF_SCAN, /* Problem id. */
419 DF_NONE, /* Direction. */
420 df_scan_alloc, /* Allocate the problem specific data. */
421 NULL, /* Reset global information. */
422 df_scan_free_bb_info, /* Free basic block info. */
423 NULL, /* Local compute function. */
424 NULL, /* Init the solution specific data. */
425 NULL, /* Iterative solver. */
426 NULL, /* Confluence operator 0. */
427 NULL, /* Confluence operator n. */
428 NULL, /* Transfer function. */
429 NULL, /* Finalize function. */
430 df_scan_free, /* Free all of the problem information. */
431 NULL, /* Remove this problem from the stack of dataflow problems. */
432 df_scan_start_dump, /* Debugging. */
433 df_scan_start_block, /* Debugging start block. */
434 NULL, /* Debugging end block. */
435 NULL, /* Incremental solution verify start. */
436 NULL, /* Incremental solution verify end. */
437 NULL, /* Dependent problem. */
438 TV_DF_SCAN, /* Timing variable. */
439 false /* Reset blocks on dropping out of blocks_to_analyze. */
443 /* Create a new DATAFLOW instance and add it to an existing instance
444 of DF. The returned structure is what is used to get at the
448 df_scan_add_problem (void)
450 df_add_problem (&problem_SCAN);
454 /*----------------------------------------------------------------------------
455 Storage Allocation Utilities
456 ----------------------------------------------------------------------------*/
459 /* First, grow the reg_info information. If the current size is less than
460 the number of pseudos, grow to 25% more than the number of
463 Second, assure that all of the slots up to max_reg_num have been
464 filled with reg_info structures. */
467 df_grow_reg_info (void)
469 unsigned int max_reg = max_reg_num ();
470 unsigned int new_size = max_reg;
471 struct df_scan_problem_data *problem_data
472 = (struct df_scan_problem_data *) df_scan->problem_data;
475 if (df->regs_size < new_size)
477 new_size += new_size / 4;
478 df->def_regs = xrealloc (df->def_regs,
479 new_size *sizeof (struct df_reg_info*));
480 df->use_regs = xrealloc (df->use_regs,
481 new_size *sizeof (struct df_reg_info*));
482 df->eq_use_regs = xrealloc (df->eq_use_regs,
483 new_size *sizeof (struct df_reg_info*));
484 df->def_info.begin = xrealloc (df->def_info.begin,
485 new_size *sizeof (int));
486 df->def_info.count = xrealloc (df->def_info.count,
487 new_size *sizeof (int));
488 df->use_info.begin = xrealloc (df->use_info.begin,
489 new_size *sizeof (int));
490 df->use_info.count = xrealloc (df->use_info.count,
491 new_size *sizeof (int));
492 df->regs_size = new_size;
495 for (i = df->regs_inited; i < max_reg; i++)
497 struct df_reg_info *reg_info;
499 reg_info = pool_alloc (problem_data->reg_pool);
500 memset (reg_info, 0, sizeof (struct df_reg_info));
501 df->def_regs[i] = reg_info;
502 reg_info = pool_alloc (problem_data->reg_pool);
503 memset (reg_info, 0, sizeof (struct df_reg_info));
504 df->use_regs[i] = reg_info;
505 reg_info = pool_alloc (problem_data->reg_pool);
506 memset (reg_info, 0, sizeof (struct df_reg_info));
507 df->eq_use_regs[i] = reg_info;
508 df->def_info.begin[i] = 0;
509 df->def_info.count[i] = 0;
510 df->use_info.begin[i] = 0;
511 df->use_info.count[i] = 0;
514 df->regs_inited = max_reg;
518 /* Grow the ref information. */
521 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
523 if (ref_info->refs_size < new_size)
525 ref_info->refs = xrealloc (ref_info->refs,
526 new_size *sizeof (struct df_ref *));
527 memset (ref_info->refs + ref_info->refs_size, 0,
528 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
529 ref_info->refs_size = new_size;
534 /* Check and grow the ref information if necessary. This routine
535 guarantees total_size + BITMAP_ADDEND amount of entries in refs
536 array. It updates ref_info->refs_size only and does not change
537 ref_info->total_size. */
540 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
541 unsigned bitmap_addend)
543 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
545 int new_size = ref_info->total_size + bitmap_addend;
546 new_size += ref_info->total_size / 4;
547 df_grow_ref_info (ref_info, new_size);
552 /* Grow the ref information. If the current size is less than the
553 number of instructions, grow to 25% more than the number of
557 df_grow_insn_info (void)
559 unsigned int new_size = get_max_uid () + 1;
560 if (DF_INSN_SIZE () < new_size)
562 new_size += new_size / 4;
563 df->insns = xrealloc (df->insns,
564 new_size *sizeof (struct df_insn_info *));
565 memset (df->insns + df->insns_size, 0,
566 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
567 DF_INSN_SIZE () = new_size;
574 /*----------------------------------------------------------------------------
575 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
576 ----------------------------------------------------------------------------*/
578 /* Rescan all of the block_to_analyze or all of the blocks in the
579 function if df_set_blocks if blocks_to_analyze is NULL; */
582 df_scan_blocks (void)
586 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
587 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
589 df_get_regular_block_artificial_uses (df->regular_block_artificial_uses);
590 df_get_eh_block_artificial_uses (df->eh_block_artificial_uses);
592 bitmap_ior_into (df->eh_block_artificial_uses,
593 df->regular_block_artificial_uses);
595 /* ENTRY and EXIT blocks have special defs/uses. */
596 df_get_entry_block_def_set (df->entry_block_defs);
597 df_record_entry_block_defs (df->entry_block_defs);
598 df_get_exit_block_use_set (df->exit_block_uses);
599 df_record_exit_block_uses (df->exit_block_uses);
600 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
601 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
606 unsigned int bb_index = bb->index;
607 df_bb_refs_record (bb_index, true);
612 /* Create a new ref of type DF_REF_TYPE for register REG at address
613 LOC within INSN of BB. This function is only used externally.
615 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
616 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
617 fields if they were constants. Otherwise they should be -1 if
618 those flags were set. */
621 df_ref_create (rtx reg, rtx *loc, rtx insn,
623 enum df_ref_type ref_type,
624 enum df_ref_flags ref_flags,
625 int width, int offset, enum machine_mode mode)
628 struct df_reg_info **reg_info;
629 struct df_ref_info *ref_info;
630 struct df_ref **ref_rec;
631 struct df_ref ***ref_rec_ptr;
632 unsigned int count = 0;
637 /* You cannot hack artificial refs. */
639 ref = df_ref_create_structure (NULL, reg, loc, bb, insn,
641 width, offset, mode);
643 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
645 reg_info = df->def_regs;
646 ref_info = &df->def_info;
647 ref_rec_ptr = &DF_INSN_DEFS (insn);
648 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
650 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
652 reg_info = df->eq_use_regs;
653 ref_info = &df->use_info;
654 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
655 switch (ref_info->ref_order)
657 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
658 case DF_REF_ORDER_BY_REG_WITH_NOTES:
659 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
663 add_to_table = false;
669 reg_info = df->use_regs;
670 ref_info = &df->use_info;
671 ref_rec_ptr = &DF_INSN_USES (insn);
672 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
675 /* Do not add if ref is not in the right blocks. */
676 if (add_to_table && df->analyze_subset)
677 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
679 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
682 switch (ref_info->ref_order)
684 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
685 case DF_REF_ORDER_BY_REG_WITH_NOTES:
686 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
687 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
690 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
694 ref_rec = *ref_rec_ptr;
701 ref_rec = *ref_rec_ptr;
704 ref_rec = xrealloc (ref_rec, (count+2) * sizeof (struct df_ref*));
705 *ref_rec_ptr = ref_rec;
706 ref_rec[count] = ref;
707 ref_rec[count+1] = NULL;
708 qsort (ref_rec, count + 1, sizeof (struct df_ref *), df_ref_compare);
712 struct df_ref **ref_rec = XNEWVEC (struct df_ref*, 2);
715 *ref_rec_ptr = ref_rec;
721 fprintf (dump_file, "adding ref ");
722 df_ref_debug (ref, dump_file);
725 /* By adding the ref directly, df_insn_rescan my not find any
726 differences even though the block will have changed. So we need
727 to mark the block dirty ourselves. */
728 df_set_bb_dirty (bb);
735 /*----------------------------------------------------------------------------
736 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
737 ----------------------------------------------------------------------------*/
740 df_free_ref (struct df_ref *ref)
742 struct df_scan_problem_data *problem_data
743 = (struct df_scan_problem_data *) df_scan->problem_data;
745 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
746 pool_free (problem_data->ref_extract_pool, (struct df_ref_extract *)ref);
748 pool_free (problem_data->ref_pool, ref);
752 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
753 Also delete the def-use or use-def chain if it exists. */
756 df_reg_chain_unlink (struct df_ref *ref)
758 struct df_ref *next = DF_REF_NEXT_REG (ref);
759 struct df_ref *prev = DF_REF_PREV_REG (ref);
760 int id = DF_REF_ID (ref);
761 struct df_reg_info *reg_info;
762 struct df_ref **refs = NULL;
764 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
766 reg_info = DF_REG_DEF_GET (DF_REF_REGNO (ref));
767 refs = df->def_info.refs;
771 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
773 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
774 switch (df->use_info.ref_order)
776 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
777 case DF_REF_ORDER_BY_REG_WITH_NOTES:
778 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
779 refs = df->use_info.refs;
787 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
788 refs = df->use_info.refs;
794 if (df->analyze_subset)
796 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BB (ref)->index))
803 /* Delete any def-use or use-def chains that start here. It is
804 possible that there is trash in this field. This happens for
805 insns that have been deleted when rescanning has been deferred
806 and the chain problem has also been deleted. The chain tear down
807 code skips deleted insns. */
808 if (df_chain && DF_REF_CHAIN (ref))
809 df_chain_unlink (ref);
812 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
814 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
815 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
818 /* Unlink from the reg chain. If there is no prev, this is the
819 first of the list. If not, just join the next and prev. */
821 DF_REF_NEXT_REG (prev) = next;
824 gcc_assert (reg_info->reg_chain == ref);
825 reg_info->reg_chain = next;
828 DF_REF_PREV_REG (next) = prev;
834 /* Remove REF from VEC. */
837 df_ref_compress_rec (struct df_ref ***vec_ptr, struct df_ref *ref)
839 struct df_ref **vec = *vec_ptr;
843 while (*vec && *vec != ref)
855 *vec_ptr = df_null_ref_rec;
860 /* Unlink REF from all def-use/use-def chains, etc. */
863 df_ref_remove (struct df_ref *ref)
868 fprintf (dump_file, "removing ref ");
869 df_ref_debug (ref, dump_file);
873 if (DF_REF_REG_DEF_P (ref))
875 if (DF_REF_IS_ARTIFICIAL (ref))
877 struct df_scan_bb_info *bb_info
878 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
879 df_ref_compress_rec (&bb_info->artificial_defs, ref);
883 unsigned int uid = DF_REF_INSN_UID (ref);
884 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
885 df_ref_compress_rec (&insn_rec->defs, ref);
890 if (DF_REF_IS_ARTIFICIAL (ref))
892 struct df_scan_bb_info *bb_info
893 = df_scan_get_bb_info (DF_REF_BB (ref)->index);
894 df_ref_compress_rec (&bb_info->artificial_uses, ref);
898 unsigned int uid = DF_REF_INSN_UID (ref);
899 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
901 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
902 df_ref_compress_rec (&insn_rec->eq_uses, ref);
904 df_ref_compress_rec (&insn_rec->uses, ref);
908 /* By deleting the ref directly, df_insn_rescan my not find any
909 differences even though the block will have changed. So we need
910 to mark the block dirty ourselves. */
911 df_set_bb_dirty (DF_REF_BB (ref));
912 df_reg_chain_unlink (ref);
916 /* Create the insn record for INSN. If there was one there, zero it
919 struct df_insn_info *
920 df_insn_create_insn_record (rtx insn)
922 struct df_scan_problem_data *problem_data
923 = (struct df_scan_problem_data *) df_scan->problem_data;
924 struct df_insn_info *insn_rec;
926 df_grow_insn_info ();
927 insn_rec = DF_INSN_GET (insn);
930 insn_rec = pool_alloc (problem_data->insn_pool);
931 DF_INSN_SET (insn, insn_rec);
933 memset (insn_rec, 0, sizeof (struct df_insn_info));
934 insn_rec->insn = insn;
939 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
942 df_ref_chain_delete_du_chain (struct df_ref **ref_rec)
946 struct df_ref *ref = *ref_rec;
947 /* CHAIN is allocated by DF_CHAIN. So make sure to
948 pass df_scan instance for the problem. */
949 if (DF_REF_CHAIN (ref))
950 df_chain_unlink (ref);
956 /* Delete all refs in the ref chain. */
959 df_ref_chain_delete (struct df_ref **ref_rec)
961 struct df_ref **start = ref_rec;
964 df_reg_chain_unlink (*ref_rec);
968 /* If the list is empty, it has a special shared element that is not
975 /* Delete the hardreg chain. */
978 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
980 struct df_scan_problem_data *problem_data;
985 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
989 pool_free (problem_data->mw_reg_pool, *hardregs);
995 /* Delete all of the refs information from INSN. BB must be passed in
996 except when called from df_process_deferred_rescans to mark the block
1000 df_insn_delete (basic_block bb, unsigned int uid)
1002 struct df_insn_info *insn_info = NULL;
1006 df_grow_bb_info (df_scan);
1007 df_grow_reg_info ();
1009 /* The block must be marked as dirty now, rather than later as in
1010 df_insn_rescan and df_notes_rescan because it may not be there at
1011 rescanning time and the mark would blow up. */
1013 df_set_bb_dirty (bb);
1015 insn_info = DF_INSN_UID_SAFE_GET (uid);
1017 /* The client has deferred rescanning. */
1018 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1022 bitmap_clear_bit (df->insns_to_rescan, uid);
1023 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1024 bitmap_set_bit (df->insns_to_delete, uid);
1027 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1032 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1034 bitmap_clear_bit (df->insns_to_delete, uid);
1035 bitmap_clear_bit (df->insns_to_rescan, uid);
1036 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1039 struct df_scan_problem_data *problem_data
1040 = (struct df_scan_problem_data *) df_scan->problem_data;
1042 /* In general, notes do not have the insn_info fields
1043 initialized. However, combine deletes insns by changing them
1044 to notes. How clever. So we cannot just check if it is a
1045 valid insn before short circuiting this code, we need to see
1046 if we actually initialized it. */
1047 if (insn_info->defs)
1049 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1053 df_ref_chain_delete_du_chain (insn_info->defs);
1054 df_ref_chain_delete_du_chain (insn_info->uses);
1055 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1058 df_ref_chain_delete (insn_info->defs);
1059 df_ref_chain_delete (insn_info->uses);
1060 df_ref_chain_delete (insn_info->eq_uses);
1062 pool_free (problem_data->insn_pool, insn_info);
1063 DF_INSN_UID_SET (uid, NULL);
1068 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1071 df_free_collection_rec (struct df_collection_rec *collection_rec)
1073 struct df_scan_problem_data *problem_data
1074 = (struct df_scan_problem_data *) df_scan->problem_data;
1075 struct df_ref **ref;
1076 struct df_mw_hardreg **mw;
1078 if (collection_rec->def_vec)
1079 for (ref = collection_rec->def_vec; *ref; ref++)
1081 if (collection_rec->use_vec)
1082 for (ref = collection_rec->use_vec; *ref; ref++)
1084 if (collection_rec->eq_use_vec)
1085 for (ref = collection_rec->eq_use_vec; *ref; ref++)
1087 if (collection_rec->mw_vec)
1088 for (mw = collection_rec->mw_vec; *mw; mw++)
1089 pool_free (problem_data->mw_reg_pool, *mw);
1093 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1096 df_insn_rescan (rtx insn)
1098 unsigned int uid = INSN_UID (insn);
1099 struct df_insn_info *insn_info = NULL;
1100 basic_block bb = BLOCK_FOR_INSN (insn);
1101 struct df_collection_rec collection_rec;
1102 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
1103 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
1104 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
1105 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
1107 if ((!df) || (!INSN_P (insn)))
1113 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1117 /* The client has disabled rescanning and plans to do it itself. */
1118 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1121 df_grow_bb_info (df_scan);
1122 df_grow_reg_info ();
1124 insn_info = DF_INSN_UID_SAFE_GET (uid);
1126 /* The client has deferred rescanning. */
1127 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1131 insn_info = df_insn_create_insn_record (insn);
1132 insn_info->defs = df_null_ref_rec;
1133 insn_info->uses = df_null_ref_rec;
1134 insn_info->eq_uses = df_null_ref_rec;
1135 insn_info->mw_hardregs = df_null_mw_rec;
1138 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1140 bitmap_clear_bit (df->insns_to_delete, uid);
1141 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1142 bitmap_set_bit (df->insns_to_rescan, INSN_UID (insn));
1146 bitmap_clear_bit (df->insns_to_delete, uid);
1147 bitmap_clear_bit (df->insns_to_rescan, uid);
1148 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
1151 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1152 /* If there's no change, return false. */
1155 df_free_collection_rec (&collection_rec);
1157 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1161 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1163 /* There's change - we need to delete the existing info. */
1164 df_insn_delete (NULL, uid);
1165 df_insn_create_insn_record (insn);
1169 df_insn_create_insn_record (insn);
1170 df_insn_refs_collect (&collection_rec, bb, insn);
1172 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1175 df_refs_add_to_chains (&collection_rec, bb, insn);
1176 df_set_bb_dirty (bb);
1181 /* Rescan all of the insns in the function. Note that the artificial
1182 uses and defs are not touched. This function will destroy def-se
1183 or use-def chains. */
1186 df_insn_rescan_all (void)
1188 bool no_insn_rescan = false;
1189 bool defer_insn_rescan = false;
1193 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1195 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1197 df_clear_flags (DF_NO_INSN_RESCAN);
1198 no_insn_rescan = true;
1201 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1203 df_clear_flags (DF_DEFER_INSN_RESCAN);
1204 defer_insn_rescan = true;
1207 bitmap_copy (tmp, df->insns_to_delete);
1208 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1210 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1212 df_insn_delete (NULL, uid);
1216 bitmap_clear (df->insns_to_delete);
1217 bitmap_clear (df->insns_to_rescan);
1218 bitmap_clear (df->insns_to_notes_rescan);
1223 FOR_BB_INSNS (bb, insn)
1225 df_insn_rescan (insn);
1230 df_set_flags (DF_NO_INSN_RESCAN);
1231 if (defer_insn_rescan)
1232 df_set_flags (DF_DEFER_INSN_RESCAN);
1236 /* Process all of the deferred rescans or deletions. */
1239 df_process_deferred_rescans (void)
1241 bool no_insn_rescan = false;
1242 bool defer_insn_rescan = false;
1245 bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
1247 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1249 df_clear_flags (DF_NO_INSN_RESCAN);
1250 no_insn_rescan = true;
1253 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1255 df_clear_flags (DF_DEFER_INSN_RESCAN);
1256 defer_insn_rescan = true;
1260 fprintf (dump_file, "starting the processing of deferred insns\n");
1262 bitmap_copy (tmp, df->insns_to_delete);
1263 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1265 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1267 df_insn_delete (NULL, uid);
1270 bitmap_copy (tmp, df->insns_to_rescan);
1271 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1273 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1275 df_insn_rescan (insn_info->insn);
1278 bitmap_copy (tmp, df->insns_to_notes_rescan);
1279 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1281 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1283 df_notes_rescan (insn_info->insn);
1287 fprintf (dump_file, "ending the processing of deferred insns\n");
1290 bitmap_clear (df->insns_to_delete);
1291 bitmap_clear (df->insns_to_rescan);
1292 bitmap_clear (df->insns_to_notes_rescan);
1295 df_set_flags (DF_NO_INSN_RESCAN);
1296 if (defer_insn_rescan)
1297 df_set_flags (DF_DEFER_INSN_RESCAN);
1299 /* If someone changed regs_ever_live during this pass, fix up the
1300 entry and exit blocks. */
1301 if (df->redo_entry_and_exit)
1303 df_update_entry_exit_and_calls ();
1304 df->redo_entry_and_exit = false;
1309 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1310 the uses if INCLUDE_USES. Include the eq_uses if
1314 df_count_refs (bool include_defs, bool include_uses,
1315 bool include_eq_uses)
1319 unsigned int m = df->regs_inited;
1321 for (regno = 0; regno < m; regno++)
1324 size += DF_REG_DEF_COUNT (regno);
1326 size += DF_REG_USE_COUNT (regno);
1327 if (include_eq_uses)
1328 size += DF_REG_EQ_USE_COUNT (regno);
1334 /* Take build ref table for either the uses or defs from the reg-use
1335 or reg-def chains. This version processes the refs in reg order
1336 which is likely to be best if processing the whole function. */
1339 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1342 bool include_eq_uses)
1344 unsigned int m = df->regs_inited;
1346 unsigned int offset = 0;
1349 if (df->changeable_flags & DF_NO_HARD_REGS)
1351 start = FIRST_PSEUDO_REGISTER;
1352 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1353 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1358 ref_info->total_size
1359 = df_count_refs (include_defs, include_uses, include_eq_uses);
1361 df_check_and_grow_ref_info (ref_info, 1);
1363 for (regno = start; regno < m; regno++)
1366 ref_info->begin[regno] = offset;
1369 struct df_ref *ref = DF_REG_DEF_CHAIN (regno);
1372 ref_info->refs[offset] = ref;
1373 DF_REF_ID (ref) = offset++;
1375 ref = DF_REF_NEXT_REG (ref);
1376 gcc_assert (offset < ref_info->refs_size);
1381 struct df_ref *ref = DF_REG_USE_CHAIN (regno);
1384 ref_info->refs[offset] = ref;
1385 DF_REF_ID (ref) = offset++;
1387 ref = DF_REF_NEXT_REG (ref);
1388 gcc_assert (offset < ref_info->refs_size);
1391 if (include_eq_uses)
1393 struct df_ref *ref = DF_REG_EQ_USE_CHAIN (regno);
1396 ref_info->refs[offset] = ref;
1397 DF_REF_ID (ref) = offset++;
1399 ref = DF_REF_NEXT_REG (ref);
1400 gcc_assert (offset < ref_info->refs_size);
1403 ref_info->count[regno] = count;
1406 /* The bitmap size is not decremented when refs are deleted. So
1407 reset it now that we have squished out all of the empty
1409 ref_info->table_size = offset;
1413 /* Take build ref table for either the uses or defs from the reg-use
1414 or reg-def chains. This version processes the refs in insn order
1415 which is likely to be best if processing some segment of the
1419 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1422 bool include_eq_uses)
1425 unsigned int bb_index;
1426 unsigned int m = df->regs_inited;
1427 unsigned int offset = 0;
1430 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1432 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1433 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1435 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1436 df_check_and_grow_ref_info (ref_info, 1);
1438 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1440 basic_block bb = BASIC_BLOCK (bb_index);
1442 struct df_ref **ref_rec;
1445 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1447 unsigned int regno = DF_REF_REGNO (*ref_rec);
1448 ref_info->count[regno]++;
1451 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1453 unsigned int regno = DF_REF_REGNO (*ref_rec);
1454 ref_info->count[regno]++;
1457 FOR_BB_INSNS (bb, insn)
1461 unsigned int uid = INSN_UID (insn);
1464 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1466 unsigned int regno = DF_REF_REGNO (*ref_rec);
1467 ref_info->count[regno]++;
1470 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1472 unsigned int regno = DF_REF_REGNO (*ref_rec);
1473 ref_info->count[regno]++;
1475 if (include_eq_uses)
1476 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1478 unsigned int regno = DF_REF_REGNO (*ref_rec);
1479 ref_info->count[regno]++;
1485 for (r = start; r < m; r++)
1487 ref_info->begin[r] = offset;
1488 offset += ref_info->count[r];
1489 ref_info->count[r] = 0;
1492 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1494 basic_block bb = BASIC_BLOCK (bb_index);
1496 struct df_ref **ref_rec;
1499 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1501 struct df_ref *ref = *ref_rec;
1502 unsigned int regno = DF_REF_REGNO (ref);
1506 = ref_info->begin[regno] + ref_info->count[regno]++;
1507 DF_REF_ID (ref) = id;
1508 ref_info->refs[id] = ref;
1512 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1514 struct df_ref *ref = *ref_rec;
1515 unsigned int regno = DF_REF_REGNO (ref);
1519 = ref_info->begin[regno] + ref_info->count[regno]++;
1520 DF_REF_ID (ref) = id;
1521 ref_info->refs[id] = ref;
1525 FOR_BB_INSNS (bb, insn)
1529 unsigned int uid = INSN_UID (insn);
1532 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1534 struct df_ref *ref = *ref_rec;
1535 unsigned int regno = DF_REF_REGNO (ref);
1539 = ref_info->begin[regno] + ref_info->count[regno]++;
1540 DF_REF_ID (ref) = id;
1541 ref_info->refs[id] = ref;
1545 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1547 struct df_ref *ref = *ref_rec;
1548 unsigned int regno = DF_REF_REGNO (ref);
1552 = ref_info->begin[regno] + ref_info->count[regno]++;
1553 DF_REF_ID (ref) = id;
1554 ref_info->refs[id] = ref;
1557 if (include_eq_uses)
1558 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1560 struct df_ref *ref = *ref_rec;
1561 unsigned int regno = DF_REF_REGNO (ref);
1565 = ref_info->begin[regno] + ref_info->count[regno]++;
1566 DF_REF_ID (ref) = id;
1567 ref_info->refs[id] = ref;
1574 /* The bitmap size is not decremented when refs are deleted. So
1575 reset it now that we have squished out all of the empty
1578 ref_info->table_size = offset;
1581 /* Take build ref table for either the uses or defs from the reg-use
1582 or reg-def chains. */
1585 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1588 bool include_eq_uses)
1590 if (df->analyze_subset)
1591 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1592 include_uses, include_eq_uses);
1594 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1595 include_uses, include_eq_uses);
1599 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1601 df_add_refs_to_table (unsigned int offset,
1602 struct df_ref_info *ref_info,
1603 struct df_ref **ref_vec)
1607 struct df_ref *ref = *ref_vec;
1608 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1609 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1611 ref_info->refs[offset] = ref;
1612 DF_REF_ID (*ref_vec) = offset++;
1620 /* Count the number of refs in all of the insns of BB. Include the
1621 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1622 eq_uses if INCLUDE_EQ_USES. */
1625 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1626 struct df_ref_info *ref_info,
1627 bool include_defs, bool include_uses,
1628 bool include_eq_uses)
1633 offset = df_add_refs_to_table (offset, ref_info,
1634 df_get_artificial_defs (bb->index));
1636 offset = df_add_refs_to_table (offset, ref_info,
1637 df_get_artificial_uses (bb->index));
1639 FOR_BB_INSNS (bb, insn)
1642 unsigned int uid = INSN_UID (insn);
1644 offset = df_add_refs_to_table (offset, ref_info,
1645 DF_INSN_UID_DEFS (uid));
1647 offset = df_add_refs_to_table (offset, ref_info,
1648 DF_INSN_UID_USES (uid));
1649 if (include_eq_uses)
1650 offset = df_add_refs_to_table (offset, ref_info,
1651 DF_INSN_UID_EQ_USES (uid));
1657 /* Organize the refs by insn into the table in REF_INFO. If
1658 blocks_to_analyze is defined, use that set, otherwise the entire
1659 program. Include the defs if INCLUDE_DEFS. Include the uses if
1660 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1663 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1664 bool include_defs, bool include_uses,
1665 bool include_eq_uses)
1668 unsigned int offset = 0;
1670 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1671 df_check_and_grow_ref_info (ref_info, 1);
1672 if (df->blocks_to_analyze)
1677 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1679 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1680 include_defs, include_uses,
1684 ref_info->table_size = offset;
1689 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1690 include_defs, include_uses,
1692 ref_info->table_size = offset;
1697 /* If the use refs in DF are not organized, reorganize them. */
1700 df_maybe_reorganize_use_refs (enum df_ref_order order)
1702 if (order == df->use_info.ref_order)
1707 case DF_REF_ORDER_BY_REG:
1708 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1711 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1712 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1715 case DF_REF_ORDER_BY_INSN:
1716 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1719 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1720 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1723 case DF_REF_ORDER_NO_TABLE:
1724 free (df->use_info.refs);
1725 df->use_info.refs = NULL;
1726 df->use_info.refs_size = 0;
1729 case DF_REF_ORDER_UNORDERED:
1730 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1735 df->use_info.ref_order = order;
1739 /* If the def refs in DF are not organized, reorganize them. */
1742 df_maybe_reorganize_def_refs (enum df_ref_order order)
1744 if (order == df->def_info.ref_order)
1749 case DF_REF_ORDER_BY_REG:
1750 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1753 case DF_REF_ORDER_BY_INSN:
1754 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1757 case DF_REF_ORDER_NO_TABLE:
1758 free (df->def_info.refs);
1759 df->def_info.refs = NULL;
1760 df->def_info.refs_size = 0;
1763 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1764 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1765 case DF_REF_ORDER_UNORDERED:
1766 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1771 df->def_info.ref_order = order;
1775 /* Change the BB of all refs in the ref chain from OLD_BB to NEW_BB.
1776 Assumes that all refs in the chain have the same BB. */
1779 df_ref_chain_change_bb (struct df_ref **ref_rec,
1785 struct df_ref *ref = *ref_rec;
1787 gcc_assert (DF_REF_BB (ref) == old_bb);
1788 DF_REF_BB (ref) = new_bb;
1794 /* Change all of the basic block references in INSN to use the insn's
1795 current basic block. This function is called from routines that move
1796 instructions from one block to another. */
1799 df_insn_change_bb (rtx insn, basic_block new_bb)
1801 basic_block old_bb = BLOCK_FOR_INSN (insn);
1802 struct df_insn_info *insn_info;
1803 unsigned int uid = INSN_UID (insn);
1805 if (old_bb == new_bb)
1808 set_block_for_insn (insn, new_bb);
1814 fprintf (dump_file, "changing bb of uid %d\n", uid);
1816 insn_info = DF_INSN_UID_SAFE_GET (uid);
1817 if (insn_info == NULL)
1820 fprintf (dump_file, " unscanned insn\n");
1821 df_insn_rescan (insn);
1828 df_ref_chain_change_bb (insn_info->defs, old_bb, new_bb);
1829 df_ref_chain_change_bb (insn_info->uses, old_bb, new_bb);
1830 df_ref_chain_change_bb (insn_info->eq_uses, old_bb, new_bb);
1832 df_set_bb_dirty (new_bb);
1836 fprintf (dump_file, " from %d to %d\n",
1837 old_bb->index, new_bb->index);
1838 df_set_bb_dirty (old_bb);
1842 fprintf (dump_file, " to %d\n", new_bb->index);
1846 /* Helper function for df_ref_change_reg_with_loc. */
1849 df_ref_change_reg_with_loc_1 (struct df_reg_info *old, struct df_reg_info *new,
1850 int new_regno, rtx loc)
1852 struct df_ref *the_ref = old->reg_chain;
1856 if (DF_REF_LOC(the_ref) && (*DF_REF_LOC(the_ref) == loc))
1858 struct df_ref *next_ref = the_ref->next_reg;
1859 struct df_ref *prev_ref = the_ref->prev_reg;
1860 struct df_ref **ref_vec, **ref_vec_t;
1861 unsigned int count = 0;
1863 DF_REF_REGNO (the_ref) = new_regno;
1864 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1866 /* Pull the_ref out of the old regno chain. */
1868 prev_ref->next_reg = next_ref;
1870 old->reg_chain = next_ref;
1872 next_ref->prev_reg = prev_ref;
1875 /* Put the ref into the new regno chain. */
1876 the_ref->prev_reg = NULL;
1877 the_ref->next_reg = new->reg_chain;
1879 new->reg_chain->prev_reg = the_ref;
1880 new->reg_chain = the_ref;
1882 df_set_bb_dirty (DF_REF_BB (the_ref));
1884 /* Need to resort the record that the ref was in because the
1885 regno is a sorting key. First, find the right record. */
1886 if (DF_REF_IS_ARTIFICIAL (the_ref))
1888 unsigned int bb_index = DF_REF_BB (the_ref)->index;
1889 if (DF_REF_REG_DEF_P (the_ref))
1890 ref_vec = df_get_artificial_defs (bb_index);
1892 ref_vec = df_get_artificial_uses (bb_index);
1896 struct df_insn_info *insn_info
1897 = DF_INSN_GET (DF_REF_INSN (the_ref));
1898 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1899 ref_vec = insn_info->eq_uses;
1901 ref_vec = insn_info->uses;
1903 fprintf (dump_file, "changing reg in insn %d\n",
1904 INSN_UID (DF_REF_INSN (the_ref)));
1906 ref_vec_t = ref_vec;
1908 /* Find the length. */
1914 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
1919 the_ref = the_ref->next_reg;
1924 /* Change the regno of all refs that contained LOC from OLD_REGNO to
1925 NEW_REGNO. Refs that do not match LOC are not changed. This call
1926 is to support the SET_REGNO macro. */
1929 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
1931 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
1934 df_grow_reg_info ();
1936 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1937 DF_REG_DEF_GET (new_regno), new_regno, loc);
1938 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1939 DF_REG_USE_GET (new_regno), new_regno, loc);
1940 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1941 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
1945 /* Delete the mw_hardregs that point into the eq_notes. */
1948 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1950 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
1951 unsigned int deleted = 0;
1952 unsigned int count = 0;
1953 struct df_scan_problem_data *problem_data
1954 = (struct df_scan_problem_data *) df_scan->problem_data;
1961 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
1963 struct df_mw_hardreg **temp_vec = mw_vec;
1965 pool_free (problem_data->mw_reg_pool, *mw_vec);
1967 /* Shove the remaining ones down one to fill the gap. While
1968 this looks n**2, it is highly unusual to have any mw regs
1969 in eq_notes and the chances of more than one are almost
1973 *temp_vec = *(temp_vec + 1);
1987 free (insn_info->mw_hardregs);
1988 insn_info->mw_hardregs = df_null_mw_rec;
1995 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1998 df_notes_rescan (rtx insn)
2000 struct df_insn_info *insn_info;
2001 unsigned int uid = INSN_UID (insn);
2006 /* The client has disabled rescanning and plans to do it itself. */
2007 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2010 /* Do nothing if the insn hasn't been emitted yet. */
2011 if (!BLOCK_FOR_INSN (insn))
2014 df_grow_bb_info (df_scan);
2015 df_grow_reg_info ();
2017 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2019 /* The client has deferred rescanning. */
2020 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2024 insn_info = df_insn_create_insn_record (insn);
2025 insn_info->defs = df_null_ref_rec;
2026 insn_info->uses = df_null_ref_rec;
2027 insn_info->eq_uses = df_null_ref_rec;
2028 insn_info->mw_hardregs = df_null_mw_rec;
2031 bitmap_clear_bit (df->insns_to_delete, uid);
2032 /* If the insn is set to be rescanned, it does not need to also
2033 be notes rescanned. */
2034 if (!bitmap_bit_p (df->insns_to_rescan, uid))
2035 bitmap_set_bit (df->insns_to_notes_rescan, INSN_UID (insn));
2039 bitmap_clear_bit (df->insns_to_delete, uid);
2040 bitmap_clear_bit (df->insns_to_notes_rescan, uid);
2044 basic_block bb = BLOCK_FOR_INSN (insn);
2046 struct df_collection_rec collection_rec;
2047 unsigned int num_deleted;
2049 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2050 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
2051 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 1000);
2053 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2054 df_ref_chain_delete (insn_info->eq_uses);
2055 insn_info->eq_uses = NULL;
2057 /* Process REG_EQUIV/REG_EQUAL notes */
2058 for (note = REG_NOTES (insn); note;
2059 note = XEXP (note, 1))
2061 switch (REG_NOTE_KIND (note))
2065 df_uses_record (&collection_rec,
2066 &XEXP (note, 0), DF_REF_REG_USE,
2067 bb, insn, DF_REF_IN_NOTE, -1, -1, 0);
2073 /* Find some place to put any new mw_hardregs. */
2074 df_canonize_collection_rec (&collection_rec);
2075 if (collection_rec.next_mw)
2077 unsigned int count = 0;
2078 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2087 /* Append to the end of the existing record after
2088 expanding it if necessary. */
2089 if (collection_rec.next_mw > num_deleted)
2091 insn_info->mw_hardregs =
2092 xrealloc (insn_info->mw_hardregs,
2093 (count + 1 + collection_rec.next_mw)
2094 * sizeof (struct df_ref*));
2096 memcpy (&insn_info->mw_hardregs[count], collection_rec.mw_vec,
2097 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2098 qsort (insn_info->mw_hardregs, count + collection_rec.next_mw,
2099 sizeof (struct df_mw_hardreg *), df_mw_compare);
2103 /* No vector there. */
2104 insn_info->mw_hardregs
2105 = XNEWVEC (struct df_mw_hardreg*,
2106 count + 1 + collection_rec.next_mw);
2107 memcpy (insn_info->mw_hardregs, collection_rec.mw_vec,
2108 (collection_rec.next_mw + 1) * sizeof (struct df_mw_hardreg *));
2111 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2113 collection_rec.mw_vec = NULL;
2114 collection_rec.next_mw = 0;
2115 df_refs_add_to_chains (&collection_rec, bb, insn);
2118 df_insn_rescan (insn);
2123 /*----------------------------------------------------------------------------
2124 Hard core instruction scanning code. No external interfaces here,
2125 just a lot of routines that look inside insns.
2126 ----------------------------------------------------------------------------*/
2129 /* Return true if the contents of two df_ref's are identical.
2130 It ignores DF_REF_MARKER. */
2133 df_ref_equal_p (struct df_ref *ref1, struct df_ref *ref2)
2138 /* The two flag tests here are only to make sure we do not look at
2139 the offset and width if they are not there. The flags are
2140 compared in the next set of tests. */
2141 if ((DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2142 && (DF_REF_FLAGS_IS_SET (ref2, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2143 && ((DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2144 || (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2145 || (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))))
2148 return (ref1 == ref2) ||
2149 (DF_REF_REG (ref1) == DF_REF_REG (ref2)
2150 && DF_REF_REGNO (ref1) == DF_REF_REGNO (ref2)
2151 && DF_REF_LOC (ref1) == DF_REF_LOC (ref2)
2152 && DF_REF_INSN (ref1) == DF_REF_INSN (ref2)
2153 && DF_REF_TYPE (ref1) == DF_REF_TYPE (ref2)
2154 && ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2155 == (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2156 && DF_REF_BB (ref1) == DF_REF_BB (ref2));
2160 /* Compare REF1 and REF2 for sorting. This is only called from places
2161 where all of the refs are of the same type, in the same insn, and
2162 have the same bb. So these fields are not checked. */
2165 df_ref_compare (const void *r1, const void *r2)
2167 const struct df_ref *const ref1 = *(const struct df_ref *const*)r1;
2168 const struct df_ref *const ref2 = *(const struct df_ref *const*)r2;
2173 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2174 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2176 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2177 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2179 if ((DF_REF_REG (ref1) != DF_REF_REG (ref2))
2180 || (DF_REF_LOC (ref1) != DF_REF_LOC (ref2)))
2181 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2183 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2185 /* If two refs are identical except that one of them has is from
2186 a mw and one is not, we need to have the one with the mw
2188 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2189 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2190 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2191 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2197 /* The flags are the same at this point so it is safe to only look
2199 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2201 if (DF_REF_EXTRACT_OFFSET (ref1) != DF_REF_EXTRACT_OFFSET (ref2))
2202 return DF_REF_EXTRACT_OFFSET (ref1) - DF_REF_EXTRACT_OFFSET (ref2);
2203 if (DF_REF_EXTRACT_WIDTH (ref1) != DF_REF_EXTRACT_WIDTH (ref2))
2204 return DF_REF_EXTRACT_WIDTH (ref1) - DF_REF_EXTRACT_WIDTH (ref2);
2205 if (DF_REF_EXTRACT_MODE (ref1) != DF_REF_EXTRACT_MODE (ref2))
2206 return DF_REF_EXTRACT_MODE (ref1) - DF_REF_EXTRACT_MODE (ref2);
2212 df_swap_refs (struct df_ref **ref_vec, int i, int j)
2214 struct df_ref *tmp = ref_vec[i];
2215 ref_vec[i] = ref_vec[j];
2219 /* Sort and compress a set of refs. */
2222 df_sort_and_compress_refs (struct df_ref **ref_vec, unsigned int count)
2225 unsigned int dist = 0;
2227 ref_vec[count] = NULL;
2228 /* If there are 1 or 0 elements, there is nothing to do. */
2231 else if (count == 2)
2233 if (df_ref_compare (&ref_vec[0], &ref_vec[1]) > 0)
2234 df_swap_refs (ref_vec, 0, 1);
2238 for (i = 0; i < count - 1; i++)
2239 if (df_ref_compare (&ref_vec[i], &ref_vec[i+1]) >= 0)
2241 /* If the array is already strictly ordered,
2242 which is the most common case for large COUNT case
2243 (which happens for CALL INSNs),
2244 no need to sort and filter out duplicate.
2245 Simply return the count.
2246 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2247 of DF_REF_COMPARE. */
2250 qsort (ref_vec, count, sizeof (struct df_ref *), df_ref_compare);
2253 for (i=0; i<count-dist; i++)
2255 /* Find the next ref that is not equal to the current ref. */
2256 while (df_ref_equal_p (ref_vec[i], ref_vec[i + dist + 1]))
2258 df_free_ref (ref_vec[i + dist + 1]);
2261 /* Copy it down to the next position. */
2263 ref_vec[i+1] = ref_vec[i + dist + 1];
2267 ref_vec[count] = NULL;
2272 /* Return true if the contents of two df_ref's are identical.
2273 It ignores DF_REF_MARKER. */
2276 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2280 return (mw1 == mw2) ||
2281 (mw1->mw_reg == mw2->mw_reg
2282 && mw1->type == mw2->type
2283 && mw1->flags == mw2->flags
2284 && mw1->start_regno == mw2->start_regno
2285 && mw1->end_regno == mw2->end_regno);
2289 /* Compare MW1 and MW2 for sorting. */
2292 df_mw_compare (const void *m1, const void *m2)
2294 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2295 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2300 if (mw1->type != mw2->type)
2301 return mw1->type - mw2->type;
2303 if (mw1->flags != mw2->flags)
2304 return mw1->flags - mw2->flags;
2306 if (mw1->start_regno != mw2->start_regno)
2307 return mw1->start_regno - mw2->start_regno;
2309 if (mw1->end_regno != mw2->end_regno)
2310 return mw1->end_regno - mw2->end_regno;
2312 if (mw1->mw_reg != mw2->mw_reg)
2313 return mw1->mw_order - mw2->mw_order;
2319 /* Sort and compress a set of refs. */
2322 df_sort_and_compress_mws (struct df_mw_hardreg **mw_vec, unsigned int count)
2324 struct df_scan_problem_data *problem_data
2325 = (struct df_scan_problem_data *) df_scan->problem_data;
2327 unsigned int dist = 0;
2328 mw_vec[count] = NULL;
2332 else if (count == 2)
2334 if (df_mw_compare (&mw_vec[0], &mw_vec[1]) > 0)
2336 struct df_mw_hardreg *tmp = mw_vec[0];
2337 mw_vec[0] = mw_vec[1];
2342 qsort (mw_vec, count, sizeof (struct df_mw_hardreg *), df_mw_compare);
2344 for (i=0; i<count-dist; i++)
2346 /* Find the next ref that is not equal to the current ref. */
2347 while (df_mw_equal_p (mw_vec[i], mw_vec[i + dist + 1]))
2349 pool_free (problem_data->mw_reg_pool, mw_vec[i + dist + 1]);
2352 /* Copy it down to the next position. */
2354 mw_vec[i+1] = mw_vec[i + dist + 1];
2358 mw_vec[count] = NULL;
2363 /* Sort and remove duplicates from the COLLECTION_REC. */
2366 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2368 if (collection_rec->def_vec)
2369 collection_rec->next_def
2370 = df_sort_and_compress_refs (collection_rec->def_vec,
2371 collection_rec->next_def);
2372 if (collection_rec->use_vec)
2373 collection_rec->next_use
2374 = df_sort_and_compress_refs (collection_rec->use_vec,
2375 collection_rec->next_use);
2376 if (collection_rec->eq_use_vec)
2377 collection_rec->next_eq_use
2378 = df_sort_and_compress_refs (collection_rec->eq_use_vec,
2379 collection_rec->next_eq_use);
2380 if (collection_rec->mw_vec)
2381 collection_rec->next_mw
2382 = df_sort_and_compress_mws (collection_rec->mw_vec,
2383 collection_rec->next_mw);
2387 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2390 df_install_ref (struct df_ref *this_ref,
2391 struct df_reg_info *reg_info,
2392 struct df_ref_info *ref_info,
2395 unsigned int regno = DF_REF_REGNO (this_ref);
2396 /* Add the ref to the reg_{def,use,eq_use} chain. */
2397 struct df_ref *head = reg_info->reg_chain;
2399 reg_info->reg_chain = this_ref;
2402 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2404 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2405 df->hard_regs_live_count[regno]++;
2408 gcc_assert (DF_REF_NEXT_REG (this_ref) == NULL);
2409 gcc_assert (DF_REF_PREV_REG (this_ref) == NULL);
2411 DF_REF_NEXT_REG (this_ref) = head;
2413 /* We cannot actually link to the head of the chain. */
2414 DF_REF_PREV_REG (this_ref) = NULL;
2417 DF_REF_PREV_REG (head) = this_ref;
2421 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2422 df_check_and_grow_ref_info (ref_info, 1);
2423 DF_REF_ID (this_ref) = ref_info->table_size;
2424 /* Add the ref to the big array of defs. */
2425 ref_info->refs[ref_info->table_size] = this_ref;
2426 ref_info->table_size++;
2429 DF_REF_ID (this_ref) = -1;
2431 ref_info->total_size++;
2435 /* This function takes one of the groups of refs (defs, uses or
2436 eq_uses) and installs the entire group into the insn. It also adds
2437 each of these refs into the appropriate chains. */
2439 static struct df_ref **
2440 df_install_refs (basic_block bb,
2441 struct df_ref **old_vec, unsigned int count,
2442 struct df_reg_info **reg_info,
2443 struct df_ref_info *ref_info,
2449 struct df_ref **new_vec = XNEWVEC (struct df_ref*, count + 1);
2452 switch (ref_info->ref_order)
2454 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2455 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2456 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2457 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2458 add_to_table = true;
2460 case DF_REF_ORDER_UNORDERED:
2461 case DF_REF_ORDER_BY_REG:
2462 case DF_REF_ORDER_BY_INSN:
2463 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2464 add_to_table = !is_notes;
2467 add_to_table = false;
2471 /* Do not add if ref is not in the right blocks. */
2472 if (add_to_table && df->analyze_subset)
2473 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2475 for (i = 0; i < count; i++)
2477 struct df_ref *this_ref = old_vec[i];
2478 new_vec[i] = this_ref;
2479 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2480 ref_info, add_to_table);
2483 new_vec[count] = NULL;
2487 return df_null_ref_rec;
2491 /* This function takes the mws installs the entire group into the
2494 static struct df_mw_hardreg **
2495 df_install_mws (struct df_mw_hardreg **old_vec, unsigned int count)
2499 struct df_mw_hardreg **new_vec
2500 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2501 memcpy (new_vec, old_vec,
2502 sizeof (struct df_mw_hardreg*) * (count + 1));
2506 return df_null_mw_rec;
2510 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2511 chains and update other necessary information. */
2514 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2515 basic_block bb, rtx insn)
2519 struct df_insn_info *insn_rec = DF_INSN_GET (insn);
2520 /* If there is a vector in the collection rec, add it to the
2521 insn. A null rec is a signal that the caller will handle the
2523 if (collection_rec->def_vec)
2525 if (insn_rec->defs && *insn_rec->defs)
2526 free (insn_rec->defs);
2528 = df_install_refs (bb, collection_rec->def_vec,
2529 collection_rec->next_def,
2531 &df->def_info, false);
2533 if (collection_rec->use_vec)
2535 if (insn_rec->uses && *insn_rec->uses)
2536 free (insn_rec->uses);
2538 = df_install_refs (bb, collection_rec->use_vec,
2539 collection_rec->next_use,
2541 &df->use_info, false);
2543 if (collection_rec->eq_use_vec)
2545 if (insn_rec->eq_uses && *insn_rec->eq_uses)
2546 free (insn_rec->eq_uses);
2548 = df_install_refs (bb, collection_rec->eq_use_vec,
2549 collection_rec->next_eq_use,
2551 &df->use_info, true);
2553 if (collection_rec->mw_vec)
2555 if (insn_rec->mw_hardregs && *insn_rec->mw_hardregs)
2556 free (insn_rec->mw_hardregs);
2557 insn_rec->mw_hardregs
2558 = df_install_mws (collection_rec->mw_vec,
2559 collection_rec->next_mw);
2564 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2566 if (bb_info->artificial_defs && *bb_info->artificial_defs)
2567 free (bb_info->artificial_defs);
2568 bb_info->artificial_defs
2569 = df_install_refs (bb, collection_rec->def_vec,
2570 collection_rec->next_def,
2572 &df->def_info, false);
2573 if (bb_info->artificial_uses && *bb_info->artificial_uses)
2574 free (bb_info->artificial_uses);
2575 bb_info->artificial_uses
2576 = df_install_refs (bb, collection_rec->use_vec,
2577 collection_rec->next_use,
2579 &df->use_info, false);
2584 /* Allocate a ref and initialize its fields.
2586 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2587 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the fields
2588 if they were constants. Otherwise they should be -1 if those flags
2591 static struct df_ref *
2592 df_ref_create_structure (struct df_collection_rec *collection_rec,
2594 basic_block bb, rtx insn,
2595 enum df_ref_type ref_type,
2596 enum df_ref_flags ref_flags,
2597 int width, int offset, enum machine_mode mode)
2599 struct df_ref *this_ref;
2600 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2601 struct df_scan_problem_data *problem_data
2602 = (struct df_scan_problem_data *) df_scan->problem_data;
2604 if (ref_flags & (DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
2606 this_ref = pool_alloc (problem_data->ref_extract_pool);
2607 DF_REF_EXTRACT_WIDTH (this_ref) = width;
2608 DF_REF_EXTRACT_OFFSET (this_ref) = offset;
2609 DF_REF_EXTRACT_MODE (this_ref) = mode;
2612 this_ref = pool_alloc (problem_data->ref_pool);
2613 DF_REF_ID (this_ref) = -1;
2614 DF_REF_REG (this_ref) = reg;
2615 DF_REF_REGNO (this_ref) = regno;
2616 DF_REF_LOC (this_ref) = loc;
2617 DF_REF_INSN (this_ref) = insn;
2618 DF_REF_CHAIN (this_ref) = NULL;
2619 DF_REF_TYPE (this_ref) = ref_type;
2620 DF_REF_FLAGS (this_ref) = ref_flags;
2621 DF_REF_BB (this_ref) = bb;
2622 DF_REF_NEXT_REG (this_ref) = NULL;
2623 DF_REF_PREV_REG (this_ref) = NULL;
2624 DF_REF_ORDER (this_ref) = df->ref_order++;
2626 /* We need to clear this bit because fwprop, and in the future
2627 possibly other optimizations sometimes create new refs using ond
2628 refs as the model. */
2629 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2631 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2632 if ((regno < FIRST_PSEUDO_REGISTER)
2633 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2635 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2637 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2638 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2640 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2641 && (regno == FRAME_POINTER_REGNUM
2642 || regno == ARG_POINTER_REGNUM)))
2643 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2648 if (DF_REF_TYPE (this_ref) == DF_REF_REG_DEF)
2649 collection_rec->def_vec[collection_rec->next_def++] = this_ref;
2650 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2651 collection_rec->eq_use_vec[collection_rec->next_eq_use++] = this_ref;
2653 collection_rec->use_vec[collection_rec->next_use++] = this_ref;
2660 /* Create new references of type DF_REF_TYPE for each part of register REG
2661 at address LOC within INSN of BB.
2663 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2664 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2665 fields if they were constants. Otherwise they should be -1 if
2666 those flags were set. */
2670 df_ref_record (struct df_collection_rec *collection_rec,
2672 basic_block bb, rtx insn,
2673 enum df_ref_type ref_type,
2674 enum df_ref_flags ref_flags,
2675 int width, int offset, enum machine_mode mode)
2679 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2681 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2682 if (regno < FIRST_PSEUDO_REGISTER)
2684 struct df_mw_hardreg *hardreg = NULL;
2685 struct df_scan_problem_data *problem_data
2686 = (struct df_scan_problem_data *) df_scan->problem_data;
2688 unsigned int endregno;
2691 if (GET_CODE (reg) == SUBREG)
2693 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2694 SUBREG_BYTE (reg), GET_MODE (reg));
2695 endregno = regno + subreg_nregs (reg);
2698 endregno = END_HARD_REGNO (reg);
2700 /* If this is a multiword hardreg, we create some extra
2701 datastructures that will enable us to easily build REG_DEAD
2702 and REG_UNUSED notes. */
2703 if ((endregno != regno + 1) && insn)
2705 /* Sets to a subreg of a multiword register are partial.
2706 Sets to a non-subreg of a multiword register are not. */
2707 if (GET_CODE (reg) == SUBREG)
2708 ref_flags |= DF_REF_PARTIAL;
2709 ref_flags |= DF_REF_MW_HARDREG;
2711 hardreg = pool_alloc (problem_data->mw_reg_pool);
2712 hardreg->type = ref_type;
2713 hardreg->flags = ref_flags;
2714 hardreg->mw_reg = reg;
2715 hardreg->start_regno = regno;
2716 hardreg->end_regno = endregno - 1;
2717 hardreg->mw_order = df->ref_order++;
2718 collection_rec->mw_vec[collection_rec->next_mw++] = hardreg;
2721 for (i = regno; i < endregno; i++)
2723 ref = df_ref_create_structure (collection_rec, regno_reg_rtx[i], loc,
2724 bb, insn, ref_type, ref_flags,
2725 width, offset, mode);
2727 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2733 ref = df_ref_create_structure (collection_rec, reg, loc, bb, insn,
2734 ref_type, ref_flags, width, offset, mode);
2739 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2740 covered by the outer mode is smaller than that covered by the inner mode,
2741 is a read-modify-write operation.
2742 This function returns true iff the SUBREG X is such a SUBREG. */
2745 df_read_modify_subreg_p (rtx x)
2747 unsigned int isize, osize;
2748 if (GET_CODE (x) != SUBREG)
2750 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2751 osize = GET_MODE_SIZE (GET_MODE (x));
2752 return isize > osize
2753 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2757 /* Process all the registers defined in the rtx, X.
2758 Autoincrement/decrement definitions will be picked up by
2762 df_def_record_1 (struct df_collection_rec *collection_rec,
2763 rtx x, basic_block bb, rtx insn,
2764 enum df_ref_flags flags)
2770 enum machine_mode mode = 0;
2772 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2774 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2777 loc = &SET_DEST (x);
2780 /* It is legal to have a set destination be a parallel. */
2781 if (GET_CODE (dst) == PARALLEL)
2785 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2787 rtx temp = XVECEXP (dst, 0, i);
2788 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2789 || GET_CODE (temp) == SET)
2790 df_def_record_1 (collection_rec,
2792 GET_CODE (temp) == CLOBBER
2793 ? flags | DF_REF_MUST_CLOBBER : flags);
2798 if (GET_CODE (dst) == STRICT_LOW_PART)
2800 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2802 loc = &XEXP (dst, 0);
2806 if (GET_CODE (dst) == ZERO_EXTRACT)
2808 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2810 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
2811 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
2813 width = INTVAL (XEXP (dst, 1));
2814 offset = INTVAL (XEXP (dst, 2));
2815 mode = GET_MODE (dst);
2818 loc = &XEXP (dst, 0);
2822 /* At this point if we do not have a reg or a subreg, just return. */
2825 df_ref_record (collection_rec,
2826 dst, loc, bb, insn, DF_REF_REG_DEF, flags,
2827 width, offset, mode);
2829 /* We want to keep sp alive everywhere - by making all
2830 writes to sp also use of sp. */
2831 if (REGNO (dst) == STACK_POINTER_REGNUM)
2832 df_ref_record (collection_rec,
2833 dst, NULL, bb, insn, DF_REF_REG_USE, flags,
2834 width, offset, mode);
2836 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2838 if (df_read_modify_subreg_p (dst))
2839 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2841 flags |= DF_REF_SUBREG;
2843 df_ref_record (collection_rec,
2844 dst, loc, bb, insn, DF_REF_REG_DEF, flags,
2845 width, offset, mode);
2850 /* Process all the registers defined in the pattern rtx, X. */
2853 df_defs_record (struct df_collection_rec *collection_rec,
2854 rtx x, basic_block bb, rtx insn, enum df_ref_flags flags)
2856 RTX_CODE code = GET_CODE (x);
2858 if (code == SET || code == CLOBBER)
2860 /* Mark the single def within the pattern. */
2861 enum df_ref_flags clobber_flags = flags;
2862 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2863 df_def_record_1 (collection_rec, x, bb, insn, clobber_flags);
2865 else if (code == COND_EXEC)
2867 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2868 bb, insn, DF_REF_CONDITIONAL);
2870 else if (code == PARALLEL)
2874 /* Mark the multiple defs within the pattern. */
2875 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2876 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn, flags);
2881 /* Process all the registers used in the rtx at address LOC.
2883 If the REF_FLAGS field contain DF_REF_SIGN_EXTRACT or
2884 DF_REF_ZERO_EXTRACT. WIDTH, OFFSET and MODE are used to access the
2885 fields if they were constants. Otherwise they should be -1 if
2886 those flags were set. */
2889 df_uses_record (struct df_collection_rec *collection_rec,
2890 rtx *loc, enum df_ref_type ref_type,
2891 basic_block bb, rtx insn, enum df_ref_flags flags,
2892 int width, int offset, enum machine_mode mode)
2901 code = GET_CODE (x);
2918 /* If we are clobbering a MEM, mark any registers inside the address
2920 if (MEM_P (XEXP (x, 0)))
2921 df_uses_record (collection_rec,
2922 &XEXP (XEXP (x, 0), 0),
2923 DF_REF_REG_MEM_STORE, bb, insn, flags,
2924 width, offset, mode);
2926 /* If we're clobbering a REG then we have a def so ignore. */
2930 df_uses_record (collection_rec,
2931 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2932 bb, insn, flags & DF_REF_IN_NOTE,
2933 width, offset, mode);
2937 /* While we're here, optimize this case. */
2938 flags |= DF_REF_PARTIAL;
2939 /* In case the SUBREG is not of a REG, do not optimize. */
2940 if (!REG_P (SUBREG_REG (x)))
2942 loc = &SUBREG_REG (x);
2943 df_uses_record (collection_rec, loc, ref_type, bb, insn, flags,
2944 width, offset, mode);
2947 /* ... Fall through ... */
2950 df_ref_record (collection_rec,
2951 x, loc, bb, insn, ref_type, flags,
2952 width, offset, mode);
2958 /* If the parameters to the zero or sign extract are
2959 constants, strip them off and recurse, otherwise there is
2960 no information that we can gain from this operation. */
2961 if (GET_CODE (XEXP (x, 1)) == CONST_INT
2962 && GET_CODE (XEXP (x, 2)) == CONST_INT)
2964 width = INTVAL (XEXP (x, 1));
2965 offset = INTVAL (XEXP (x, 2));
2966 mode = GET_MODE (x);
2968 if (code == ZERO_EXTRACT)
2969 flags |= DF_REF_ZERO_EXTRACT;
2971 flags |= DF_REF_SIGN_EXTRACT;
2973 df_uses_record (collection_rec,
2974 &XEXP (x, 0), ref_type, bb, insn, flags,
2975 width, offset, mode);
2983 rtx dst = SET_DEST (x);
2984 gcc_assert (!(flags & DF_REF_IN_NOTE));
2985 df_uses_record (collection_rec,
2986 &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags,
2987 width, offset, mode);
2989 switch (GET_CODE (dst))
2992 if (df_read_modify_subreg_p (dst))
2994 df_uses_record (collection_rec, &SUBREG_REG (dst),
2995 DF_REF_REG_USE, bb, insn,
2996 flags | DF_REF_READ_WRITE | DF_REF_SUBREG,
2997 width, offset, mode);
3008 df_uses_record (collection_rec, &XEXP (dst, 0),
3009 DF_REF_REG_MEM_STORE, bb, insn, flags,
3010 width, offset, mode);
3012 case STRICT_LOW_PART:
3014 rtx *temp = &XEXP (dst, 0);
3015 /* A strict_low_part uses the whole REG and not just the
3017 dst = XEXP (dst, 0);
3018 df_uses_record (collection_rec,
3019 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3020 DF_REF_REG_USE, bb, insn,
3021 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART,
3022 width, offset, mode);
3027 if (GET_CODE (XEXP (dst, 1)) == CONST_INT
3028 && GET_CODE (XEXP (dst, 2)) == CONST_INT)
3030 width = INTVAL (XEXP (dst, 1));
3031 offset = INTVAL (XEXP (dst, 2));
3032 mode = GET_MODE (dst);
3036 df_uses_record (collection_rec, &XEXP (dst, 1),
3037 DF_REF_REG_USE, bb, insn, flags,
3038 width, offset, mode);
3039 df_uses_record (collection_rec, &XEXP (dst, 2),
3040 DF_REF_REG_USE, bb, insn, flags,
3041 width, offset, mode);
3044 df_uses_record (collection_rec, &XEXP (dst, 0),
3045 DF_REF_REG_USE, bb, insn,
3046 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT,
3047 width, offset, mode);
3061 case UNSPEC_VOLATILE:
3065 /* Traditional and volatile asm instructions must be
3066 considered to use and clobber all hard registers, all
3067 pseudo-registers and all of memory. So must TRAP_IF and
3068 UNSPEC_VOLATILE operations.
3070 Consider for instance a volatile asm that changes the fpu
3071 rounding mode. An insn should not be moved across this
3072 even if it only uses pseudo-regs because it might give an
3073 incorrectly rounded result.
3075 However, flow.c's liveness computation did *not* do this,
3076 giving the reasoning as " ?!? Unfortunately, marking all
3077 hard registers as live causes massive problems for the
3078 register allocator and marking all pseudos as live creates
3079 mountains of uninitialized variable warnings."
3081 In order to maintain the status quo with regard to liveness
3082 and uses, we do what flow.c did and just mark any regs we
3083 can find in ASM_OPERANDS as used. In global asm insns are
3084 scanned and regs_asm_clobbered is filled out.
3086 For all ASM_OPERANDS, we must traverse the vector of input
3087 operands. We can not just fall through here since then we
3088 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3089 which do not indicate traditional asms unlike their normal
3091 if (code == ASM_OPERANDS)
3095 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3096 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3097 DF_REF_REG_USE, bb, insn, flags,
3098 width, offset, mode);
3110 /* Catch the def of the register being modified. */
3111 df_ref_record (collection_rec, XEXP (x, 0), &XEXP (x, 0), bb, insn,
3113 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY,
3114 width, offset, mode);
3116 /* ... Fall through to handle uses ... */
3122 /* Recursively scan the operands of this expression. */
3124 const char *fmt = GET_RTX_FORMAT (code);
3127 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3131 /* Tail recursive case: save a function call level. */
3137 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3139 width, offset, mode);
3141 else if (fmt[i] == 'E')
3144 for (j = 0; j < XVECLEN (x, i); j++)
3145 df_uses_record (collection_rec,
3146 &XVECEXP (x, i, j), ref_type,
3148 width, offset, mode);
3157 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3160 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3163 for (i = 0; i < collection_rec->next_def; i++)
3165 struct df_ref *ref = collection_rec->def_vec[i];
3166 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3170 enum machine_mode mode = 0;
3173 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_SIGN_EXTRACT | DF_REF_ZERO_EXTRACT))
3175 width = DF_REF_EXTRACT_WIDTH (ref);
3176 offset = DF_REF_EXTRACT_OFFSET (ref);
3177 mode = DF_REF_EXTRACT_MODE (ref);
3180 use = df_ref_create_structure (collection_rec, DF_REF_REG (ref),
3181 DF_REF_LOC (ref), DF_REF_BB (ref),
3182 DF_REF_INSN (ref), DF_REF_REG_USE,
3183 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL,
3184 width, offset, mode);
3185 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3191 /* Get call's extra defs and uses. */
3194 df_get_call_refs (struct df_collection_rec * collection_rec,
3197 enum df_ref_flags flags)
3202 bool is_sibling_call;
3204 bitmap defs_generated = BITMAP_ALLOC (&df_bitmap_obstack);
3206 /* Do not generate clobbers for registers that are the result of the
3207 call. This causes ordering problems in the chain building code
3208 depending on which def is seen first. */
3209 for (i=0; i<collection_rec->next_def; i++)
3211 struct df_ref *def = collection_rec->def_vec[i];
3212 bitmap_set_bit (defs_generated, DF_REF_REGNO (def));
3215 /* Record the registers used to pass arguments, and explicitly
3216 noted as clobbered. */
3217 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
3218 note = XEXP (note, 1))
3220 if (GET_CODE (XEXP (note, 0)) == USE)
3221 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3222 DF_REF_REG_USE, bb, insn, flags, -1, -1, 0);
3223 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3225 if (REG_P (XEXP (XEXP (note, 0), 0)))
3227 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3228 if (!bitmap_bit_p (defs_generated, regno))
3229 df_defs_record (collection_rec, XEXP (note, 0), bb,
3233 df_uses_record (collection_rec, &XEXP (note, 0),
3234 DF_REF_REG_USE, bb, insn, flags, -1, -1, 0);
3238 /* The stack ptr is used (honorarily) by a CALL insn. */
3239 df_ref_record (collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3240 NULL, bb, insn, DF_REF_REG_USE, DF_REF_CALL_STACK_USAGE | flags,
3243 /* Calls may also reference any of the global registers,
3244 so they are recorded as used. */
3245 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3248 df_ref_record (collection_rec, regno_reg_rtx[i],
3249 NULL, bb, insn, DF_REF_REG_USE, flags, -1, -1, 0);
3250 df_ref_record (collection_rec, regno_reg_rtx[i],
3251 NULL, bb, insn, DF_REF_REG_DEF, flags, -1, -1, 0);
3254 is_sibling_call = SIBLING_CALL_P (insn);
3255 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
3257 if (!global_regs[ui]
3258 && (!bitmap_bit_p (defs_generated, ui))
3259 && (!is_sibling_call
3260 || !bitmap_bit_p (df->exit_block_uses, ui)
3261 || refers_to_regno_p (ui, ui+1,
3262 crtl->return_rtx, NULL)))
3263 df_ref_record (collection_rec, regno_reg_rtx[ui],
3264 NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags,
3268 BITMAP_FREE (defs_generated);
3272 /* Collect all refs in the INSN. This function is free of any
3273 side-effect - it will create and return a lists of df_ref's in the
3274 COLLECTION_REC without putting those refs into existing ref chains
3278 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3279 basic_block bb, rtx insn)
3282 bool is_cond_exec = (GET_CODE (PATTERN (insn)) == COND_EXEC);
3284 /* Clear out the collection record. */
3285 collection_rec->next_def = 0;
3286 collection_rec->next_use = 0;
3287 collection_rec->next_eq_use = 0;
3288 collection_rec->next_mw = 0;
3290 /* Record register defs. */
3291 df_defs_record (collection_rec, PATTERN (insn), bb, insn, 0);
3293 /* Process REG_EQUIV/REG_EQUAL notes */
3294 for (note = REG_NOTES (insn); note;
3295 note = XEXP (note, 1))
3297 switch (REG_NOTE_KIND (note))
3301 df_uses_record (collection_rec,
3302 &XEXP (note, 0), DF_REF_REG_USE,
3303 bb, insn, DF_REF_IN_NOTE, -1, -1, 0);
3305 case REG_NON_LOCAL_GOTO:
3306 /* The frame ptr is used by a non-local goto. */
3307 df_ref_record (collection_rec,
3308 regno_reg_rtx[FRAME_POINTER_REGNUM],
3311 DF_REF_REG_USE, 0, -1, -1, 0);
3312 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3313 df_ref_record (collection_rec,
3314 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3317 DF_REF_REG_USE, 0, -1, -1, 0);
3326 df_get_call_refs (collection_rec, bb, insn,
3327 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3329 /* Record the register uses. */
3330 df_uses_record (collection_rec,
3331 &PATTERN (insn), DF_REF_REG_USE, bb, insn, 0,
3334 /* DF_REF_CONDITIONAL needs corresponding USES. */
3336 df_get_conditional_uses (collection_rec);
3338 df_canonize_collection_rec (collection_rec);
3341 /* Recompute the luids for the insns in BB. */
3344 df_recompute_luids (basic_block bb)
3349 df_grow_insn_info ();
3351 /* Scan the block an insn at a time from beginning to end. */
3352 FOR_BB_INSNS (bb, insn)
3354 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3355 /* Inserting labels does not always trigger the incremental
3359 gcc_assert (!INSN_P (insn));
3360 df_insn_create_insn_record (insn);
3363 DF_INSN_LUID (insn) = luid;
3370 /* Returns true if the function entry needs to
3371 define the static chain register. */
3374 df_need_static_chain_reg (struct function *fun)
3376 tree fun_context = decl_function_context (fun->decl);
3378 && DECL_NO_STATIC_CHAIN (fun_context) == false;
3382 /* Collect all artificial refs at the block level for BB and add them
3383 to COLLECTION_REC. */
3386 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3388 collection_rec->next_def = 0;
3389 collection_rec->next_use = 0;
3390 collection_rec->next_eq_use = 0;
3391 collection_rec->next_mw = 0;
3393 if (bb->index == ENTRY_BLOCK)
3395 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3398 else if (bb->index == EXIT_BLOCK)
3400 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3404 #ifdef EH_RETURN_DATA_REGNO
3405 if (bb_has_eh_pred (bb))
3408 /* Mark the registers that will contain data for the handler. */
3411 unsigned regno = EH_RETURN_DATA_REGNO (i);
3412 if (regno == INVALID_REGNUM)
3414 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3415 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
3422 if (bb_has_eh_pred (bb))
3425 /* This code is putting in an artificial ref for the use at the
3426 TOP of the block that receives the exception. It is too
3427 cumbersome to actually put the ref on the edge. We could
3428 either model this at the top of the receiver block or the
3429 bottom of the sender block.
3431 The bottom of the sender block is problematic because not all
3432 out-edges of a block are eh-edges. However, it is true
3433 that all edges into a block are either eh-edges or none of
3434 them are eh-edges. Thus, we can model this at the top of the
3435 eh-receiver for all of the edges at once. */
3436 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3438 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3439 bb, NULL, DF_REF_REG_USE, DF_REF_AT_TOP, -1, -1, 0);
3443 /* Add the hard_frame_pointer if this block is the target of a
3445 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3446 df_ref_record (collection_rec, hard_frame_pointer_rtx, NULL,
3447 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP, -1, -1, 0);
3449 /* Add the artificial uses. */
3450 if (bb->index >= NUM_FIXED_BLOCKS)
3454 bitmap au = bb_has_eh_pred (bb)
3455 ? df->eh_block_artificial_uses
3456 : df->regular_block_artificial_uses;
3458 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3460 df_ref_record (collection_rec, regno_reg_rtx[regno], NULL,
3461 bb, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3465 df_canonize_collection_rec (collection_rec);
3469 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3472 df_bb_refs_record (int bb_index, bool scan_insns)
3474 basic_block bb = BASIC_BLOCK (bb_index);
3477 struct df_scan_bb_info *bb_info;
3478 struct df_collection_rec collection_rec;
3479 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
3480 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
3481 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
3482 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
3487 bb_info = df_scan_get_bb_info (bb_index);
3489 /* Need to make sure that there is a record in the basic block info. */
3492 bb_info = (struct df_scan_bb_info *) pool_alloc (df_scan->block_pool);
3493 df_scan_set_bb_info (bb_index, bb_info);
3494 bb_info->artificial_defs = NULL;
3495 bb_info->artificial_uses = NULL;
3499 /* Scan the block an insn at a time from beginning to end. */
3500 FOR_BB_INSNS (bb, insn)
3502 struct df_insn_info *insn_info = DF_INSN_GET (insn);
3503 gcc_assert (!insn_info);
3505 df_insn_create_insn_record (insn);
3508 /* Record refs within INSN. */
3509 DF_INSN_LUID (insn) = luid++;
3510 df_insn_refs_collect (&collection_rec, bb, insn);
3511 df_refs_add_to_chains (&collection_rec, bb, insn);
3513 DF_INSN_LUID (insn) = luid;
3516 /* Other block level artificial refs */
3517 df_bb_refs_collect (&collection_rec, bb);
3518 df_refs_add_to_chains (&collection_rec, bb, NULL);
3520 /* Now that the block has been processed, set the block as dirty so
3521 LR and LIVE will get it processed. */
3522 df_set_bb_dirty (bb);
3526 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3530 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3532 bitmap_clear (regular_block_artificial_uses);
3534 if (reload_completed)
3536 if (frame_pointer_needed)
3537 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3540 /* Before reload, there are a few registers that must be forced
3541 live everywhere -- which might not already be the case for
3542 blocks within infinite loops. */
3544 /* Any reference to any pseudo before reload is a potential
3545 reference of the frame pointer. */
3546 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3548 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3549 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3552 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3553 /* Pseudos with argument area equivalences may require
3554 reloading via the argument pointer. */
3555 if (fixed_regs[ARG_POINTER_REGNUM])
3556 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3559 /* Any constant, or pseudo with constant equivalences, may
3560 require reloading from memory using the pic register. */
3561 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3562 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3563 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3565 /* The all-important stack pointer must always be live. */
3566 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3570 /* Get the artificial use set for an eh block. */
3573 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3575 bitmap_clear (eh_block_artificial_uses);
3577 /* The following code (down thru the arg_pointer setting APPEARS
3578 to be necessary because there is nothing that actually
3579 describes what the exception handling code may actually need
3581 if (reload_completed)
3583 if (frame_pointer_needed)
3585 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3586 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3587 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3591 if (fixed_regs[ARG_POINTER_REGNUM])
3592 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3599 /*----------------------------------------------------------------------------
3600 Specialized hard register scanning functions.
3601 ----------------------------------------------------------------------------*/
3604 /* Mark a register in SET. Hard registers in large modes get all
3605 of their component registers set as well. */
3608 df_mark_reg (rtx reg, void *vset)
3610 bitmap set = (bitmap) vset;
3611 int regno = REGNO (reg);
3613 gcc_assert (GET_MODE (reg) != BLKmode);
3615 bitmap_set_bit (set, regno);
3616 if (regno < FIRST_PSEUDO_REGISTER)
3618 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3620 bitmap_set_bit (set, regno + n);
3625 /* Set the bit for regs that are considered being defined at the entry. */
3628 df_get_entry_block_def_set (bitmap entry_block_defs)
3633 bitmap_clear (entry_block_defs);
3635 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3637 if (FUNCTION_ARG_REGNO_P (i))
3638 #ifdef INCOMING_REGNO
3639 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3641 bitmap_set_bit (entry_block_defs, i);
3645 /* Once the prologue has been generated, all of these registers
3646 should just show up in the first regular block. */
3647 if (HAVE_prologue && epilogue_completed)
3649 /* Defs for the callee saved registers are inserted so that the
3650 pushes have some defining location. */
3651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3652 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3653 bitmap_set_bit (entry_block_defs, i);
3657 /* The always important stack pointer. */
3658 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3660 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
3661 only STATIC_CHAIN_REGNUM is defined. If they are different,
3662 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
3663 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3664 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3666 #ifdef STATIC_CHAIN_REGNUM
3667 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3672 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3674 bitmap_set_bit (entry_block_defs, REGNO (r));
3676 if ((!reload_completed) || frame_pointer_needed)
3678 /* Any reference to any pseudo before reload is a potential
3679 reference of the frame pointer. */
3680 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3681 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3682 /* If they are different, also mark the hard frame pointer as live. */
3683 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3684 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3688 /* These registers are live everywhere. */
3689 if (!reload_completed)
3692 /* The ia-64, the only machine that uses this, does not define these
3693 until after reload. */
3694 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3697 bitmap_set_bit (entry_block_defs, i);
3701 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3702 /* Pseudos with argument area equivalences may require
3703 reloading via the argument pointer. */
3704 if (fixed_regs[ARG_POINTER_REGNUM])
3705 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3708 #ifdef PIC_OFFSET_TABLE_REGNUM
3709 /* Any constant, or pseudo with constant equivalences, may
3710 require reloading from memory using the pic register. */
3711 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3712 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3713 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3717 #ifdef INCOMING_RETURN_ADDR_RTX
3718 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3719 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3722 targetm.live_on_entry (entry_block_defs);
3724 /* If the function has an incoming STATIC_CHAIN,
3725 it has to show up in the entry def set. */
3726 if (df_need_static_chain_reg (cfun))
3728 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3729 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
3731 #ifdef STATIC_CHAIN_REGNUM
3732 bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
3739 /* Return the (conservative) set of hard registers that are defined on
3740 entry to the function.
3741 It uses df->entry_block_defs to determine which register
3742 reference to include. */
3745 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3746 bitmap entry_block_defs)
3751 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3753 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3754 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0, -1, -1, 0);
3757 df_canonize_collection_rec (collection_rec);
3761 /* Record the (conservative) set of hard registers that are defined on
3762 entry to the function. */
3765 df_record_entry_block_defs (bitmap entry_block_defs)
3767 struct df_collection_rec collection_rec;
3768 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3769 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3771 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3773 /* Process bb_refs chain */
3774 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3778 /* Update the defs in the entry block. */
3781 df_update_entry_block_defs (void)
3783 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3784 bool changed = false;
3786 df_get_entry_block_def_set (refs);
3787 if (df->entry_block_defs)
3789 if (!bitmap_equal_p (df->entry_block_defs, refs))
3791 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3792 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3793 df_ref_chain_delete (bb_info->artificial_defs);
3794 bb_info->artificial_defs = NULL;
3800 struct df_scan_problem_data *problem_data
3801 = (struct df_scan_problem_data *) df_scan->problem_data;
3802 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3808 df_record_entry_block_defs (refs);
3809 bitmap_copy (df->entry_block_defs, refs);
3810 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3816 /* Set the bit for regs that are considered being used at the exit. */
3819 df_get_exit_block_use_set (bitmap exit_block_uses)
3823 bitmap_clear (exit_block_uses);
3825 /* Stack pointer is always live at the exit. */
3826 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3828 /* Mark the frame pointer if needed at the end of the function.
3829 If we end up eliminating it, it will be removed from the live
3830 list of each basic block by reload. */
3832 if ((!reload_completed) || frame_pointer_needed)
3834 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3835 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3836 /* If they are different, also mark the hard frame pointer as live. */
3837 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3838 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3842 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3843 /* Many architectures have a GP register even without flag_pic.
3844 Assume the pic register is not in use, or will be handled by
3845 other means, if it is not fixed. */
3846 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3847 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3848 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3851 /* Mark all global registers, and all registers used by the
3852 epilogue as being live at the end of the function since they
3853 may be referenced by our caller. */
3854 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3855 if (global_regs[i] || EPILOGUE_USES (i))
3856 bitmap_set_bit (exit_block_uses, i);
3858 if (HAVE_epilogue && epilogue_completed)
3860 /* Mark all call-saved registers that we actually used. */
3861 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3862 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3863 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3864 bitmap_set_bit (exit_block_uses, i);
3867 #ifdef EH_RETURN_DATA_REGNO
3868 /* Mark the registers that will contain data for the handler. */
3869 if (reload_completed && crtl->calls_eh_return)
3872 unsigned regno = EH_RETURN_DATA_REGNO (i);
3873 if (regno == INVALID_REGNUM)
3875 bitmap_set_bit (exit_block_uses, regno);
3879 #ifdef EH_RETURN_STACKADJ_RTX
3880 if ((!HAVE_epilogue || ! epilogue_completed)
3881 && crtl->calls_eh_return)
3883 rtx tmp = EH_RETURN_STACKADJ_RTX;
3884 if (tmp && REG_P (tmp))
3885 df_mark_reg (tmp, exit_block_uses);
3889 #ifdef EH_RETURN_HANDLER_RTX
3890 if ((!HAVE_epilogue || ! epilogue_completed)
3891 && crtl->calls_eh_return)
3893 rtx tmp = EH_RETURN_HANDLER_RTX;
3894 if (tmp && REG_P (tmp))
3895 df_mark_reg (tmp, exit_block_uses);
3899 /* Mark function return value. */
3900 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3904 /* Return the refs of hard registers that are used in the exit block.
3905 It uses df->exit_block_uses to determine register to include. */
3908 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3913 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3914 df_ref_record (collection_rec, regno_reg_rtx[i], NULL,
3915 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3917 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3918 /* It is deliberate that this is not put in the exit block uses but
3919 I do not know why. */
3920 if (reload_completed
3921 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3922 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3923 && fixed_regs[ARG_POINTER_REGNUM])
3924 df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3925 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0, -1, -1, 0);
3928 df_canonize_collection_rec (collection_rec);
3932 /* Record the set of hard registers that are used in the exit block.
3933 It uses df->exit_block_uses to determine which bit to include. */
3936 df_record_exit_block_uses (bitmap exit_block_uses)
3938 struct df_collection_rec collection_rec;
3939 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3940 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * FIRST_PSEUDO_REGISTER);
3942 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3944 /* Process bb_refs chain */
3945 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3949 /* Update the uses in the exit block. */
3952 df_update_exit_block_uses (void)
3954 bitmap refs = BITMAP_ALLOC (&df_bitmap_obstack);
3955 bool changed = false;
3957 df_get_exit_block_use_set (refs);
3958 if (df->exit_block_uses)
3960 if (!bitmap_equal_p (df->exit_block_uses, refs))
3962 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3963 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3964 df_ref_chain_delete (bb_info->artificial_uses);
3965 bb_info->artificial_uses = NULL;
3971 struct df_scan_problem_data *problem_data
3972 = (struct df_scan_problem_data *) df_scan->problem_data;
3973 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3979 df_record_exit_block_uses (refs);
3980 bitmap_copy (df->exit_block_uses, refs);
3981 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
3986 static bool initialized = false;
3989 /* Initialize some platform specific structures. */
3992 df_hard_reg_init (void)
3995 #ifdef ELIMINABLE_REGS
3996 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4001 bitmap_obstack_initialize (&persistent_obstack);
4003 /* Record which registers will be eliminated. We use this in
4005 CLEAR_HARD_REG_SET (elim_reg_set);
4007 #ifdef ELIMINABLE_REGS
4008 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4009 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4011 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4014 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
4016 /* Inconveniently, this is only readily available in hard reg set
4018 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4019 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4020 bitmap_set_bit (df_invalidated_by_call, i);
4026 /* Recompute the parts of scanning that are based on regs_ever_live
4027 because something changed in that array. */
4030 df_update_entry_exit_and_calls (void)
4034 df_update_entry_block_defs ();
4035 df_update_exit_block_uses ();
4037 /* The call insns need to be rescanned because there may be changes
4038 in the set of registers clobbered across the call. */
4042 FOR_BB_INSNS (bb, insn)
4044 if (INSN_P (insn) && CALL_P (insn))
4045 df_insn_rescan (insn);
4051 /* Return true if hard REG is actually used in the some instruction.
4052 There are a fair number of conditions that affect the setting of
4053 this array. See the comment in df.h for df->hard_regs_live_count
4054 for the conditions that this array is set. */
4057 df_hard_reg_used_p (unsigned int reg)
4060 return df->hard_regs_live_count[reg] != 0;
4064 /* A count of the number of times REG is actually used in the some
4065 instruction. There are a fair number of conditions that affect the
4066 setting of this array. See the comment in df.h for
4067 df->hard_regs_live_count for the conditions that this array is
4072 df_hard_reg_used_count (unsigned int reg)
4075 return df->hard_regs_live_count[reg];
4079 /* Get the value of regs_ever_live[REGNO]. */
4082 df_regs_ever_live_p (unsigned int regno)
4084 return regs_ever_live[regno];
4088 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4089 to change, schedule that change for the next update. */
4092 df_set_regs_ever_live (unsigned int regno, bool value)
4094 if (regs_ever_live[regno] == value)
4097 regs_ever_live[regno] = value;
4099 df->redo_entry_and_exit = true;
4103 /* Compute "regs_ever_live" information from the underlying df
4104 information. Set the vector to all false if RESET. */
4107 df_compute_regs_ever_live (bool reset)
4110 bool changed = df->redo_entry_and_exit;
4113 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4115 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4116 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4118 regs_ever_live[i] = true;
4122 df_update_entry_exit_and_calls ();
4123 df->redo_entry_and_exit = false;
4127 /*----------------------------------------------------------------------------
4128 Dataflow ref information verification functions.
4130 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4131 df_reg_chain_verify_unmarked (refs)
4132 df_refs_verify (ref*, ref*, bool)
4133 df_mws_verify (mw*, mw*, bool)
4134 df_insn_refs_verify (collection_rec, bb, insn, bool)
4135 df_bb_refs_verify (bb, refs, bool)
4137 df_exit_block_bitmap_verify (bool)
4138 df_entry_block_bitmap_verify (bool)
4140 ----------------------------------------------------------------------------*/
4143 /* Mark all refs in the reg chain. Verify that all of the registers
4144 are in the correct chain. */
4147 df_reg_chain_mark (struct df_ref *refs, unsigned int regno,
4148 bool is_def, bool is_eq_use)
4150 unsigned int count = 0;
4152 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4154 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4156 /* If there are no def-use or use-def chains, make sure that all
4157 of the chains are clear. */
4159 gcc_assert (!DF_REF_CHAIN (ref));
4161 /* Check to make sure the ref is in the correct chain. */
4162 gcc_assert (DF_REF_REGNO (ref) == regno);
4164 gcc_assert (DF_REF_TYPE(ref) == DF_REF_REG_DEF);
4166 gcc_assert (DF_REF_TYPE(ref) != DF_REF_REG_DEF);
4169 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4171 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4174 gcc_assert (ref->next_reg->prev_reg == ref);
4176 DF_REF_REG_MARK (ref);
4182 /* Verify that all of the registers in the chain are unmarked. */
4185 df_reg_chain_verify_unmarked (struct df_ref *refs)
4188 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4189 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4193 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4196 df_refs_verify (struct df_ref **new_rec, struct df_ref **old_rec,
4199 while ((*new_rec) && (*old_rec))
4201 if (!df_ref_equal_p (*new_rec, *old_rec))
4209 /* Abort if fail is called from the function level verifier. If
4210 that is the context, mark this reg as being seem. */
4213 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4214 DF_REF_REG_UNMARK (*old_rec);
4222 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4224 return ((*new_rec == NULL) && (*old_rec == NULL));
4229 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4232 df_mws_verify (struct df_mw_hardreg **new_rec, struct df_mw_hardreg **old_rec,
4235 while ((*new_rec) && (*old_rec))
4237 if (!df_mw_equal_p (*new_rec, *old_rec))
4249 gcc_assert ((*new_rec == NULL) && (*old_rec == NULL));
4251 return ((*new_rec == NULL) && (*old_rec == NULL));
4256 /* Return true if the existing insn refs information is complete and
4257 correct. Otherwise (i.e. if there's any missing or extra refs),
4258 return the correct df_ref chain in REFS_RETURN.
4260 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4261 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4262 verification mode instead of the whole function, so unmark
4265 If ABORT_IF_FAIL is set, this function never returns false. */
4268 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4273 bool ret1, ret2, ret3, ret4;
4274 unsigned int uid = INSN_UID (insn);
4276 df_insn_refs_collect (collection_rec, bb, insn);
4278 if (!DF_INSN_UID_DEFS (uid))
4280 /* The insn_rec was created but it was never filled out. */
4287 /* Unfortunately we cannot opt out early if one of these is not
4288 right because the marks will not get cleared. */
4289 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4291 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4293 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4295 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4297 return (ret1 && ret2 && ret3 && ret4);
4301 /* Return true if all refs in the basic block are correct and complete.
4302 Due to df_ref_chain_verify, it will cause all refs
4303 that are verified to have DF_REF_MARK bit set. */
4306 df_bb_verify (basic_block bb)
4309 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4310 struct df_collection_rec collection_rec;
4312 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4313 collection_rec.def_vec = alloca (sizeof (struct df_ref*) * 1000);
4314 collection_rec.use_vec = alloca (sizeof (struct df_ref*) * 1000);
4315 collection_rec.eq_use_vec = alloca (sizeof (struct df_ref*) * 1000);
4316 collection_rec.mw_vec = alloca (sizeof (struct df_mw_hardreg*) * 100);
4318 gcc_assert (bb_info);
4320 /* Scan the block an insn at a time from beginning to end. */
4321 FOR_BB_INSNS_REVERSE (bb, insn)
4325 df_insn_refs_verify (&collection_rec, bb, insn, true);
4326 df_free_collection_rec (&collection_rec);
4329 /* Do the artificial defs and uses. */
4330 df_bb_refs_collect (&collection_rec, bb);
4331 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4332 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4333 df_free_collection_rec (&collection_rec);
4339 /* Returns true if the entry block has correct and complete df_ref set.
4340 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4343 df_entry_block_bitmap_verify (bool abort_if_fail)
4345 bitmap entry_block_defs = BITMAP_ALLOC (&df_bitmap_obstack);
4348 df_get_entry_block_def_set (entry_block_defs);
4350 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4352 if (!is_eq && abort_if_fail)
4354 print_current_pass (stderr);
4355 fprintf (stderr, "entry_block_defs = ");
4356 df_print_regset (stderr, entry_block_defs);
4357 fprintf (stderr, "df->entry_block_defs = ");
4358 df_print_regset (stderr, df->entry_block_defs);
4362 BITMAP_FREE (entry_block_defs);
4368 /* Returns true if the exit block has correct and complete df_ref set.
4369 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4372 df_exit_block_bitmap_verify (bool abort_if_fail)
4374 bitmap exit_block_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4377 df_get_exit_block_use_set (exit_block_uses);
4379 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4381 if (!is_eq && abort_if_fail)
4383 print_current_pass (stderr);
4384 fprintf (stderr, "exit_block_uses = ");
4385 df_print_regset (stderr, exit_block_uses);
4386 fprintf (stderr, "df->exit_block_uses = ");
4387 df_print_regset (stderr, df->exit_block_uses);
4391 BITMAP_FREE (exit_block_uses);
4397 /* Return true if df_ref information for all insns in all blocks are
4398 correct and complete. */
4401 df_scan_verify (void)
4405 bitmap regular_block_artificial_uses;
4406 bitmap eh_block_artificial_uses;
4411 /* Verification is a 4 step process. */
4413 /* (1) All of the refs are marked by going thru the reg chains. */
4414 for (i = 0; i < DF_REG_SIZE (df); i++)
4416 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4417 == DF_REG_DEF_COUNT(i));
4418 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4419 == DF_REG_USE_COUNT(i));
4420 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4421 == DF_REG_EQ_USE_COUNT(i));
4424 /* (2) There are various bitmaps whose value may change over the
4425 course of the compilation. This step recomputes them to make
4426 sure that they have not slipped out of date. */
4427 regular_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4428 eh_block_artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
4430 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4431 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4433 bitmap_ior_into (eh_block_artificial_uses,
4434 regular_block_artificial_uses);
4436 /* Check artificial_uses bitmaps didn't change. */
4437 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4438 df->regular_block_artificial_uses));
4439 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4440 df->eh_block_artificial_uses));
4442 BITMAP_FREE (regular_block_artificial_uses);
4443 BITMAP_FREE (eh_block_artificial_uses);
4445 /* Verify entry block and exit block. These only verify the bitmaps,
4446 the refs are verified in df_bb_verify. */
4447 df_entry_block_bitmap_verify (true);
4448 df_exit_block_bitmap_verify (true);
4450 /* (3) All of the insns in all of the blocks are traversed and the
4451 marks are cleared both in the artificial refs attached to the
4452 blocks and the real refs inside the insns. It is a failure to
4453 clear a mark that has not been set as this means that the ref in
4454 the block or insn was not in the reg chain. */
4459 /* (4) See if all reg chains are traversed a second time. This time
4460 a check is made that the marks are clear. A set mark would be a
4461 from a reg that is not in any insn or basic block. */
4463 for (i = 0; i < DF_REG_SIZE (df); i++)
4465 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4466 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4467 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));